mirror of
https://github.com/NixOS/nixpkgs.git
synced 2025-04-14 13:27:48 +00:00
noto-fonts-monochrome-emoji: Automatically collect file hashes
This commit is contained in:
parent
149f6d07ed
commit
e16875bfbd
@ -229,6 +229,7 @@ rec {
|
||||
# Metadata fetched from
|
||||
# https://www.googleapis.com/webfonts/v1/webfonts?key=${GOOGLE_FONTS_TOKEN}&family=Noto+Emoji
|
||||
let metadata = with builtins; head (fromJSON (readFile ./noto-emoji.json)).items;
|
||||
urlHashes = with builtins; fromJSON (readFile ./noto-emoji.hashes.json);
|
||||
|
||||
in
|
||||
stdenvNoCC.mkDerivation {
|
||||
@ -245,17 +246,10 @@ rec {
|
||||
"600" = "SemiBold";
|
||||
"700" = "Bold";
|
||||
};
|
||||
fileHashes = {
|
||||
"NotoEmoji-Bold.ttf" = "ce426e27c6254eb515fb6f301c8aa7cb7c90be3bd9a843c6e165d899a2dc63c0";
|
||||
"NotoEmoji-Light.ttf" = "f67750a89273b02911e8a71844d556df05d6331707fb44331604107421bcbd2a";
|
||||
"NotoEmoji-Medium.ttf" = "c3317d90a34c7904d86764144f9a4881aba1976a8ca59da730b35378026eaad4";
|
||||
"NotoEmoji-Regular.ttf" = "01718b75679b75dc8985328c5bf0ffead5bc38371a5eb50cf7a9b684df706258";
|
||||
"NotoEmoji-SemiBold.ttf" = "3487a513c5fe94ab47eb24f77853d957bcd8511dd8e469cda1b01b7fb01c911d";
|
||||
};
|
||||
in lib.mapAttrsToList
|
||||
(variant: url: fetchurl rec { name = "NotoEmoji-${weightNames.${variant}}.ttf";
|
||||
sha256 = fileHashes.${name};
|
||||
inherit url; } )
|
||||
(variant: url: fetchurl { name = "NotoEmoji-${weightNames.${variant}}.ttf";
|
||||
hash = urlHashes.${url};
|
||||
inherit url; } )
|
||||
metadata.files;
|
||||
|
||||
installPhase = ''
|
||||
|
7
pkgs/data/fonts/noto-fonts/noto-emoji.hashes.json
Normal file
7
pkgs/data/fonts/noto-fonts/noto-emoji.hashes.json
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"http://fonts.gstatic.com/s/notoemoji/v46/bMrnmSyK7YY-MEu6aWjPDs-ar6uWaGWuob_10jwvS-FGJCMY.ttf": "sha256-9ndQqJJzsCkR6KcYRNVW3wXWMxcH+0QzFgQQdCG8vSo=",
|
||||
"http://fonts.gstatic.com/s/notoemoji/v46/bMrnmSyK7YY-MEu6aWjPDs-ar6uWaGWuob-r0jwvS-FGJCMY.ttf": "sha256-AXGLdWebddyJhTKMW/D/6tW8ODcaXrUM96m2hN9wYlg=",
|
||||
"http://fonts.gstatic.com/s/notoemoji/v46/bMrnmSyK7YY-MEu6aWjPDs-ar6uWaGWuob-Z0jwvS-FGJCMY.ttf": "sha256-wzF9kKNMeQTYZ2QUT5pIgauhl2qMpZ2nMLNTeAJuqtQ=",
|
||||
"http://fonts.gstatic.com/s/notoemoji/v46/bMrnmSyK7YY-MEu6aWjPDs-ar6uWaGWuob911TwvS-FGJCMY.ttf": "sha256-NIelE8X+lKtH6yT3eFPZV7zYUR3Y5GnNobAbf7AckR0=",
|
||||
"http://fonts.gstatic.com/s/notoemoji/v46/bMrnmSyK7YY-MEu6aWjPDs-ar6uWaGWuob9M1TwvS-FGJCMY.ttf": "sha256-zkJuJ8YlTrUV+28wHIqny3yQvjvZqEPG4WXYmaLcY8A="
|
||||
}
|
102
pkgs/data/fonts/noto-fonts/noto-emoji.py
Executable file
102
pkgs/data/fonts/noto-fonts/noto-emoji.py
Executable file
@ -0,0 +1,102 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
#! nix-shell -i "python3 -I" -p python3
|
||||
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Iterable
|
||||
from urllib import request
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def getUrls(metadata) -> Iterable[str]:
|
||||
'''Fetch all files' URLs from Google Fonts' metadata.
|
||||
|
||||
The metadata must obey the API v1 schema, and can be obtained from:
|
||||
https://www.googleapis.com/webfonts/v1/webfonts?key=${GOOGLE_FONTS_TOKEN}&family=${FAMILY}
|
||||
'''
|
||||
return ( url for i in metadata['items'] for _, url in i['files'].items() )
|
||||
|
||||
|
||||
def hashUrl(url: str, *, hash: str = 'sha256'):
|
||||
'''Compute the hash of the data from HTTP GETing a given `url`.
|
||||
|
||||
The `hash` must be an algorithm name `hashlib.new` accepts.
|
||||
'''
|
||||
import hashlib
|
||||
with request.urlopen(url) as req:
|
||||
return hashlib.new(hash, req.read())
|
||||
|
||||
def sriEncode(h) -> str:
|
||||
'''Encode a hash in the SRI format.
|
||||
|
||||
Takes a `hashlib` object, and produces a string that
|
||||
nixpkgs' `fetchurl` accepts as `hash` parameter.
|
||||
'''
|
||||
from base64 import b64encode
|
||||
return f"{h.name}-{b64encode(h.digest()).decode()}"
|
||||
|
||||
def hashUrls(
|
||||
urls: Iterable[str],
|
||||
knownHashes: dict[str, str] = {},
|
||||
) -> dict[str, str]:
|
||||
'''Generate a `dict` mapping URLs to SRI-encoded hashes.
|
||||
|
||||
The `knownHashes` optional parameter can be used to avoid
|
||||
re-downloading files whose URL have not changed.
|
||||
'''
|
||||
return {
|
||||
url: knownHashes.get(url) or sriEncode(hashUrl(url))
|
||||
for url in urls
|
||||
}
|
||||
|
||||
|
||||
@contextmanager
|
||||
def atomicFileUpdate(target: Path):
|
||||
'''Atomically replace the contents of a file.
|
||||
|
||||
Yields an open file to write into; upon exiting the context,
|
||||
the file is closed and (atomically) replaces the `target`.
|
||||
|
||||
Guarantees that the `target` was either successfully overwritten
|
||||
with new content and no exception was raised, or the temporary
|
||||
file was cleaned up.
|
||||
'''
|
||||
from tempfile import mkstemp
|
||||
fd, _p = mkstemp(
|
||||
dir = target.parent,
|
||||
prefix = target.name,
|
||||
)
|
||||
tmpPath = Path(_p)
|
||||
|
||||
try:
|
||||
with open(fd, 'w') as f:
|
||||
yield f
|
||||
|
||||
tmpPath.replace(target)
|
||||
|
||||
except Exception:
|
||||
tmpPath.unlink(missing_ok = True)
|
||||
raise
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
currentDir = Path(__file__).parent
|
||||
|
||||
with (currentDir / 'noto-emoji.json').open() as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
hashPath = currentDir / 'noto-emoji.hashes.json'
|
||||
try:
|
||||
with hashPath.open() as hashFile:
|
||||
hashes = json.load(hashFile)
|
||||
except FileNotFoundError:
|
||||
hashes = {}
|
||||
|
||||
with atomicFileUpdate(hashPath) as hashFile:
|
||||
json.dump(
|
||||
hashUrls(getUrls(metadata), knownHashes = hashes),
|
||||
hashFile,
|
||||
indent = 2,
|
||||
)
|
||||
hashFile.write("\n") # Pacify nixpkgs' dumb editor config check
|
Loading…
Reference in New Issue
Block a user