chromium: fetch src from git instead of using release tarball, {ungoogled-,}chromium,chromedriver: 130.0.6723.116 -> 131.0.6778.69/85 (#357371)

This commit is contained in:
Emily 2024-11-20 22:47:33 +01:00 committed by GitHub
commit dca46c1a6d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 2005 additions and 421 deletions

View File

@ -1,5 +1,5 @@
{ lib, mkChromiumDerivation
, channel, chromiumVersionAtLeast
, chromiumVersionAtLeast
, enableWideVine, ungoogled
}:
@ -90,7 +90,7 @@ mkChromiumDerivation (base: rec {
license = if enableWideVine then lib.licenses.unfree else lib.licenses.bsd3;
platforms = lib.platforms.linux;
mainProgram = "chromium";
hydraPlatforms = lib.optionals (channel == "stable" || channel == "ungoogled-chromium") ["aarch64-linux" "x86_64-linux"];
hydraPlatforms = ["aarch64-linux" "x86_64-linux"];
timeout = 172800; # 48 hours (increased from the Hydra default of 10h)
};
})

View File

@ -1,15 +1,19 @@
{ stdenv, lib, fetchpatch
, recompressTarball
, zstd
, fetchFromGitiles
, fetchNpmDeps
, buildPackages
, pkgsBuildBuild
# Channel data:
, channel, upstream-info
, upstream-info
# Helper functions:
, chromiumVersionAtLeast, versionRange
# Native build inputs:
, ninja, pkg-config
, python3, perl
, nodejs
, npmHooks
, which
, libuuid
, overrideCC
@ -145,12 +149,64 @@ let
else throw "no chromium Rosetta Stone entry for os: ${platform.config}";
};
isElectron = packageName == "electron";
chromiumDeps = lib.mapAttrs (path: args: fetchFromGitiles (removeAttrs args [ "recompress" ] // lib.optionalAttrs args.recompress or false {
name = "source.tar.zstd";
downloadToTemp = false;
passthru.unpack = true;
postFetch = ''
tar \
--use-compress-program="${lib.getExe zstd} -T$NIX_BUILD_CORES" \
--sort=name \
--mtime="1970-01-01" \
--owner=root --group=root \
--numeric-owner --mode=go=rX,u+rw,a-s \
--remove-files \
--directory="$out" \
-cf "$TMPDIR/source.zstd" .
mv "$TMPDIR/source.zstd" "$out"
'';
})) upstream-info.DEPS;
unpackPhaseSnippet = lib.concatStrings (lib.mapAttrsToList (path: dep:
(if dep.unpack or false
then ''
mkdir -p ${path}
pushd ${path}
unpackFile ${dep}
popd
''
else ''
mkdir -p ${builtins.dirOf path}
cp -r ${dep}/. ${path}
''
) + ''
chmod u+w -R ${path}
'') chromiumDeps);
base = rec {
pname = "${lib.optionalString ungoogled "ungoogled-"}${packageName}-unwrapped";
inherit (upstream-info) version;
inherit packageName buildType buildPath;
src = recompressTarball { inherit version; inherit (upstream-info) hash; };
unpackPhase = ''
runHook preUnpack
${unpackPhaseSnippet}
sourceRoot=src
runHook postUnpack
'';
npmRoot = "third_party/node";
npmDeps = (fetchNpmDeps {
src = chromiumDeps."src";
sourceRoot = npmRoot;
hash = upstream-info.deps.npmHash;
}).overrideAttrs (p: {
nativeBuildInputs = p.nativeBuildInputs or [ ] ++ [ zstd ];
});
nativeBuildInputs = [
ninja pkg-config
@ -158,6 +214,9 @@ let
which
buildPackages.rustc.llvmPackages.bintools
bison gperf
] ++ lib.optionals (!isElectron) [
nodejs
npmHooks.npmConfigHook
];
depsBuildBuild = [
@ -317,7 +376,32 @@ let
})
];
postPatch = ''
postPatch = lib.optionalString (!isElectron) ''
ln -s ${./files/gclient_args.gni} build/config/gclient_args.gni
echo 'LASTCHANGE=${upstream-info.DEPS."src".rev}-refs/heads/master@{#0}' > build/util/LASTCHANGE
echo "$SOURCE_DATE_EPOCH" > build/util/LASTCHANGE.committime
cat << EOF > gpu/config/gpu_lists_version.h
/* Generated by lastchange.py, do not edit.*/
#ifndef GPU_CONFIG_GPU_LISTS_VERSION_H_
#define GPU_CONFIG_GPU_LISTS_VERSION_H_
#define GPU_LISTS_VERSION "${upstream-info.DEPS."src".rev}"
#endif // GPU_CONFIG_GPU_LISTS_VERSION_H_
EOF
cat << EOF > skia/ext/skia_commit_hash.h
/* Generated by lastchange.py, do not edit.*/
#ifndef SKIA_EXT_SKIA_COMMIT_HASH_H_
#define SKIA_EXT_SKIA_COMMIT_HASH_H_
#define SKIA_COMMIT_HASH "${upstream-info.DEPS."src/third_party/skia".rev}-"
#endif // SKIA_EXT_SKIA_COMMIT_HASH_H_
EOF
echo -n '${upstream-info.DEPS."src/third_party/dawn".rev}' > gpu/webgpu/DAWN_VERSION
mkdir -p third_party/jdk/current/bin
'' + ''
# Workaround/fix for https://bugs.chromium.org/p/chromium/issues/detail?id=1313361:
substituteInPlace BUILD.gn \
--replace '"//infra/orchestrator:orchestrator_all",' ""
@ -513,6 +597,11 @@ let
# enable those features in our stable builds.
preConfigure = ''
export RUSTC_BOOTSTRAP=1
'' + lib.optionalString (!isElectron) ''
(
cd third_party/node
grep patch update_npm_deps | sh
)
'';
configurePhase = ''
@ -570,11 +659,9 @@ let
'';
passthru = {
updateScript = ./update.py;
chromiumDeps = {
gn = gnChromium;
};
inherit recompressTarball;
updateScript = ./update.mjs;
} // lib.optionalAttrs (!isElectron) {
inherit chromiumDeps npmDeps;
};
}
# overwrite `version` with the exact same `version` from the same source,

View File

@ -10,8 +10,7 @@
# package customization
# Note: enable* flags should not require full rebuilds (i.e. only affect the wrapper)
, channel ? "stable"
, upstream-info ? (import ./upstream-info.nix).${channel}
, upstream-info ? (lib.importJSON ./info.json).${if !ungoogled then "chromium" else "ungoogled-chromium"}
, proprietaryCodecs ? true
, enableWideVine ? false
, ungoogled ? false # Whether to build chromium or ungoogled-chromium
@ -46,13 +45,14 @@ let
inherit stdenv upstream-info;
mkChromiumDerivation = callPackage ./common.nix ({
inherit channel chromiumVersionAtLeast versionRange;
inherit chromiumVersionAtLeast versionRange;
inherit proprietaryCodecs
cupsSupport pulseSupport ungoogled;
gnChromium = buildPackages.gn.overrideAttrs (oldAttrs: {
inherit (upstream-info.deps.gn) version;
version = if (upstream-info.deps.gn ? "version") then upstream-info.deps.gn.version else "0";
src = fetchgit {
inherit (upstream-info.deps.gn) url rev hash;
url = "https://gn.googlesource.com/gn";
inherit (upstream-info.deps.gn) rev hash;
};
} // lib.optionalAttrs (chromiumVersionAtLeast "127") {
# Relax hardening as otherwise gn unstable 2024-06-06 and later fail with:
@ -65,11 +65,10 @@ let
# As a work around until gn is updated again, we filter specifically that patch out.
patches = lib.filter (e: lib.getName e != "LFS64.patch") oldAttrs.patches;
});
recompressTarball = callPackage ./recompress-tarball.nix { inherit chromiumVersionAtLeast; };
});
browser = callPackage ./browser.nix {
inherit channel chromiumVersionAtLeast enableWideVine ungoogled;
inherit chromiumVersionAtLeast enableWideVine ungoogled;
};
# ungoogled-chromium is, contrary to its name, not a build of
@ -80,8 +79,6 @@ let
ungoogled-chromium = pkgsBuildBuild.callPackage ./ungoogled.nix {};
};
suffix = lib.optionalString (channel != "stable" && channel != "ungoogled-chromium") ("-" + channel);
sandboxExecutableName = chromium.browser.passthru.sandboxExecutableName;
# We want users to be able to enableWideVine without rebuilding all of
@ -99,7 +96,7 @@ let
in stdenv.mkDerivation {
pname = lib.optionalString ungoogled "ungoogled-"
+ "chromium${suffix}";
+ "chromium";
inherit (chromium.browser) version;
nativeBuildInputs = [

View File

@ -0,0 +1,122 @@
#! /usr/bin/env nix-shell
#! nix-shell -i python -p python3
"""
This is a heavily simplified variant of electron's update.py
for use in ./update.mjs and should not be called manually.
It resolves chromium's DEPS file recursively when called with
a working depot_tools checkout and a ref to fetch and prints
the result as JSON to stdout.
"""
import base64
import json
from typing import Optional
from urllib.request import urlopen
import sys
if len(sys.argv) != 3:
print("""This internal script has been called with the wrong amount of parameters.
This script is not supposed to be called manually.
Refer to ./update.mjs instead.""")
exit(1)
_, depot_tools_checkout, chromium_version = sys.argv
sys.path.append(depot_tools_checkout)
import gclient_eval
import gclient_utils
class Repo:
fetcher: str
args: dict
def __init__(self) -> None:
self.deps: dict = {}
self.hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
def get_deps(self, repo_vars: dict, path: str) -> None:
print(
"evaluating " + json.dumps(self, default=vars, sort_keys=True),
file=sys.stderr,
)
deps_file = self.get_file("DEPS")
evaluated = gclient_eval.Parse(deps_file, vars_override=repo_vars, filename="DEPS")
repo_vars = dict(evaluated.get("vars", {})) | repo_vars
prefix = f"{path}/" if evaluated.get("use_relative_paths", False) else ""
self.deps = {
prefix + dep_name: repo_from_dep(dep)
for dep_name, dep in evaluated.get("deps", {}).items()
if (
gclient_eval.EvaluateCondition(dep["condition"], repo_vars)
if "condition" in dep
else True
)
and repo_from_dep(dep) != None
}
for key in evaluated.get("recursedeps", []):
dep_path = prefix + key
if dep_path in self.deps and dep_path != "src/third_party/squirrel.mac":
self.deps[dep_path].get_deps(repo_vars, dep_path)
def flatten_repr(self) -> dict:
return {"fetcher": self.fetcher, "hash": self.hash, **self.args}
def flatten(self, path: str) -> dict:
out = {path: self.flatten_repr()}
for dep_path, dep in self.deps.items():
out |= dep.flatten(dep_path)
return out
def get_file(self, filepath: str) -> str:
raise NotImplementedError
class GitilesRepo(Repo):
def __init__(self, url: str, rev: str) -> None:
super().__init__()
self.fetcher = "fetchFromGitiles"
self.args = {
"url": url,
"rev": rev,
}
def get_file(self, filepath: str) -> str:
return base64.b64decode(
urlopen(
f"{self.args['url']}/+/{self.args['rev']}/{filepath}?format=TEXT"
).read()
).decode("utf-8")
def repo_from_dep(dep: dict) -> Optional[Repo]:
if "url" in dep:
url, rev = gclient_utils.SplitUrlRevision(dep["url"])
return GitilesRepo(url, rev)
else:
# Not a git dependency; skip
return None
chromium = GitilesRepo("https://chromium.googlesource.com/chromium/src.git", chromium_version)
chromium.get_deps(
{
**{
f"checkout_{platform}": platform == "linux" or platform == "x64" or platform == "arm64" or platform == "arm"
for platform in ["ios", "chromeos", "android", "mac", "win", "linux"]
},
**{
f"checkout_{arch}": True
for arch in ["x64", "arm64", "arm", "x86", "mips", "mips64"]
},
},
"",
)
print(json.dumps(chromium.flatten("src")))

View File

@ -0,0 +1,12 @@
build_with_chromium = true
checkout_android = false
checkout_android_prebuilts_build_tools = false
checkout_clang_coverage_tools = false
checkout_copybara = false
checkout_ios_webkit = false
checkout_nacl = false
checkout_openxr = false
checkout_src_internal = false
cros_boards = ""
cros_boards_with_qemu_images = ""
generate_location_tags = true

File diff suppressed because it is too large Load Diff

View File

@ -1,56 +0,0 @@
{ zstd
, fetchurl
, lib
, chromiumVersionAtLeast
}:
{ version
, hash ? ""
} @ args:
fetchurl ({
name = "chromium-${version}.tar.zstd";
url = "https://commondatastorage.googleapis.com/chromium-browser-official/chromium-${version}.tar.xz";
inherit hash;
# chromium xz tarballs are multiple gigabytes big and are sometimes downloaded multiples
# times for different versions as part of our update script.
# We originally inherited fetchzip's default for downloadToTemp (true).
# Given the size of the /run/user tmpfs used defaults to logind's RuntimeDirectorySize=,
# which in turn defaults to 10% of the total amount of physical RAM, this often lead to
# "no space left" errors, eventually resulting in its own section in our chromium
# README.md (for users wanting to run the update script).
# Nowadays, we use fetchurl instead of fetchzip, which defaults to false instead of true.
# We just want to be explicit and provide a place to document the history and reasoning
# behind this.
downloadToTemp = false;
nativeBuildInputs = [ zstd ];
postFetch = ''
cat "$downloadedFile" \
| xz -d --threads=$NIX_BUILD_CORES \
| tar xf - \
--warning=no-timestamp \
--one-top-level=source \
--exclude=third_party/llvm \
--exclude=third_party/rust-src \
--exclude='build/linux/debian_*-sysroot' \
'' + lib.optionalString (chromiumVersionAtLeast "127") ''
--exclude='*.tar.[a-zA-Z0-9][a-zA-Z0-9]' \
--exclude='*.tar.[a-zA-Z0-9][a-zA-Z0-9][a-zA-Z0-9]' \
--exclude=third_party/llvm-build \
--exclude=third_party/rust-toolchain \
--exclude=third_party/instrumented_libs \
'' + ''
--strip-components=1
tar \
--use-compress-program "zstd -T$NIX_BUILD_CORES" \
--sort name \
--mtime "1970-01-01" \
--owner=root --group=root \
--numeric-owner --mode=go=rX,u+rw,a-s \
-cf $out source
'';
} // removeAttrs args [ "version" ])

View File

@ -0,0 +1,227 @@
#! /usr/bin/env nix-shell
/*
#! nix-shell -i zx -p zx
*/
cd(__dirname)
const nixpkgs = (await $`git rev-parse --show-toplevel`).stdout.trim()
const $nixpkgs = $({
cwd: nixpkgs
})
const dummy_hash = 'sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA='
const lockfile_file = './info.json'
const lockfile_initial = fs.readJsonSync(lockfile_file)
function flush_to_file() {
fs.writeJsonSync(lockfile_file, lockfile, { spaces: 2 })
}
const flush_to_file_proxy = {
get(obj, prop) {
const value = obj[prop]
return typeof value == 'object' ? new Proxy(value, flush_to_file_proxy) : value
},
set(obj, prop, value) {
obj[prop] = value
flush_to_file()
return true
},
}
const lockfile = new Proxy(structuredClone(lockfile_initial), flush_to_file_proxy)
for (const attr_path of Object.keys(lockfile)) {
if (!argv[attr_path]) {
console.log(`[${attr_path}] Skipping ${attr_path}. Pass --${attr_path} as argument to update.`)
continue
}
const ungoogled = attr_path === 'ungoogled-chromium'
const version_nixpkgs = !ungoogled ? lockfile[attr_path].version : lockfile[attr_path].deps["ungoogled-patches"].rev
const version_upstream = !ungoogled ? await get_latest_chromium_release() : await get_latest_ungoogled_release()
console.log(`[${attr_path}] ${chalk.red(version_nixpkgs)} (nixpkgs)`)
console.log(`[${attr_path}] ${chalk.green(version_upstream)} (upstream)`)
if (version_greater_than(version_upstream, version_nixpkgs)) {
console.log(`[${attr_path}] ${chalk.green(version_upstream)} from upstream is newer than our ${chalk.red(version_nixpkgs)}...`)
// unconditionally remove ungoogled-chromium's epoch/sub-version (e.g. 130.0.6723.116-1 -> 130.0.6723.116)
const version_chromium = version_upstream.split('-')[0]
lockfile[attr_path] = {
version: version_chromium,
chromedriver: !ungoogled ? await fetch_chromedriver_binaries(version_chromium) : undefined,
deps: {
depot_tools: {},
gn: {},
"ungoogled-patches": ungoogled ? await fetch_ungoogled(version_upstream) : undefined,
npmHash: dummy_hash,
},
DEPS: {},
}
const depot_tools = await fetch_depot_tools(version_chromium, lockfile_initial[attr_path].deps.depot_tools)
lockfile[attr_path].deps.depot_tools = {
rev: depot_tools.rev,
hash: depot_tools.hash,
}
const gn = await fetch_gn(version_chromium, lockfile_initial[attr_path].deps.gn)
lockfile[attr_path].deps.gn = {
rev: gn.rev,
hash: gn.hash,
}
// DEPS update loop
lockfile[attr_path].DEPS = await resolve_DEPS(depot_tools.out, version_chromium)
for (const [path, value] of Object.entries(lockfile[attr_path].DEPS)) {
delete value.fetcher
delete value.postFetch
if (value.url === 'https://chromium.googlesource.com/chromium/src.git') {
value.recompress = true
}
const cache = lockfile_initial[attr_path].DEPS[path]
const cache_hit =
cache !== undefined &&
value.url === cache.url &&
value.rev === cache.rev &&
value.recompress === cache.recompress &&
cache.hash !== undefined &&
cache.hash !== '' &&
cache.hash !== dummy_hash
if (cache_hit) {
console.log(`[${chalk.green(path)}] Reusing hash from previous info.json for ${cache.url}@${cache.rev}`)
value.hash = cache.hash
continue
}
console.log(`[${chalk.red(path)}] FOD prefetching ${value.url}@${value.rev}...`)
value.hash = await prefetch_FOD('-A', `${attr_path}.browser.passthru.chromiumDeps."${path}"`)
console.log(`[${chalk.green(path)}] FOD prefetching successful`)
}
lockfile[attr_path].deps.npmHash = await prefetch_FOD('-A', `${attr_path}.browser.passthru.npmDeps`)
console.log(chalk.green(`[${attr_path}] Done updating ${attr_path} from ${version_nixpkgs} to ${version_upstream}!`))
}
}
async function fetch_gn(chromium_rev, gn_previous) {
const DEPS_file = await get_gitiles_file('https://chromium.googlesource.com/chromium/src', chromium_rev, 'DEPS')
const gn_rev = /^\s+'gn_version': 'git_revision:(?<rev>.+)',$/m.exec(DEPS_file).groups.rev
const hash = gn_rev === gn_previous.rev ? gn_previous.hash : ''
return await prefetch_gitiles('https://gn.googlesource.com/gn', gn_rev, hash)
}
async function fetch_chromedriver_binaries(chromium_version) {
// https://developer.chrome.com/docs/chromedriver/downloads/version-selection
const prefetch = async (url) => {
const expr = [`(import ./. {}).fetchzip { url = "${url}"; hash = ""; }`]
const derivation = await $nixpkgs`nix-instantiate --expr ${expr}`
return await prefetch_FOD(derivation)
}
// if the URL ever changes, the URLs in the chromedriver derivations need updating as well!
const url = (platform) => `https://storage.googleapis.com/chrome-for-testing-public/${chromium_version}/${platform}/chromedriver-${platform}.zip`
return {
hash_darwin: await prefetch(url('mac-x64')),
hash_darwin_aarch64: await prefetch(url('mac-arm64')),
}
}
async function resolve_DEPS(depot_tools_checkout, chromium_rev) {
const { stdout } = await $`./depot_tools.py ${depot_tools_checkout} ${chromium_rev}`
const deps = JSON.parse(stdout)
return Object.fromEntries(Object.entries(deps).map(([k, { url, rev, hash }]) => [k, { url, rev, hash }]))
}
async function get_latest_chromium_release() {
const url = `https://versionhistory.googleapis.com/v1/chrome/platforms/linux/channels/stable/versions/all/releases?` + new URLSearchParams({
order_by: 'version desc',
filter: 'endtime=none,fraction>=0.5'
})
const response = await (await fetch(url)).json()
return response.releases[0].version
}
async function get_latest_ungoogled_release() {
const ungoogled_tags = await (await fetch('https://api.github.com/repos/ungoogled-software/ungoogled-chromium/tags')).json()
const chromium_releases = await (await fetch('https://versionhistory.googleapis.com/v1/chrome/platforms/linux/channels/stable/versions/all/releases')).json()
const chromium_release_map = chromium_releases.releases.map((x) => x.version)
return ungoogled_tags.find((x) => chromium_release_map.includes(x.name.split('-')[0])).name
}
async function fetch_ungoogled(rev) {
const expr = (hash) => [`(import ./. {}).fetchFromGitHub { owner = "ungoogled-software"; repo = "ungoogled-chromium"; rev = "${rev}"; hash = "${hash}"; }`]
const hash = await prefetch_FOD('--expr', expr(''))
const checkout = await $nixpkgs`nix-build --expr ${expr(hash)}`
await fs.copy(`${checkout.stdout.trim()}/flags.gn`, './ungoogled-flags.toml')
return {
rev,
hash,
}
}
function version_greater_than(greater, than) {
return greater.localeCompare(than, undefined, { numeric: true, sensitivity: 'base' }) === 1
}
async function get_gitiles_file(repo, rev, path) {
const base64 = await (await fetch(`${repo}/+/${rev}/${path}?format=TEXT`)).text()
return Buffer.from(base64, 'base64').toString('utf-8')
}
async function fetch_depot_tools(chromium_rev, depot_tools_previous) {
const depot_tools_rev = await get_gitiles_file('https://chromium.googlesource.com/chromium/src', chromium_rev, 'third_party/depot_tools')
const hash = depot_tools_rev === depot_tools_previous.rev ? depot_tools_previous.hash : ''
return await prefetch_gitiles('https://chromium.googlesource.com/chromium/tools/depot_tools', depot_tools_rev, hash)
}
async function prefetch_gitiles(url, rev, hash = '') {
const expr = () => [`(import ./. {}).fetchFromGitiles { url = "${url}"; rev = "${rev}"; hash = "${hash}"; }`]
if (hash === '') {
hash = await prefetch_FOD('--expr', expr())
}
const { stdout } = await $nixpkgs`nix-build --expr ${expr()}`
return {
url,
rev,
hash,
out: stdout.trim(),
}
}
async function prefetch_FOD(...args) {
const { stderr } = await $nixpkgs`nix-build ${args}`.nothrow()
const hash = /\s+got:\s+(?<hash>.+)$/m.exec(stderr)?.groups?.hash
if (hash == undefined) {
throw new Error(chalk.red('Expected to find hash in nix-build stderr output:') + stderr)
}
return hash
}

View File

@ -1,300 +0,0 @@
#! /usr/bin/env nix-shell
#! nix-shell -i python -p python3Packages.looseversion nix nixfmt-classic nix-prefetch-git
"""This script automatically updates chromium, google-chrome, chromedriver, and ungoogled-chromium
via upstream-info.nix."""
# Usage: ./update.py [--commit]
import base64
import csv
import json
import re
import subprocess
import sys
from codecs import iterdecode
from collections import OrderedDict
from datetime import datetime
from looseversion import LooseVersion
from os.path import abspath, dirname
from urllib.request import urlopen
RELEASES_URL = 'https://versionhistory.googleapis.com/v1/chrome/platforms/linux/channels/all/versions/all/releases'
PIN_PATH = dirname(abspath(__file__)) + '/upstream-info.nix'
UNGOOGLED_FLAGS_PATH = dirname(abspath(__file__)) + '/ungoogled-flags.toml'
COMMIT_MESSAGE_SCRIPT = dirname(abspath(__file__)) + '/get-commit-message.py'
NIXPKGS_PATH = subprocess.check_output(["git", "rev-parse", "--show-toplevel"], cwd=dirname(PIN_PATH)).strip()
def load_as_json(path):
"""Loads the given nix file as JSON."""
out = subprocess.check_output(['nix-instantiate', '--eval', '--strict', '--json', path])
return json.loads(out)
def save_dict_as_nix(path, input):
"""Saves the given dict/JSON as nix file."""
json_string = json.dumps(input)
nix = subprocess.check_output(['nix-instantiate', '--eval', '--expr', '{ json }: builtins.fromJSON json', '--argstr', 'json', json_string])
formatted = subprocess.check_output(['nixfmt'], input=nix)
with open(path, 'w') as out:
out.write(formatted.decode())
def prefetch_src_sri_hash(attr_path, version):
"""Prefetches the fixed-output-derivation source tarball and returns its SRI-Hash."""
print(f'nix-build (FOD prefetch) {attr_path} {version}')
out = subprocess.run(
["nix-build", "--expr", f'(import ./. {{}}).{attr_path}.browser.passthru.recompressTarball {{ version = "{version}"; }}'],
cwd=NIXPKGS_PATH,
stderr=subprocess.PIPE
).stderr.decode()
for line in iter(out.split("\n")):
match = re.match(r"\s+got:\s+(.+)$", line)
if match:
print(f'Hash: {match.group(1)}')
return match.group(1)
print(f'{out}\n\nError: Expected hash in nix-build stderr output.', file=sys.stderr)
sys.exit(1)
def nix_prefetch_url(url, algo='sha256'):
"""Prefetches the content of the given URL."""
print(f'nix store prefetch-file {url}')
out = subprocess.check_output(['nix', 'store', 'prefetch-file', '--json', '--hash-type', algo, url])
return json.loads(out)['hash']
def nix_prefetch_git(url, rev):
"""Prefetches the requested Git revision of the given repository URL."""
print(f'nix-prefetch-git {url} {rev}')
out = subprocess.check_output(['nix-prefetch-git', '--quiet', '--url', url, '--rev', rev])
return json.loads(out)
def get_file_revision(revision, file_path):
"""Fetches the requested Git revision of the given Chromium file."""
url = f'https://chromium.googlesource.com/chromium/src/+/refs/tags/{revision}/{file_path}?format=TEXT'
with urlopen(url) as http_response:
resp = http_response.read()
return base64.b64decode(resp)
def get_ungoogled_file_revision(revision, file_path):
"""Fetches the requested Git revision of the given Chromium file."""
url = f'https://raw.githubusercontent.com/ungoogled-software/ungoogled-chromium/{revision}/{file_path}'
with urlopen(url) as http_response:
resp = http_response.read()
return resp.decode("utf-8")
def get_chromedriver(channel):
"""Get the latest chromedriver builds given a channel"""
# See https://chromedriver.chromium.org/downloads/version-selection#h.4wiyvw42q63v
chromedriver_versions_url = f'https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions-with-downloads.json'
print(f'GET {chromedriver_versions_url}')
with urlopen(chromedriver_versions_url) as http_response:
chromedrivers = json.load(http_response)
channel = chromedrivers['channels'][channel]
downloads = channel['downloads']['chromedriver']
def get_chromedriver_url(platform):
for download in downloads:
if download['platform'] == platform:
return download['url']
return {
'version': channel['version'],
'hash_linux': nix_prefetch_url(get_chromedriver_url('linux64')),
'hash_darwin': nix_prefetch_url(get_chromedriver_url('mac-x64')),
'hash_darwin_aarch64': nix_prefetch_url(get_chromedriver_url('mac-arm64'))
}
def get_channel_dependencies(version):
"""Gets all dependencies for the given Chromium version."""
deps = get_file_revision(version, 'DEPS')
gn_pattern = b"'gn_version': 'git_revision:([0-9a-f]{40})'"
gn_commit = re.search(gn_pattern, deps).group(1).decode()
gn = nix_prefetch_git('https://gn.googlesource.com/gn', gn_commit)
return {
'gn': {
'version': datetime.fromisoformat(gn['date']).date().isoformat(),
'url': gn['url'],
'rev': gn['rev'],
'hash': gn['hash']
}
}
def get_latest_ungoogled_chromium_tag(linux_stable_versions):
"""Returns the latest ungoogled-chromium tag for linux using the GitHub API."""
api_tag_url = 'https://api.github.com/repos/ungoogled-software/ungoogled-chromium/tags'
with urlopen(api_tag_url) as http_response:
tags = json.load(http_response)
for tag in tags:
if not tag['name'].split('-')[0] in linux_stable_versions:
continue
return tag['name']
def get_latest_ungoogled_chromium_build(linux_stable_versions):
"""Returns a dictionary for the latest ungoogled-chromium build."""
tag = get_latest_ungoogled_chromium_tag(linux_stable_versions)
version = tag.split('-')[0]
return {
'name': 'chrome/platforms/linux/channels/ungoogled-chromium/versions/',
'version': version,
'ungoogled_rev': tag
}
def get_ungoogled_chromium_build_by_ref(ungoogled_chromium_ref):
"""Returns a dictionary for an ungoogled-chromium build referenced by a ref in the ungoogled-chromium repository."""
version = get_ungoogled_file_revision(ungoogled_chromium_ref, "chromium_version.txt").strip("\n ")
return {
'name': 'chrome/platforms/linux/channels/ungoogled-chromium/versions/',
'version': version,
'ungoogled_rev': ungoogled_chromium_ref
}
def get_ungoogled_chromium_gn_flags(revision):
"""Returns ungoogled-chromium's GN build flags for the given revision."""
gn_flags_url = f'https://raw.githubusercontent.com/ungoogled-software/ungoogled-chromium/{revision}/flags.gn'
return urlopen(gn_flags_url).read().decode()
def channel_name_to_attr_name(channel_name):
"""Maps a channel name to the corresponding main Nixpkgs attribute name."""
if channel_name == 'stable':
return 'chromium'
if channel_name == 'ungoogled-chromium':
return 'ungoogled-chromium'
print(f'Error: Unexpected channel: {channel_name}', file=sys.stderr)
sys.exit(1)
def get_channel_key(item):
"""Orders Chromium channels by their name."""
channel_name = item[0]
if channel_name == 'stable':
return 0
if channel_name == 'beta':
return 1
if channel_name == 'dev':
return 2
if channel_name == 'ungoogled-chromium':
return 3
print(f'Error: Unexpected channel: {channel_name}', file=sys.stderr)
sys.exit(1)
def print_updates(channels_old, channels_new):
"""Print a summary of the updates."""
print('Updates:')
for channel_name in channels_old:
version_old = channels_old[channel_name]["version"]
version_new = channels_new[channel_name]["version"]
if LooseVersion(version_old) < LooseVersion(version_new):
attr_name = channel_name_to_attr_name(channel_name)
print(f'- {attr_name}: {version_old} -> {version_new}')
channels = {}
last_channels = load_as_json(PIN_PATH)
src_hash_cache = {}
print(f'GET {RELEASES_URL}', file=sys.stderr)
with urlopen(RELEASES_URL) as resp:
releases = json.load(resp)['releases']
if len(sys.argv) == 3 and sys.argv[1] == 'ungoogled-rev':
releases.append(get_ungoogled_chromium_build_by_ref(sys.argv[2]))
else:
linux_stable_versions = [release['version'] for release in releases if release['name'].startswith('chrome/platforms/linux/channels/stable/versions/')]
releases.append(get_latest_ungoogled_chromium_build(linux_stable_versions))
for release in releases:
channel_name = re.findall("chrome/platforms/linux/channels/(.*)/versions/", release['name'])[0]
# If we've already found a newer release for this channel, we're
# no longer interested in it.
if channel_name in channels:
continue
# We only look for channels that are listed in our version pin file.
if channel_name not in last_channels:
continue
# If we're back at the last release we used, we don't need to
# keep going -- there's no new version available, and we can
# just reuse the info from last time.
if release['version'] == last_channels[channel_name]['version']:
channels[channel_name] = last_channels[channel_name]
continue
channel = {'version': release['version']}
if channel_name == 'dev':
google_chrome_suffix = 'unstable'
elif channel_name == 'ungoogled-chromium':
google_chrome_suffix = 'stable'
else:
google_chrome_suffix = channel_name
try:
version = release["version"]
existing_releases = dict(map(lambda channel: (channel[1]['version'], channel[1]['hash']), last_channels.items()))
if version in src_hash_cache:
print(f'Already got hash {src_hash_cache[version]} for {version}, skipping FOD prefetch for {channel_name_to_attr_name(channel_name)}')
channel["hash"] = src_hash_cache[version]
elif version in existing_releases:
print(f'Already got hash {existing_releases[version]} for {version} (from upstream-info.nix), skipping FOD prefetch for {channel_name_to_attr_name(channel_name)}')
channel["hash"] = existing_releases[version]
else:
channel["hash"] = prefetch_src_sri_hash(
channel_name_to_attr_name(channel_name),
version
)
src_hash_cache[version] = channel["hash"]
except subprocess.CalledProcessError:
# This release isn't actually available yet. Continue to
# the next one.
continue
channel['deps'] = get_channel_dependencies(channel['version'])
if channel_name == 'stable':
channel['chromedriver'] = get_chromedriver('Stable')
elif channel_name == 'ungoogled-chromium':
ungoogled_repo_url = 'https://github.com/ungoogled-software/ungoogled-chromium.git'
channel['deps']['ungoogled-patches'] = {
'rev': release['ungoogled_rev'],
'hash': nix_prefetch_git(ungoogled_repo_url, release['ungoogled_rev'])['hash']
}
with open(UNGOOGLED_FLAGS_PATH, 'w') as out:
out.write(get_ungoogled_chromium_gn_flags(release['ungoogled_rev']))
channels[channel_name] = channel
sorted_channels = OrderedDict(sorted(channels.items(), key=get_channel_key))
if len(sys.argv) == 2 and sys.argv[1] == '--commit':
for channel_name in sorted_channels.keys():
version_old = last_channels[channel_name]['version']
version_new = sorted_channels[channel_name]['version']
if LooseVersion(version_old) < LooseVersion(version_new):
last_channels[channel_name] = sorted_channels[channel_name]
save_dict_as_nix(PIN_PATH, last_channels)
attr_name = channel_name_to_attr_name(channel_name)
commit_message = f'{attr_name}: {version_old} -> {version_new}'
if channel_name == 'stable':
body = subprocess.check_output([COMMIT_MESSAGE_SCRIPT, version_new]).decode('utf-8')
commit_message += '\n\n' + body
elif channel_name == 'ungoogled-chromium':
subprocess.run(['git', 'add', UNGOOGLED_FLAGS_PATH], check=True)
subprocess.run(['git', 'add', JSON_PATH], check=True)
subprocess.run(['git', 'commit', '--file=-'], input=commit_message.encode(), check=True)
else:
save_dict_as_nix(PIN_PATH, sorted_channels)
print_updates(last_channels, sorted_channels)

View File

@ -1,37 +0,0 @@
{
stable = {
chromedriver = {
hash_darwin = "sha256-+Pcd++19/nJVsqGr2jzyjMTWYfb2U9wSgnNccDyGuGU=";
hash_darwin_aarch64 =
"sha256-vrbIpHrBwbzqars7D546eJ7zhEhAB0abq7MXiqlU4ts=";
hash_linux = "sha256-NbZ1GULLWJ6L3kczz23HoUhGk6VgBOXcjZlL7t4Z6Ec=";
version = "130.0.6723.116";
};
deps = {
gn = {
hash = "sha256-iNXRq3Mr8+wmY1SR4sV7yd2fDiIZ94eReelwFI0UhGU=";
rev = "20806f79c6b4ba295274e3a589d85db41a02fdaa";
url = "https://gn.googlesource.com/gn";
version = "2024-09-09";
};
};
hash = "sha256-eOCUKhFv205MD1gEY1FQQNCwxyELNjIAxUlPcRn74Lk=";
version = "130.0.6723.116";
};
ungoogled-chromium = {
deps = {
gn = {
hash = "sha256-iNXRq3Mr8+wmY1SR4sV7yd2fDiIZ94eReelwFI0UhGU=";
rev = "20806f79c6b4ba295274e3a589d85db41a02fdaa";
url = "https://gn.googlesource.com/gn";
version = "2024-09-09";
};
ungoogled-patches = {
hash = "sha256-+94tSSaOp6vzWkXN1qF3UXMm/Rs3pKmjf+U4x+af818=";
rev = "130.0.6723.116-1";
};
};
hash = "sha256-eOCUKhFv205MD1gEY1FQQNCwxyELNjIAxUlPcRn74Lk=";
version = "130.0.6723.116";
};
}

View File

@ -1,7 +1,7 @@
{
lib,
stdenv,
fetchurl,
fetchzip,
unzip,
testers,
chromedriver,
@ -9,19 +9,18 @@
let
upstream-info =
(import ../../../../applications/networking/browsers/chromium/upstream-info.nix)
.stable.chromedriver;
(lib.importJSON ../../../../applications/networking/browsers/chromium/info.json).chromium;
# See ./source.nix for Linux
allSpecs = {
x86_64-darwin = {
system = "mac-x64";
hash = upstream-info.hash_darwin;
hash = upstream-info.chromedriver.hash_darwin;
};
aarch64-darwin = {
system = "mac-arm64";
hash = upstream-info.hash_darwin_aarch64;
hash = upstream-info.chromedriver.hash_darwin_aarch64;
};
};
@ -35,7 +34,7 @@ stdenv.mkDerivation {
pname = "chromedriver";
inherit version;
src = fetchurl {
src = fetchzip {
url = "https://storage.googleapis.com/chrome-for-testing-public/${version}/${spec.system}/chromedriver-${spec.system}.zip";
inherit (spec) hash;
};

View File

@ -16135,7 +16135,6 @@ with pkgs;
ungoogled-chromium = callPackage ../applications/networking/browsers/chromium ((config.chromium or {}) // {
ungoogled = true;
channel = "ungoogled-chromium";
});
unigine-tropics = pkgsi686Linux.callPackage ../applications/graphics/unigine-tropics { };