From 4ba1db98760f1a6661b0cfe63c74eb2670482c18 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Thu, 23 Feb 2023 23:01:33 +0100 Subject: [PATCH] update-python-libraries: Allow packages to deny bulk updates By settings `passthru.skipBulkUpdate = true` in a python package, it will be excluded from bulk update runs. This is useful in case of fragile packages, that require special attention during updates. --- .../update-python-libraries.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py b/pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py index 396a55385824..62eca74608c9 100755 --- a/pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py +++ b/pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py @@ -33,6 +33,8 @@ EXTENSIONS = ['tar.gz', 'tar.bz2', 'tar', 'zip', '.whl'] PRERELEASES = False +BULK_UPDATE = False + GIT = "git" NIXPKGS_ROOT = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode('utf-8').strip() @@ -152,6 +154,13 @@ def _hash_to_sri(algorithm, value): value ]).decode().strip() + +def _skip_bulk_update(attr_name: str) -> bool: + return bool(_get_attr_value( + f"{attr_name}.skipBulkUpdate" + )) + + SEMVER = { 'major' : 0, 'minor' : 1, @@ -369,6 +378,8 @@ def _update_package(path, target): # Attempt a fetch using each pname, e.g. backports-zoneinfo vs backports.zoneinfo successful_fetch = False for pname in pnames: + if BULK_UPDATE and _skip_bulk_update(f"python3Packages.{pname}"): + raise ValueError(f"Bulk update skipped for {pname}") try: new_version, new_sha256, prefix = FETCHERS[fetcher](pname, extension, version, target) successful_fetch = True @@ -489,6 +500,10 @@ environment variables: packages = list(map(os.path.abspath, args.package)) + if len(packages) > 1: + global BULK_UPDATE + BULK_UPDATE = true + logging.info("Updating packages...") # Use threads to update packages concurrently