Merge master into staging-next

This commit is contained in:
github-actions[bot] 2023-10-30 12:01:05 +00:00 committed by GitHub
commit 7f11b9b46b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 915 additions and 453 deletions

View File

@ -22,6 +22,7 @@ use JSON::PP;
use IPC::Cmd;
use Sys::Syslog qw(:standard :macros);
use Cwd qw(abs_path);
use Fcntl ':flock';
## no critic(ControlStructures::ProhibitDeepNests)
## no critic(ErrorHandling::RequireCarping)
@ -91,6 +92,8 @@ if (!-f "/etc/NIXOS" && (read_file("/etc/os-release", err_mode => "quiet") // ""
}
make_path("/run/nixos", { mode => oct(755) });
open(my $stc_lock, '>>', '/run/nixos/switch-to-configuration.lock') or die "Could not open lock - $!";
flock($stc_lock, LOCK_EX) or die "Could not acquire lock - $!";
openlog("nixos", "", LOG_USER);
# Install or update the bootloader.
@ -985,4 +988,5 @@ if ($res == 0) {
syslog(LOG_ERR, "switching to system configuration $toplevel failed (status $res)");
}
close($stc_lock) or die "Could not close lock - $!";
exit($res);

View File

@ -26,14 +26,14 @@
stdenv.mkDerivation (finalAttrs: {
pname = "qmplay2";
version = "23.08.22";
version = "23.10.22";
src = fetchFromGitHub {
owner = "zaps166";
repo = "QMPlay2";
rev = finalAttrs.version;
fetchSubmodules = true;
hash = "sha256-Ug7WAqZ+BxspQUXweL/OnVBGCsU60DOWNexbi0GpDo0=";
hash = "sha256-yDymUXuILgT4AFTt302GniPi/WNwrTCOuOfdUiKOIyk=";
};
nativeBuildInputs = [

View File

@ -80,14 +80,11 @@ stdenv.mkDerivation rec {
sourceRoot = ".";
outputs = [ "out" "sddm" ];
nativeBuildInputs = [ jdupes ];
propagatedUserEnvPkgs = [
gtk-engine-murrine
breeze-icons
plasma-framework
plasma-workspace
];
propagatedUserEnvPkgs = [ gtk-engine-murrine ];
dontWrapQtApps = true;
@ -119,15 +116,18 @@ stdenv.mkDerivation rec {
rmdir $out/share/themes/Nordic/extras{/wallpapers,}
# move kde related contents to appropriate directories
mkdir -p $out/share/{aurorae/themes,color-schemes,Kvantum,plasma,sddm/themes,icons}
mkdir -p $out/share/{aurorae/themes,color-schemes,Kvantum,plasma,icons}
mv -v $out/share/themes/Nordic/kde/aurorae/* $out/share/aurorae/themes/
mv -v $out/share/themes/Nordic/kde/colorschemes/* $out/share/color-schemes/
mv -v $out/share/themes/Nordic/kde/konsole $out/share/
mv -v $out/share/themes/Nordic/kde/kvantum/* $out/share/Kvantum/
mv -v $out/share/themes/Nordic/kde/plasma/look-and-feel $out/share/plasma/
mv -v $out/share/themes/Nordic/kde/sddm/* $out/share/sddm/themes/
mv -v $out/share/themes/Nordic/kde/folders/* $out/share/icons/
mv -v $out/share/themes/Nordic/kde/cursors/*-cursors $out/share/icons/
mkdir -p $sddm/share/sddm/themes
mv -v $out/share/themes/Nordic/kde/sddm/* $sddm/share/sddm/themes/
rm -rf $out/share/themes/Nordic/kde
# Replace duplicate files with symbolic links to the first file in
@ -137,6 +137,16 @@ stdenv.mkDerivation rec {
runHook postInstall
'';
postFixup = ''
# Propagate sddm theme dependencies to user env otherwise sddm
# does find them. Putting them in buildInputs is not enough.
mkdir -p $sddm/nix-support
printWords ${breeze-icons} ${plasma-framework} ${plasma-workspace} \
>> $sddm/nix-support/propagated-user-env-packages
'';
meta = with lib; {
description = "Gtk and KDE themes using the Nord color pallete";
homepage = "https://github.com/EliverLara/Nordic";

View File

@ -4,29 +4,25 @@
, pythonOlder
, azure-common
, azure-mgmt-core
, msrest
, typing-extensions
, isodate
}:
buildPythonPackage rec {
pname = "azure-mgmt-containerregistry";
version = "10.1.0";
version = "10.2.0";
format = "setuptools";
disabled = pythonOlder "3.7";
disabled = pythonOlder "3.8";
src = fetchPypi {
inherit pname version;
hash = "sha256-VrX9YfYNvlA8+eNqHCp35BAeQZzQKakZs7ZZKwT8oYc=";
extension = "zip";
hash = "sha256-i7i/5ofGxiF9/wTAPnUOaZ6FAgK3EaBqoHeSC8HuXCo=";
};
propagatedBuildInputs = [
azure-common
azure-mgmt-core
msrest
] ++ lib.optionals (pythonOlder "3.8") [
typing-extensions
isodate
];
# no tests included
@ -40,6 +36,7 @@ buildPythonPackage rec {
meta = with lib; {
description = "Microsoft Azure Container Registry Client Library for Python";
homepage = "https://github.com/Azure/azure-sdk-for-python";
changelog = "https://github.com/Azure/azure-sdk-for-python/blob/azure-mgmt-containerregistry_${version}/sdk/containerregistry/azure-mgmt-containerregistry/CHANGELOG.md";
license = licenses.mit;
maintainers = with maintainers; [ jonringer ];
};

View File

@ -4,28 +4,25 @@
, pythonOlder
, azure-common
, azure-mgmt-core
, msrest
, msrestazure
, isodate
}:
buildPythonPackage rec {
pname = "azure-mgmt-netapp";
version = "10.1.0";
version = "11.0.0";
format = "setuptools";
disabled = pythonOlder "3.7";
disabled = pythonOlder "3.8";
src = fetchPypi {
inherit pname version;
hash = "sha256-eJiWTOCk2C79Jotku9bKlu3vU6H8004hWrX+h76MjQM=";
extension = "zip";
hash = "sha256-00cDFHpaEciRQLHM+Kt3uOtw/geOn5+onrY7lav6EeU=";
};
propagatedBuildInputs = [
azure-common
azure-mgmt-core
msrest
msrestazure
isodate
];
# no tests included
@ -39,6 +36,7 @@ buildPythonPackage rec {
meta = with lib; {
description = "Microsoft Azure NetApp Files Management Client Library for Python";
homepage = "https://github.com/Azure/azure-sdk-for-python";
changelog = "https://github.com/Azure/azure-sdk-for-python/blob/azure-mgmt-netapp_${version}/sdk/netapp/azure-mgmt-netapp/CHANGELOG.md";
license = licenses.mit;
maintainers = with maintainers; [ jonringer ];
};

View File

@ -2,6 +2,7 @@
, buildPythonPackage
, fetchPypi
, pythonOlder
, setuptools
# extras: babel
, babel
@ -11,7 +12,6 @@
, bcrypt
, bleach
, flask-mailman
, qrcode
# extras: fsqla
, flask-sqlalchemy
@ -21,20 +21,21 @@
# extras: mfa
, cryptography
, phonenumbers
, webauthn
, qrcode
# propagates
, blinker
, email-validator
, flask
, flask-login
, flask-principal
, flask-wtf
, itsdangerous
, passlib
, importlib-resources
, wtforms
# tests
, argon2-cffi
, flask-mongoengine
, mongoengine
, mongomock
, peewee
@ -46,31 +47,30 @@
buildPythonPackage rec {
pname = "flask-security-too";
version = "5.3.0";
format = "setuptools";
version = "5.3.2";
pyproject = true;
disabled = pythonOlder "3.7";
src = fetchPypi {
pname = "Flask-Security-Too";
inherit version;
hash = "sha256-n12DCRPqxm8YhFeVrl99BEvdDYNq6rzP662rain3k1Q=";
hash = "sha256-wLUHXfDWSp7zWwTIjTH79AWlkkNzb21tChpLSEWr8+U=";
};
postPatch = ''
# This should be removed after updating to version 5.3.0.
sed -i '/filterwarnings =/a ignore:pkg_resources is deprecated:DeprecationWarning' pytest.ini
'';
nativeBuildInputs = [
setuptools
];
propagatedBuildInputs = [
blinker
email-validator
flask
flask-login
flask-principal
flask-wtf
itsdangerous
passlib
importlib-resources
wtforms
];
passthru.optional-dependencies = {
@ -82,7 +82,6 @@ buildPythonPackage rec {
bcrypt
bleach
flask-mailman
qrcode
];
fsqla = [
flask-sqlalchemy
@ -92,12 +91,13 @@ buildPythonPackage rec {
mfa = [
cryptography
phonenumbers
webauthn
qrcode
];
};
nativeCheckInputs = [
argon2-cffi
flask-mongoengine
mongoengine
mongomock
peewee
@ -112,6 +112,11 @@ buildPythonPackage rec {
++ passthru.optional-dependencies.mfa;
disabledTests = [
# needs /etc/resolv.conf
"test_login_email_whatever"
];
pythonImportsCheck = [
"flask_security"
];

View File

@ -1,25 +1,25 @@
{ lib
, buildPythonPackage
, cloudscraper
, fetchFromGitHub
, garth
, pdm-backend
, pythonOlder
, requests
, withings-sync
}:
buildPythonPackage rec {
pname = "garminconnect";
version = "0.2.8";
version = "0.2.9";
format = "pyproject";
disabled = pythonOlder "3.7";
disabled = pythonOlder "3.10";
src = fetchFromGitHub {
owner = "cyberjunky";
repo = "python-garminconnect";
rev = "refs/tags/${version}";
hash = "sha256-jNDFSA6Mz0+7UhEVrCKcKDEX3B7yk6igBf59A6YlW2M=";
hash = "sha256-wQWOksI0nfzIMdxgZehMmNytuXWD22GLUNoI7Ki0C3s=";
};
nativeBuildInputs = [
@ -27,9 +27,9 @@ buildPythonPackage rec {
];
propagatedBuildInputs = [
cloudscraper
garth
requests
withings-sync
];
# Tests require a token

View File

@ -12,14 +12,14 @@
buildPythonPackage rec {
pname = "google-cloud-vision";
version = "3.4.4";
version = "3.4.5";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-QFdErlCFIDTMR7MqmxuuUNP7Cc0eIWABQYKJHvV2ZpU=";
hash = "sha256-DfgkGrJ3GZuRnKODen3oUFk2P+oOPWYAYIcL587/wEc=";
};
propagatedBuildInputs = [

View File

@ -17,7 +17,7 @@
buildPythonPackage rec {
pname = "hap-python";
version = "4.9.0";
version = "4.9.1";
format = "setuptools";
disabled = pythonOlder "3.6";
@ -26,7 +26,7 @@ buildPythonPackage rec {
owner = "ikalchev";
repo = "HAP-python";
rev = "refs/tags/${version}";
hash = "sha256-bFSqMAZWE3xTfnc7FSQMfAhxhKlYm65VFpm+q3yrqpE=";
hash = "sha256-nnh8PSEcuPN1qGuInJ7uYe83zdne8axbTrHd4g1xoJs=";
};
propagatedBuildInputs = [

View File

@ -6,14 +6,14 @@
buildPythonPackage rec {
pname = "peaqevcore";
version = "19.5.10";
version = "19.5.12";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-izw41TUmqKOy34/RMHjBROQr88SChheKJVpPMaOubnE=";
hash = "sha256-NsQrfJQ1+WZ4wNBH8ZGGo9IMJ+yvWrVQmesDBQrfRKg=";
};
postPatch = ''

View File

@ -5,20 +5,25 @@
, pytestCheckHook
, pythonOlder
, requests
, setuptools
}:
buildPythonPackage rec {
pname = "publicsuffixlist";
version = "0.10.0.20231026";
format = "setuptools";
version = "0.10.0.20231030";
pyproject = true;
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
hash = "sha256-q2rUBjbue3I3VnRLTF7UscBs51bGxUGjMYwAkgX5UMs=";
hash = "sha256-1yRv6zg9mKJTinR57QHvCx/0mi0b2O3CkcoH1v4QuNo=";
};
nativeBuildInputs = [
setuptools
];
passthru.optional-dependencies = {
update = [
requests

View File

@ -0,0 +1,48 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, garth
, lxml
, pythonOlder
, requests
, setuptools
, wheel
}:
buildPythonPackage rec {
pname = "withings-sync";
version = "4.2.1";
pyproject = true;
disabled = pythonOlder "3.10";
src = fetchFromGitHub {
owner = "jaroslawhartman";
repo = "withings-sync";
rev = "refs/tags/v${version}";
hash = "sha256-6igjUmgIA077/1SQMt10tRpnLVKxGFNJN1GeLhQLROg=";
};
nativeBuildInputs = [
setuptools
wheel
];
propagatedBuildInputs = [
garth
lxml
requests
];
pythonImportsCheck = [
"withings_sync"
];
meta = with lib; {
description = "Synchronisation of Withings weight";
homepage = "https://github.com/jaroslawhartman/withings-sync";
changelog = "https://github.com/jaroslawhartman/withings-sync/releases/tag/v${version}";
license = licenses.mit;
maintainers = with maintainers; [ fab ];
};
}

View File

@ -9,13 +9,13 @@
stdenv.mkDerivation rec {
pname = "brogue-ce";
version = "1.12";
version = "1.13";
src = fetchFromGitHub {
owner = "tmewett";
repo = "BrogueCE";
rev = "v${version}";
hash = "sha256-bGAE0hRiKBo3ikyObGxAiPRRO24KtC+upO3XLj+f4yo=";
hash = "sha256-FUIdi1Ytn+INeD9550MW41qXtLb6in0QS3Snt8QaXUA=";
};
postPatch = ''

View File

@ -558,6 +558,8 @@ let
PERSISTENT_KEYRINGS = yes;
# enable temporary caching of the last request_key() result
KEYS_REQUEST_CACHE = whenAtLeast "5.3" yes;
# randomized slab caches
RANDOM_KMALLOC_CACHES = whenAtLeast "6.6" yes;
} // optionalAttrs stdenv.hostPlatform.isx86_64 {
# Enable Intel SGX
X86_SGX = whenAtLeast "5.11" yes;
@ -572,6 +574,8 @@ let
KVM_AMD_SEV = yes;
# AMD SEV-SNP
SEV_GUEST = whenAtLeast "5.19" module;
# Shadow stacks
X86_USER_SHADOW_STACK = whenAtLeast "6.6" yes;
};
microcode = {

View File

@ -30,5 +30,9 @@
"4.14": {
"version": "4.14.328",
"hash": "sha256:1igcpvnhwwrczfdsafmszvi0456k7f6j4cgpfw6v6afw09p95d8x"
},
"6.6": {
"version": "6.6",
"hash": "sha256:1l2nisx9lf2vdgkq910n5ldbi8z25ky1zvl67zgwg2nxcdna09nr"
}
}

View File

@ -6,14 +6,14 @@ let
# NOTE: When updating these, please also take a look at the changes done to
# kernel config in the xanmod version commit
ltsVariant = {
version = "6.1.58";
hash = "sha256-Lnp1CSh1jLbIkEx9hLfxhdIA12iQZmywhOec9uZ7UjI=";
version = "6.1.60";
hash = "sha256-KYCeONJxyFPee4pvBLRw/MBTzPU7D2oZCrAVr3t/yPM=";
variant = "lts";
};
mainVariant = {
version = "6.5.8";
hash = "sha256-lHi+O7RE6YdiqPmuxHajGkc7jS9F5cB89+JbTVKkB/c=";
version = "6.5.9";
hash = "sha256-5SFPBsDTmq7tA6pyM7rbIjBPAtPbqhUl6VfA2z5baPA=";
variant = "main";
};

View File

@ -25,13 +25,13 @@ let
in
python.pkgs.buildPythonApplication rec {
pname = "calibre-web";
version = "0.6.20";
version = "0.6.21";
src = fetchFromGitHub {
owner = "janeczku";
repo = "calibre-web";
rev = version;
hash = "sha256-0lArY1aTpO4sgIVDSqClYMGlip92f9hE/L2UouTLK8Q=";
hash = "sha256-tRrOquetn3P2NmrXq7DQHRGP1sWnLR7bV2Lw0W/lUPQ=";
};
propagatedBuildInputs = with python.pkgs; [
@ -64,8 +64,6 @@ python.pkgs.buildPythonApplication rec {
# and exit. This is gonna be used to configure calibre-web declaratively, as most of its configuration parameters
# are stored in the DB.
./db-migrations.patch
# environ in tornado.wsgi.WSGIContainer no longer a static method from 6.3 version
./static_environ.patch
];
# calibre-web doesn't follow setuptools directory structure. The following is taken from the script

View File

@ -1,25 +0,0 @@
diff --git a/cps/tornado_wsgi.py b/cps/tornado_wsgi.py
index af93219c..cf302042 100644
--- a/cps/tornado_wsgi.py
+++ b/cps/tornado_wsgi.py
@@ -53,7 +53,7 @@ class MyWSGIContainer(WSGIContainer):
return response.append
app_response = self.wsgi_application(
- MyWSGIContainer.environ(request), start_response
+ self.environ(request), start_response
)
try:
response.extend(app_response)
@@ -86,9 +86,8 @@ class MyWSGIContainer(WSGIContainer):
request.connection.finish()
self._log(status_code, request)
- @staticmethod
- def environ(request: httputil.HTTPServerRequest) -> Dict[Text, Any]:
- environ = WSGIContainer.environ(request)
+ def environ(self, request: httputil.HTTPServerRequest) -> Dict[Text, Any]:
+ environ = super().environ(request)
environ['RAW_URI'] = request.path
return environ

View File

@ -1,6 +1,6 @@
{ rustPlatform
, lib
, fetchurl
, fetchzip
, openssl
, pkg-config
, systemd
@ -8,14 +8,14 @@
rustPlatform.buildRustPackage rec {
pname = "pr-tracker";
version = "1.2.0";
version = "1.3.0";
src = fetchurl {
src = fetchzip {
url = "https://git.qyliss.net/pr-tracker/snapshot/pr-tracker-${version}.tar.xz";
sha256 = "sha256-Tru9DsitRQLiO4Ln70J9LvkEqcj2i4A+eArBvIhd/ls=";
hash = "sha256-JetfcA7Pn6nsCxCkgxP4jS6tijx89any/0GrmLa+DR0=";
};
cargoSha256 = "0q3ibxnzw8gngvrgfkv4m64dr411c511xkvb6j9k63vhy9vwarz7";
cargoSha256 = "sha256-QUr0IHmzbhFNd6rBDEX8RZul/d1TLv0t+ySCQYMlpmE=";
nativeBuildInputs = [ pkg-config ];
buildInputs = [ openssl systemd ];

View File

@ -162,6 +162,12 @@ version = "3.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
[[package]]
name = "either"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
[[package]]
name = "errno"
version = "0.3.2"
@ -218,6 +224,15 @@ dependencies = [
"windows-sys",
]
[[package]]
name = "itertools"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "1.0.9"
@ -274,6 +289,7 @@ dependencies = [
"anyhow",
"clap",
"colored",
"itertools",
"lazy_static",
"regex",
"rnix",

View File

@ -13,6 +13,7 @@ serde = { version = "1.0.185", features = ["derive"] }
anyhow = "1.0"
lazy_static = "1.4.0"
colored = "2.0.4"
itertools = "0.11.0"
[dev-dependencies]
temp-env = "0.3.5"

View File

@ -1,6 +1,6 @@
# Nixpkgs pkgs/by-name checker
This directory implements a program to check the [validity](#validity-checks) of the `pkgs/by-name` Nixpkgs directory once introduced.
This directory implements a program to check the [validity](#validity-checks) of the `pkgs/by-name` Nixpkgs directory.
It is being used by [this GitHub Actions workflow](../../../.github/workflows/check-by-name.yml).
This is part of the implementation of [RFC 140](https://github.com/NixOS/rfcs/pull/140).
@ -24,7 +24,7 @@ This API may be changed over time if the CI workflow making use of it is adjuste
- `2`: If an unexpected I/O error occurs
- Standard error:
- Informative messages
- Error messages if validation is not successful
- Detected problems if validation is not successful
## Validity checks

View File

@ -1,12 +1,12 @@
use crate::nixpkgs_problem::NixpkgsProblem;
use crate::structure;
use crate::utils::ErrorWriter;
use crate::validation::{self, Validation::Success};
use crate::Version;
use std::path::Path;
use anyhow::Context;
use serde::Deserialize;
use std::collections::HashMap;
use std::io;
use std::path::PathBuf;
use std::process;
use tempfile::NamedTempFile;
@ -40,12 +40,12 @@ const EXPR: &str = include_str!("eval.nix");
/// Check that the Nixpkgs attribute values corresponding to the packages in pkgs/by-name are
/// of the form `callPackage <package_file> { ... }`.
/// See the `eval.nix` file for how this is achieved on the Nix side
pub fn check_values<W: io::Write>(
pub fn check_values(
version: Version,
error_writer: &mut ErrorWriter<W>,
nixpkgs: &structure::Nixpkgs,
nixpkgs_path: &Path,
package_names: Vec<String>,
eval_accessible_paths: Vec<&Path>,
) -> anyhow::Result<()> {
) -> validation::Result<()> {
// Write the list of packages we need to check into a temporary JSON file.
// This can then get read by the Nix evaluation.
let attrs_file = NamedTempFile::new().context("Failed to create a temporary file")?;
@ -55,7 +55,7 @@ pub fn check_values<W: io::Write>(
// entry is needed.
let attrs_file_path = attrs_file.path().canonicalize()?;
serde_json::to_writer(&attrs_file, &nixpkgs.package_names).context(format!(
serde_json::to_writer(&attrs_file, &package_names).context(format!(
"Failed to serialise the package names to the temporary path {}",
attrs_file_path.display()
))?;
@ -87,9 +87,9 @@ pub fn check_values<W: io::Write>(
.arg(&attrs_file_path)
// Same for the nixpkgs to test
.args(["--arg", "nixpkgsPath"])
.arg(&nixpkgs.path)
.arg(nixpkgs_path)
.arg("-I")
.arg(&nixpkgs.path);
.arg(nixpkgs_path);
// Also add extra paths that need to be accessible
for path in eval_accessible_paths {
@ -111,52 +111,54 @@ pub fn check_values<W: io::Write>(
String::from_utf8_lossy(&result.stdout)
))?;
for package_name in &nixpkgs.package_names {
let relative_package_file = structure::Nixpkgs::relative_file_for_package(package_name);
let absolute_package_file = nixpkgs.path.join(&relative_package_file);
Ok(validation::sequence_(package_names.iter().map(
|package_name| {
let relative_package_file = structure::relative_file_for_package(package_name);
let absolute_package_file = nixpkgs_path.join(&relative_package_file);
if let Some(attribute_info) = actual_files.get(package_name) {
let valid = match &attribute_info.variant {
AttributeVariant::AutoCalled => true,
AttributeVariant::CallPackage { path, empty_arg } => {
let correct_file = if let Some(call_package_path) = path {
absolute_package_file == *call_package_path
} else {
false
};
// Only check for the argument to be non-empty if the version is V1 or
// higher
let non_empty = if version >= Version::V1 {
!empty_arg
} else {
true
};
correct_file && non_empty
if let Some(attribute_info) = actual_files.get(package_name) {
let valid = match &attribute_info.variant {
AttributeVariant::AutoCalled => true,
AttributeVariant::CallPackage { path, empty_arg } => {
let correct_file = if let Some(call_package_path) = path {
absolute_package_file == *call_package_path
} else {
false
};
// Only check for the argument to be non-empty if the version is V1 or
// higher
let non_empty = if version >= Version::V1 {
!empty_arg
} else {
true
};
correct_file && non_empty
}
AttributeVariant::Other => false,
};
if !valid {
NixpkgsProblem::WrongCallPackage {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into()
} else if !attribute_info.is_derivation {
NixpkgsProblem::NonDerivation {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into()
} else {
Success(())
}
AttributeVariant::Other => false,
};
if !valid {
error_writer.write(&format!(
"pkgs.{package_name}: This attribute is manually defined (most likely in pkgs/top-level/all-packages.nix), which is only allowed if the definition is of the form `pkgs.callPackage {} {{ ... }}` with a non-empty second argument.",
relative_package_file.display()
))?;
continue;
} else {
NixpkgsProblem::UndefinedAttr {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into()
}
if !attribute_info.is_derivation {
error_writer.write(&format!(
"pkgs.{package_name}: This attribute defined by {} is not a derivation",
relative_package_file.display()
))?;
}
} else {
error_writer.write(&format!(
"pkgs.{package_name}: This attribute is not defined but it should be defined automatically as {}",
relative_package_file.display()
))?;
continue;
}
}
Ok(())
},
)))
}

View File

@ -1,16 +1,19 @@
mod eval;
mod nixpkgs_problem;
mod references;
mod structure;
mod utils;
mod validation;
use crate::structure::check_structure;
use crate::validation::Validation::Failure;
use crate::validation::Validation::Success;
use anyhow::Context;
use clap::{Parser, ValueEnum};
use colored::Colorize;
use std::io;
use std::path::{Path, PathBuf};
use std::process::ExitCode;
use structure::Nixpkgs;
use utils::ErrorWriter;
/// Program to check the validity of pkgs/by-name
#[derive(Parser, Debug)]
@ -63,8 +66,8 @@ fn main() -> ExitCode {
///
/// # Return value
/// - `Err(e)` if an I/O-related error `e` occurred.
/// - `Ok(false)` if the structure is invalid, all the structural errors have been written to `error_writer`.
/// - `Ok(true)` if the structure is valid, nothing will have been written to `error_writer`.
/// - `Ok(false)` if there are problems, all of which will be written to `error_writer`.
/// - `Ok(true)` if there are no problems
pub fn check_nixpkgs<W: io::Write>(
nixpkgs_path: &Path,
version: Version,
@ -76,31 +79,38 @@ pub fn check_nixpkgs<W: io::Write>(
nixpkgs_path.display()
))?;
// Wraps the error_writer to print everything in red, and tracks whether anything was printed
// at all. Later used to figure out if the structure was valid or not.
let mut error_writer = ErrorWriter::new(error_writer);
if !nixpkgs_path.join(structure::BASE_SUBPATH).exists() {
let check_result = if !nixpkgs_path.join(utils::BASE_SUBPATH).exists() {
eprintln!(
"Given Nixpkgs path does not contain a {} subdirectory, no check necessary.",
structure::BASE_SUBPATH
utils::BASE_SUBPATH
);
Success(())
} else {
let nixpkgs = Nixpkgs::new(&nixpkgs_path, &mut error_writer)?;
if error_writer.empty {
// Only if we could successfully parse the structure, we do the semantic checks
eval::check_values(version, &mut error_writer, &nixpkgs, eval_accessible_paths)?;
references::check_references(&mut error_writer, &nixpkgs)?;
match check_structure(&nixpkgs_path)? {
Failure(errors) => Failure(errors),
Success(package_names) =>
// Only if we could successfully parse the structure, we do the evaluation checks
{
eval::check_values(version, &nixpkgs_path, package_names, eval_accessible_paths)?
}
}
};
match check_result {
Failure(errors) => {
for error in errors {
writeln!(error_writer, "{}", error.to_string().red())?
}
Ok(false)
}
Success(_) => Ok(true),
}
Ok(error_writer.empty)
}
#[cfg(test)]
mod tests {
use crate::check_nixpkgs;
use crate::structure;
use crate::utils;
use crate::Version;
use anyhow::Context;
use std::fs;
@ -145,7 +155,7 @@ mod tests {
return Ok(());
}
let base = path.join(structure::BASE_SUBPATH);
let base = path.join(utils::BASE_SUBPATH);
fs::create_dir_all(base.join("fo/foo"))?;
fs::write(base.join("fo/foo/package.nix"), "{ someDrv }: someDrv")?;

View File

@ -0,0 +1,218 @@
use crate::utils::PACKAGE_NIX_FILENAME;
use rnix::parser::ParseError;
use std::ffi::OsString;
use std::fmt;
use std::io;
use std::path::PathBuf;
/// Any problem that can occur when checking Nixpkgs
pub enum NixpkgsProblem {
ShardNonDir {
relative_shard_path: PathBuf,
},
InvalidShardName {
relative_shard_path: PathBuf,
shard_name: String,
},
PackageNonDir {
relative_package_dir: PathBuf,
},
CaseSensitiveDuplicate {
relative_shard_path: PathBuf,
first: OsString,
second: OsString,
},
InvalidPackageName {
relative_package_dir: PathBuf,
package_name: String,
},
IncorrectShard {
relative_package_dir: PathBuf,
correct_relative_package_dir: PathBuf,
},
PackageNixNonExistent {
relative_package_dir: PathBuf,
},
PackageNixDir {
relative_package_dir: PathBuf,
},
UndefinedAttr {
relative_package_file: PathBuf,
package_name: String,
},
WrongCallPackage {
relative_package_file: PathBuf,
package_name: String,
},
NonDerivation {
relative_package_file: PathBuf,
package_name: String,
},
OutsideSymlink {
relative_package_dir: PathBuf,
subpath: PathBuf,
},
UnresolvableSymlink {
relative_package_dir: PathBuf,
subpath: PathBuf,
io_error: io::Error,
},
CouldNotParseNix {
relative_package_dir: PathBuf,
subpath: PathBuf,
error: ParseError,
},
PathInterpolation {
relative_package_dir: PathBuf,
subpath: PathBuf,
line: usize,
text: String,
},
SearchPath {
relative_package_dir: PathBuf,
subpath: PathBuf,
line: usize,
text: String,
},
OutsidePathReference {
relative_package_dir: PathBuf,
subpath: PathBuf,
line: usize,
text: String,
},
UnresolvablePathReference {
relative_package_dir: PathBuf,
subpath: PathBuf,
line: usize,
text: String,
io_error: io::Error,
},
}
impl fmt::Display for NixpkgsProblem {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
NixpkgsProblem::ShardNonDir { relative_shard_path } =>
write!(
f,
"{}: This is a file, but it should be a directory.",
relative_shard_path.display(),
),
NixpkgsProblem::InvalidShardName { relative_shard_path, shard_name } =>
write!(
f,
"{}: Invalid directory name \"{shard_name}\", must be at most 2 ASCII characters consisting of a-z, 0-9, \"-\" or \"_\".",
relative_shard_path.display()
),
NixpkgsProblem::PackageNonDir { relative_package_dir } =>
write!(
f,
"{}: This path is a file, but it should be a directory.",
relative_package_dir.display(),
),
NixpkgsProblem::CaseSensitiveDuplicate { relative_shard_path, first, second } =>
write!(
f,
"{}: Duplicate case-sensitive package directories {first:?} and {second:?}.",
relative_shard_path.display(),
),
NixpkgsProblem::InvalidPackageName { relative_package_dir, package_name } =>
write!(
f,
"{}: Invalid package directory name \"{package_name}\", must be ASCII characters consisting of a-z, A-Z, 0-9, \"-\" or \"_\".",
relative_package_dir.display(),
),
NixpkgsProblem::IncorrectShard { relative_package_dir, correct_relative_package_dir } =>
write!(
f,
"{}: Incorrect directory location, should be {} instead.",
relative_package_dir.display(),
correct_relative_package_dir.display(),
),
NixpkgsProblem::PackageNixNonExistent { relative_package_dir } =>
write!(
f,
"{}: Missing required \"{PACKAGE_NIX_FILENAME}\" file.",
relative_package_dir.display(),
),
NixpkgsProblem::PackageNixDir { relative_package_dir } =>
write!(
f,
"{}: \"{PACKAGE_NIX_FILENAME}\" must be a file.",
relative_package_dir.display(),
),
NixpkgsProblem::UndefinedAttr { relative_package_file, package_name } =>
write!(
f,
"pkgs.{package_name}: This attribute is not defined but it should be defined automatically as {}",
relative_package_file.display()
),
NixpkgsProblem::WrongCallPackage { relative_package_file, package_name } =>
write!(
f,
"pkgs.{package_name}: This attribute is manually defined (most likely in pkgs/top-level/all-packages.nix), which is only allowed if the definition is of the form `pkgs.callPackage {} {{ ... }}` with a non-empty second argument.",
relative_package_file.display()
),
NixpkgsProblem::NonDerivation { relative_package_file, package_name } =>
write!(
f,
"pkgs.{package_name}: This attribute defined by {} is not a derivation",
relative_package_file.display()
),
NixpkgsProblem::OutsideSymlink { relative_package_dir, subpath } =>
write!(
f,
"{}: Path {} is a symlink pointing to a path outside the directory of that package.",
relative_package_dir.display(),
subpath.display(),
),
NixpkgsProblem::UnresolvableSymlink { relative_package_dir, subpath, io_error } =>
write!(
f,
"{}: Path {} is a symlink which cannot be resolved: {io_error}.",
relative_package_dir.display(),
subpath.display(),
),
NixpkgsProblem::CouldNotParseNix { relative_package_dir, subpath, error } =>
write!(
f,
"{}: File {} could not be parsed by rnix: {}",
relative_package_dir.display(),
subpath.display(),
error,
),
NixpkgsProblem::PathInterpolation { relative_package_dir, subpath, line, text } =>
write!(
f,
"{}: File {} at line {line} contains the path expression \"{}\", which is not yet supported and may point outside the directory of that package.",
relative_package_dir.display(),
subpath.display(),
text
),
NixpkgsProblem::SearchPath { relative_package_dir, subpath, line, text } =>
write!(
f,
"{}: File {} at line {line} contains the nix search path expression \"{}\" which may point outside the directory of that package.",
relative_package_dir.display(),
subpath.display(),
text
),
NixpkgsProblem::OutsidePathReference { relative_package_dir, subpath, line, text } =>
write!(
f,
"{}: File {} at line {line} contains the path expression \"{}\" which may point outside the directory of that package.",
relative_package_dir.display(),
subpath.display(),
text,
),
NixpkgsProblem::UnresolvablePathReference { relative_package_dir, subpath, line, text, io_error } =>
write!(
f,
"{}: File {} at line {line} contains the path expression \"{}\" which cannot be resolved: {io_error}.",
relative_package_dir.display(),
subpath.display(),
text,
),
}
}
}

View File

@ -1,105 +1,98 @@
use crate::structure::Nixpkgs;
use crate::nixpkgs_problem::NixpkgsProblem;
use crate::utils;
use crate::utils::{ErrorWriter, LineIndex};
use crate::utils::LineIndex;
use crate::validation::{self, ResultIteratorExt, Validation::Success};
use anyhow::Context;
use rnix::{Root, SyntaxKind::NODE_PATH};
use std::ffi::OsStr;
use std::fs::read_to_string;
use std::io;
use std::path::{Path, PathBuf};
/// Small helper so we don't need to pass in the same arguments to all functions
struct PackageContext<'a, W: io::Write> {
error_writer: &'a mut ErrorWriter<W>,
/// The package directory relative to Nixpkgs, such as `pkgs/by-name/fo/foo`
relative_package_dir: &'a PathBuf,
/// The absolute package directory
absolute_package_dir: &'a PathBuf,
}
use std::path::Path;
/// Check that every package directory in pkgs/by-name doesn't link to outside that directory.
/// Both symlinks and Nix path expressions are checked.
pub fn check_references<W: io::Write>(
error_writer: &mut ErrorWriter<W>,
nixpkgs: &Nixpkgs,
) -> anyhow::Result<()> {
// Check the directories for each package separately
for package_name in &nixpkgs.package_names {
let relative_package_dir = Nixpkgs::relative_dir_for_package(package_name);
let mut context = PackageContext {
error_writer,
relative_package_dir: &relative_package_dir,
absolute_package_dir: &nixpkgs.path.join(&relative_package_dir),
};
// The empty argument here is the subpath under the package directory to check
// An empty one means the package directory itself
check_path(&mut context, Path::new("")).context(format!(
"While checking the references in package directory {}",
relative_package_dir.display()
))?;
}
Ok(())
pub fn check_references(
relative_package_dir: &Path,
absolute_package_dir: &Path,
) -> validation::Result<()> {
// The empty argument here is the subpath under the package directory to check
// An empty one means the package directory itself
check_path(relative_package_dir, absolute_package_dir, Path::new("")).context(format!(
"While checking the references in package directory {}",
relative_package_dir.display()
))
}
/// Checks for a specific path to not have references outside
fn check_path<W: io::Write>(context: &mut PackageContext<W>, subpath: &Path) -> anyhow::Result<()> {
let path = context.absolute_package_dir.join(subpath);
fn check_path(
relative_package_dir: &Path,
absolute_package_dir: &Path,
subpath: &Path,
) -> validation::Result<()> {
let path = absolute_package_dir.join(subpath);
if path.is_symlink() {
Ok(if path.is_symlink() {
// Check whether the symlink resolves to outside the package directory
match path.canonicalize() {
Ok(target) => {
// No need to handle the case of it being inside the directory, since we scan through the
// entire directory recursively anyways
if let Err(_prefix_error) = target.strip_prefix(context.absolute_package_dir) {
context.error_writer.write(&format!(
"{}: Path {} is a symlink pointing to a path outside the directory of that package.",
context.relative_package_dir.display(),
subpath.display(),
))?;
if let Err(_prefix_error) = target.strip_prefix(absolute_package_dir) {
NixpkgsProblem::OutsideSymlink {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
}
.into()
} else {
Success(())
}
}
Err(e) => {
context.error_writer.write(&format!(
"{}: Path {} is a symlink which cannot be resolved: {e}.",
context.relative_package_dir.display(),
subpath.display(),
))?;
Err(io_error) => NixpkgsProblem::UnresolvableSymlink {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
io_error,
}
.into(),
}
} else if path.is_dir() {
// Recursively check each entry
for entry in utils::read_dir_sorted(&path)? {
let entry_subpath = subpath.join(entry.file_name());
check_path(context, &entry_subpath)
.context(format!("Error while recursing into {}", subpath.display()))?
}
validation::sequence_(
utils::read_dir_sorted(&path)?
.into_iter()
.map(|entry| {
let entry_subpath = subpath.join(entry.file_name());
check_path(relative_package_dir, absolute_package_dir, &entry_subpath)
.context(format!("Error while recursing into {}", subpath.display()))
})
.collect_vec()?,
)
} else if path.is_file() {
// Only check Nix files
if let Some(ext) = path.extension() {
if ext == OsStr::new("nix") {
check_nix_file(context, subpath).context(format!(
"Error while checking Nix file {}",
subpath.display()
))?
check_nix_file(relative_package_dir, absolute_package_dir, subpath).context(
format!("Error while checking Nix file {}", subpath.display()),
)?
} else {
Success(())
}
} else {
Success(())
}
} else {
// This should never happen, git doesn't support other file types
anyhow::bail!("Unsupported file type for path {}", subpath.display());
}
Ok(())
})
}
/// Check whether a nix file contains path expression references pointing outside the package
/// directory
fn check_nix_file<W: io::Write>(
context: &mut PackageContext<W>,
fn check_nix_file(
relative_package_dir: &Path,
absolute_package_dir: &Path,
subpath: &Path,
) -> anyhow::Result<()> {
let path = context.absolute_package_dir.join(subpath);
) -> validation::Result<()> {
let path = absolute_package_dir.join(subpath);
let parent_dir = path.parent().context(format!(
"Could not get parent of path {}",
subpath.display()
@ -110,75 +103,73 @@ fn check_nix_file<W: io::Write>(
let root = Root::parse(&contents);
if let Some(error) = root.errors().first() {
context.error_writer.write(&format!(
"{}: File {} could not be parsed by rnix: {}",
context.relative_package_dir.display(),
subpath.display(),
error,
))?;
return Ok(());
return Ok(NixpkgsProblem::CouldNotParseNix {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
error: error.clone(),
}
.into());
}
let line_index = LineIndex::new(&contents);
for node in root.syntax().descendants() {
// We're only interested in Path expressions
if node.kind() != NODE_PATH {
continue;
}
Ok(validation::sequence_(root.syntax().descendants().map(
|node| {
let text = node.text().to_string();
let line = line_index.line(node.text_range().start().into());
let text = node.text().to_string();
let line = line_index.line(node.text_range().start().into());
// Filters out ./foo/${bar}/baz
// TODO: We can just check ./foo
if node.children().count() != 0 {
context.error_writer.write(&format!(
"{}: File {} at line {line} contains the path expression \"{}\", which is not yet supported and may point outside the directory of that package.",
context.relative_package_dir.display(),
subpath.display(),
text
))?;
continue;
}
// Filters out search paths like <nixpkgs>
if text.starts_with('<') {
context.error_writer.write(&format!(
"{}: File {} at line {line} contains the nix search path expression \"{}\" which may point outside the directory of that package.",
context.relative_package_dir.display(),
subpath.display(),
text
))?;
continue;
}
// Resolves the reference of the Nix path
// turning `../baz` inside `/foo/bar/default.nix` to `/foo/baz`
match parent_dir.join(Path::new(&text)).canonicalize() {
Ok(target) => {
// Then checking if it's still in the package directory
// No need to handle the case of it being inside the directory, since we scan through the
// entire directory recursively anyways
if let Err(_prefix_error) = target.strip_prefix(context.absolute_package_dir) {
context.error_writer.write(&format!(
"{}: File {} at line {line} contains the path expression \"{}\" which may point outside the directory of that package.",
context.relative_package_dir.display(),
subpath.display(),
if node.kind() != NODE_PATH {
// We're only interested in Path expressions
Success(())
} else if node.children().count() != 0 {
// Filters out ./foo/${bar}/baz
// TODO: We can just check ./foo
NixpkgsProblem::PathInterpolation {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
line,
text,
}
.into()
} else if text.starts_with('<') {
// Filters out search paths like <nixpkgs>
NixpkgsProblem::SearchPath {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
line,
text,
}
.into()
} else {
// Resolves the reference of the Nix path
// turning `../baz` inside `/foo/bar/default.nix` to `/foo/baz`
match parent_dir.join(Path::new(&text)).canonicalize() {
Ok(target) => {
// Then checking if it's still in the package directory
// No need to handle the case of it being inside the directory, since we scan through the
// entire directory recursively anyways
if let Err(_prefix_error) = target.strip_prefix(absolute_package_dir) {
NixpkgsProblem::OutsidePathReference {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
line,
text,
}
.into()
} else {
Success(())
}
}
Err(e) => NixpkgsProblem::UnresolvablePathReference {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
line,
text,
))?;
io_error: e,
}
.into(),
}
}
Err(e) => {
context.error_writer.write(&format!(
"{}: File {} at line {line} contains the path expression \"{}\" which cannot be resolved: {e}.",
context.relative_package_dir.display(),
subpath.display(),
text,
))?;
}
};
}
Ok(())
},
)))
}

View File

@ -1,152 +1,170 @@
use crate::nixpkgs_problem::NixpkgsProblem;
use crate::references;
use crate::utils;
use crate::utils::ErrorWriter;
use crate::utils::{BASE_SUBPATH, PACKAGE_NIX_FILENAME};
use crate::validation::{self, ResultIteratorExt, Validation::Success};
use itertools::concat;
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::HashMap;
use std::io;
use std::fs::DirEntry;
use std::path::{Path, PathBuf};
pub const BASE_SUBPATH: &str = "pkgs/by-name";
pub const PACKAGE_NIX_FILENAME: &str = "package.nix";
lazy_static! {
static ref SHARD_NAME_REGEX: Regex = Regex::new(r"^[a-z0-9_-]{1,2}$").unwrap();
static ref PACKAGE_NAME_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9_-]+$").unwrap();
}
/// Contains information about the structure of the pkgs/by-name directory of a Nixpkgs
pub struct Nixpkgs {
/// The path to nixpkgs
pub path: PathBuf,
/// The names of all packages declared in pkgs/by-name
pub package_names: Vec<String>,
// Some utility functions for the basic structure
pub fn shard_for_package(package_name: &str) -> String {
package_name.to_lowercase().chars().take(2).collect()
}
impl Nixpkgs {
// Some utility functions for the basic structure
pub fn shard_for_package(package_name: &str) -> String {
package_name.to_lowercase().chars().take(2).collect()
}
pub fn relative_dir_for_shard(shard_name: &str) -> PathBuf {
PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}"))
}
pub fn relative_dir_for_package(package_name: &str) -> PathBuf {
Nixpkgs::relative_dir_for_shard(&Nixpkgs::shard_for_package(package_name))
.join(package_name)
}
pub fn relative_file_for_package(package_name: &str) -> PathBuf {
Nixpkgs::relative_dir_for_package(package_name).join(PACKAGE_NIX_FILENAME)
}
pub fn relative_dir_for_shard(shard_name: &str) -> PathBuf {
PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}"))
}
impl Nixpkgs {
/// Read the structure of a Nixpkgs directory, displaying errors on the writer.
/// May return early with I/O errors.
pub fn new<W: io::Write>(
path: &Path,
error_writer: &mut ErrorWriter<W>,
) -> anyhow::Result<Nixpkgs> {
let base_dir = path.join(BASE_SUBPATH);
pub fn relative_dir_for_package(package_name: &str) -> PathBuf {
relative_dir_for_shard(&shard_for_package(package_name)).join(package_name)
}
let mut package_names = Vec::new();
pub fn relative_file_for_package(package_name: &str) -> PathBuf {
relative_dir_for_package(package_name).join(PACKAGE_NIX_FILENAME)
}
for shard_entry in utils::read_dir_sorted(&base_dir)? {
/// Check the structure of Nixpkgs, returning the attribute names that are defined in
/// `pkgs/by-name`
pub fn check_structure(path: &Path) -> validation::Result<Vec<String>> {
let base_dir = path.join(BASE_SUBPATH);
let shard_results = utils::read_dir_sorted(&base_dir)?
.into_iter()
.map(|shard_entry| -> validation::Result<_> {
let shard_path = shard_entry.path();
let shard_name = shard_entry.file_name().to_string_lossy().into_owned();
let relative_shard_path = Nixpkgs::relative_dir_for_shard(&shard_name);
let relative_shard_path = relative_dir_for_shard(&shard_name);
if shard_name == "README.md" {
Ok(if shard_name == "README.md" {
// README.md is allowed to be a file and not checked
continue;
}
if !shard_path.is_dir() {
error_writer.write(&format!(
"{}: This is a file, but it should be a directory.",
relative_shard_path.display(),
))?;
Success(vec![])
} else if !shard_path.is_dir() {
NixpkgsProblem::ShardNonDir {
relative_shard_path: relative_shard_path.clone(),
}
.into()
// we can't check for any other errors if it's a file, since there's no subdirectories to check
continue;
}
let shard_name_valid = SHARD_NAME_REGEX.is_match(&shard_name);
if !shard_name_valid {
error_writer.write(&format!(
"{}: Invalid directory name \"{shard_name}\", must be at most 2 ASCII characters consisting of a-z, 0-9, \"-\" or \"_\".",
relative_shard_path.display()
))?;
}
let mut unique_package_names = HashMap::new();
for package_entry in utils::read_dir_sorted(&shard_path)? {
let package_path = package_entry.path();
let package_name = package_entry.file_name().to_string_lossy().into_owned();
let relative_package_dir =
PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}/{package_name}"));
if !package_path.is_dir() {
error_writer.write(&format!(
"{}: This path is a file, but it should be a directory.",
relative_package_dir.display(),
))?;
continue;
}
if let Some(duplicate_package_name) =
unique_package_names.insert(package_name.to_lowercase(), package_name.clone())
{
error_writer.write(&format!(
"{}: Duplicate case-sensitive package directories \"{duplicate_package_name}\" and \"{package_name}\".",
relative_shard_path.display(),
))?;
}
let package_name_valid = PACKAGE_NAME_REGEX.is_match(&package_name);
if !package_name_valid {
error_writer.write(&format!(
"{}: Invalid package directory name \"{package_name}\", must be ASCII characters consisting of a-z, A-Z, 0-9, \"-\" or \"_\".",
relative_package_dir.display(),
))?;
}
let correct_relative_package_dir = Nixpkgs::relative_dir_for_package(&package_name);
if relative_package_dir != correct_relative_package_dir {
// Only show this error if we have a valid shard and package name
// Because if one of those is wrong, you should fix that first
if shard_name_valid && package_name_valid {
error_writer.write(&format!(
"{}: Incorrect directory location, should be {} instead.",
relative_package_dir.display(),
correct_relative_package_dir.display(),
))?;
} else {
let shard_name_valid = SHARD_NAME_REGEX.is_match(&shard_name);
let result = if !shard_name_valid {
NixpkgsProblem::InvalidShardName {
relative_shard_path: relative_shard_path.clone(),
shard_name: shard_name.clone(),
}
}
.into()
} else {
Success(())
};
let package_nix_path = package_path.join(PACKAGE_NIX_FILENAME);
if !package_nix_path.exists() {
error_writer.write(&format!(
"{}: Missing required \"{PACKAGE_NIX_FILENAME}\" file.",
relative_package_dir.display(),
))?;
} else if package_nix_path.is_dir() {
error_writer.write(&format!(
"{}: \"{PACKAGE_NIX_FILENAME}\" must be a file.",
relative_package_dir.display(),
))?;
}
let entries = utils::read_dir_sorted(&shard_path)?;
package_names.push(package_name.clone());
}
}
let duplicate_results = entries
.iter()
.zip(entries.iter().skip(1))
.filter(|(l, r)| {
l.file_name().to_ascii_lowercase() == r.file_name().to_ascii_lowercase()
})
.map(|(l, r)| {
NixpkgsProblem::CaseSensitiveDuplicate {
relative_shard_path: relative_shard_path.clone(),
first: l.file_name(),
second: r.file_name(),
}
.into()
});
Ok(Nixpkgs {
path: path.to_owned(),
package_names,
let result = result.and(validation::sequence_(duplicate_results));
let package_results = entries
.into_iter()
.map(|package_entry| {
check_package(path, &shard_name, shard_name_valid, package_entry)
})
.collect_vec()?;
result.and(validation::sequence(package_results))
})
})
}
.collect_vec()?;
// Combine the package names conatained within each shard into a longer list
Ok(validation::sequence(shard_results).map(concat))
}
fn check_package(
path: &Path,
shard_name: &str,
shard_name_valid: bool,
package_entry: DirEntry,
) -> validation::Result<String> {
let package_path = package_entry.path();
let package_name = package_entry.file_name().to_string_lossy().into_owned();
let relative_package_dir = PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}/{package_name}"));
Ok(if !package_path.is_dir() {
NixpkgsProblem::PackageNonDir {
relative_package_dir: relative_package_dir.clone(),
}
.into()
} else {
let package_name_valid = PACKAGE_NAME_REGEX.is_match(&package_name);
let result = if !package_name_valid {
NixpkgsProblem::InvalidPackageName {
relative_package_dir: relative_package_dir.clone(),
package_name: package_name.clone(),
}
.into()
} else {
Success(())
};
let correct_relative_package_dir = relative_dir_for_package(&package_name);
let result = result.and(if relative_package_dir != correct_relative_package_dir {
// Only show this error if we have a valid shard and package name
// Because if one of those is wrong, you should fix that first
if shard_name_valid && package_name_valid {
NixpkgsProblem::IncorrectShard {
relative_package_dir: relative_package_dir.clone(),
correct_relative_package_dir: correct_relative_package_dir.clone(),
}
.into()
} else {
Success(())
}
} else {
Success(())
});
let package_nix_path = package_path.join(PACKAGE_NIX_FILENAME);
let result = result.and(if !package_nix_path.exists() {
NixpkgsProblem::PackageNixNonExistent {
relative_package_dir: relative_package_dir.clone(),
}
.into()
} else if package_nix_path.is_dir() {
NixpkgsProblem::PackageNixDir {
relative_package_dir: relative_package_dir.clone(),
}
.into()
} else {
Success(())
});
let result = result.and(references::check_references(
&relative_package_dir,
&path.join(&relative_package_dir),
)?);
result.map(|_| package_name.clone())
})
}

View File

@ -1,9 +1,11 @@
use anyhow::Context;
use colored::Colorize;
use std::fs;
use std::io;
use std::path::Path;
pub const BASE_SUBPATH: &str = "pkgs/by-name";
pub const PACKAGE_NIX_FILENAME: &str = "package.nix";
/// Deterministic file listing so that tests are reproducible
pub fn read_dir_sorted(base_dir: &Path) -> anyhow::Result<Vec<fs::DirEntry>> {
let listing = base_dir
@ -47,26 +49,3 @@ impl LineIndex {
}
}
}
/// A small wrapper around a generic io::Write specifically for errors:
/// - Print everything in red to signal it's an error
/// - Keep track of whether anything was printed at all, so that
/// it can be queried whether any errors were encountered at all
pub struct ErrorWriter<W> {
pub writer: W,
pub empty: bool,
}
impl<W: io::Write> ErrorWriter<W> {
pub fn new(writer: W) -> ErrorWriter<W> {
ErrorWriter {
writer,
empty: true,
}
}
pub fn write(&mut self, string: &str) -> io::Result<()> {
self.empty = false;
writeln!(self.writer, "{}", string.red())
}
}

View File

@ -0,0 +1,102 @@
use crate::nixpkgs_problem::NixpkgsProblem;
use itertools::concat;
use itertools::{
Either::{Left, Right},
Itertools,
};
use Validation::*;
/// The validation result of a check.
/// Instead of exiting at the first failure,
/// this type can accumulate multiple failures.
/// This can be achieved using the functions `and`, `sequence` and `sequence_`
///
/// This leans on https://hackage.haskell.org/package/validation
pub enum Validation<A> {
Failure(Vec<NixpkgsProblem>),
Success(A),
}
impl<A> From<NixpkgsProblem> for Validation<A> {
/// Create a `Validation<A>` from a single check problem
fn from(value: NixpkgsProblem) -> Self {
Failure(vec![value])
}
}
/// A type alias representing the result of a check, either:
/// - Err(anyhow::Error): A fatal failure, typically I/O errors.
/// Such failures are not caused by the files in Nixpkgs.
/// This hints at a bug in the code or a problem with the deployment.
/// - Ok(Failure(Vec<NixpkgsProblem>)): A non-fatal validation problem with the Nixpkgs files.
/// Further checks can be run even with this result type.
/// Such problems can be fixed by changing the Nixpkgs files.
/// - Ok(Success(A)): A successful (potentially intermediate) result with an arbitrary value.
/// No fatal errors have occurred and no validation problems have been found with Nixpkgs.
pub type Result<A> = anyhow::Result<Validation<A>>;
pub trait ResultIteratorExt<A, E>: Sized + Iterator<Item = std::result::Result<A, E>> {
fn collect_vec(self) -> std::result::Result<Vec<A>, E>;
}
impl<I, A, E> ResultIteratorExt<A, E> for I
where
I: Sized + Iterator<Item = std::result::Result<A, E>>,
{
/// A convenience version of `collect` specialised to a vector
fn collect_vec(self) -> std::result::Result<Vec<A>, E> {
self.collect()
}
}
impl<A> Validation<A> {
/// Map a `Validation<A>` to a `Validation<B>` by applying a function to the
/// potentially contained value in case of success.
pub fn map<B>(self, f: impl FnOnce(A) -> B) -> Validation<B> {
match self {
Failure(err) => Failure(err),
Success(value) => Success(f(value)),
}
}
}
impl Validation<()> {
/// Combine two validations, both of which need to be successful for the return value to be successful.
/// The `NixpkgsProblem`s of both sides are returned concatenated.
pub fn and<A>(self, other: Validation<A>) -> Validation<A> {
match (self, other) {
(Success(_), Success(right_value)) => Success(right_value),
(Failure(errors), Success(_)) => Failure(errors),
(Success(_), Failure(errors)) => Failure(errors),
(Failure(errors_l), Failure(errors_r)) => Failure(concat([errors_l, errors_r])),
}
}
}
/// Combine many validations into a single one.
/// All given validations need to be successful in order for the returned validation to be successful,
/// in which case the returned validation value contains a `Vec` of each individual value.
/// Otherwise the `NixpkgsProblem`s of all validations are returned concatenated.
pub fn sequence<A>(check_results: impl IntoIterator<Item = Validation<A>>) -> Validation<Vec<A>> {
let (errors, values): (Vec<Vec<NixpkgsProblem>>, Vec<A>) = check_results
.into_iter()
.partition_map(|validation| match validation {
Failure(err) => Left(err),
Success(value) => Right(value),
});
// To combine the errors from the results we flatten all the error Vec's into a new Vec
// This is not very efficient, but doesn't matter because generally we should have no errors
let flattened_errors = errors.into_iter().concat();
if flattened_errors.is_empty() {
Success(values)
} else {
Failure(flattened_errors)
}
}
/// Like `sequence`, but without any containing value, for convenience
pub fn sequence_(validations: impl IntoIterator<Item = Validation<()>>) -> Validation<()> {
sequence(validations).map(|_| ())
}

View File

@ -9,6 +9,7 @@
, yarn
, fixup_yarn_lock
, nodejs
, fetchpatch
, server-mode ? true
}:
@ -26,7 +27,61 @@ let
# keep the scope, as it is used throughout the derivation and tests
# this also makes potential future overrides easier
pythonPackages = python3.pkgs.overrideScope (final: prev: rec { });
pythonPackages = python3.pkgs.overrideScope (final: prev: rec {
# pgadmin 7.8 is incompatible with Flask >= 2.3
flask = prev.flask.overridePythonAttrs (oldAttrs: rec {
version = "2.2.5";
src = oldAttrs.src.override {
pname = "Flask";
inherit version;
hash = "sha256-7e6bCn/yZiG9WowQ/0hK4oc3okENmbC7mmhQx/uXeqA=";
};
format = "setuptools";
});
# downgrade needed for older Flask
httpbin = prev.httpbin.overridePythonAttrs (oldAttrs: rec {
version = "0.7.0";
src = oldAttrs.src.override {
inherit version;
hash = "sha256-y7N3kMkVdfTxV1f0KtQdn3KesifV7b6J5OwXVIbbjfo=";
};
format = "setuptools";
patches = [
(fetchpatch {
# Replaces BaseResponse class with Response class for Werkezug 2.1.0 compatibility
# https://github.com/postmanlabs/httpbin/pull/674
url = "https://github.com/postmanlabs/httpbin/commit/5cc81ce87a3c447a127e4a1a707faf9f3b1c9b6b.patch";
hash = "sha256-SbEWjiqayMFYrbgAPZtSsXqSyCDUz3z127XgcKOcrkE=";
})
];
pytestFlagsArray = [
"test_httpbin.py"
];
propagatedBuildInputs = oldAttrs.propagatedBuildInputs ++ [ final.pythonPackages.brotlipy ];
});
# downgrade needed for older httpbin
werkzeug = prev.werkzeug.overridePythonAttrs (oldAttrs: rec {
version = "2.2.3";
format = "setuptools";
src = oldAttrs.src.override {
pname = "Werkzeug";
inherit version;
hash = "sha256-LhzMlBfU2jWLnebxdOOsCUOR6h1PvvLWZ4ZdgZ39Cv4=";
};
});
# Downgrade needed for older Flask
flask-security-too = prev.flask-security-too.overridePythonAttrs (oldAttrs: rec {
version = "5.1.2";
src = oldAttrs.src.override {
inherit version;
hash = "sha256-lZzm43m30y+2qjxNddFEeg9HDlQP9afq5VtuR25zaLc=";
};
postPatch = ''
# This should be removed after updating to version 5.3.0.
sed -i '/filterwarnings =/a ignore:pkg_resources is deprecated:DeprecationWarning' pytest.ini
'';
});
});
offlineCache = fetchYarnDeps {
yarnLock = ./yarn.lock;

View File

@ -18,11 +18,13 @@
, zlib
, openssl
, libedit
, ldns
, pkg-config
, pam
, libredirect
, etcDir ? null
, withKerberos ? true
, withLdns ? true
, libkrb5
, libfido2
, hostname
@ -64,6 +66,7 @@ stdenv.mkDerivation {
buildInputs = [ zlib openssl libedit ]
++ lib.optional withFIDO libfido2
++ lib.optional withKerberos libkrb5
++ lib.optional withLdns ldns
++ lib.optional withPAM pam;
preConfigure = ''
@ -87,6 +90,7 @@ stdenv.mkDerivation {
++ lib.optional withKerberos (assert libkrb5 != null; "--with-kerberos5=${libkrb5}")
++ lib.optional stdenv.isDarwin "--disable-libutil"
++ lib.optional (!linkOpenssl) "--without-openssl"
++ lib.optional withLdns "--with-ldns"
++ extraConfigureFlags;
${if stdenv.hostPlatform.isStatic then "NIX_LDFLAGS" else null}= [ "-laudit" ] ++ lib.optionals withKerberos [ "-lkeyutils" ];

View File

@ -20,13 +20,13 @@
let
pname = "cie-middleware-linux";
version = "1.4.4.0";
version = "1.5.0";
src = fetchFromGitHub {
owner = "M0rf30";
repo = pname;
rev = "${version}-podofo";
sha256 = "sha256-Kyr9OTiY6roJ/wVJS/1aWfrrzDNQbuRTJQqo0akbMUU=";
rev = version;
sha256 = "sha256-Z8K2Ibg5bBfSql5HEapKgdfiCf/EIKTTD15oVeysQGk=";
};
gradle = gradle_7;
@ -44,6 +44,7 @@ let
buildPhase = ''
# Run the fetchDeps task
export GRADLE_USER_HOME=$(mktemp -d)
ls -l
gradle --no-daemon -b cie-java/build.gradle fetchDeps
'';
@ -60,7 +61,7 @@ let
outputHashAlgo = "sha256";
outputHashMode = "recursive";
outputHash = "sha256-WzT5vYF9yCMU2A7EkLZyjgWrN3gD7pnkPXc3hDFqpD8=";
outputHash = "sha256-jtaH8dBpnx8KMJe+jzJfkvcx1NO4nL5jsRO4+GI+d0c=";
};
in
@ -84,7 +85,7 @@ stdenv.mkDerivation {
buildInputs = [
cryptopp
fontconfig
podofo
podofo.dev
openssl
pcsclite
curl
@ -95,6 +96,10 @@ stdenv.mkDerivation {
# substitute the cieid command with this $out/bin/cieid
substituteInPlace libs/pkcs11/src/CSP/AbilitaCIE.cpp \
--replace 'file = "cieid"' 'file = "'$out'/bin/cieid"'
# revert https://github.com/M0Rf30/cie-middleware-linux/commit/1a389d8
sed -i libs/meson.build \
-e "s@podofo_dep = .\+@podofo_dep = dependency('libpodofo')@g"
'';
# Note: we use pushd/popd to juggle between the

View File

@ -494,6 +494,7 @@ mapAliases ({
linuxPackages_6_3 = linuxKernel.packages.linux_6_3;
linuxPackages_6_4 = linuxKernel.packages.linux_6_4;
linuxPackages_6_5 = linuxKernel.packages.linux_6_5;
linuxPackages_6_6 = linuxKernel.packages.linux_6_6;
linuxPackages_rpi0 = linuxKernel.packages.linux_rpi1;
linuxPackages_rpi02w = linuxKernel.packages.linux_rpi3;
linuxPackages_rpi1 = linuxKernel.packages.linux_rpi1;
@ -518,6 +519,7 @@ mapAliases ({
linux_6_3 = linuxKernel.kernels.linux_6_3;
linux_6_4 = linuxKernel.kernels.linux_6_4;
linux_6_5 = linuxKernel.kernels.linux_6_5;
linux_6_6 = linuxKernel.kernels.linux_6_6;
linux_rpi0 = linuxKernel.kernels.linux_rpi1;
linux_rpi02w = linuxKernel.kernels.linux_rpi3;
linux_rpi1 = linuxKernel.kernels.linux_rpi1;

View File

@ -178,6 +178,14 @@ in {
];
};
linux_6_6 = callPackage ../os-specific/linux/kernel/mainline.nix {
branch = "6.6";
kernelPatches = [
kernelPatches.bridge_stp_helper
kernelPatches.request_key_helper
];
};
linux_testing = let
testing = callPackage ../os-specific/linux/kernel/mainline.nix {
# A special branch that tracks the kernel under the release process
@ -567,6 +575,7 @@ in {
linux_5_15 = recurseIntoAttrs (packagesFor kernels.linux_5_15);
linux_6_1 = recurseIntoAttrs (packagesFor kernels.linux_6_1);
linux_6_5 = recurseIntoAttrs (packagesFor kernels.linux_6_5);
linux_6_6 = recurseIntoAttrs (packagesFor kernels.linux_6_6);
} // lib.optionalAttrs config.allowAliases {
linux_4_9 = throw "linux 4.9 was removed because it will reach its end of life within 22.11"; # Added 2022-11-08
linux_4_14 = throw "linux 4.14 was removed because it will reach its end of life within 23.11"; # Added 2023-10-11
@ -627,7 +636,7 @@ in {
packageAliases = {
linux_default = packages.linux_6_1;
# Update this when adding the newest kernel major version!
linux_latest = packages.linux_6_5;
linux_latest = packages.linux_6_6;
linux_mptcp = throw "'linux_mptcp' has been moved to https://github.com/teto/mptcp-flake";
linux_rt_default = packages.linux_rt_5_4;
linux_rt_latest = packages.linux_rt_6_1;

View File

@ -15666,6 +15666,8 @@ self: super: with self; {
withings-api = callPackage ../development/python-modules/withings-api { };
withings-sync = callPackage ../development/python-modules/withings-sync { };
wktutils = callPackage ../development/python-modules/wktutils { };
wled = callPackage ../development/python-modules/wled { };