Merge branch 'master' into fix-sandbox-escape

This commit is contained in:
John Ericson 2024-06-26 18:11:39 -04:00
commit 8a420162ab
274 changed files with 3295 additions and 900 deletions

View File

@ -4,20 +4,20 @@
# Top-most EditorConfig file # Top-most EditorConfig file
root = true root = true
# Unix-style newlines with a newline ending every file, utf-8 charset # Unix-style newlines with a newline ending every file, UTF-8 charset
[*] [*]
end_of_line = lf end_of_line = lf
insert_final_newline = true insert_final_newline = true
trim_trailing_whitespace = true trim_trailing_whitespace = true
charset = utf-8 charset = utf-8
# Match nix files, set indent to spaces with width of two # Match Nix files, set indent to spaces with width of two
[*.nix] [*.nix]
indent_style = space indent_style = space
indent_size = 2 indent_size = 2
# Match c++/shell/perl, set indent to spaces with width of four # Match C++/C/shell/Perl, set indent to spaces with width of four
[*.{hpp,cc,hh,sh,pl,xs}] [*.{hpp,cc,hh,c,h,sh,pl,xs}]
indent_style = space indent_style = space
indent_size = 4 indent_size = 4

View File

@ -31,6 +31,23 @@ jobs:
name: '${{ env.CACHIX_NAME }}' name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- if: matrix.os == 'ubuntu-latest'
run: |
free -h
swapon --show
swap=$(swapon --show --noheadings | head -n 1 | awk '{print $1}')
echo "Found swap: $swap"
sudo swapoff $swap
# resize it (fallocate)
sudo fallocate -l 10G $swap
sudo mkswap $swap
sudo swapon $swap
free -h
(
while sleep 60; do
free -h
done
) &
- run: nix --experimental-features 'nix-command flakes' flake check -L - run: nix --experimental-features 'nix-command flakes' flake check -L
# Steps to test CI automation in your own fork. # Steps to test CI automation in your own fork.
@ -175,4 +192,16 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main - uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main - uses: DeterminateSystems/magic-nix-cache-action@main
- run: nix build -L .#hydraJobs.tests.githubFlakes .#hydraJobs.tests.tarballFlakes - run: nix build -L .#hydraJobs.tests.githubFlakes .#hydraJobs.tests.tarballFlakes .#hydraJobs.tests.functional_user
meson_build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- run: nix build -L .#hydraJobs.build.{nix-fetchers,nix-store,nix-util}.$(nix-instantiate --eval --expr builtins.currentSystem | sed -e 's/"//g')

View File

@ -1,12 +0,0 @@
diff --git a/include/gc_allocator.h b/include/gc_allocator.h
index 597c7f13..587286be 100644
--- a/include/gc_allocator.h
+++ b/include/gc_allocator.h
@@ -312,6 +312,7 @@ public:
template<>
class traceable_allocator<void> {
+public:
typedef size_t size_type;
typedef ptrdiff_t difference_type;
typedef void* pointer;

View File

@ -1,7 +1,7 @@
// redirect rules for URL fragments (client-side) to prevent link rot. // redirect rules for URL fragments (client-side) to prevent link rot.
// this must be done on the client side, as web servers do not see the fragment part of the URL. // this must be done on the client side, as web servers do not see the fragment part of the URL.
// it will only work with JavaScript enabled in the browser, but this is the best we can do here. // it will only work with JavaScript enabled in the browser, but this is the best we can do here.
// see ./_redirects for path redirects (client-side) // see src/_redirects for path redirects (server-side)
// redirects are declared as follows: // redirects are declared as follows:
// each entry has as its key a path matching the requested URL path, relative to the mdBook document root. // each entry has as its key a path matching the requested URL path, relative to the mdBook document root.

View File

@ -1,5 +1,5 @@
# redirect rules for paths (server-side) to prevent link rot. # redirect rules for paths (server-side) to prevent link rot.
# see ./redirects.js for redirects based on URL fragments (client-side) # see ../redirects.js for redirects based on URL fragments (client-side)
# #
# concrete user story this supports: # concrete user story this supports:
# - user finds URL to the manual for Nix x.y # - user finds URL to the manual for Nix x.y

View File

@ -114,6 +114,8 @@ On other platforms they wouldn't be run at all.
The functional tests reside under the `tests/functional` directory and are listed in `tests/functional/local.mk`. The functional tests reside under the `tests/functional` directory and are listed in `tests/functional/local.mk`.
Each test is a bash script. Each test is a bash script.
Functional tests are run during `installCheck` in the `nix` package build, as well as separately from the build, in VM tests.
### Running the whole test suite ### Running the whole test suite
The whole test suite can be run with: The whole test suite can be run with:
@ -252,13 +254,30 @@ Regressions are caught, and improvements always show up in code review.
To ensure that characterisation testing doesn't make it harder to intentionally change these interfaces, there always must be an easy way to regenerate the expected output, as we do with `_NIX_TEST_ACCEPT=1`. To ensure that characterisation testing doesn't make it harder to intentionally change these interfaces, there always must be an easy way to regenerate the expected output, as we do with `_NIX_TEST_ACCEPT=1`.
### Running functional tests on NixOS
We run the functional tests not just in the build, but also in VM tests.
This helps us ensure that Nix works correctly on NixOS, and environments that have similar characteristics that are hard to reproduce in a build environment.
The recommended way to run these tests during development is:
```shell
nix build .#hydraJobs.tests.functional_user.quickBuild
```
The `quickBuild` attribute configures the test to use a `nix` package that's built without integration tests, so that you can iterate on the tests without performing recompilations due to the changed sources for `installCheck`.
Generally, this build is sufficient, but in nightly or CI we also test the attributes `functional_root` and `functional_trusted`, in which the test suite is run with different levels of authorization.
## Integration tests ## Integration tests
The integration tests are defined in the Nix flake under the `hydraJobs.tests` attribute. The integration tests are defined in the Nix flake under the `hydraJobs.tests` attribute.
These tests include everything that needs to interact with external services or run Nix in a non-trivial distributed setup. These tests include everything that needs to interact with external services or run Nix in a non-trivial distributed setup.
Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>). Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>).
You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}` You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`.
If you are testing a build of `nix` that you haven't compiled yet, you may iterate faster by appending the `quickBuild` attribute: `nix build .#hydraJobs.tests.{testName}.quickBuild`.
## Installer tests ## Installer tests

View File

@ -302,6 +302,12 @@ Derivations can declare some infrequently used optional attributes.
(associative) arrays. For example, the attribute `hardening.format = true` (associative) arrays. For example, the attribute `hardening.format = true`
ends up as the Bash associative array element `${hardening[format]}`. ends up as the Bash associative array element `${hardening[format]}`.
> **Warning**
>
> If set to `true`, other advanced attributes such as [`allowedReferences`](#adv-attr-allowedReferences), [`allowedReferences`](#adv-attr-allowedReferences), [`allowedRequisites`](#adv-attr-allowedRequisites),
[`disallowedReferences`](#adv-attr-disallowedReferences) and [`disallowedRequisites`](#adv-attr-disallowedRequisites), maxSize, and maxClosureSize.
will have no effect.
- [`outputChecks`]{#adv-attr-outputChecks}\ - [`outputChecks`]{#adv-attr-outputChecks}\
When using [structured attributes](#adv-attr-structuredAttrs), the `outputChecks` When using [structured attributes](#adv-attr-structuredAttrs), the `outputChecks`
attribute allows defining checks per-output. attribute allows defining checks per-output.

235
flake.nix
View File

@ -58,6 +58,18 @@
"stdenv" "stdenv"
]; ];
/**
`flatMapAttrs attrs f` applies `f` to each attribute in `attrs` and
merges the results into a single attribute set.
This can be nested to form a build matrix where all the attributes
generated by the innermost `f` are returned as is.
(Provided that the names are unique.)
See https://nixos.org/manual/nixpkgs/stable/index.html#function-library-lib.attrsets.concatMapAttrs
*/
flatMapAttrs = attrs: f: lib.concatMapAttrs f attrs;
forAllSystems = lib.genAttrs systems; forAllSystems = lib.genAttrs systems;
forAllCrossSystems = lib.genAttrs crossSystems; forAllCrossSystems = lib.genAttrs crossSystems;
@ -117,114 +129,25 @@
{ {
nixStable = prev.nix; nixStable = prev.nix;
default-busybox-sandbox-shell = final.busybox.override { # A new scope, so that we can use `callPackage` to inject our own interdependencies
useMusl = true; # without "polluting" the top level "`pkgs`" attrset.
enableStatic = true; # This also has the benefit of providing us with a distinct set of packages
enableMinimal = true; # we can iterate over.
extraConfig = '' nixComponents = lib.makeScope final.nixDependencies.newScope (import ./packaging/components.nix);
CONFIG_FEATURE_FANCY_ECHO y
CONFIG_FEATURE_SH_MATH y
CONFIG_FEATURE_SH_MATH_64 y
CONFIG_ASH y # The dependencies are in their own scope, so that they don't have to be
CONFIG_ASH_OPTIMIZE_FOR_SIZE y # in Nixpkgs top level `pkgs` or `nixComponents`.
nixDependencies = lib.makeScope final.newScope (import ./packaging/dependencies.nix {
CONFIG_ASH_ALIAS y inherit inputs stdenv versionSuffix;
CONFIG_ASH_BASH_COMPAT y pkgs = final;
CONFIG_ASH_CMDCMD y
CONFIG_ASH_ECHO y
CONFIG_ASH_GETOPTS y
CONFIG_ASH_INTERNAL_GLOB y
CONFIG_ASH_JOB_CONTROL y
CONFIG_ASH_PRINTF y
CONFIG_ASH_TEST y
'';
};
libgit2-nix = final.libgit2.overrideAttrs (attrs: {
src = libgit2;
version = libgit2.lastModifiedDate;
cmakeFlags = attrs.cmakeFlags or []
++ [ "-DUSE_SSH=exec" ];
}); });
boehmgc-nix = final.boehmgc.override { nix = final.nixComponents.nix;
enableLargeConfig = true;
};
libseccomp-nix = final.libseccomp.overrideAttrs (_: rec { nix_noTests = final.nix.override {
version = "2.5.5"; doCheck = false;
src = final.fetchurl { doInstallCheck = false;
url = "https://github.com/seccomp/libseccomp/releases/download/v${version}/libseccomp-${version}.tar.gz"; installUnitTests = false;
hash = "sha256-JIosik2bmFiqa69ScSw0r+/PnJ6Ut23OAsHJqiX7M3U=";
};
});
nix-util = final.callPackage ./src/libutil/package.nix {
inherit
fileset
stdenv
officialRelease
versionSuffix
;
};
nix-store = final.callPackage ./src/libstore/package.nix {
inherit
fileset
stdenv
officialRelease
versionSuffix
;
libseccomp = final.libseccomp-nix;
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
};
nix-fetchers = final.callPackage ./src/libfetchers/package.nix {
inherit
fileset
stdenv
officialRelease
versionSuffix
;
};
nix =
final.callPackage ./package.nix {
inherit
fileset
stdenv
officialRelease
versionSuffix
;
boehmgc = final.boehmgc-nix;
libgit2 = final.libgit2-nix;
libseccomp = final.libseccomp-nix;
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
};
nix-perl-bindings = final.callPackage ./src/perl/package.nix {
inherit
fileset
stdenv
versionSuffix
;
};
nix-internal-api-docs = final.callPackage ./src/internal-api-docs/package.nix {
inherit
fileset
stdenv
versionSuffix
;
};
nix-external-api-docs = final.callPackage ./src/external-api-docs/package.nix {
inherit
fileset
stdenv
versionSuffix
;
}; };
# See https://github.com/NixOS/nixpkgs/pull/214409 # See https://github.com/NixOS/nixpkgs/pull/214409
@ -241,7 +164,7 @@
# 'nix-perl-bindings' packages. # 'nix-perl-bindings' packages.
overlays.default = overlayFor (p: p.stdenv); overlays.default = overlayFor (p: p.stdenv);
hydraJobs = import ./maintainers/hydra.nix { hydraJobs = import ./packaging/hydra.nix {
inherit inherit
inputs inputs
binaryTarball binaryTarball
@ -275,39 +198,61 @@
# the old build system is gone and we are back to one build # the old build system is gone and we are back to one build
# system, we should reenable this. # system, we should reenable this.
#perlBindings = self.hydraJobs.perlBindings.${system}; #perlBindings = self.hydraJobs.perlBindings.${system};
} // devFlake.checks.${system} or {} }
# Add "passthru" tests
// flatMapAttrs ({
"" = nixpkgsFor.${system}.native;
} // lib.optionalAttrs (! nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) {
# TODO: enable static builds for darwin, blocked on:
# https://github.com/NixOS/nixpkgs/issues/320448
"static-" = nixpkgsFor.${system}.static;
})
(nixpkgsPrefix: nixpkgs:
flatMapAttrs nixpkgs.nixComponents
(pkgName: pkg:
flatMapAttrs pkg.tests or {}
(testName: test: {
"${nixpkgsPrefix}${pkgName}-${testName}" = test;
})
)
)
// devFlake.checks.${system} or {}
); );
packages = forAllSystems (system: { packages = forAllSystems (system:
inherit (nixpkgsFor.${system}.native) { # Here we put attributes that map 1:1 into packages.<system>, ie
changelog-d; # for which we don't apply the full build matrix such as cross or static.
default = self.packages.${system}.nix; inherit (nixpkgsFor.${system}.native)
nix-internal-api-docs = nixpkgsFor.${system}.native.nix-internal-api-docs; changelog-d;
nix-external-api-docs = nixpkgsFor.${system}.native.nix-external-api-docs; default = self.packages.${system}.nix;
} // lib.concatMapAttrs nix-internal-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-internal-api-docs;
# We need to flatten recursive attribute sets of derivations to pass `flake check`. nix-external-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-external-api-docs;
(pkgName: {}: {
"${pkgName}" = nixpkgsFor.${system}.native.${pkgName};
"${pkgName}-static" = nixpkgsFor.${system}.static.${pkgName};
} // lib.concatMapAttrs
(crossSystem: {}: {
"${pkgName}-${crossSystem}" = nixpkgsFor.${system}.cross.${crossSystem}.${pkgName};
})
(lib.genAttrs crossSystems (_: { }))
// lib.concatMapAttrs
(stdenvName: {}: {
"${pkgName}-${stdenvName}" = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".${pkgName};
})
(lib.genAttrs stdenvs (_: { })))
{
"nix" = { };
# Temporarily disabled because GitHub Actions OOM issues. Once
# the old build system is gone and we are back to one build
# system, we should reenable these.
#"nix-util" = { };
#"nix-store" = { };
#"nix-fetchers" = { };
} }
# We need to flatten recursive attribute sets of derivations to pass `flake check`.
// flatMapAttrs
{ # Components we'll iterate over in the upcoming lambda
"nix" = { };
# Temporarily disabled because GitHub Actions OOM issues. Once
# the old build system is gone and we are back to one build
# system, we should reenable these.
#"nix-util" = { };
#"nix-store" = { };
#"nix-fetchers" = { };
}
(pkgName: {}: {
# These attributes go right into `packages.<system>`.
"${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName};
"${pkgName}-static" = nixpkgsFor.${system}.static.nixComponents.${pkgName};
}
// flatMapAttrs (lib.genAttrs crossSystems (_: { })) (crossSystem: {}: {
# These attributes go right into `packages.<system>`.
"${pkgName}-${crossSystem}" = nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName};
})
// flatMapAttrs (lib.genAttrs stdenvs (_: { })) (stdenvName: {}: {
# These attributes go right into `packages.<system>`.
"${pkgName}-${stdenvName}" = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".nixComponents.${pkgName};
})
)
// lib.optionalAttrs (builtins.elem system linux64BitSystems) { // lib.optionalAttrs (builtins.elem system linux64BitSystems) {
dockerImage = dockerImage =
let let
@ -367,19 +312,19 @@
}; };
mesonFlags = mesonFlags =
map (transformFlag "libutil") pkgs.nix-util.mesonFlags map (transformFlag "libutil") pkgs.nixComponents.nix-util.mesonFlags
++ map (transformFlag "libstore") pkgs.nix-store.mesonFlags ++ map (transformFlag "libstore") pkgs.nixComponents.nix-store.mesonFlags
++ map (transformFlag "libfetchers") pkgs.nix-fetchers.mesonFlags ++ map (transformFlag "libfetchers") pkgs.nixComponents.nix-fetchers.mesonFlags
++ lib.optionals havePerl (map (transformFlag "perl") pkgs.nix-perl-bindings.mesonFlags) ++ lib.optionals havePerl (map (transformFlag "perl") pkgs.nixComponents.nix-perl-bindings.mesonFlags)
; ;
nativeBuildInputs = attrs.nativeBuildInputs or [] nativeBuildInputs = attrs.nativeBuildInputs or []
++ pkgs.nix-util.nativeBuildInputs ++ pkgs.nixComponents.nix-util.nativeBuildInputs
++ pkgs.nix-store.nativeBuildInputs ++ pkgs.nixComponents.nix-store.nativeBuildInputs
++ pkgs.nix-fetchers.nativeBuildInputs ++ pkgs.nixComponents.nix-fetchers.nativeBuildInputs
++ lib.optionals havePerl pkgs.nix-perl-bindings.nativeBuildInputs ++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.nativeBuildInputs
++ pkgs.nix-internal-api-docs.nativeBuildInputs ++ pkgs.nixComponents.nix-internal-api-docs.nativeBuildInputs
++ pkgs.nix-external-api-docs.nativeBuildInputs ++ pkgs.nixComponents.nix-external-api-docs.nativeBuildInputs
++ [ ++ [
modular.pre-commit.settings.package modular.pre-commit.settings.package
(pkgs.writeScriptBin "pre-commit-hooks-install" (pkgs.writeScriptBin "pre-commit-hooks-install"

View File

@ -447,7 +447,6 @@
''^tests/unit/libfetchers/public-key\.cc'' ''^tests/unit/libfetchers/public-key\.cc''
''^tests/unit/libstore-support/tests/derived-path\.cc'' ''^tests/unit/libstore-support/tests/derived-path\.cc''
''^tests/unit/libstore-support/tests/derived-path\.hh'' ''^tests/unit/libstore-support/tests/derived-path\.hh''
''^tests/unit/libstore-support/tests/libstore\.hh''
''^tests/unit/libstore-support/tests/nix_api_store\.hh'' ''^tests/unit/libstore-support/tests/nix_api_store\.hh''
''^tests/unit/libstore-support/tests/outputs-spec\.cc'' ''^tests/unit/libstore-support/tests/outputs-spec\.cc''
''^tests/unit/libstore-support/tests/outputs-spec\.hh'' ''^tests/unit/libstore-support/tests/outputs-spec\.hh''
@ -522,6 +521,7 @@
''^tests/functional/ca/repl\.sh$'' ''^tests/functional/ca/repl\.sh$''
''^tests/functional/ca/selfref-gc\.sh$'' ''^tests/functional/ca/selfref-gc\.sh$''
''^tests/functional/ca/why-depends\.sh$'' ''^tests/functional/ca/why-depends\.sh$''
''^tests/functional/characterisation-test-infra\.sh$''
''^tests/functional/check\.sh$'' ''^tests/functional/check\.sh$''
''^tests/functional/common/vars-and-functions\.sh$'' ''^tests/functional/common/vars-and-functions\.sh$''
''^tests/functional/completions\.sh$'' ''^tests/functional/completions\.sh$''
@ -579,9 +579,7 @@
''^tests/functional/impure-env\.sh$'' ''^tests/functional/impure-env\.sh$''
''^tests/functional/impure-eval\.sh$'' ''^tests/functional/impure-eval\.sh$''
''^tests/functional/install-darwin\.sh$'' ''^tests/functional/install-darwin\.sh$''
''^tests/functional/lang-test-infra\.sh$''
''^tests/functional/lang\.sh$'' ''^tests/functional/lang\.sh$''
''^tests/functional/lang/framework\.sh$''
''^tests/functional/legacy-ssh-store\.sh$'' ''^tests/functional/legacy-ssh-store\.sh$''
''^tests/functional/linux-sandbox\.sh$'' ''^tests/functional/linux-sandbox\.sh$''
''^tests/functional/local-overlay-store/add-lower-inner\.sh$'' ''^tests/functional/local-overlay-store/add-lower-inner\.sh$''

View File

@ -12,3 +12,10 @@ subproject('libfetchers')
subproject('perl') subproject('perl')
subproject('internal-api-docs') subproject('internal-api-docs')
subproject('external-api-docs') subproject('external-api-docs')
# C wrappers
subproject('libutil-c')
# Testing
subproject('libutil-test-support')
subproject('libutil-test')

View File

@ -1,12 +1,10 @@
{ lib { lib
, fetchurl
, stdenv , stdenv
, releaseTools , releaseTools
, autoconf-archive , autoconf-archive
, autoreconfHook , autoreconfHook
, aws-sdk-cpp , aws-sdk-cpp
, boehmgc , boehmgc
, buildPackages
, nlohmann_json , nlohmann_json
, bison , bison
, boost , boost
@ -15,7 +13,6 @@
, curl , curl
, editline , editline
, readline , readline
, fileset
, flex , flex
, git , git
, gtest , gtest
@ -50,7 +47,6 @@
, pname ? "nix" , pname ? "nix"
, versionSuffix ? "" , versionSuffix ? ""
, officialRelease ? false
# Whether to build Nix. Useful to skip for tasks like testing existing pre-built versions of Nix # Whether to build Nix. Useful to skip for tasks like testing existing pre-built versions of Nix
, doBuild ? true , doBuild ? true
@ -113,6 +109,8 @@
}: }:
let let
inherit (lib) fileset;
version = lib.fileContents ./.version + versionSuffix; version = lib.fileContents ./.version + versionSuffix;
# selected attributes with defaults, will be used to define some # selected attributes with defaults, will be used to define some

28
packaging/components.nix Normal file
View File

@ -0,0 +1,28 @@
scope:
let
inherit (scope) callPackage;
in
# This becomes the pkgs.nixComponents attribute set
{
nix = callPackage ../package.nix { };
nix-util = callPackage ../src/libutil/package.nix { };
nix-util-test-support = callPackage ../tests/unit/libutil-support/package.nix { };
nix-util-test = callPackage ../tests/unit/libutil/package.nix { };
nix-util-c = callPackage ../src/libutil-c/package.nix { };
nix-store = callPackage ../src/libstore/package.nix { };
nix-fetchers = callPackage ../src/libfetchers/package.nix { };
nix-perl-bindings = callPackage ../src/perl/package.nix { };
nix-internal-api-docs = callPackage ../src/internal-api-docs/package.nix { };
nix-external-api-docs = callPackage ../src/external-api-docs/package.nix { };
}

View File

@ -0,0 +1,58 @@
# These overrides are applied to the dependencies of the Nix components.
{
# Flake inputs; used for sources
inputs,
# The raw Nixpkgs, not affected by this scope
pkgs,
stdenv,
versionSuffix,
}:
scope: {
inherit stdenv versionSuffix;
libseccomp = pkgs.libseccomp.overrideAttrs (_: rec {
version = "2.5.5";
src = pkgs.fetchurl {
url = "https://github.com/seccomp/libseccomp/releases/download/v${version}/libseccomp-${version}.tar.gz";
hash = "sha256-JIosik2bmFiqa69ScSw0r+/PnJ6Ut23OAsHJqiX7M3U=";
};
});
boehmgc = pkgs.boehmgc.override {
enableLargeConfig = true;
};
libgit2 = pkgs.libgit2.overrideAttrs (attrs: {
src = inputs.libgit2;
version = inputs.libgit2.lastModifiedDate;
cmakeFlags = attrs.cmakeFlags or []
++ [ "-DUSE_SSH=exec" ];
});
busybox-sandbox-shell = pkgs.busybox-sandbox-shell or (pkgs.busybox.override {
useMusl = true;
enableStatic = true;
enableMinimal = true;
extraConfig = ''
CONFIG_FEATURE_FANCY_ECHO y
CONFIG_FEATURE_SH_MATH y
CONFIG_FEATURE_SH_MATH_64 y
CONFIG_ASH y
CONFIG_ASH_OPTIMIZE_FOR_SIZE y
CONFIG_ASH_ALIAS y
CONFIG_ASH_BASH_COMPAT y
CONFIG_ASH_CMDCMD y
CONFIG_ASH_ECHO y
CONFIG_ASH_GETOPTS y
CONFIG_ASH_INTERNAL_GLOB y
CONFIG_ASH_JOB_CONTROL y
CONFIG_ASH_PRINTF y
CONFIG_ASH_TEST y
'';
});
}

View File

@ -9,7 +9,6 @@
}: }:
let let
inherit (inputs) nixpkgs nixpkgs-regression; inherit (inputs) nixpkgs nixpkgs-regression;
inherit (lib) fileset;
installScriptFor = tarballs: installScriptFor = tarballs:
nixpkgsFor.x86_64-linux.native.callPackage ../scripts/installer.nix { nixpkgsFor.x86_64-linux.native.callPackage ../scripts/installer.nix {
@ -25,17 +24,21 @@ let
lib.versionAtLeast client.version "2.4pre20211005") lib.versionAtLeast client.version "2.4pre20211005")
"-${client.version}-against-${daemon.version}"; "-${client.version}-against-${daemon.version}";
inherit fileset;
test-client = client; test-client = client;
test-daemon = daemon; test-daemon = daemon;
doBuild = false; doBuild = false;
}; };
# Technically we could just return `pkgs.nixComponents`, but for Hydra it's
# convention to transpose it, and to transpose it efficiently, we need to
# enumerate them manually, so that we don't evaluate unnecessary package sets.
forAllPackages = lib.genAttrs [ forAllPackages = lib.genAttrs [
"nix" "nix"
"nix-util" "nix-util"
"nix-util-c"
"nix-util-test-support"
"nix-util-test"
"nix-store" "nix-store"
"nix-fetchers" "nix-fetchers"
]; ];
@ -43,28 +46,22 @@ in
{ {
# Binary package for various platforms. # Binary package for various platforms.
build = forAllPackages (pkgName: build = forAllPackages (pkgName:
forAllSystems (system: nixpkgsFor.${system}.native.${pkgName})); forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.${pkgName}));
shellInputs = forAllSystems (system: self.devShells.${system}.default.inputDerivation); shellInputs = forAllSystems (system: self.devShells.${system}.default.inputDerivation);
buildStatic = forAllPackages (pkgName: buildStatic = forAllPackages (pkgName:
lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.${pkgName})); lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.nixComponents.${pkgName}));
buildCross = forAllPackages (pkgName: buildCross = forAllPackages (pkgName:
forAllCrossSystems (crossSystem: forAllCrossSystems (crossSystem:
lib.genAttrs [ "x86_64-linux" ] (system: nixpkgsFor.${system}.cross.${crossSystem}.${pkgName}))); lib.genAttrs [ "x86_64-linux" ] (system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName})));
buildNoGc = forAllSystems (system: buildNoGc = forAllSystems (system:
self.packages.${system}.nix.override { enableGC = false; } self.packages.${system}.nix.override { enableGC = false; }
); );
buildNoTests = forAllSystems (system: buildNoTests = forAllSystems (system: nixpkgsFor.${system}.native.nix_noTests);
self.packages.${system}.nix.override {
doCheck = false;
doInstallCheck = false;
installUnitTests = false;
}
);
# Toggles some settings for better coverage. Windows needs these # Toggles some settings for better coverage. Windows needs these
# library combinations, and Debian build Nix with GNU readline too. # library combinations, and Debian build Nix with GNU readline too.
@ -76,7 +73,7 @@ in
); );
# Perl bindings for various platforms. # Perl bindings for various platforms.
perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nix-perl-bindings); perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-perl-bindings);
# Binary tarball for various platforms, containing a Nix store # Binary tarball for various platforms, containing a Nix store
# with the closure of 'nix' package, and the second half of # with the closure of 'nix' package, and the second half of
@ -125,10 +122,10 @@ in
}; };
# API docs for Nix's unstable internal C++ interfaces. # API docs for Nix's unstable internal C++ interfaces.
internal-api-docs = nixpkgsFor.x86_64-linux.native.nix-internal-api-docs; internal-api-docs = nixpkgsFor.x86_64-linux.native.nixComponents.nix-internal-api-docs;
# API docs for Nix's C bindings. # API docs for Nix's C bindings.
external-api-docs = nixpkgsFor.x86_64-linux.native.nix-external-api-docs; external-api-docs = nixpkgsFor.x86_64-linux.native.nixComponents.nix-external-api-docs;
# System tests. # System tests.
tests = import ../tests/nixos { inherit lib nixpkgs nixpkgsFor self; } // { tests = import ../tests/nixos { inherit lib nixpkgs nixpkgsFor self; } // {

View File

@ -1,7 +1,5 @@
{ lib { lib
, stdenv , stdenv
, releaseTools
, fileset
, meson , meson
, ninja , ninja
@ -12,6 +10,10 @@
, versionSuffix ? "" , versionSuffix ? ""
}: }:
let
inherit (lib) fileset;
in
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "nix-external-api-docs"; pname = "nix-external-api-docs";
version = lib.fileContents ./.version + versionSuffix; version = lib.fileContents ./.version + versionSuffix;

View File

@ -1,7 +1,5 @@
{ lib { lib
, stdenv , stdenv
, releaseTools
, fileset
, meson , meson
, ninja , ninja
@ -12,6 +10,10 @@
, versionSuffix ? "" , versionSuffix ? ""
}: }:
let
inherit (lib) fileset;
in
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "nix-internal-api-docs"; pname = "nix-internal-api-docs";
version = lib.fileContents ./.version + versionSuffix; version = lib.fileContents ./.version + versionSuffix;

View File

@ -127,12 +127,12 @@ ref<EvalState> EvalCommand::getEvalState()
if (!evalState) { if (!evalState) {
evalState = evalState =
#if HAVE_BOEHMGC #if HAVE_BOEHMGC
std::allocate_shared<EvalState>(traceable_allocator<EvalState>(), std::allocate_shared<EvalState>(
lookupPath, getEvalStore(), getStore()) traceable_allocator<EvalState>(),
#else #else
std::make_shared<EvalState>( std::make_shared<EvalState>(
lookupPath, getEvalStore(), getStore())
#endif #endif
lookupPath, getEvalStore(), evalSettings, getStore())
; ;
evalState->repair = repair; evalState->repair = repair;

View File

@ -1,6 +1,7 @@
#include "eval-settings.hh" #include "eval-settings.hh"
#include "common-eval-args.hh" #include "common-eval-args.hh"
#include "shared.hh" #include "shared.hh"
#include "config-global.hh"
#include "filetransfer.hh" #include "filetransfer.hh"
#include "eval.hh" #include "eval.hh"
#include "fetchers.hh" #include "fetchers.hh"
@ -13,6 +14,12 @@
namespace nix { namespace nix {
EvalSettings evalSettings {
settings.readOnlyMode
};
static GlobalConfig::Register rEvalSettings(&evalSettings);
MixEvalArgs::MixEvalArgs() MixEvalArgs::MixEvalArgs()
{ {
addFlag({ addFlag({

View File

@ -12,9 +12,15 @@ namespace nix {
class Store; class Store;
class EvalState; class EvalState;
struct EvalSettings;
class Bindings; class Bindings;
struct SourcePath; struct SourcePath;
/**
* @todo Get rid of global setttings variables
*/
extern EvalSettings evalSettings;
struct MixEvalArgs : virtual Args, virtual MixRepair struct MixEvalArgs : virtual Args, virtual MixRepair
{ {
static constexpr auto category = "Common evaluation options"; static constexpr auto category = "Common evaluation options";

View File

@ -8,6 +8,7 @@
#include "ansicolor.hh" #include "ansicolor.hh"
#include "shared.hh" #include "shared.hh"
#include "config-global.hh"
#include "eval.hh" #include "eval.hh"
#include "eval-cache.hh" #include "eval-cache.hh"
#include "eval-inline.hh" #include "eval-inline.hh"

View File

@ -7,6 +7,7 @@
#include "eval.hh" #include "eval.hh"
#include "globals.hh" #include "globals.hh"
#include "util.hh" #include "util.hh"
#include "eval-settings.hh"
#include "nix_api_expr.h" #include "nix_api_expr.h"
#include "nix_api_expr_internal.h" #include "nix_api_expr_internal.h"
@ -106,7 +107,21 @@ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath_c
for (size_t i = 0; lookupPath_c[i] != nullptr; i++) for (size_t i = 0; lookupPath_c[i] != nullptr; i++)
lookupPath.push_back(lookupPath_c[i]); lookupPath.push_back(lookupPath_c[i]);
return new EvalState{nix::EvalState(nix::LookupPath::parse(lookupPath), store->ptr)}; void * p = ::operator new(
sizeof(EvalState),
static_cast<std::align_val_t>(alignof(EvalState)));
auto * p2 = static_cast<EvalState *>(p);
new (p) EvalState {
.settings = nix::EvalSettings{
nix::settings.readOnlyMode,
},
.state = nix::EvalState(
nix::LookupPath::parse(lookupPath),
store->ptr,
p2->settings),
};
loadConfFile(p2->settings);
return p2;
} }
NIXC_CATCH_ERRS_NULL NIXC_CATCH_ERRS_NULL
} }

View File

@ -2,11 +2,13 @@
#define NIX_API_EXPR_INTERNAL_H #define NIX_API_EXPR_INTERNAL_H
#include "eval.hh" #include "eval.hh"
#include "eval-settings.hh"
#include "attr-set.hh" #include "attr-set.hh"
#include "nix_api_value.h" #include "nix_api_value.h"
struct EvalState struct EvalState
{ {
nix::EvalSettings settings;
nix::EvalState state; nix::EvalState state;
}; };

View File

@ -1,4 +1,5 @@
#include "users.hh" #include "users.hh"
#include "config-global.hh"
#include "globals.hh" #include "globals.hh"
#include "profiles.hh" #include "profiles.hh"
#include "eval.hh" #include "eval.hh"
@ -44,7 +45,8 @@ static Strings parseNixPath(const std::string & s)
return res; return res;
} }
EvalSettings::EvalSettings() EvalSettings::EvalSettings(bool & readOnlyMode)
: readOnlyMode{readOnlyMode}
{ {
auto var = getEnv("NIX_PATH"); auto var = getEnv("NIX_PATH");
if (var) nixPath = parseNixPath(*var); if (var) nixPath = parseNixPath(*var);
@ -54,7 +56,7 @@ EvalSettings::EvalSettings()
builtinsAbortOnWarn = true; builtinsAbortOnWarn = true;
} }
Strings EvalSettings::getDefaultNixPath() Strings EvalSettings::getDefaultNixPath() const
{ {
Strings res; Strings res;
auto add = [&](const Path & p, const std::string & s = std::string()) { auto add = [&](const Path & p, const std::string & s = std::string()) {
@ -67,7 +69,7 @@ Strings EvalSettings::getDefaultNixPath()
} }
}; };
if (!evalSettings.restrictEval && !evalSettings.pureEval) { if (!restrictEval && !pureEval) {
add(getNixDefExpr() + "/channels"); add(getNixDefExpr() + "/channels");
add(rootChannelsDir() + "/nixpkgs", "nixpkgs"); add(rootChannelsDir() + "/nixpkgs", "nixpkgs");
add(rootChannelsDir()); add(rootChannelsDir());
@ -93,16 +95,12 @@ std::string EvalSettings::resolvePseudoUrl(std::string_view url)
return std::string(url); return std::string(url);
} }
const std::string & EvalSettings::getCurrentSystem() const std::string & EvalSettings::getCurrentSystem() const
{ {
const auto & evalSystem = currentSystem.get(); const auto & evalSystem = currentSystem.get();
return evalSystem != "" ? evalSystem : settings.thisSystem.get(); return evalSystem != "" ? evalSystem : settings.thisSystem.get();
} }
EvalSettings evalSettings;
static GlobalConfig::Register rEvalSettings(&evalSettings);
Path getNixDefExpr() Path getNixDefExpr()
{ {
return settings.useXDGBaseDirectories return settings.useXDGBaseDirectories

View File

@ -7,9 +7,11 @@ namespace nix {
struct EvalSettings : Config struct EvalSettings : Config
{ {
EvalSettings(); EvalSettings(bool & readOnlyMode);
static Strings getDefaultNixPath(); bool & readOnlyMode;
Strings getDefaultNixPath() const;
static bool isPseudoUrl(std::string_view s); static bool isPseudoUrl(std::string_view s);
@ -74,7 +76,7 @@ struct EvalSettings : Config
* Implements the `eval-system` vs `system` defaulting logic * Implements the `eval-system` vs `system` defaulting logic
* described for `eval-system`. * described for `eval-system`.
*/ */
const std::string & getCurrentSystem(); const std::string & getCurrentSystem() const;
Setting<bool> restrictEval{ Setting<bool> restrictEval{
this, false, "restrict-eval", this, false, "restrict-eval",
@ -193,8 +195,6 @@ struct EvalSettings : Config
)"}; )"};
}; };
extern EvalSettings evalSettings;
/** /**
* Conventionally part of the default nix path in impure mode. * Conventionally part of the default nix path in impure mode.
*/ */

View File

@ -9,7 +9,6 @@
#include "store-api.hh" #include "store-api.hh"
#include "derivations.hh" #include "derivations.hh"
#include "downstream-placeholder.hh" #include "downstream-placeholder.hh"
#include "globals.hh"
#include "eval-inline.hh" #include "eval-inline.hh"
#include "filetransfer.hh" #include "filetransfer.hh"
#include "function-trace.hh" #include "function-trace.hh"
@ -219,8 +218,10 @@ static constexpr size_t BASE_ENV_SIZE = 128;
EvalState::EvalState( EvalState::EvalState(
const LookupPath & _lookupPath, const LookupPath & _lookupPath,
ref<Store> store, ref<Store> store,
const EvalSettings & settings,
std::shared_ptr<Store> buildStore) std::shared_ptr<Store> buildStore)
: sWith(symbols.create("<with>")) : settings{settings}
, sWith(symbols.create("<with>"))
, sOutPath(symbols.create("outPath")) , sOutPath(symbols.create("outPath"))
, sDrvPath(symbols.create("drvPath")) , sDrvPath(symbols.create("drvPath"))
, sType(symbols.create("type")) , sType(symbols.create("type"))
@ -240,6 +241,12 @@ EvalState::EvalState(
, sRight(symbols.create("right")) , sRight(symbols.create("right"))
, sWrong(symbols.create("wrong")) , sWrong(symbols.create("wrong"))
, sStructuredAttrs(symbols.create("__structuredAttrs")) , sStructuredAttrs(symbols.create("__structuredAttrs"))
, sAllowedReferences(symbols.create("allowedReferences"))
, sAllowedRequisites(symbols.create("allowedRequisites"))
, sDisallowedReferences(symbols.create("disallowedReferences"))
, sDisallowedRequisites(symbols.create("disallowedRequisites"))
, sMaxSize(symbols.create("maxSize"))
, sMaxClosureSize(symbols.create("maxClosureSize"))
, sBuilder(symbols.create("builder")) , sBuilder(symbols.create("builder"))
, sArgs(symbols.create("args")) , sArgs(symbols.create("args"))
, sContentAddressed(symbols.create("__contentAddressed")) , sContentAddressed(symbols.create("__contentAddressed"))
@ -270,10 +277,10 @@ EvalState::EvalState(
, repair(NoRepair) , repair(NoRepair)
, emptyBindings(0) , emptyBindings(0)
, rootFS( , rootFS(
evalSettings.restrictEval || evalSettings.pureEval settings.restrictEval || settings.pureEval
? ref<SourceAccessor>(AllowListSourceAccessor::create(getFSSourceAccessor(), {}, ? ref<SourceAccessor>(AllowListSourceAccessor::create(getFSSourceAccessor(), {},
[](const CanonPath & path) -> RestrictedPathError { [&settings](const CanonPath & path) -> RestrictedPathError {
auto modeInformation = evalSettings.pureEval auto modeInformation = settings.pureEval
? "in pure evaluation mode (use '--impure' to override)" ? "in pure evaluation mode (use '--impure' to override)"
: "in restricted mode"; : "in restricted mode";
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
@ -324,10 +331,10 @@ EvalState::EvalState(
vStringUnknown.mkString("unknown"); vStringUnknown.mkString("unknown");
/* Initialise the Nix expression search path. */ /* Initialise the Nix expression search path. */
if (!evalSettings.pureEval) { if (!settings.pureEval) {
for (auto & i : _lookupPath.elements) for (auto & i : _lookupPath.elements)
lookupPath.elements.emplace_back(LookupPath::Elem {i}); lookupPath.elements.emplace_back(LookupPath::Elem {i});
for (auto & i : evalSettings.nixPath.get()) for (auto & i : settings.nixPath.get())
lookupPath.elements.emplace_back(LookupPath::Elem::parse(i)); lookupPath.elements.emplace_back(LookupPath::Elem::parse(i));
} }
@ -405,9 +412,9 @@ bool isAllowedURI(std::string_view uri, const Strings & allowedUris)
void EvalState::checkURI(const std::string & uri) void EvalState::checkURI(const std::string & uri)
{ {
if (!evalSettings.restrictEval) return; if (!settings.restrictEval) return;
if (isAllowedURI(uri, evalSettings.allowedUris.get())) return; if (isAllowedURI(uri, settings.allowedUris.get())) return;
/* If the URI is a path, then check it against allowedPaths as /* If the URI is a path, then check it against allowedPaths as
well. */ well. */
@ -452,7 +459,7 @@ void EvalState::addConstant(const std::string & name, Value * v, Constant info)
constantInfos.push_back({name2, info}); constantInfos.push_back({name2, info});
if (!(evalSettings.pureEval && info.impureOnly)) { if (!(settings.pureEval && info.impureOnly)) {
/* Check the type, if possible. /* Check the type, if possible.
We might know the type of a thunk in advance, so be allowed We might know the type of a thunk in advance, so be allowed
@ -1407,11 +1414,11 @@ public:
void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos) void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos)
{ {
if (callDepth > evalSettings.maxCallDepth) if (callDepth > settings.maxCallDepth)
error<EvalError>("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow(); error<EvalError>("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow();
CallDepth _level(callDepth); CallDepth _level(callDepth);
auto trace = evalSettings.traceFunctionCalls auto trace = settings.traceFunctionCalls
? std::make_unique<FunctionCallTrace>(positions[pos]) ? std::make_unique<FunctionCallTrace>(positions[pos])
: nullptr; : nullptr;
@ -2297,7 +2304,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
path.resolveSymlinks(), path.resolveSymlinks(),
settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy, settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
path.baseName(), path.baseName(),
FileIngestionMethod::Recursive, ContentAddressMethod::Raw::NixArchive,
nullptr, nullptr,
repair); repair);
allowPath(dstPath); allowPath(dstPath);
@ -2739,7 +2746,7 @@ SourcePath EvalState::findFile(const LookupPath & lookupPath, const std::string_
return {corepkgsFS, CanonPath(path.substr(3))}; return {corepkgsFS, CanonPath(path.substr(3))};
error<ThrownError>( error<ThrownError>(
evalSettings.pureEval settings.pureEval
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
path path
@ -2819,7 +2826,7 @@ Expr * EvalState::parse(
const SourcePath & basePath, const SourcePath & basePath,
std::shared_ptr<StaticEnv> & staticEnv) std::shared_ptr<StaticEnv> & staticEnv)
{ {
auto result = parseExprFromBuf(text, length, origin, basePath, symbols, positions, rootFS, exprSymbols); auto result = parseExprFromBuf(text, length, origin, basePath, symbols, settings, positions, rootFS, exprSymbols);
result->bindVars(*this, staticEnv); result->bindVars(*this, staticEnv);

View File

@ -30,6 +30,7 @@ namespace nix {
constexpr size_t maxPrimOpArity = 8; constexpr size_t maxPrimOpArity = 8;
class Store; class Store;
struct EvalSettings;
class EvalState; class EvalState;
class StorePath; class StorePath;
struct SingleDerivedPath; struct SingleDerivedPath;
@ -39,7 +40,6 @@ namespace eval_cache {
class EvalCache; class EvalCache;
} }
/** /**
* Function that implements a primop. * Function that implements a primop.
*/ */
@ -162,13 +162,17 @@ struct DebugTrace {
class EvalState : public std::enable_shared_from_this<EvalState> class EvalState : public std::enable_shared_from_this<EvalState>
{ {
public: public:
const EvalSettings & settings;
SymbolTable symbols; SymbolTable symbols;
PosTable positions; PosTable positions;
const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue, const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue,
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls, sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
sFile, sLine, sColumn, sFunctor, sToString, sFile, sLine, sColumn, sFunctor, sToString,
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs, sRight, sWrong, sStructuredAttrs,
sAllowedReferences, sAllowedRequisites, sDisallowedReferences, sDisallowedRequisites,
sMaxSize, sMaxClosureSize,
sBuilder, sArgs,
sContentAddressed, sImpure, sContentAddressed, sImpure,
sOutputHash, sOutputHashAlgo, sOutputHashMode, sOutputHash, sOutputHashAlgo, sOutputHashMode,
sRecurseForDerivations, sRecurseForDerivations,
@ -349,6 +353,7 @@ public:
EvalState( EvalState(
const LookupPath & _lookupPath, const LookupPath & _lookupPath,
ref<Store> store, ref<Store> store,
const EvalSettings & settings,
std::shared_ptr<Store> buildStore = nullptr); std::shared_ptr<Store> buildStore = nullptr);
~EvalState(); ~EvalState();

View File

@ -1,5 +1,5 @@
#include "users.hh" #include "users.hh"
#include "globals.hh" #include "config-global.hh"
#include "fetch-settings.hh" #include "fetch-settings.hh"
#include "flake.hh" #include "flake.hh"

View File

@ -803,7 +803,7 @@ static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, V
{ {
std::string flakeRefS(state.forceStringNoCtx(*args[0], pos, "while evaluating the argument passed to builtins.getFlake")); std::string flakeRefS(state.forceStringNoCtx(*args[0], pos, "while evaluating the argument passed to builtins.getFlake"));
auto flakeRef = parseFlakeRef(flakeRefS, {}, true); auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
if (evalSettings.pureEval && !flakeRef.input.isLocked()) if (state.settings.pureEval && !flakeRef.input.isLocked())
throw Error("cannot call 'getFlake' on unlocked flake reference '%s', at %s (use --impure to override)", flakeRefS, state.positions[pos]); throw Error("cannot call 'getFlake' on unlocked flake reference '%s', at %s (use --impure to override)", flakeRefS, state.positions[pos]);
callFlake(state, callFlake(state,
@ -811,8 +811,8 @@ static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, V
LockFlags { LockFlags {
.updateLockFile = false, .updateLockFile = false,
.writeLockFile = false, .writeLockFile = false,
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries, .useRegistries = !state.settings.pureEval && fetchSettings.useRegistries,
.allowUnlocked = !evalSettings.pureEval, .allowUnlocked = !state.settings.pureEval,
}), }),
v); v);
} }

View File

@ -46,6 +46,7 @@ struct ParserState
PosTable::Origin origin; PosTable::Origin origin;
const ref<SourceAccessor> rootFS; const ref<SourceAccessor> rootFS;
const Expr::AstSymbols & s; const Expr::AstSymbols & s;
const EvalSettings & settings;
void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos);
void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos);

View File

@ -25,7 +25,6 @@
#include "nixexpr.hh" #include "nixexpr.hh"
#include "eval.hh" #include "eval.hh"
#include "eval-settings.hh" #include "eval-settings.hh"
#include "globals.hh"
#include "parser-state.hh" #include "parser-state.hh"
#define YYLTYPE ::nix::ParserLocation #define YYLTYPE ::nix::ParserLocation
@ -40,6 +39,7 @@ Expr * parseExprFromBuf(
Pos::Origin origin, Pos::Origin origin,
const SourcePath & basePath, const SourcePath & basePath,
SymbolTable & symbols, SymbolTable & symbols,
const EvalSettings & settings,
PosTable & positions, PosTable & positions,
const ref<SourceAccessor> rootFS, const ref<SourceAccessor> rootFS,
const Expr::AstSymbols & astSymbols); const Expr::AstSymbols & astSymbols);
@ -294,7 +294,7 @@ path_start
$$ = new ExprPath(ref<SourceAccessor>(state->rootFS), std::move(path)); $$ = new ExprPath(ref<SourceAccessor>(state->rootFS), std::move(path));
} }
| HPATH { | HPATH {
if (evalSettings.pureEval) { if (state->settings.pureEval) {
throw Error( throw Error(
"the path '%s' can not be resolved in pure mode", "the path '%s' can not be resolved in pure mode",
std::string_view($1.p, $1.l) std::string_view($1.p, $1.l)
@ -429,6 +429,7 @@ Expr * parseExprFromBuf(
Pos::Origin origin, Pos::Origin origin,
const SourcePath & basePath, const SourcePath & basePath,
SymbolTable & symbols, SymbolTable & symbols,
const EvalSettings & settings,
PosTable & positions, PosTable & positions,
const ref<SourceAccessor> rootFS, const ref<SourceAccessor> rootFS,
const Expr::AstSymbols & astSymbols) const Expr::AstSymbols & astSymbols)
@ -441,6 +442,7 @@ Expr * parseExprFromBuf(
.origin = positions.addOrigin(origin, length), .origin = positions.addOrigin(origin, length),
.rootFS = rootFS, .rootFS = rootFS,
.s = astSymbols, .s = astSymbols,
.settings = settings,
}; };
yylex_init(&scanner); yylex_init(&scanner);

View File

@ -5,7 +5,6 @@
#include "eval.hh" #include "eval.hh"
#include "eval-settings.hh" #include "eval-settings.hh"
#include "gc-small-vector.hh" #include "gc-small-vector.hh"
#include "globals.hh"
#include "json-to-value.hh" #include "json-to-value.hh"
#include "names.hh" #include "names.hh"
#include "path-references.hh" #include "path-references.hh"
@ -78,7 +77,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS
if (drvs.empty()) return {}; if (drvs.empty()) return {};
if (isIFD && !evalSettings.enableImportFromDerivation) if (isIFD && !settings.enableImportFromDerivation)
error<EvalError>( error<EvalError>(
"cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled", "cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled",
drvs.begin()->to_string(*store) drvs.begin()->to_string(*store)
@ -901,7 +900,7 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Va
MaintainCount trylevel(state.trylevel); MaintainCount trylevel(state.trylevel);
ReplExitStatus (* savedDebugRepl)(ref<EvalState> es, const ValMap & extraEnv) = nullptr; ReplExitStatus (* savedDebugRepl)(ref<EvalState> es, const ValMap & extraEnv) = nullptr;
if (state.debugRepl && evalSettings.ignoreExceptionsDuringTry) if (state.debugRepl && state.settings.ignoreExceptionsDuringTry)
{ {
/* to prevent starting the repl from exceptions withing a tryEval, null it. */ /* to prevent starting the repl from exceptions withing a tryEval, null it. */
savedDebugRepl = state.debugRepl; savedDebugRepl = state.debugRepl;
@ -950,7 +949,7 @@ static RegisterPrimOp primop_tryEval({
static void prim_getEnv(EvalState & state, const PosIdx pos, Value * * args, Value & v) static void prim_getEnv(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{ {
std::string name(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.getEnv")); std::string name(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.getEnv"));
v.mkString(evalSettings.restrictEval || evalSettings.pureEval ? "" : getEnv(name).value_or("")); v.mkString(state.settings.restrictEval || state.settings.pureEval ? "" : getEnv(name).value_or(""));
} }
static RegisterPrimOp primop_getEnv({ static RegisterPrimOp primop_getEnv({
@ -1017,7 +1016,7 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu
printError("trace: %1%", args[0]->string_view()); printError("trace: %1%", args[0]->string_view());
else else
printError("trace: %1%", ValuePrinter(state, *args[0])); printError("trace: %1%", ValuePrinter(state, *args[0]));
if (evalSettings.builtinsTraceDebugger) { if (state.settings.builtinsTraceDebugger) {
state.runDebugRepl(nullptr); state.runDebugRepl(nullptr);
} }
state.forceValue(*args[1], pos); state.forceValue(*args[1], pos);
@ -1056,11 +1055,11 @@ static void prim_warn(EvalState & state, const PosIdx pos, Value * * args, Value
logWarning(info); logWarning(info);
} }
if (evalSettings.builtinsAbortOnWarn) { if (state.settings.builtinsAbortOnWarn) {
// Not an EvalError or subclass, which would cause the error to be stored in the eval cache. // Not an EvalError or subclass, which would cause the error to be stored in the eval cache.
state.error<EvalBaseError>("aborting to reveal stack trace of warning, as abort-on-warn is set").setIsFromExpr().debugThrow(); state.error<EvalBaseError>("aborting to reveal stack trace of warning, as abort-on-warn is set").setIsFromExpr().debugThrow();
} }
if (evalSettings.builtinsTraceDebugger || evalSettings.builtinsDebuggerOnWarn) { if (state.settings.builtinsTraceDebugger || state.settings.builtinsDebuggerOnWarn) {
state.runDebugRepl(nullptr); state.runDebugRepl(nullptr);
} }
state.forceValue(*args[1], pos); state.forceValue(*args[1], pos);
@ -1163,12 +1162,34 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
} }
} }
/**
* Early validation for the derivation name, for better error message.
* It is checked again when constructing store paths.
*
* @todo Check that the `.drv` suffix also fits.
*/
static void checkDerivationName(EvalState & state, std::string_view drvName)
{
try {
checkName(drvName);
} catch (BadStorePathName & e) {
// "Please pass a different name": Users may not be aware that they can
// pass a different one, in functions like `fetchurl` where the name
// is optional.
// Note that Nixpkgs generally won't trigger this, because `mkDerivation`
// sanitizes the name.
state.error<EvalError>("invalid derivation name: %s. Please pass a different '%s'.", Uncolored(e.message()), "name").debugThrow();
}
}
static void derivationStrictInternal( static void derivationStrictInternal(
EvalState & state, EvalState & state,
const std::string & drvName, const std::string & drvName,
const Bindings * attrs, const Bindings * attrs,
Value & v) Value & v)
{ {
checkDerivationName(state, drvName);
/* Check whether attributes should be passed as a JSON file. */ /* Check whether attributes should be passed as a JSON file. */
using nlohmann::json; using nlohmann::json;
std::optional<json> jsonObject; std::optional<json> jsonObject;
@ -1209,7 +1230,7 @@ static void derivationStrictInternal(
auto handleHashMode = [&](const std::string_view s) { auto handleHashMode = [&](const std::string_view s) {
if (s == "recursive") { if (s == "recursive") {
// back compat, new name is "nar" // back compat, new name is "nar"
ingestionMethod = FileIngestionMethod::Recursive; ingestionMethod = ContentAddressMethod::Raw::NixArchive;
} else try { } else try {
ingestionMethod = ContentAddressMethod::parse(s); ingestionMethod = ContentAddressMethod::parse(s);
} catch (UsageError &) { } catch (UsageError &) {
@ -1217,9 +1238,9 @@ static void derivationStrictInternal(
"invalid value '%s' for 'outputHashMode' attribute", s "invalid value '%s' for 'outputHashMode' attribute", s
).atPos(v).debugThrow(); ).atPos(v).debugThrow();
} }
if (ingestionMethod == TextIngestionMethod {}) if (ingestionMethod == ContentAddressMethod::Raw::Text)
experimentalFeatureSettings.require(Xp::DynamicDerivations); experimentalFeatureSettings.require(Xp::DynamicDerivations);
if (ingestionMethod == FileIngestionMethod::Git) if (ingestionMethod == ContentAddressMethod::Raw::Git)
experimentalFeatureSettings.require(Xp::GitHashing); experimentalFeatureSettings.require(Xp::GitHashing);
}; };
@ -1308,6 +1329,20 @@ static void derivationStrictInternal(
handleOutputs(ss); handleOutputs(ss);
} }
if (i->name == state.sAllowedReferences)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedReferences'; use 'outputChecks.<output>.allowedReferences' instead", drvName);
if (i->name == state.sAllowedRequisites)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedRequisites'; use 'outputChecks.<output>.allowedRequisites' instead", drvName);
if (i->name == state.sDisallowedReferences)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedReferences'; use 'outputChecks.<output>.disallowedReferences' instead", drvName);
if (i->name == state.sDisallowedRequisites)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedRequisites'; use 'outputChecks.<output>.disallowedRequisites' instead", drvName);
if (i->name == state.sMaxSize)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxSize'; use 'outputChecks.<output>.maxSize' instead", drvName);
if (i->name == state.sMaxClosureSize)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxClosureSize'; use 'outputChecks.<output>.maxClosureSize' instead", drvName);
} else { } else {
auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned(); auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned();
drv.env.emplace(key, s); drv.env.emplace(key, s);
@ -1377,7 +1412,7 @@ static void derivationStrictInternal(
/* Check whether the derivation name is valid. */ /* Check whether the derivation name is valid. */
if (isDerivation(drvName) && if (isDerivation(drvName) &&
!(ingestionMethod == ContentAddressMethod { TextIngestionMethod { } } && !(ingestionMethod == ContentAddressMethod::Raw::Text &&
outputs.size() == 1 && outputs.size() == 1 &&
*(outputs.begin()) == "out")) *(outputs.begin()) == "out"))
{ {
@ -1399,7 +1434,7 @@ static void derivationStrictInternal(
auto h = newHashAllowEmpty(*outputHash, outputHashAlgo); auto h = newHashAllowEmpty(*outputHash, outputHashAlgo);
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat); auto method = ingestionMethod.value_or(ContentAddressMethod::Raw::Flat);
DerivationOutput::CAFixed dof { DerivationOutput::CAFixed dof {
.ca = ContentAddress { .ca = ContentAddress {
@ -1418,7 +1453,7 @@ static void derivationStrictInternal(
.atPos(v).debugThrow(); .atPos(v).debugThrow();
auto ha = outputHashAlgo.value_or(HashAlgorithm::SHA256); auto ha = outputHashAlgo.value_or(HashAlgorithm::SHA256);
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive); auto method = ingestionMethod.value_or(ContentAddressMethod::Raw::NixArchive);
for (auto & i : outputs) { for (auto & i : outputs) {
drv.env[i] = hashPlaceholder(i); drv.env[i] = hashPlaceholder(i);
@ -1564,7 +1599,7 @@ static RegisterPrimOp primop_toPath({
corner cases. */ corner cases. */
static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v) static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{ {
if (evalSettings.pureEval) if (state.settings.pureEval)
state.error<EvalError>( state.error<EvalError>(
"'%s' is not allowed in pure evaluation mode", "'%s' is not allowed in pure evaluation mode",
"builtins.storePath" "builtins.storePath"
@ -2194,7 +2229,7 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
}) })
: ({ : ({
StringSource s { contents }; StringSource s { contents };
state.store->addToStoreFromDump(s, name, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair); state.store->addToStoreFromDump(s, name, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, refs, state.repair);
}); });
/* Note: we don't need to add `context' to the context of the /* Note: we don't need to add `context' to the context of the
@ -2377,7 +2412,7 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg
"while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'"); "while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'");
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource"); state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
addPath(state, pos, path.baseName(), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); addPath(state, pos, path.baseName(), path, args[0], ContentAddressMethod::Raw::NixArchive, std::nullopt, v, context);
} }
static RegisterPrimOp primop_filterSource({ static RegisterPrimOp primop_filterSource({
@ -2440,7 +2475,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
std::optional<SourcePath> path; std::optional<SourcePath> path;
std::string name; std::string name;
Value * filterFun = nullptr; Value * filterFun = nullptr;
ContentAddressMethod method = FileIngestionMethod::Recursive; auto method = ContentAddressMethod::Raw::NixArchive;
std::optional<Hash> expectedHash; std::optional<Hash> expectedHash;
NixStringContext context; NixStringContext context;
@ -2456,8 +2491,8 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
state.forceFunction(*(filterFun = attr.value), attr.pos, "while evaluating the `filter` parameter passed to builtins.path"); state.forceFunction(*(filterFun = attr.value), attr.pos, "while evaluating the `filter` parameter passed to builtins.path");
else if (n == "recursive") else if (n == "recursive")
method = state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") method = state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path")
? FileIngestionMethod::Recursive ? ContentAddressMethod::Raw::NixArchive
: FileIngestionMethod::Flat; : ContentAddressMethod::Raw::Flat;
else if (n == "sha256") else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256); expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
else else
@ -4548,7 +4583,7 @@ void EvalState::createBaseEnv()
)", )",
}); });
if (!evalSettings.pureEval) { if (!settings.pureEval) {
v.mkInt(time(0)); v.mkInt(time(0));
} }
addConstant("__currentTime", v, { addConstant("__currentTime", v, {
@ -4575,8 +4610,8 @@ void EvalState::createBaseEnv()
.impureOnly = true, .impureOnly = true,
}); });
if (!evalSettings.pureEval) if (!settings.pureEval)
v.mkString(evalSettings.getCurrentSystem()); v.mkString(settings.getCurrentSystem());
addConstant("__currentSystem", v, { addConstant("__currentSystem", v, {
.type = nString, .type = nString,
.doc = R"( .doc = R"(
@ -4656,7 +4691,7 @@ void EvalState::createBaseEnv()
#ifndef _WIN32 // TODO implement on Windows #ifndef _WIN32 // TODO implement on Windows
// Miscellaneous // Miscellaneous
if (evalSettings.enableNativeCode) { if (settings.enableNativeCode) {
addPrimOp({ addPrimOp({
.name = "__importNative", .name = "__importNative",
.arity = 2, .arity = 2,
@ -4679,7 +4714,7 @@ void EvalState::createBaseEnv()
error if `--trace-verbose` is enabled. Then return *e2*. This function error if `--trace-verbose` is enabled. Then return *e2*. This function
is useful for debugging. is useful for debugging.
)", )",
.fun = evalSettings.traceVerbose ? prim_trace : prim_second, .fun = settings.traceVerbose ? prim_trace : prim_second,
}); });
/* Add a value containing the current Nix expression search path. */ /* Add a value containing the current Nix expression search path. */

View File

@ -53,7 +53,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
// whitelist. Ah well. // whitelist. Ah well.
state.checkURI(url); state.checkURI(url);
if (evalSettings.pureEval && !rev) if (state.settings.pureEval && !rev)
throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision"); throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision");
fetchers::Attrs attrs; fetchers::Attrs attrs;

View File

@ -171,10 +171,10 @@ static void fetchTree(
} }
} }
if (!evalSettings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes)) if (!state.settings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes))
input = lookupInRegistries(state.store, input).first; input = lookupInRegistries(state.store, input).first;
if (evalSettings.pureEval && !input.isLocked()) { if (state.settings.pureEval && !input.isLocked()) {
auto fetcher = "fetchTree"; auto fetcher = "fetchTree";
if (params.isFetchGit) if (params.isFetchGit)
fetcher = "fetchGit"; fetcher = "fetchGit";
@ -431,7 +431,10 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
state.forceValue(*args[0], pos); state.forceValue(*args[0], pos);
if (args[0]->type() == nAttrs) { bool isArgAttrs = args[0]->type() == nAttrs;
bool nameAttrPassed = false;
if (isArgAttrs) {
for (auto & attr : *args[0]->attrs()) { for (auto & attr : *args[0]->attrs()) {
std::string_view n(state.symbols[attr.name]); std::string_view n(state.symbols[attr.name]);
@ -439,8 +442,10 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch"); url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
else if (n == "sha256") else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256); expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256);
else if (n == "name") else if (n == "name") {
nameAttrPassed = true;
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch"); name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
}
else else
state.error<EvalError>("unsupported argument '%s' to '%s'", n, who) state.error<EvalError>("unsupported argument '%s' to '%s'", n, who)
.atPos(pos).debugThrow(); .atPos(pos).debugThrow();
@ -453,14 +458,27 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch"); url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
if (who == "fetchTarball") if (who == "fetchTarball")
url = evalSettings.resolvePseudoUrl(*url); url = state.settings.resolvePseudoUrl(*url);
state.checkURI(*url); state.checkURI(*url);
if (name == "") if (name == "")
name = baseNameOf(*url); name = baseNameOf(*url);
if (evalSettings.pureEval && !expectedHash) try {
checkName(name);
} catch (BadStorePathName & e) {
auto resolution =
nameAttrPassed ? HintFmt("Please change the value for the 'name' attribute passed to '%s', so that it can create a valid store path.", who) :
isArgAttrs ? HintFmt("Please add a valid 'name' attribute to the argument for '%s', so that it can create a valid store path.", who) :
HintFmt("Please pass an attribute set with 'url' and 'name' attributes to '%s', so that it can create a valid store path.", who);
state.error<EvalError>(
std::string("invalid store path name when fetching URL '%s': %s. %s"), *url, Uncolored(e.message()), Uncolored(resolution.str()))
.atPos(pos).debugThrow();
}
if (state.settings.pureEval && !expectedHash)
state.error<EvalError>("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow(); state.error<EvalError>("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow();
// early exit if pinned and already in the store // early exit if pinned and already in the store
@ -468,7 +486,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
auto expectedPath = state.store->makeFixedOutputPath( auto expectedPath = state.store->makeFixedOutputPath(
name, name,
FixedOutputInfo { FixedOutputInfo {
.method = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat, .method = unpack ? FileIngestionMethod::NixArchive : FileIngestionMethod::Flat,
.hash = *expectedHash, .hash = *expectedHash,
.references = {} .references = {}
}); });

View File

@ -1,4 +1,5 @@
#include "fetch-settings.hh" #include "fetch-settings.hh"
#include "config-global.hh"
namespace nix { namespace nix {

View File

@ -18,7 +18,7 @@ StorePath fetchToStore(
const SourcePath & path, const SourcePath & path,
FetchMode mode, FetchMode mode,
std::string_view name = "source", std::string_view name = "source",
ContentAddressMethod method = FileIngestionMethod::Recursive, ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive,
PathFilter * filter = nullptr, PathFilter * filter = nullptr,
RepairFlag repair = NoRepair); RepairFlag repair = NoRepair);

View File

@ -305,7 +305,7 @@ StorePath Input::computeStorePath(Store & store) const
if (!narHash) if (!narHash)
throw Error("cannot compute store path for unlocked input '%s'", to_string()); throw Error("cannot compute store path for unlocked input '%s'", to_string());
return store.makeFixedOutputPath(getName(), FixedOutputInfo { return store.makeFixedOutputPath(getName(), FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::NixArchive,
.hash = *narHash, .hash = *narHash,
.references = {}, .references = {},
}); });

View File

@ -41,21 +41,6 @@ bool isCacheFileWithinTtl(time_t now, const struct stat & st)
return st.st_mtime + settings.tarballTtl > now; return st.st_mtime + settings.tarballTtl > now;
} }
bool touchCacheFile(const Path & path, time_t touch_time)
{
#ifndef _WIN32 // TODO implement
struct timeval times[2];
times[0].tv_sec = touch_time;
times[0].tv_usec = 0;
times[1].tv_sec = touch_time;
times[1].tv_usec = 0;
return lutimes(path.c_str(), times) == 0;
#else
return false;
#endif
}
Path getCachePath(std::string_view key, bool shallow) Path getCachePath(std::string_view key, bool shallow)
{ {
return getCacheDir() return getCacheDir()
@ -594,8 +579,11 @@ struct GitInputScheme : InputScheme
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", repoInfo.url); warn("could not update local clone of Git repository '%s'; continuing with the most recent version", repoInfo.url);
} }
if (!touchCacheFile(localRefFile, now)) try {
warn("could not update mtime for file '%s': %s", localRefFile, strerror(errno)); setWriteTime(localRefFile, now, now);
} catch (Error & e) {
warn("could not update mtime for file '%s': %s", localRefFile, e.msg());
}
if (!originalRef && !storeCachedHead(repoInfo.url, ref)) if (!originalRef && !storeCachedHead(repoInfo.url, ref))
warn("could not update cached head '%s' for '%s'", ref, repoInfo.url); warn("could not update cached head '%s' for '%s'", ref, repoInfo.url);
} }

View File

@ -433,7 +433,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", headers).storePath))); downloadFile(store, url, "source", headers).storePath)));
if (json.is_array() && json.size() == 1 && json[0]["id"] != nullptr) { if (json.is_array() && json.size() >= 1 && json[0]["id"] != nullptr) {
return RefInfo { return RefInfo {
.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1) .rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)
}; };

View File

@ -213,7 +213,7 @@ struct MercurialInputScheme : InputScheme
auto storePath = store->addToStore( auto storePath = store->addToStore(
input.getName(), input.getName(),
{getFSSourceAccessor(), CanonPath(actualPath)}, {getFSSourceAccessor(), CanonPath(actualPath)},
FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, ContentAddressMethod::Raw::NixArchive, HashAlgorithm::SHA256, {},
filter); filter);
return storePath; return storePath;

View File

@ -1,7 +1,6 @@
{ lib { lib
, stdenv , stdenv
, releaseTools , releaseTools
, fileset
, meson , meson
, ninja , ninja
@ -16,17 +15,16 @@
# Configuration Options # Configuration Options
, versionSuffix ? "" , versionSuffix ? ""
, officialRelease ? false
# Check test coverage of Nix. Probably want to use with with at least # Check test coverage of Nix. Probably want to use with with at least
# one of `doCheck` or `doInstallCheck` enabled. # one of `doCheck` or `doInstallCheck` enabled.
, withCoverageChecks ? false , withCoverageChecks ? false
# Avoid setting things that would interfere with a functioning devShell
, forDevShell ? false
}: }:
let let
inherit (lib) fileset;
version = lib.fileContents ./.version + versionSuffix; version = lib.fileContents ./.version + versionSuffix;
mkDerivation = mkDerivation =

View File

@ -1,5 +1,6 @@
#include "common-args.hh" #include "common-args.hh"
#include "args/root.hh" #include "args/root.hh"
#include "config-global.hh"
#include "globals.hh" #include "globals.hh"
#include "logging.hh" #include "logging.hh"
#include "loggers.hh" #include "loggers.hh"

View File

@ -322,7 +322,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(
if (static_cast<FileIngestionMethod>(dumpMethod) == hashMethod.getFileIngestionMethod()) if (static_cast<FileIngestionMethod>(dumpMethod) == hashMethod.getFileIngestionMethod())
caHash = hashString(HashAlgorithm::SHA256, dump2.s); caHash = hashString(HashAlgorithm::SHA256, dump2.s);
switch (dumpMethod) { switch (dumpMethod) {
case FileSerialisationMethod::Recursive: case FileSerialisationMethod::NixArchive:
// The dump is already NAR in this case, just use it. // The dump is already NAR in this case, just use it.
nar = dump2.s; nar = dump2.s;
break; break;
@ -339,7 +339,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(
} else { } else {
// Otherwise, we have to do th same hashing as NAR so our single // Otherwise, we have to do th same hashing as NAR so our single
// hash will suffice for both purposes. // hash will suffice for both purposes.
if (dumpMethod != FileSerialisationMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) if (dumpMethod != FileSerialisationMethod::NixArchive || hashAlgo != HashAlgorithm::SHA256)
unsupported("addToStoreFromDump"); unsupported("addToStoreFromDump");
} }
StringSource narDump { nar }; StringSource narDump { nar };

View File

@ -3,6 +3,7 @@
# include "hook-instance.hh" # include "hook-instance.hh"
#endif #endif
#include "processes.hh" #include "processes.hh"
#include "config-global.hh"
#include "worker.hh" #include "worker.hh"
#include "builtins.hh" #include "builtins.hh"
#include "builtins/buildenv.hh" #include "builtins/buildenv.hh"

View File

@ -19,7 +19,6 @@ Worker::Worker(Store & store, Store & evalStore)
, store(store) , store(store)
, evalStore(evalStore) , evalStore(evalStore)
{ {
/* Debugging: prevent recursive workers. */
nrLocalBuilds = 0; nrLocalBuilds = 0;
nrSubstitutions = 0; nrSubstitutions = 0;
lastWokenUp = steady_time_point::min(); lastWokenUp = steady_time_point::min();
@ -530,7 +529,7 @@ bool Worker::pathContentsGood(const StorePath & path)
else { else {
auto current = hashPath( auto current = hashPath(
{store.getFSAccessor(), CanonPath(store.printStorePath(path))}, {store.getFSAccessor(), CanonPath(store.printStorePath(path))},
FileIngestionMethod::Recursive, info->narHash.algo).first; FileIngestionMethod::NixArchive, info->narHash.algo).first;
Hash nullHash(HashAlgorithm::SHA256); Hash nullHash(HashAlgorithm::SHA256);
res = info->narHash == nullHash || info->narHash == current; res = info->narHash == nullHash || info->narHash == current;
} }

View File

@ -59,7 +59,7 @@ struct HookInstance;
#endif #endif
/** /**
* The worker class. * Coordinates one or more realisations and their interdependencies.
*/ */
class Worker class Worker
{ {

View File

@ -8,98 +8,136 @@ std::string_view makeFileIngestionPrefix(FileIngestionMethod m)
{ {
switch (m) { switch (m) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
// Not prefixed for back compat
return ""; return "";
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
return "r:"; return "r:";
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
experimentalFeatureSettings.require(Xp::GitHashing); experimentalFeatureSettings.require(Xp::GitHashing);
return "git:"; return "git:";
default: default:
throw Error("impossible, caught both cases"); assert(false);
} }
} }
std::string_view ContentAddressMethod::render() const std::string_view ContentAddressMethod::render() const
{ {
return std::visit(overloaded { switch (raw) {
[](TextIngestionMethod) -> std::string_view { return "text"; }, case ContentAddressMethod::Raw::Text:
[](FileIngestionMethod m2) { return "text";
/* Not prefixed for back compat with things that couldn't produce text before. */ case ContentAddressMethod::Raw::Flat:
return renderFileIngestionMethod(m2); case ContentAddressMethod::Raw::NixArchive:
}, case ContentAddressMethod::Raw::Git:
}, raw); return renderFileIngestionMethod(getFileIngestionMethod());
default:
assert(false);
}
}
/**
* **Not surjective**
*
* This is not exposed because `FileIngestionMethod::Flat` maps to
* `ContentAddressMethod::Raw::Flat` and
* `ContentAddressMethod::Raw::Text` alike. We can thus only safely use
* this when the latter is ruled out (e.g. because it is already
* handled).
*/
static ContentAddressMethod fileIngestionMethodToContentAddressMethod(FileIngestionMethod m)
{
switch (m) {
case FileIngestionMethod::Flat:
return ContentAddressMethod::Raw::Flat;
case FileIngestionMethod::NixArchive:
return ContentAddressMethod::Raw::NixArchive;
case FileIngestionMethod::Git:
return ContentAddressMethod::Raw::Git;
default:
assert(false);
}
} }
ContentAddressMethod ContentAddressMethod::parse(std::string_view m) ContentAddressMethod ContentAddressMethod::parse(std::string_view m)
{ {
if (m == "text") if (m == "text")
return TextIngestionMethod {}; return ContentAddressMethod::Raw::Text;
else else
return parseFileIngestionMethod(m); return fileIngestionMethodToContentAddressMethod(
parseFileIngestionMethod(m));
} }
std::string_view ContentAddressMethod::renderPrefix() const std::string_view ContentAddressMethod::renderPrefix() const
{ {
return std::visit(overloaded { switch (raw) {
[](TextIngestionMethod) -> std::string_view { return "text:"; }, case ContentAddressMethod::Raw::Text:
[](FileIngestionMethod m2) { return "text:";
/* Not prefixed for back compat with things that couldn't produce text before. */ case ContentAddressMethod::Raw::Flat:
return makeFileIngestionPrefix(m2); case ContentAddressMethod::Raw::NixArchive:
}, case ContentAddressMethod::Raw::Git:
}, raw); return makeFileIngestionPrefix(getFileIngestionMethod());
default:
assert(false);
}
} }
ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m) ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
{ {
if (splitPrefix(m, "r:")) { if (splitPrefix(m, "r:")) {
return FileIngestionMethod::Recursive; return ContentAddressMethod::Raw::NixArchive;
} }
else if (splitPrefix(m, "git:")) { else if (splitPrefix(m, "git:")) {
experimentalFeatureSettings.require(Xp::GitHashing); experimentalFeatureSettings.require(Xp::GitHashing);
return FileIngestionMethod::Git; return ContentAddressMethod::Raw::Git;
} }
else if (splitPrefix(m, "text:")) { else if (splitPrefix(m, "text:")) {
return TextIngestionMethod {}; return ContentAddressMethod::Raw::Text;
}
return ContentAddressMethod::Raw::Flat;
}
/**
* This is slightly more mindful of forward compat in that it uses `fixed:`
* rather than just doing a raw empty prefix or `r:`, which doesn't "save room"
* for future changes very well.
*/
static std::string renderPrefixModern(const ContentAddressMethod & ca)
{
switch (ca.raw) {
case ContentAddressMethod::Raw::Text:
return "text:";
case ContentAddressMethod::Raw::Flat:
case ContentAddressMethod::Raw::NixArchive:
case ContentAddressMethod::Raw::Git:
return "fixed:" + makeFileIngestionPrefix(ca.getFileIngestionMethod());
default:
assert(false);
} }
return FileIngestionMethod::Flat;
} }
std::string ContentAddressMethod::renderWithAlgo(HashAlgorithm ha) const std::string ContentAddressMethod::renderWithAlgo(HashAlgorithm ha) const
{ {
return std::visit(overloaded { return renderPrefixModern(*this) + printHashAlgo(ha);
[&](const TextIngestionMethod & th) {
return std::string{"text:"} + printHashAlgo(ha);
},
[&](const FileIngestionMethod & fim) {
return "fixed:" + makeFileIngestionPrefix(fim) + printHashAlgo(ha);
}
}, raw);
} }
FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const
{ {
return std::visit(overloaded { switch (raw) {
[&](const TextIngestionMethod & th) { case ContentAddressMethod::Raw::Flat:
return FileIngestionMethod::Flat; return FileIngestionMethod::Flat;
}, case ContentAddressMethod::Raw::NixArchive:
[&](const FileIngestionMethod & fim) { return FileIngestionMethod::NixArchive;
return fim; case ContentAddressMethod::Raw::Git:
} return FileIngestionMethod::Git;
}, raw); case ContentAddressMethod::Raw::Text:
return FileIngestionMethod::Flat;
default:
assert(false);
}
} }
std::string ContentAddress::render() const std::string ContentAddress::render() const
{ {
return std::visit(overloaded { return renderPrefixModern(method) + this->hash.to_string(HashFormat::Nix32, true);
[](const TextIngestionMethod &) -> std::string {
return "text:";
},
[](const FileIngestionMethod & method) {
return "fixed:"
+ makeFileIngestionPrefix(method);
},
}, method.raw)
+ this->hash.to_string(HashFormat::Nix32, true);
} }
/** /**
@ -130,17 +168,17 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
// No parsing of the ingestion method, "text" only support flat. // No parsing of the ingestion method, "text" only support flat.
HashAlgorithm hashAlgo = parseHashAlgorithm_(); HashAlgorithm hashAlgo = parseHashAlgorithm_();
return { return {
TextIngestionMethod {}, ContentAddressMethod::Raw::Text,
std::move(hashAlgo), std::move(hashAlgo),
}; };
} else if (prefix == "fixed") { } else if (prefix == "fixed") {
// Parse method // Parse method
auto method = FileIngestionMethod::Flat; auto method = ContentAddressMethod::Raw::Flat;
if (splitPrefix(rest, "r:")) if (splitPrefix(rest, "r:"))
method = FileIngestionMethod::Recursive; method = ContentAddressMethod::Raw::NixArchive;
else if (splitPrefix(rest, "git:")) { else if (splitPrefix(rest, "git:")) {
experimentalFeatureSettings.require(Xp::GitHashing); experimentalFeatureSettings.require(Xp::GitHashing);
method = FileIngestionMethod::Git; method = ContentAddressMethod::Raw::Git;
} }
HashAlgorithm hashAlgo = parseHashAlgorithm_(); HashAlgorithm hashAlgo = parseHashAlgorithm_();
return { return {
@ -201,57 +239,58 @@ size_t StoreReferences::size() const
ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const ContentAddress & ca) noexcept ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const ContentAddress & ca) noexcept
{ {
return std::visit(overloaded { switch (ca.method.raw) {
[&](const TextIngestionMethod &) -> ContentAddressWithReferences { case ContentAddressMethod::Raw::Text:
return TextInfo { return TextInfo {
.hash = ca.hash, .hash = ca.hash,
.references = {}, .references = {},
}; };
}, case ContentAddressMethod::Raw::Flat:
[&](const FileIngestionMethod & method) -> ContentAddressWithReferences { case ContentAddressMethod::Raw::NixArchive:
return FixedOutputInfo { case ContentAddressMethod::Raw::Git:
.method = method, return FixedOutputInfo {
.hash = ca.hash, .method = ca.method.getFileIngestionMethod(),
.references = {}, .hash = ca.hash,
}; .references = {},
}, };
}, ca.method.raw); default:
assert(false);
}
} }
ContentAddressWithReferences ContentAddressWithReferences::fromParts( ContentAddressWithReferences ContentAddressWithReferences::fromParts(
ContentAddressMethod method, Hash hash, StoreReferences refs) ContentAddressMethod method, Hash hash, StoreReferences refs)
{ {
return std::visit(overloaded { switch (method.raw) {
[&](TextIngestionMethod _) -> ContentAddressWithReferences { case ContentAddressMethod::Raw::Text:
if (refs.self) if (refs.self)
throw Error("self-reference not allowed with text hashing"); throw Error("self-reference not allowed with text hashing");
return ContentAddressWithReferences { return TextInfo {
TextInfo { .hash = std::move(hash),
.hash = std::move(hash), .references = std::move(refs.others),
.references = std::move(refs.others), };
} case ContentAddressMethod::Raw::Flat:
}; case ContentAddressMethod::Raw::NixArchive:
}, case ContentAddressMethod::Raw::Git:
[&](FileIngestionMethod m2) -> ContentAddressWithReferences { return FixedOutputInfo {
return ContentAddressWithReferences { .method = method.getFileIngestionMethod(),
FixedOutputInfo { .hash = std::move(hash),
.method = m2, .references = std::move(refs),
.hash = std::move(hash), };
.references = std::move(refs), default:
} assert(false);
}; }
},
}, method.raw);
} }
ContentAddressMethod ContentAddressWithReferences::getMethod() const ContentAddressMethod ContentAddressWithReferences::getMethod() const
{ {
return std::visit(overloaded { return std::visit(overloaded {
[](const TextInfo & th) -> ContentAddressMethod { [](const TextInfo & th) -> ContentAddressMethod {
return TextIngestionMethod {}; return ContentAddressMethod::Raw::Text;
}, },
[](const FixedOutputInfo & fsh) -> ContentAddressMethod { [](const FixedOutputInfo & fsh) -> ContentAddressMethod {
return fsh.method; return fileIngestionMethodToContentAddressMethod(
fsh.method);
}, },
}, raw); }, raw);
} }

View File

@ -5,7 +5,6 @@
#include "hash.hh" #include "hash.hh"
#include "path.hh" #include "path.hh"
#include "file-content-address.hh" #include "file-content-address.hh"
#include "comparator.hh"
#include "variant-wrapper.hh" #include "variant-wrapper.hh"
namespace nix { namespace nix {
@ -14,24 +13,6 @@ namespace nix {
* Content addressing method * Content addressing method
*/ */
/* We only have one way to hash text with references, so this is a single-value
type, mainly useful with std::variant.
*/
/**
* The single way we can serialize "text" file system objects.
*
* Somewhat obscure, used by \ref Derivation derivations and
* `builtins.toFile` currently.
*
* TextIngestionMethod is identical to FileIngestionMethod::Fixed except that
* the former may not have self-references and is tagged `text:${algo}:${hash}`
* rather than `fixed:${algo}:${hash}`. The contents of the store path are
* ingested and hashed identically, aside from the slightly different tag and
* restriction on self-references.
*/
struct TextIngestionMethod : std::monostate { };
/** /**
* Compute the prefix to the hash algorithm which indicates how the * Compute the prefix to the hash algorithm which indicates how the
* files were ingested. * files were ingested.
@ -48,14 +29,51 @@ std::string_view makeFileIngestionPrefix(FileIngestionMethod m);
*/ */
struct ContentAddressMethod struct ContentAddressMethod
{ {
typedef std::variant< enum struct Raw {
TextIngestionMethod, /**
FileIngestionMethod * Calculate a store path using the `FileIngestionMethod::Flat`
> Raw; * hash of the file system objects, and references.
*
* See `store-object/content-address.md#method-flat` in the
* manual.
*/
Flat,
/**
* Calculate a store path using the
* `FileIngestionMethod::NixArchive` hash of the file system
* objects, and references.
*
* See `store-object/content-address.md#method-flat` in the
* manual.
*/
NixArchive,
/**
* Calculate a store path using the `FileIngestionMethod::Git`
* hash of the file system objects, and references.
*
* Part of `ExperimentalFeature::GitHashing`.
*
* See `store-object/content-address.md#method-git` in the
* manual.
*/
Git,
/**
* Calculate a store path using the `FileIngestionMethod::Flat`
* hash of the file system objects, and references, but in a
* different way than `ContentAddressMethod::Raw::Flat`.
*
* See `store-object/content-address.md#method-text` in the
* manual.
*/
Text,
};
Raw raw; Raw raw;
GENERATE_CMP(ContentAddressMethod, me->raw); auto operator <=>(const ContentAddressMethod &) const = default;
MAKE_WRAPPER_CONSTRUCTOR(ContentAddressMethod); MAKE_WRAPPER_CONSTRUCTOR(ContentAddressMethod);
@ -141,7 +159,7 @@ struct ContentAddress
*/ */
Hash hash; Hash hash;
GENERATE_CMP(ContentAddress, me->method, me->hash); auto operator <=>(const ContentAddress &) const = default;
/** /**
* Compute the content-addressability assertion * Compute the content-addressability assertion
@ -200,7 +218,7 @@ struct StoreReferences
*/ */
size_t size() const; size_t size() const;
GENERATE_CMP(StoreReferences, me->self, me->others); auto operator <=>(const StoreReferences &) const = default;
}; };
// This matches the additional info that we need for makeTextPath // This matches the additional info that we need for makeTextPath
@ -217,7 +235,7 @@ struct TextInfo
*/ */
StorePathSet references; StorePathSet references;
GENERATE_CMP(TextInfo, me->hash, me->references); auto operator <=>(const TextInfo &) const = default;
}; };
struct FixedOutputInfo struct FixedOutputInfo
@ -237,7 +255,7 @@ struct FixedOutputInfo
*/ */
StoreReferences references; StoreReferences references;
GENERATE_CMP(FixedOutputInfo, me->hash, me->references); auto operator <=>(const FixedOutputInfo &) const = default;
}; };
/** /**
@ -254,7 +272,7 @@ struct ContentAddressWithReferences
Raw raw; Raw raw;
GENERATE_CMP(ContentAddressWithReferences, me->raw); auto operator <=>(const ContentAddressWithReferences &) const = default;
MAKE_WRAPPER_CONSTRUCTOR(ContentAddressWithReferences); MAKE_WRAPPER_CONSTRUCTOR(ContentAddressWithReferences);

View File

@ -415,12 +415,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
dumpMethod = FileSerialisationMethod::Flat; dumpMethod = FileSerialisationMethod::Flat;
break; break;
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
dumpMethod = FileSerialisationMethod::Recursive; dumpMethod = FileSerialisationMethod::NixArchive;
break; break;
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
// Use NAR; Git is not a serialization method // Use NAR; Git is not a serialization method
dumpMethod = FileSerialisationMethod::Recursive; dumpMethod = FileSerialisationMethod::NixArchive;
break; break;
default: default:
assert(false); assert(false);
@ -435,19 +435,21 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
} else { } else {
HashAlgorithm hashAlgo; HashAlgorithm hashAlgo;
std::string baseName; std::string baseName;
FileIngestionMethod method; ContentAddressMethod method;
{ {
bool fixed; bool fixed;
uint8_t recursive; uint8_t recursive;
std::string hashAlgoRaw; std::string hashAlgoRaw;
from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw; from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
if (recursive > (uint8_t) FileIngestionMethod::Recursive) if (recursive > true)
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive); throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
method = FileIngestionMethod { recursive }; method = recursive
? ContentAddressMethod::Raw::NixArchive
: ContentAddressMethod::Raw::Flat;
/* Compatibility hack. */ /* Compatibility hack. */
if (!fixed) { if (!fixed) {
hashAlgoRaw = "sha256"; hashAlgoRaw = "sha256";
method = FileIngestionMethod::Recursive; method = ContentAddressMethod::Raw::NixArchive;
} }
hashAlgo = parseHashAlgo(hashAlgoRaw); hashAlgo = parseHashAlgo(hashAlgoRaw);
} }
@ -468,7 +470,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}); });
logger->startWork(); logger->startWork();
auto path = store->addToStoreFromDump( auto path = store->addToStoreFromDump(
*dumpSource, baseName, FileSerialisationMethod::Recursive, method, hashAlgo); *dumpSource, baseName, FileSerialisationMethod::NixArchive, method, hashAlgo);
logger->stopWork(); logger->stopWork();
to << store->printStorePath(path); to << store->printStorePath(path);
@ -500,7 +502,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork(); logger->startWork();
auto path = ({ auto path = ({
StringSource source { s }; StringSource source { s };
store->addToStoreFromDump(source, suffix, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair); store->addToStoreFromDump(source, suffix, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, refs, NoRepair);
}); });
logger->stopWork(); logger->stopWork();
to << store->printStorePath(path); to << store->printStorePath(path);

View File

@ -150,7 +150,7 @@ StorePath writeDerivation(Store & store,
}) })
: ({ : ({
StringSource s { contents }; StringSource s { contents };
store.addToStoreFromDump(s, suffix, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair); store.addToStoreFromDump(s, suffix, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, references, repair);
}); });
} }
@ -274,7 +274,7 @@ static DerivationOutput parseDerivationOutput(
{ {
if (hashAlgoStr != "") { if (hashAlgoStr != "") {
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr); ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr);
if (method == TextIngestionMethod {}) if (method == ContentAddressMethod::Raw::Text)
xpSettings.require(Xp::DynamicDerivations); xpSettings.require(Xp::DynamicDerivations);
const auto hashAlgo = parseHashAlgo(hashAlgoStr); const auto hashAlgo = parseHashAlgo(hashAlgoStr);
if (hashS == "impure") { if (hashS == "impure") {
@ -1249,7 +1249,7 @@ DerivationOutput DerivationOutput::fromJSON(
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashAlgorithm> { auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashAlgorithm> {
auto & method_ = getString(valueAt(json, "method")); auto & method_ = getString(valueAt(json, "method"));
ContentAddressMethod method = ContentAddressMethod::parse(method_); ContentAddressMethod method = ContentAddressMethod::parse(method_);
if (method == TextIngestionMethod {}) if (method == ContentAddressMethod::Raw::Text)
xpSettings.require(Xp::DynamicDerivations); xpSettings.require(Xp::DynamicDerivations);
auto & hashAlgo_ = getString(valueAt(json, "hashAlgo")); auto & hashAlgo_ = getString(valueAt(json, "hashAlgo"));

View File

@ -64,8 +64,8 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
virtual StorePath addToStoreFromDump( virtual StorePath addToStoreFromDump(
Source & dump, Source & dump,
std::string_view name, std::string_view name,
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive, FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive, ContentAddressMethod hashMethod = FileIngestionMethod::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
RepairFlag repair = NoRepair) override RepairFlag repair = NoRepair) override

View File

@ -1,5 +1,6 @@
#include "filetransfer.hh" #include "filetransfer.hh"
#include "globals.hh" #include "globals.hh"
#include "config-global.hh"
#include "store-api.hh" #include "store-api.hh"
#include "s3.hh" #include "s3.hh"
#include "compression.hh" #include "compression.hh"

View File

@ -1,4 +1,5 @@
#include "globals.hh" #include "globals.hh"
#include "config-global.hh"
#include "current-process.hh" #include "current-process.hh"
#include "archive.hh" #include "archive.hh"
#include "args.hh" #include "args.hh"
@ -123,12 +124,12 @@ Settings::Settings()
}; };
} }
void loadConfFile() void loadConfFile(AbstractConfig & config)
{ {
auto applyConfigFile = [&](const Path & path) { auto applyConfigFile = [&](const Path & path) {
try { try {
std::string contents = readFile(path); std::string contents = readFile(path);
globalConfig.applyConfig(contents, path); config.applyConfig(contents, path);
} catch (SystemError &) { } } catch (SystemError &) { }
}; };
@ -136,7 +137,7 @@ void loadConfFile()
/* We only want to send overrides to the daemon, i.e. stuff from /* We only want to send overrides to the daemon, i.e. stuff from
~/.nix/nix.conf or the command line. */ ~/.nix/nix.conf or the command line. */
globalConfig.resetOverridden(); config.resetOverridden();
auto files = settings.nixUserConfFiles; auto files = settings.nixUserConfFiles;
for (auto file = files.rbegin(); file != files.rend(); file++) { for (auto file = files.rbegin(); file != files.rend(); file++) {
@ -145,7 +146,7 @@ void loadConfFile()
auto nixConfEnv = getEnv("NIX_CONFIG"); auto nixConfEnv = getEnv("NIX_CONFIG");
if (nixConfEnv.has_value()) { if (nixConfEnv.has_value()) {
globalConfig.applyConfig(nixConfEnv.value(), "NIX_CONFIG"); config.applyConfig(nixConfEnv.value(), "NIX_CONFIG");
} }
} }
@ -437,7 +438,7 @@ void initLibStore(bool loadConfig) {
initLibUtil(); initLibUtil();
if (loadConfig) if (loadConfig)
loadConfFile(); loadConfFile(globalConfig);
preloadNSS(); preloadNSS();

View File

@ -1284,7 +1284,13 @@ extern Settings settings;
*/ */
void initPlugins(); void initPlugins();
void loadConfFile(); /**
* Load the configuration (from `nix.conf`, `NIX_CONFIG`, etc.) into the
* given configuration object.
*
* Usually called with `globalConfig`.
*/
void loadConfFile(AbstractConfig & config);
// Used by the Settings constructor // Used by the Settings constructor
std::vector<Path> getUserConfigFiles(); std::vector<Path> getUserConfigFiles();

View File

@ -76,8 +76,8 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
virtual StorePath addToStoreFromDump( virtual StorePath addToStoreFromDump(
Source & dump, Source & dump,
std::string_view name, std::string_view name,
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive, FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive, ContentAddressMethod hashMethod = FileIngestionMethod::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
RepairFlag repair = NoRepair) override RepairFlag repair = NoRepair) override

View File

@ -1155,7 +1155,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
auto fim = specified.method.getFileIngestionMethod(); auto fim = specified.method.getFileIngestionMethod();
switch (fim) { switch (fim) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
{ {
HashModuloSink caSink { HashModuloSink caSink {
specified.hash.algo, specified.hash.algo,
@ -1253,7 +1253,7 @@ StorePath LocalStore::addToStoreFromDump(
std::filesystem::path tempDir; std::filesystem::path tempDir;
AutoCloseFD tempDirFd; AutoCloseFD tempDirFd;
bool methodsMatch = ContentAddressMethod(FileIngestionMethod(dumpMethod)) == hashMethod; bool methodsMatch = static_cast<FileIngestionMethod>(dumpMethod) == hashMethod.getFileIngestionMethod();
/* If the methods don't match, our streaming hash of the dump is the /* If the methods don't match, our streaming hash of the dump is the
wrong sort, and we need to rehash. */ wrong sort, and we need to rehash. */
@ -1314,7 +1314,7 @@ StorePath LocalStore::addToStoreFromDump(
auto fim = hashMethod.getFileIngestionMethod(); auto fim = hashMethod.getFileIngestionMethod();
switch (fim) { switch (fim) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
restorePath(realPath, dumpSource, (FileSerialisationMethod) fim); restorePath(realPath, dumpSource, (FileSerialisationMethod) fim);
break; break;
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
@ -1330,7 +1330,7 @@ StorePath LocalStore::addToStoreFromDump(
/* For computing the nar hash. In recursive SHA-256 mode, this /* For computing the nar hash. In recursive SHA-256 mode, this
is the same as the store hash, so no need to do it again. */ is the same as the store hash, so no need to do it again. */
auto narHash = std::pair { dumpHash, size }; auto narHash = std::pair { dumpHash, size };
if (dumpMethod != FileSerialisationMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) { if (dumpMethod != FileSerialisationMethod::NixArchive || hashAlgo != HashAlgorithm::SHA256) {
HashSink narSink { HashAlgorithm::SHA256 }; HashSink narSink { HashAlgorithm::SHA256 };
dumpPath(realPath, narSink); dumpPath(realPath, narSink);
narHash = narSink.finish(); narHash = narSink.finish();
@ -1423,7 +1423,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
PosixSourceAccessor accessor; PosixSourceAccessor accessor;
std::string hash = hashPath( std::string hash = hashPath(
PosixSourceAccessor::createAtRoot(link.path()), PosixSourceAccessor::createAtRoot(link.path()),
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false); FileIngestionMethod::NixArchive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
if (hash != name.string()) { if (hash != name.string()) {
printError("link '%s' was modified! expected hash '%s', got '%s'", printError("link '%s' was modified! expected hash '%s', got '%s'",
link.path(), name, hash); link.path(), name, hash);

View File

@ -52,7 +52,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
dstStore, dstStore,
path.name(), path.name(),
FixedOutputInfo { FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::NixArchive,
.hash = narModuloHash, .hash = narModuloHash,
.references = std::move(refs), .references = std::move(refs),
}, },

View File

@ -151,7 +151,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
Hash hash = ({ Hash hash = ({
hashPath( hashPath(
{make_ref<PosixSourceAccessor>(), CanonPath(path)}, {make_ref<PosixSourceAccessor>(), CanonPath(path)},
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first; FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256).first;
}); });
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true)); debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
@ -165,7 +165,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|| (repair && hash != ({ || (repair && hash != ({
hashPath( hashPath(
PosixSourceAccessor::createAtRoot(linkPath), PosixSourceAccessor::createAtRoot(linkPath),
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first; FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256).first;
}))) })))
{ {
// XXX: Consider overwriting linkPath with our valid version. // XXX: Consider overwriting linkPath with our valid version.

View File

@ -1,7 +1,6 @@
{ lib { lib
, stdenv , stdenv
, releaseTools , releaseTools
, fileset
, meson , meson
, ninja , ninja
@ -13,7 +12,6 @@
, aws-sdk-cpp , aws-sdk-cpp
, libseccomp , libseccomp
, nlohmann_json , nlohmann_json
, man
, sqlite , sqlite
, busybox-sandbox-shell ? null , busybox-sandbox-shell ? null
@ -21,7 +19,6 @@
# Configuration Options # Configuration Options
, versionSuffix ? "" , versionSuffix ? ""
, officialRelease ? false
# Check test coverage of Nix. Probably want to use with at least # Check test coverage of Nix. Probably want to use with at least
# one of `doCheck` or `doInstallCheck` enabled. # one of `doCheck` or `doInstallCheck` enabled.
@ -32,6 +29,8 @@
}: }:
let let
inherit (lib) fileset;
version = lib.fileContents ./.version + versionSuffix; version = lib.fileContents ./.version + versionSuffix;
mkDerivation = mkDerivation =

View File

@ -48,15 +48,21 @@ std::optional<ContentAddressWithReferences> ValidPathInfo::contentAddressWithRef
if (! ca) if (! ca)
return std::nullopt; return std::nullopt;
return std::visit(overloaded { switch (ca->method.raw) {
[&](const TextIngestionMethod &) -> ContentAddressWithReferences { case ContentAddressMethod::Raw::Text:
{
assert(references.count(path) == 0); assert(references.count(path) == 0);
return TextInfo { return TextInfo {
.hash = ca->hash, .hash = ca->hash,
.references = references, .references = references,
}; };
}, }
[&](const FileIngestionMethod & m2) -> ContentAddressWithReferences {
case ContentAddressMethod::Raw::Flat:
case ContentAddressMethod::Raw::NixArchive:
case ContentAddressMethod::Raw::Git:
default:
{
auto refs = references; auto refs = references;
bool hasSelfReference = false; bool hasSelfReference = false;
if (refs.count(path)) { if (refs.count(path)) {
@ -64,15 +70,15 @@ std::optional<ContentAddressWithReferences> ValidPathInfo::contentAddressWithRef
refs.erase(path); refs.erase(path);
} }
return FixedOutputInfo { return FixedOutputInfo {
.method = m2, .method = ca->method.getFileIngestionMethod(),
.hash = ca->hash, .hash = ca->hash,
.references = { .references = {
.others = std::move(refs), .others = std::move(refs),
.self = hasSelfReference, .self = hasSelfReference,
}, },
}; };
}, }
}, ca->method.raw); }
} }
bool ValidPathInfo::isContentAddressed(const Store & store) const bool ValidPathInfo::isContentAddressed(const Store & store) const
@ -127,22 +133,18 @@ ValidPathInfo::ValidPathInfo(
: UnkeyedValidPathInfo(narHash) : UnkeyedValidPathInfo(narHash)
, path(store.makeFixedOutputPathFromCA(name, ca)) , path(store.makeFixedOutputPathFromCA(name, ca))
{ {
this->ca = ContentAddress {
.method = ca.getMethod(),
.hash = ca.getHash(),
};
std::visit(overloaded { std::visit(overloaded {
[this](TextInfo && ti) { [this](TextInfo && ti) {
this->references = std::move(ti.references); this->references = std::move(ti.references);
this->ca = ContentAddress {
.method = TextIngestionMethod {},
.hash = std::move(ti.hash),
};
}, },
[this](FixedOutputInfo && foi) { [this](FixedOutputInfo && foi) {
this->references = std::move(foi.references.others); this->references = std::move(foi.references.others);
if (foi.references.self) if (foi.references.self)
this->references.insert(path); this->references.insert(path);
this->ca = ContentAddress {
.method = std::move(foi.method),
.hash = std::move(foi.hash),
};
}, },
}, std::move(ca).raw); }, std::move(ca).raw);
} }

View File

@ -2,25 +2,24 @@
namespace nix { namespace nix {
static void checkName(std::string_view path, std::string_view name) void checkName(std::string_view name)
{ {
if (name.empty()) if (name.empty())
throw BadStorePath("store path '%s' has an empty name", path); throw BadStorePathName("name must not be empty");
if (name.size() > StorePath::MaxPathLen) if (name.size() > StorePath::MaxPathLen)
throw BadStorePath("store path '%s' has a name longer than %d characters", throw BadStorePathName("name '%s' must be no longer than %d characters", name, StorePath::MaxPathLen);
path, StorePath::MaxPathLen);
// See nameRegexStr for the definition // See nameRegexStr for the definition
if (name[0] == '.') { if (name[0] == '.') {
// check against "." and "..", followed by end or dash // check against "." and "..", followed by end or dash
if (name.size() == 1) if (name.size() == 1)
throw BadStorePath("store path '%s' has invalid name '%s'", path, name); throw BadStorePathName("name '%s' is not valid", name);
if (name[1] == '-') if (name[1] == '-')
throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, "."); throw BadStorePathName("name '%s' is not valid: first dash-separated component must not be '%s'", name, ".");
if (name[1] == '.') { if (name[1] == '.') {
if (name.size() == 2) if (name.size() == 2)
throw BadStorePath("store path '%s' has invalid name '%s'", path, name); throw BadStorePathName("name '%s' is not valid", name);
if (name[2] == '-') if (name[2] == '-')
throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, ".."); throw BadStorePathName("name '%s' is not valid: first dash-separated component must not be '%s'", name, "..");
} }
} }
for (auto c : name) for (auto c : name)
@ -28,7 +27,16 @@ static void checkName(std::string_view path, std::string_view name)
|| (c >= 'a' && c <= 'z') || (c >= 'a' && c <= 'z')
|| (c >= 'A' && c <= 'Z') || (c >= 'A' && c <= 'Z')
|| c == '+' || c == '-' || c == '.' || c == '_' || c == '?' || c == '=')) || c == '+' || c == '-' || c == '.' || c == '_' || c == '?' || c == '='))
throw BadStorePath("store path '%s' contains illegal character '%s'", path, c); throw BadStorePathName("name '%s' contains illegal character '%s'", name, c);
}
static void checkPathName(std::string_view path, std::string_view name)
{
try {
checkName(name);
} catch (BadStorePathName & e) {
throw BadStorePath("path '%s' is not a valid store path: %s", path, Uncolored(e.message()));
}
} }
StorePath::StorePath(std::string_view _baseName) StorePath::StorePath(std::string_view _baseName)
@ -40,13 +48,13 @@ StorePath::StorePath(std::string_view _baseName)
if (c == 'e' || c == 'o' || c == 'u' || c == 't' if (c == 'e' || c == 'o' || c == 'u' || c == 't'
|| !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z'))) || !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z')))
throw BadStorePath("store path '%s' contains illegal base-32 character '%s'", baseName, c); throw BadStorePath("store path '%s' contains illegal base-32 character '%s'", baseName, c);
checkName(baseName, name()); checkPathName(baseName, name());
} }
StorePath::StorePath(const Hash & hash, std::string_view _name) StorePath::StorePath(const Hash & hash, std::string_view _name)
: baseName((hash.to_string(HashFormat::Nix32, false) + "-").append(std::string(_name))) : baseName((hash.to_string(HashFormat::Nix32, false) + "-").append(std::string(_name)))
{ {
checkName(baseName, name()); checkPathName(baseName, name());
} }
bool StorePath::isDerivation() const noexcept bool StorePath::isDerivation() const noexcept

View File

@ -9,6 +9,13 @@ namespace nix {
struct Hash; struct Hash;
/**
* Check whether a name is a valid store path name.
*
* @throws BadStorePathName if the name is invalid. The message is of the format "name %s is not valid, for this specific reason".
*/
void checkName(std::string_view name);
/** /**
* \ref StorePath "Store path" is the fundamental reference type of Nix. * \ref StorePath "Store path" is the fundamental reference type of Nix.
* A store paths refers to a Store object. * A store paths refers to a Store object.
@ -31,8 +38,10 @@ public:
StorePath() = delete; StorePath() = delete;
/** @throws BadStorePath */
StorePath(std::string_view baseName); StorePath(std::string_view baseName);
/** @throws BadStorePath */
StorePath(const Hash & hash, std::string_view name); StorePath(const Hash & hash, std::string_view name);
std::string_view to_string() const noexcept std::string_view to_string() const noexcept

View File

@ -33,19 +33,9 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct
#ifndef _WIN32 // TODO implement #ifndef _WIN32 // TODO implement
if (st.st_mtime != mtimeStore) { if (st.st_mtime != mtimeStore) {
struct timeval times[2]; struct stat st2 = st;
times[0].tv_sec = st.st_atime; st2.st_mtime = mtimeStore,
times[0].tv_usec = 0; setWriteTime(path, st2);
times[1].tv_sec = mtimeStore;
times[1].tv_usec = 0;
#if HAVE_LUTIMES
if (lutimes(path.c_str(), times) == -1)
if (errno != ENOSYS ||
(!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1))
#else
if (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1)
#endif
throw SysError("changing modification time of '%1%'", path);
} }
#endif #endif
} }

View File

@ -392,8 +392,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
else { else {
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25"); if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
std::visit(overloaded { switch (caMethod.raw) {
[&](const TextIngestionMethod & thm) -> void { case ContentAddressMethod::Raw::Text:
{
if (hashAlgo != HashAlgorithm::SHA256) if (hashAlgo != HashAlgorithm::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashAlgo(hashAlgo)); name, printHashAlgo(hashAlgo));
@ -401,13 +402,19 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
conn->to << WorkerProto::Op::AddTextToStore << name << s; conn->to << WorkerProto::Op::AddTextToStore << name << s;
WorkerProto::write(*this, *conn, references); WorkerProto::write(*this, *conn, references);
conn.processStderr(); conn.processStderr();
}, break;
[&](const FileIngestionMethod & fim) -> void { }
case ContentAddressMethod::Raw::Flat:
case ContentAddressMethod::Raw::NixArchive:
case ContentAddressMethod::Raw::Git:
default:
{
auto fim = caMethod.getFileIngestionMethod();
conn->to conn->to
<< WorkerProto::Op::AddToStore << WorkerProto::Op::AddToStore
<< name << name
<< ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ << ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::NixArchive) ? 0 : 1) /* backwards compatibility hack */
<< (fim == FileIngestionMethod::Recursive ? 1 : 0) << (fim == FileIngestionMethod::NixArchive ? 1 : 0)
<< printHashAlgo(hashAlgo); << printHashAlgo(hashAlgo);
try { try {
@ -415,7 +422,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
connections->incCapacity(); connections->incCapacity();
{ {
Finally cleanup([&]() { connections->decCapacity(); }); Finally cleanup([&]() { connections->decCapacity(); });
if (fim == FileIngestionMethod::Recursive) { if (fim == FileIngestionMethod::NixArchive) {
dump.drainInto(conn->to); dump.drainInto(conn->to);
} else { } else {
std::string contents = dump.drain(); std::string contents = dump.drain();
@ -432,9 +439,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
} catch (EndOfFile & e) { } } catch (EndOfFile & e) { }
throw; throw;
} }
break;
} }
}, caMethod.raw); }
auto path = parseStorePath(readString(conn->from)); auto path = parseStorePath(readString(conn->from));
// Release our connection to prevent a deadlock in queryPathInfo(). // Release our connection to prevent a deadlock in queryPathInfo().
conn_.reset(); conn_.reset();
@ -457,12 +464,12 @@ StorePath RemoteStore::addToStoreFromDump(
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
fsm = FileSerialisationMethod::Flat; fsm = FileSerialisationMethod::Flat;
break; break;
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
fsm = FileSerialisationMethod::Recursive; fsm = FileSerialisationMethod::NixArchive;
break; break;
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
// Use NAR; Git is not a serialization method // Use NAR; Git is not a serialization method
fsm = FileSerialisationMethod::Recursive; fsm = FileSerialisationMethod::NixArchive;
break; break;
default: default:
assert(false); assert(false);

View File

@ -87,8 +87,8 @@ public:
StorePath addToStoreFromDump( StorePath addToStoreFromDump(
Source & dump, Source & dump,
std::string_view name, std::string_view name,
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive, FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive, ContentAddressMethod hashMethod = FileIngestionMethod::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
RepairFlag repair = NoRepair) override; RepairFlag repair = NoRepair) override;

View File

@ -19,6 +19,7 @@
#include "signals.hh" #include "signals.hh"
#include "users.hh" #include "users.hh"
#include <filesystem>
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
using json = nlohmann::json; using json = nlohmann::json;
@ -121,7 +122,7 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed
if (info.method == FileIngestionMethod::Git && info.hash.algo != HashAlgorithm::SHA1) if (info.method == FileIngestionMethod::Git && info.hash.algo != HashAlgorithm::SHA1)
throw Error("Git file ingestion must use SHA-1 hash"); throw Error("Git file ingestion must use SHA-1 hash");
if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) { if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::NixArchive) {
return makeStorePath(makeType(*this, "source", info.references), info.hash, name); return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
} else { } else {
if (!info.references.empty()) { if (!info.references.empty()) {
@ -199,12 +200,12 @@ StorePath Store::addToStore(
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
fsm = FileSerialisationMethod::Flat; fsm = FileSerialisationMethod::Flat;
break; break;
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
fsm = FileSerialisationMethod::Recursive; fsm = FileSerialisationMethod::NixArchive;
break; break;
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
// Use NAR; Git is not a serialization method // Use NAR; Git is not a serialization method
fsm = FileSerialisationMethod::Recursive; fsm = FileSerialisationMethod::NixArchive;
break; break;
} }
auto source = sinkToSource([&](Sink & sink) { auto source = sinkToSource([&](Sink & sink) {
@ -355,7 +356,7 @@ ValidPathInfo Store::addToStoreSlow(
RegularFileSink fileSink { caHashSink }; RegularFileSink fileSink { caHashSink };
TeeSink unusualHashTee { narHashSink, caHashSink }; TeeSink unusualHashTee { narHashSink, caHashSink };
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashAlgorithm::SHA256 auto & narSink = method == ContentAddressMethod::Raw::NixArchive && hashAlgo != HashAlgorithm::SHA256
? static_cast<Sink &>(unusualHashTee) ? static_cast<Sink &>(unusualHashTee)
: narHashSink; : narHashSink;
@ -383,9 +384,9 @@ ValidPathInfo Store::addToStoreSlow(
finish. */ finish. */
auto [narHash, narSize] = narHashSink.finish(); auto [narHash, narSize] = narHashSink.finish();
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256 auto hash = method == ContentAddressMethod::Raw::NixArchive && hashAlgo == HashAlgorithm::SHA256
? narHash ? narHash
: method == FileIngestionMethod::Git : method == ContentAddressMethod::Raw::Git
? git::dumpHash(hashAlgo, srcPath).hash ? git::dumpHash(hashAlgo, srcPath).hash
: caHashSink.finish().first; : caHashSink.finish().first;
@ -1303,7 +1304,7 @@ ref<Store> openStore(StoreReference && storeURI)
if (!pathExists(chrootStore)) { if (!pathExists(chrootStore)) {
try { try {
createDirs(chrootStore); createDirs(chrootStore);
} catch (Error & e) { } catch (SystemError & e) {
return std::make_shared<LocalStore>(params); return std::make_shared<LocalStore>(params);
} }
warn("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore); warn("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore);

View File

@ -441,7 +441,7 @@ public:
virtual StorePath addToStore( virtual StorePath addToStore(
std::string_view name, std::string_view name,
const SourcePath & path, const SourcePath & path,
ContentAddressMethod method = FileIngestionMethod::Recursive, ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
PathFilter & filter = defaultPathFilter, PathFilter & filter = defaultPathFilter,
@ -455,7 +455,7 @@ public:
ValidPathInfo addToStoreSlow( ValidPathInfo addToStoreSlow(
std::string_view name, std::string_view name,
const SourcePath & path, const SourcePath & path,
ContentAddressMethod method = FileIngestionMethod::Recursive, ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
std::optional<Hash> expectedCAHash = {}); std::optional<Hash> expectedCAHash = {});
@ -470,7 +470,7 @@ public:
* *
* @param dumpMethod What serialisation format is `dump`, i.e. how * @param dumpMethod What serialisation format is `dump`, i.e. how
* to deserialize it. Must either match hashMethod or be * to deserialize it. Must either match hashMethod or be
* `FileSerialisationMethod::Recursive`. * `FileSerialisationMethod::NixArchive`.
* *
* @param hashMethod How content addressing? Need not match be the * @param hashMethod How content addressing? Need not match be the
* same as `dumpMethod`. * same as `dumpMethod`.
@ -480,8 +480,8 @@ public:
virtual StorePath addToStoreFromDump( virtual StorePath addToStoreFromDump(
Source & dump, Source & dump,
std::string_view name, std::string_view name,
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive, FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive, ContentAddressMethod hashMethod = ContentAddressMethod::Raw::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
RepairFlag repair = NoRepair) = 0; RepairFlag repair = NoRepair) = 0;

View File

@ -16,6 +16,7 @@ namespace nix {
struct SourcePath; struct SourcePath;
MakeError(BadStorePath, Error); MakeError(BadStorePath, Error);
MakeError(BadStorePathName, BadStorePath);
struct StoreDirConfig : public Config struct StoreDirConfig : public Config
{ {
@ -97,7 +98,7 @@ struct StoreDirConfig : public Config
std::pair<StorePath, Hash> computeStorePath( std::pair<StorePath, Hash> computeStorePath(
std::string_view name, std::string_view name,
const SourcePath & path, const SourcePath & path,
ContentAddressMethod method = FileIngestionMethod::Recursive, ContentAddressMethod method = FileIngestionMethod::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = {}, const StorePathSet & references = {},
PathFilter & filter = defaultPathFilter) const; PathFilter & filter = defaultPathFilter) const;

View File

@ -1,4 +1,5 @@
#include "globals.hh" #include "globals.hh"
#include "config-global.hh"
#include "hook-instance.hh" #include "hook-instance.hh"
#include "file-system.hh" #include "file-system.hh"
#include "child.hh" #include "child.hh"

View File

@ -2499,7 +2499,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
auto fim = outputHash.method.getFileIngestionMethod(); auto fim = outputHash.method.getFileIngestionMethod();
switch (fim) { switch (fim) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
{ {
HashModuloSink caSink { outputHash.hashAlgo, oldHashPart }; HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
auto fim = outputHash.method.getFileIngestionMethod(); auto fim = outputHash.method.getFileIngestionMethod();
@ -2541,7 +2541,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
{ {
HashResult narHashAndSize = hashPath( HashResult narHashAndSize = hashPath(
{getFSSourceAccessor(), CanonPath(actualPath)}, {getFSSourceAccessor(), CanonPath(actualPath)},
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256); FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256);
newInfo0.narHash = narHashAndSize.first; newInfo0.narHash = narHashAndSize.first;
newInfo0.narSize = narHashAndSize.second; newInfo0.narSize = narHashAndSize.second;
} }
@ -2564,7 +2564,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
rewriteOutput(outputRewrites); rewriteOutput(outputRewrites);
HashResult narHashAndSize = hashPath( HashResult narHashAndSize = hashPath(
{getFSSourceAccessor(), CanonPath(actualPath)}, {getFSSourceAccessor(), CanonPath(actualPath)},
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256); FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256);
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first }; ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
newInfo0.narSize = narHashAndSize.second; newInfo0.narSize = narHashAndSize.second;
auto refs = rewriteRefs(); auto refs = rewriteRefs();
@ -2914,6 +2914,24 @@ void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo
}; };
if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) { if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) {
if (get(*structuredAttrs, "allowedReferences")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'allowedReferences'; use 'outputChecks' instead");
}
if (get(*structuredAttrs, "allowedRequisites")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'allowedRequisites'; use 'outputChecks' instead");
}
if (get(*structuredAttrs, "disallowedRequisites")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'disallowedRequisites'; use 'outputChecks' instead");
}
if (get(*structuredAttrs, "disallowedReferences")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'disallowedReferences'; use 'outputChecks' instead");
}
if (get(*structuredAttrs, "maxSize")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'maxSize'; use 'outputChecks' instead");
}
if (get(*structuredAttrs, "maxClosureSize")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'maxClosureSize'; use 'outputChecks' instead");
}
if (auto outputChecks = get(*structuredAttrs, "outputChecks")) { if (auto outputChecks = get(*structuredAttrs, "outputChecks")) {
if (auto output = get(*outputChecks, outputName)) { if (auto output = get(*outputChecks, outputName)) {
Checks checks; Checks checks;

1
src/libutil-c/.version Symbolic link
View File

@ -0,0 +1 @@
../../.version

117
src/libutil-c/meson.build Normal file
View File

@ -0,0 +1,117 @@
project('nix-util-c', 'cpp',
version : files('.version'),
default_options : [
'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level
'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail
],
meson_version : '>= 1.1',
license : 'LGPL-2.1-or-later',
)
cxx = meson.get_compiler('cpp')
# See note in ../nix-util/meson.build
deps_private = [ ]
# See note in ../nix-util/meson.build
deps_public = [ ]
# See note in ../nix-util/meson.build
deps_other = [ ]
configdata = configuration_data()
add_project_arguments(
# TODO(Qyriad): Yes this is how the autoconf+Make system did it.
# It would be nice for our headers to be idempotent instead.
'-include', 'config-util.h',
# '-include', 'config-store.h',
'-Wno-deprecated-declarations',
'-Wimplicit-fallthrough',
'-Werror=switch',
'-Werror=switch-enum',
'-Wdeprecated-copy',
'-Wignored-qualifiers',
# Enable assertions in libstdc++ by default. Harmless on libc++. Benchmarked
# at ~1% overhead in `nix search`.
#
# FIXME: remove when we get meson 1.4.0 which will default this to on for us:
# https://mesonbuild.com/Release-notes-for-1-4-0.html#ndebug-setting-now-controls-c-stdlib-assertions
'-D_GLIBCXX_ASSERTIONS=1',
language : 'cpp',
)
sources = files(
'nix_api_util.cc',
)
include_dirs = [include_directories('.')]
headers = files(
'nix_api_util.h',
'nix_api_util_internal.h',
)
if host_machine.system() == 'cygwin' or host_machine.system() == 'windows'
# Windows DLLs are stricter about symbol visibility than Unix shared
# objects --- see https://gcc.gnu.org/wiki/Visibility for details.
# This is a temporary sledgehammer to export everything like on Unix,
# and not detail with this yet.
#
# TODO do not do this, and instead do fine-grained export annotations.
linker_export_flags = ['-Wl,--export-all-symbols']
else
linker_export_flags = []
endif
nix_util = dependency('nix-util')
if nix_util.type_name() == 'internal'
# subproject sadly no good for pkg-config module
deps_other += nix_util
else
deps_public += nix_util
endif
# TODO rename, because it will conflict with downstream projects
configdata.set_quoted('PACKAGE_VERSION', meson.project_version())
config_h = configure_file(
configuration : configdata,
output : 'config-util.h',
)
this_library = library(
'nixutilc',
sources,
dependencies : deps_public + deps_private + deps_other,
include_directories : include_dirs,
link_args: linker_export_flags,
install : true,
)
install_headers(headers, subdir : 'nix', preserve_path : true)
libraries_private = []
import('pkgconfig').generate(
this_library,
filebase : meson.project_name(),
name : 'Nix',
description : 'Nix Package Manager',
subdirs : ['nix'],
extra_cflags : ['-std=c++2a'],
requires : deps_public,
requires_private : deps_private,
libraries_private : libraries_private,
)
meson.override_dependency(meson.project_name(), declare_dependency(
include_directories : include_dirs,
link_with : this_library,
compile_args : ['-std=c++2a'],
dependencies : [],
))

View File

@ -0,0 +1 @@
# vim: filetype=meson

View File

@ -0,0 +1,9 @@
prefix=@prefix@
libdir=@libdir@
includedir=@includedir@
Name: Nix libutil C API
Description: Common functions for the Nix C API, such as error handling
Version: @PACKAGE_VERSION@
Libs: -L${libdir} -lnixutil
Cflags: -I${includedir}/nix -std=c++2a

View File

@ -1,5 +1,5 @@
#include "nix_api_util.h" #include "nix_api_util.h"
#include "config.hh" #include "config-global.hh"
#include "error.hh" #include "error.hh"
#include "nix_api_util_internal.h" #include "nix_api_util_internal.h"
#include "util.hh" #include "util.hh"

97
src/libutil-c/package.nix Normal file
View File

@ -0,0 +1,97 @@
{ lib
, stdenv
, releaseTools
, meson
, ninja
, pkg-config
, nix-util
# Configuration Options
, versionSuffix ? ""
# Check test coverage of Nix. Probably want to use with at least
# one of `doCheck` or `doInstallCheck` enabled.
, withCoverageChecks ? false
}:
let
inherit (lib) fileset;
version = lib.fileContents ./.version + versionSuffix;
mkDerivation =
if withCoverageChecks
then
# TODO support `finalAttrs` args function in
# `releaseTools.coverageAnalysis`.
argsFun:
releaseTools.coverageAnalysis (let args = argsFun args; in args)
else stdenv.mkDerivation;
in
mkDerivation (finalAttrs: {
pname = "nix-util-c";
inherit version;
src = fileset.toSource {
root = ./.;
fileset = fileset.unions [
./meson.build
./meson.options
(fileset.fileFilter (file: file.hasExt "cc") ./.)
(fileset.fileFilter (file: file.hasExt "hh") ./.)
(fileset.fileFilter (file: file.hasExt "h") ./.)
];
};
outputs = [ "out" "dev" ];
nativeBuildInputs = [
meson
ninja
pkg-config
];
buildInputs = [
nix-util
]
;
propagatedBuildInputs = [
nix-util
];
preConfigure =
# "Inline" .version so it's not a symlink, and includes the suffix
''
echo ${version} > .version
'';
mesonFlags = [
];
env = lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
LDFLAGS = "-fuse-ld=gold";
};
enableParallelBuilding = true;
separateDebugInfo = !stdenv.hostPlatform.isStatic;
# TODO Always true after https://github.com/NixOS/nixpkgs/issues/318564
strictDeps = !withCoverageChecks;
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
meta = {
platforms = lib.platforms.unix ++ lib.platforms.windows;
};
} // lib.optionalAttrs withCoverageChecks {
lcovFilter = [ "*/boost/*" "*-tab.*" ];
hardeningDisable = [ "fortify" ];
})

1
src/libutil-test Symbolic link
View File

@ -0,0 +1 @@
../tests/unit/libutil/

1
src/libutil-test-support Symbolic link
View File

@ -0,0 +1 @@
../tests/unit/libutil-support/

View File

@ -6,7 +6,7 @@
#include <strings.h> // for strcasecmp #include <strings.h> // for strcasecmp
#include "archive.hh" #include "archive.hh"
#include "config.hh" #include "config-global.hh"
#include "posix-source-accessor.hh" #include "posix-source-accessor.hh"
#include "source-path.hh" #include "source-path.hh"
#include "file-system.hh" #include "file-system.hh"

View File

@ -0,0 +1,67 @@
#include "config-global.hh"
namespace nix {
bool GlobalConfig::set(const std::string & name, const std::string & value)
{
for (auto & config : *configRegistrations)
if (config->set(name, value))
return true;
unknownSettings.emplace(name, value);
return false;
}
void GlobalConfig::getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly)
{
for (auto & config : *configRegistrations)
config->getSettings(res, overriddenOnly);
}
void GlobalConfig::resetOverridden()
{
for (auto & config : *configRegistrations)
config->resetOverridden();
}
nlohmann::json GlobalConfig::toJSON()
{
auto res = nlohmann::json::object();
for (const auto & config : *configRegistrations)
res.update(config->toJSON());
return res;
}
std::string GlobalConfig::toKeyValue()
{
std::string res;
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
for (const auto & s : settings)
res += fmt("%s = %s\n", s.first, s.second.value);
return res;
}
void GlobalConfig::convertToArgs(Args & args, const std::string & category)
{
for (auto & config : *configRegistrations)
config->convertToArgs(args, category);
}
GlobalConfig globalConfig;
GlobalConfig::ConfigRegistrations * GlobalConfig::configRegistrations;
GlobalConfig::Register::Register(Config * config)
{
if (!configRegistrations)
configRegistrations = new ConfigRegistrations;
configRegistrations->emplace_back(config);
}
ExperimentalFeatureSettings experimentalFeatureSettings;
static GlobalConfig::Register rSettings(&experimentalFeatureSettings);
}

View File

@ -0,0 +1,33 @@
#pragma once
///@file
#include "config.hh"
namespace nix {
struct GlobalConfig : public AbstractConfig
{
typedef std::vector<Config *> ConfigRegistrations;
static ConfigRegistrations * configRegistrations;
bool set(const std::string & name, const std::string & value) override;
void getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly = false) override;
void resetOverridden() override;
nlohmann::json toJSON() override;
std::string toKeyValue() override;
void convertToArgs(Args & args, const std::string & category) override;
struct Register
{
Register(Config * config);
};
};
extern GlobalConfig globalConfig;
}

View File

@ -443,67 +443,6 @@ void OptionalPathSetting::operator =(const std::optional<Path> & v)
this->assign(v); this->assign(v);
} }
bool GlobalConfig::set(const std::string & name, const std::string & value)
{
for (auto & config : *configRegistrations)
if (config->set(name, value)) return true;
unknownSettings.emplace(name, value);
return false;
}
void GlobalConfig::getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly)
{
for (auto & config : *configRegistrations)
config->getSettings(res, overriddenOnly);
}
void GlobalConfig::resetOverridden()
{
for (auto & config : *configRegistrations)
config->resetOverridden();
}
nlohmann::json GlobalConfig::toJSON()
{
auto res = nlohmann::json::object();
for (const auto & config : *configRegistrations)
res.update(config->toJSON());
return res;
}
std::string GlobalConfig::toKeyValue()
{
std::string res;
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
for (const auto & s : settings)
res += fmt("%s = %s\n", s.first, s.second.value);
return res;
}
void GlobalConfig::convertToArgs(Args & args, const std::string & category)
{
for (auto & config : *configRegistrations)
config->convertToArgs(args, category);
}
GlobalConfig globalConfig;
GlobalConfig::ConfigRegistrations * GlobalConfig::configRegistrations;
GlobalConfig::Register::Register(Config * config)
{
if (!configRegistrations)
configRegistrations = new ConfigRegistrations;
configRegistrations->emplace_back(config);
}
ExperimentalFeatureSettings experimentalFeatureSettings;
static GlobalConfig::Register rSettings(&experimentalFeatureSettings);
bool ExperimentalFeatureSettings::isEnabled(const ExperimentalFeature & feature) const bool ExperimentalFeatureSettings::isEnabled(const ExperimentalFeature & feature) const
{ {
auto & f = experimentalFeatures.get(); auto & f = experimentalFeatures.get();

View File

@ -375,31 +375,6 @@ public:
void operator =(const std::optional<Path> & v); void operator =(const std::optional<Path> & v);
}; };
struct GlobalConfig : public AbstractConfig
{
typedef std::vector<Config*> ConfigRegistrations;
static ConfigRegistrations * configRegistrations;
bool set(const std::string & name, const std::string & value) override;
void getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly = false) override;
void resetOverridden() override;
nlohmann::json toJSON() override;
std::string toKeyValue() override;
void convertToArgs(Args & args, const std::string & category) override;
struct Register
{
Register(Config * config);
};
};
extern GlobalConfig globalConfig;
struct ExperimentalFeatureSettings : Config { struct ExperimentalFeatureSettings : Config {

View File

@ -155,6 +155,7 @@ public:
: err(e) : err(e)
{ } { }
/** The error message without "error: " prefixed to it. */
std::string message() { std::string message() {
return err.msg.str(); return err.msg.str();
} }

View File

@ -10,7 +10,7 @@ static std::optional<FileSerialisationMethod> parseFileSerialisationMethodOpt(st
if (input == "flat") { if (input == "flat") {
return FileSerialisationMethod::Flat; return FileSerialisationMethod::Flat;
} else if (input == "nar") { } else if (input == "nar") {
return FileSerialisationMethod::Recursive; return FileSerialisationMethod::NixArchive;
} else { } else {
return std::nullopt; return std::nullopt;
} }
@ -45,7 +45,7 @@ std::string_view renderFileSerialisationMethod(FileSerialisationMethod method)
switch (method) { switch (method) {
case FileSerialisationMethod::Flat: case FileSerialisationMethod::Flat:
return "flat"; return "flat";
case FileSerialisationMethod::Recursive: case FileSerialisationMethod::NixArchive:
return "nar"; return "nar";
default: default:
assert(false); assert(false);
@ -57,7 +57,7 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
{ {
switch (method) { switch (method) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
return renderFileSerialisationMethod( return renderFileSerialisationMethod(
static_cast<FileSerialisationMethod>(method)); static_cast<FileSerialisationMethod>(method));
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
@ -78,7 +78,7 @@ void dumpPath(
case FileSerialisationMethod::Flat: case FileSerialisationMethod::Flat:
path.readFile(sink); path.readFile(sink);
break; break;
case FileSerialisationMethod::Recursive: case FileSerialisationMethod::NixArchive:
path.dumpPath(sink, filter); path.dumpPath(sink, filter);
break; break;
} }
@ -94,7 +94,7 @@ void restorePath(
case FileSerialisationMethod::Flat: case FileSerialisationMethod::Flat:
writeFile(path, source); writeFile(path, source);
break; break;
case FileSerialisationMethod::Recursive: case FileSerialisationMethod::NixArchive:
restorePath(path, source); restorePath(path, source);
break; break;
} }
@ -119,7 +119,7 @@ std::pair<Hash, std::optional<uint64_t>> hashPath(
{ {
switch (method) { switch (method) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: { case FileIngestionMethod::NixArchive: {
auto res = hashPath(path, (FileSerialisationMethod) method, ht, filter); auto res = hashPath(path, (FileSerialisationMethod) method, ht, filter);
return {res.first, {res.second}}; return {res.first, {res.second}};
} }

View File

@ -35,14 +35,14 @@ enum struct FileSerialisationMethod : uint8_t {
* See `file-system-object/content-address.md#serial-nix-archive` in * See `file-system-object/content-address.md#serial-nix-archive` in
* the manual. * the manual.
*/ */
Recursive, NixArchive,
}; };
/** /**
* Parse a `FileSerialisationMethod` by name. Choice of: * Parse a `FileSerialisationMethod` by name. Choice of:
* *
* - `flat`: `FileSerialisationMethod::Flat` * - `flat`: `FileSerialisationMethod::Flat`
* - `nar`: `FileSerialisationMethod::Recursive` * - `nar`: `FileSerialisationMethod::NixArchive`
* *
* Opposite of `renderFileSerialisationMethod`. * Opposite of `renderFileSerialisationMethod`.
*/ */
@ -107,16 +107,18 @@ enum struct FileIngestionMethod : uint8_t {
Flat, Flat,
/** /**
* Hash `FileSerialisationMethod::Recursive` serialisation. * Hash `FileSerialisationMethod::NixArchive` serialisation.
* *
* See `file-system-object/content-address.md#serial-flat` in the * See `file-system-object/content-address.md#serial-flat` in the
* manual. * manual.
*/ */
Recursive, NixArchive,
/** /**
* Git hashing. * Git hashing.
* *
* Part of `ExperimentalFeature::GitHashing`.
*
* See `file-system-object/content-address.md#serial-git` in the * See `file-system-object/content-address.md#serial-git` in the
* manual. * manual.
*/ */
@ -127,7 +129,7 @@ enum struct FileIngestionMethod : uint8_t {
* Parse a `FileIngestionMethod` by name. Choice of: * Parse a `FileIngestionMethod` by name. Choice of:
* *
* - `flat`: `FileIngestionMethod::Flat` * - `flat`: `FileIngestionMethod::Flat`
* - `nar`: `FileIngestionMethod::Recursive` * - `nar`: `FileIngestionMethod::NixArchive`
* - `git`: `FileIngestionMethod::Git` * - `git`: `FileIngestionMethod::Git`
* *
* Opposite of `renderFileIngestionMethod`. * Opposite of `renderFileIngestionMethod`.

View File

@ -418,30 +418,13 @@ void createDir(const Path & path, mode_t mode)
throw SysError("creating directory '%1%'", path); throw SysError("creating directory '%1%'", path);
} }
Paths createDirs(const Path & path) void createDirs(const Path & path)
{ {
Paths created; try {
if (path == "/") return created; fs::create_directories(path);
} catch (fs::filesystem_error & e) {
struct stat st; throw SysError("creating directory '%1%'", path);
if (STAT(path.c_str(), &st) == -1) {
created = createDirs(dirOf(path));
if (mkdir(path.c_str()
#ifndef _WIN32 // TODO abstract mkdir perms for Windows
, 0777
#endif
) == -1 && errno != EEXIST)
throw SysError("creating directory '%1%'", path);
st = STAT(path);
created.push_back(path);
} }
if (S_ISLNK(st.st_mode) && stat(path.c_str(), &st) == -1)
throw SysError("statting symlink '%1%'", path);
if (!S_ISDIR(st.st_mode)) throw Error("'%1%' is not a directory", path);
return created;
} }
@ -579,29 +562,69 @@ void replaceSymlink(const Path & target, const Path & link)
} }
} }
#ifndef _WIN32 void setWriteTime(
static void setWriteTime(const fs::path & p, const struct stat & st) const std::filesystem::path & path,
time_t accessedTime,
time_t modificationTime,
std::optional<bool> optIsSymlink)
{ {
struct timeval times[2]; #ifndef _WIN32
times[0] = { struct timeval times[2] = {
.tv_sec = st.st_atime, {
.tv_usec = 0, .tv_sec = accessedTime,
.tv_usec = 0,
},
{
.tv_sec = modificationTime,
.tv_usec = 0,
},
}; };
times[1] = {
.tv_sec = st.st_mtime,
.tv_usec = 0,
};
if (lutimes(p.c_str(), times) != 0)
throw SysError("changing modification time of '%s'", p);
}
#endif #endif
auto nonSymlink = [&]{
bool isSymlink = optIsSymlink
? *optIsSymlink
: fs::is_symlink(path);
if (!isSymlink) {
#ifdef _WIN32
// FIXME use `fs::last_write_time`.
//
// Would be nice to use std::filesystem unconditionally, but
// doesn't support access time just modification time.
//
// System clock vs File clock issues also make that annoying.
warn("Changing file times is not yet implemented on Windows, path is '%s'", path);
#else
if (utimes(path.c_str(), times) == -1) {
throw SysError("changing modification time of '%s' (not a symlink)", path);
}
#endif
} else {
throw Error("Cannot modification time of symlink '%s'", path);
}
};
#if HAVE_LUTIMES
if (lutimes(path.c_str(), times) == -1) {
if (errno == ENOSYS)
nonSymlink();
else
throw SysError("changing modification time of '%s'", path);
}
#else
nonSymlink();
#endif
}
void setWriteTime(const fs::path & path, const struct stat & st)
{
setWriteTime(path, st.st_atime, st.st_mtime, S_ISLNK(st.st_mode));
}
void copyFile(const fs::path & from, const fs::path & to, bool andDelete) void copyFile(const fs::path & from, const fs::path & to, bool andDelete)
{ {
#ifndef _WIN32
// TODO: Rewrite the `is_*` to use `symlink_status()`
auto statOfFrom = lstat(from.c_str());
#endif
auto fromStatus = fs::symlink_status(from); auto fromStatus = fs::symlink_status(from);
// Mark the directory as writable so that we can delete its children // Mark the directory as writable so that we can delete its children
@ -621,9 +644,7 @@ void copyFile(const fs::path & from, const fs::path & to, bool andDelete)
throw Error("file '%s' has an unsupported type", from); throw Error("file '%s' has an unsupported type", from);
} }
#ifndef _WIN32 setWriteTime(to, lstat(from.string().c_str()));
setWriteTime(to, statOfFrom);
#endif
if (andDelete) { if (andDelete) {
if (!fs::is_symlink(fromStatus)) if (!fs::is_symlink(fromStatus))
fs::permissions(from, fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow); fs::permissions(from, fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow);

View File

@ -148,11 +148,10 @@ void deletePath(const std::filesystem::path & path);
void deletePath(const std::filesystem::path & path, uint64_t & bytesFreed); void deletePath(const std::filesystem::path & path, uint64_t & bytesFreed);
/** /**
* Create a directory and all its parents, if necessary. Returns the * Create a directory and all its parents, if necessary.
* list of created directories, in order of creation.
*/ */
Paths createDirs(const Path & path); void createDirs(const Path & path);
inline Paths createDirs(PathView path) inline void createDirs(PathView path)
{ {
return createDirs(Path(path)); return createDirs(Path(path));
} }
@ -162,6 +161,30 @@ inline Paths createDirs(PathView path)
*/ */
void createDir(const Path & path, mode_t mode = 0755); void createDir(const Path & path, mode_t mode = 0755);
/**
* Set the access and modification times of the given path, not
* following symlinks.
*
* @param accessTime Specified in seconds.
*
* @param modificationTime Specified in seconds.
*
* @param isSymlink Whether the file in question is a symlink. Used for
* fallback code where we don't have `lutimes` or similar. if
* `std::optional` is passed, the information will be recomputed if it
* is needed. Race conditions are possible so be careful!
*/
void setWriteTime(
const std::filesystem::path & path,
time_t accessedTime,
time_t modificationTime,
std::optional<bool> isSymlink = std::nullopt);
/**
* Convenience wrapper that takes all arguments from the `struct stat`.
*/
void setWriteTime(const std::filesystem::path & path, const struct stat & st);
/** /**
* Create a symlink. * Create a symlink.
*/ */

View File

@ -111,6 +111,8 @@ std::ostream & operator<<(std::ostream & out, const Magenta<T> & y)
/** /**
* Values wrapped in this class are printed without coloring. * Values wrapped in this class are printed without coloring.
* *
* Specifically, the color is reset to normal before printing the value.
*
* By default, arguments to `HintFmt` are printed in magenta (see `Magenta`). * By default, arguments to `HintFmt` are printed in magenta (see `Magenta`).
*/ */
template <class T> template <class T>

View File

@ -1,7 +1,7 @@
#include <fcntl.h> #include <fcntl.h>
#include "error.hh" #include "error.hh"
#include "config.hh" #include "config-global.hh"
#include "fs-sink.hh" #include "fs-sink.hh"
#if _WIN32 #if _WIN32

View File

@ -3,7 +3,7 @@
#include "environment-variables.hh" #include "environment-variables.hh"
#include "terminal.hh" #include "terminal.hh"
#include "util.hh" #include "util.hh"
#include "config.hh" #include "config-global.hh"
#include "source-path.hh" #include "source-path.hh"
#include "position.hh" #include "position.hh"

View File

@ -161,6 +161,7 @@ sources = files(
'compression.cc', 'compression.cc',
'compute-levels.cc', 'compute-levels.cc',
'config.cc', 'config.cc',
'config-global.cc',
'current-process.cc', 'current-process.cc',
'english.cc', 'english.cc',
'environment-variables.cc', 'environment-variables.cc',
@ -211,6 +212,7 @@ headers = [config_h] + files(
'comparator.hh', 'comparator.hh',
'compression.hh', 'compression.hh',
'compute-levels.hh', 'compute-levels.hh',
'config-global.hh',
'config-impl.hh', 'config-impl.hh',
'config.hh', 'config.hh',
'current-process.hh', 'current-process.hh',

View File

@ -1,7 +1,6 @@
{ lib { lib
, stdenv , stdenv
, releaseTools , releaseTools
, fileset
, meson , meson
, ninja , ninja
@ -18,7 +17,6 @@
# Configuration Options # Configuration Options
, versionSuffix ? "" , versionSuffix ? ""
, officialRelease ? false
# Check test coverage of Nix. Probably want to use with at least # Check test coverage of Nix. Probably want to use with at least
# one of `doCheck` or `doInstallCheck` enabled. # one of `doCheck` or `doInstallCheck` enabled.
@ -26,6 +24,8 @@
}: }:
let let
inherit (lib) fileset;
version = lib.fileContents ./.version + versionSuffix; version = lib.fileContents ./.version + versionSuffix;
mkDerivation = mkDerivation =

View File

@ -3,6 +3,7 @@
#include "types.hh" #include "types.hh"
#include "error.hh" #include "error.hh"
#include "file-descriptor.hh"
#include "logging.hh" #include "logging.hh"
#include "ansicolor.hh" #include "ansicolor.hh"
@ -23,26 +24,36 @@ namespace nix {
struct Sink; struct Sink;
struct Source; struct Source;
#ifndef _WIN32
class Pid class Pid
{ {
#ifndef _WIN32
pid_t pid = -1; pid_t pid = -1;
bool separatePG = false; bool separatePG = false;
int killSignal = SIGKILL; int killSignal = SIGKILL;
#else
AutoCloseFD pid = INVALID_DESCRIPTOR;
#endif
public: public:
Pid(); Pid();
#ifndef _WIN32
Pid(pid_t pid); Pid(pid_t pid);
~Pid();
void operator =(pid_t pid); void operator =(pid_t pid);
operator pid_t(); operator pid_t();
#else
Pid(AutoCloseFD pid);
void operator =(AutoCloseFD pid);
#endif
~Pid();
int kill(); int kill();
int wait(); int wait();
// TODO: Implement for Windows
#ifndef _WIN32
void setSeparatePG(bool separatePG); void setSeparatePG(bool separatePG);
void setKillSignal(int signal); void setKillSignal(int signal);
pid_t release(); pid_t release();
};
#endif #endif
};
#ifndef _WIN32 #ifndef _WIN32

View File

@ -1,9 +1,15 @@
#include "current-process.hh" #include "current-process.hh"
#include "environment-variables.hh" #include "environment-variables.hh"
#include "error.hh"
#include "file-descriptor.hh"
#include "file-path.hh"
#include "signals.hh" #include "signals.hh"
#include "processes.hh" #include "processes.hh"
#include "finally.hh" #include "finally.hh"
#include "serialise.hh" #include "serialise.hh"
#include "file-system.hh"
#include "util.hh"
#include "windows-error.hh"
#include <cerrno> #include <cerrno>
#include <cstdlib> #include <cstdlib>
@ -16,25 +22,347 @@
#include <sys/types.h> #include <sys/types.h>
#include <unistd.h> #include <unistd.h>
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
namespace nix { namespace nix {
std::string runProgram(Path program, bool lookupPath, const Strings & args, using namespace nix::windows;
const std::optional<std::string> & input, bool isInteractive)
Pid::Pid() {}
Pid::Pid(AutoCloseFD pid)
: pid(std::move(pid))
{ {
throw UnimplementedError("Cannot shell out to git on Windows yet");
} }
Pid::~Pid()
{
if (pid.get() != INVALID_DESCRIPTOR)
kill();
}
void Pid::operator=(AutoCloseFD pid)
{
if (this->pid.get() != INVALID_DESCRIPTOR && this->pid.get() != pid.get())
kill();
this->pid = std::move(pid);
}
// TODO: Implement (not needed for process spawning yet)
int Pid::kill()
{
assert(pid.get() != INVALID_DESCRIPTOR);
debug("killing process %1%", pid.get());
throw UnimplementedError("Pid::kill unimplemented");
}
int Pid::wait()
{
// https://github.com/nix-windows/nix/blob/windows-meson/src/libutil/util.cc#L1938
assert(pid.get() != INVALID_DESCRIPTOR);
DWORD status = WaitForSingleObject(pid.get(), INFINITE);
if (status != WAIT_OBJECT_0) {
debug("WaitForSingleObject returned %1%", status);
}
DWORD exitCode = 0;
if (GetExitCodeProcess(pid.get(), &exitCode) == FALSE) {
debug("GetExitCodeProcess failed on pid %1%", pid.get());
}
pid.close();
return exitCode;
}
// TODO: Merge this with Unix's runProgram since it's identical logic.
std::string runProgram(
Path program, bool lookupPath, const Strings & args, const std::optional<std::string> & input, bool isInteractive)
{
auto res = runProgram(RunOptions{
.program = program, .lookupPath = lookupPath, .args = args, .input = input, .isInteractive = isInteractive});
if (!statusOk(res.first))
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
return res.second;
}
std::optional<Path> getProgramInterpreter(const Path & program)
{
// These extensions are automatically handled by Windows and don't require an interpreter.
static constexpr const char * exts[] = {".exe", ".cmd", ".bat"};
for (const auto ext : exts) {
if (hasSuffix(program, ext)) {
return {};
}
}
// TODO: Open file and read the shebang
throw UnimplementedError("getProgramInterpreter unimplemented");
}
// TODO: Not sure if this is needed in the unix version but it might be useful as a member func
void setFDInheritable(AutoCloseFD & fd, bool inherit)
{
if (fd.get() != INVALID_DESCRIPTOR) {
if (!SetHandleInformation(fd.get(), HANDLE_FLAG_INHERIT, inherit ? HANDLE_FLAG_INHERIT : 0)) {
throw WinError("Couldn't disable inheriting of handle");
}
}
}
AutoCloseFD nullFD()
{
// Create null handle to discard reads / writes
// https://stackoverflow.com/a/25609668
// https://github.com/nix-windows/nix/blob/windows-meson/src/libutil/util.cc#L2228
AutoCloseFD nul = CreateFileW(
L"NUL",
GENERIC_READ | GENERIC_WRITE,
// We don't care who reads / writes / deletes this file since it's NUL anyways
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
NULL,
OPEN_EXISTING,
0,
NULL);
if (!nul.get()) {
throw WinError("Couldn't open NUL device");
}
// Let this handle be inheritable by child processes
setFDInheritable(nul, true);
return nul;
}
// Adapted from
// https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
std::string windowsEscape(const std::string & str, bool cmd)
{
// TODO: This doesn't handle cmd.exe escaping.
if (cmd) {
throw UnimplementedError("cmd.exe escaping is not implemented");
}
if (str.find_first_of(" \t\n\v\"") == str.npos && !str.empty()) {
// No need to escape this one, the nonempty contents don't have a special character
return str;
}
std::string buffer;
// Add the opening quote
buffer += '"';
for (auto iter = str.begin();; ++iter) {
size_t backslashes = 0;
while (iter != str.end() && *iter == '\\') {
++iter;
++backslashes;
}
// We only escape backslashes if:
// - They come immediately before the closing quote
// - They come immediately before a quote in the middle of the string
// Both of these cases break the escaping if not handled. Otherwise backslashes are fine as-is
if (iter == str.end()) {
// Need to escape each backslash
buffer.append(backslashes * 2, '\\');
// Exit since we've reached the end of the string
break;
} else if (*iter == '"') {
// Need to escape each backslash and the intermediate quote character
buffer.append(backslashes * 2, '\\');
buffer += "\\\"";
} else {
// Don't escape the backslashes since they won't break the delimiter
buffer.append(backslashes, '\\');
buffer += *iter;
}
}
// Add the closing quote
return buffer + '"';
}
Pid spawnProcess(const Path & realProgram, const RunOptions & options, Pipe & out, Pipe & in)
{
// Setup pipes.
if (options.standardOut) {
// Don't inherit the read end of the output pipe
setFDInheritable(out.readSide, false);
} else {
out.writeSide = nullFD();
}
if (options.standardIn) {
// Don't inherit the write end of the input pipe
setFDInheritable(in.writeSide, false);
} else {
in.readSide = nullFD();
}
STARTUPINFOW startInfo = {0};
startInfo.cb = sizeof(startInfo);
startInfo.dwFlags = STARTF_USESTDHANDLES;
startInfo.hStdInput = in.readSide.get();
startInfo.hStdOutput = out.writeSide.get();
startInfo.hStdError = out.writeSide.get();
std::string envline;
// Retain the current processes' environment variables.
for (const auto & envVar : getEnv()) {
envline += (envVar.first + '=' + envVar.second + '\0');
}
// Also add new ones specified in options.
if (options.environment) {
for (const auto & envVar : *options.environment) {
envline += (envVar.first + '=' + envVar.second + '\0');
}
}
std::string cmdline = windowsEscape(realProgram, false);
for (const auto & arg : options.args) {
// TODO: This isn't the right way to escape windows command
// See https://learn.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-commandlinetoargvw
cmdline += ' ' + windowsEscape(arg, false);
}
PROCESS_INFORMATION procInfo = {0};
if (CreateProcessW(
// EXE path is provided in the cmdline
NULL,
string_to_os_string(cmdline).data(),
NULL,
NULL,
TRUE,
CREATE_UNICODE_ENVIRONMENT | CREATE_SUSPENDED,
string_to_os_string(envline).data(),
options.chdir.has_value() ? string_to_os_string(*options.chdir).data() : NULL,
&startInfo,
&procInfo)
== 0) {
throw WinError("CreateProcessW failed (%1%)", cmdline);
}
// Convert these to use RAII
AutoCloseFD process = procInfo.hProcess;
AutoCloseFD thread = procInfo.hThread;
// Add current process and child to job object so child terminates when parent terminates
// TODO: This spawns one job per child process. We can probably keep this as a global, and
// add children a single job so we don't use so many jobs at once.
Descriptor job = CreateJobObjectW(NULL, NULL);
if (job == NULL) {
TerminateProcess(procInfo.hProcess, 0);
throw WinError("Couldn't create job object for child process");
}
if (AssignProcessToJobObject(job, procInfo.hProcess) == FALSE) {
TerminateProcess(procInfo.hProcess, 0);
throw WinError("Couldn't assign child process to job object");
}
if (ResumeThread(procInfo.hThread) == (DWORD) -1) {
TerminateProcess(procInfo.hProcess, 0);
throw WinError("Couldn't resume child process thread");
}
return process;
}
// TODO: Merge this with Unix's runProgram since it's identical logic.
// Output = error code + "standard out" output stream // Output = error code + "standard out" output stream
std::pair<int, std::string> runProgram(RunOptions && options) std::pair<int, std::string> runProgram(RunOptions && options)
{ {
throw UnimplementedError("Cannot shell out to git on Windows yet"); StringSink sink;
} options.standardOut = &sink;
int status = 0;
try {
runProgram2(options);
} catch (ExecError & e) {
status = e.status;
}
return {status, std::move(sink.s)};
}
void runProgram2(const RunOptions & options) void runProgram2(const RunOptions & options)
{ {
throw UnimplementedError("Cannot shell out to git on Windows yet"); checkInterrupt();
assert(!(options.standardIn && options.input));
std::unique_ptr<Source> source_;
Source * source = options.standardIn;
if (options.input) {
source_ = std::make_unique<StringSource>(*options.input);
source = source_.get();
}
/* Create a pipe. */
Pipe out, in;
// TODO: I copied this from unix but this is handled again in spawnProcess, so might be weird to split it up like
// this
if (options.standardOut)
out.create();
if (source)
in.create();
Path realProgram = options.program;
// TODO: Implement shebang / program interpreter lookup on Windows
auto interpreter = getProgramInterpreter(realProgram);
std::optional<Finally<std::function<void()>>> resumeLoggerDefer;
if (options.isInteractive) {
logger->pause();
resumeLoggerDefer.emplace([]() { logger->resume(); });
}
Pid pid = spawnProcess(interpreter.has_value() ? *interpreter : realProgram, options, out, in);
// TODO: This is identical to unix, deduplicate?
out.writeSide.close();
std::thread writerThread;
std::promise<void> promise;
Finally doJoin([&] {
if (writerThread.joinable())
writerThread.join();
});
if (source) {
in.readSide.close();
writerThread = std::thread([&] {
try {
std::vector<char> buf(8 * 1024);
while (true) {
size_t n;
try {
n = source->read(buf.data(), buf.size());
} catch (EndOfFile &) {
break;
}
writeFull(in.writeSide.get(), {buf.data(), n});
}
promise.set_value();
} catch (...) {
promise.set_exception(std::current_exception());
}
in.writeSide.close();
});
}
if (options.standardOut)
drainFD(out.readSide.get(), *options.standardOut);
/* Wait for the child to finish. */
int status = pid.wait();
/* Wait for the writer thread to finish. */
if (source)
promise.get_future().get();
if (status)
throw ExecError(status, "program '%1%' %2%", options.program, statusToString(status));
} }
std::string statusToString(int status) std::string statusToString(int status)
@ -45,10 +373,8 @@ std::string statusToString(int status)
return "succeeded"; return "succeeded";
} }
bool statusOk(int status) bool statusOk(int status)
{ {
return status == 0; return status == 0;
} }
} }

View File

@ -259,7 +259,7 @@ static void main_nix_build(int argc, char * * argv)
auto store = openStore(); auto store = openStore();
auto evalStore = myArgs.evalStoreUrl ? openStore(*myArgs.evalStoreUrl) : store; auto evalStore = myArgs.evalStoreUrl ? openStore(*myArgs.evalStoreUrl) : store;
auto state = std::make_unique<EvalState>(myArgs.lookupPath, evalStore, store); auto state = std::make_unique<EvalState>(myArgs.lookupPath, evalStore, evalSettings, store);
state->repair = myArgs.repair; state->repair = myArgs.repair;
if (myArgs.repair) buildMode = bmRepair; if (myArgs.repair) buildMode = bmRepair;

View File

@ -1525,7 +1525,7 @@ static int main_nix_env(int argc, char * * argv)
auto store = openStore(); auto store = openStore();
globals.state = std::shared_ptr<EvalState>(new EvalState(myArgs.lookupPath, store)); globals.state = std::shared_ptr<EvalState>(new EvalState(myArgs.lookupPath, store, evalSettings));
globals.state->repair = myArgs.repair; globals.state->repair = myArgs.repair;
globals.instSource.nixExprPath = std::make_shared<SourcePath>( globals.instSource.nixExprPath = std::make_shared<SourcePath>(

View File

@ -115,7 +115,7 @@ bool createUserEnv(EvalState & state, PackageInfos & elems,
std::string str2 = str.str(); std::string str2 = str.str();
StringSource source { str2 }; StringSource source { str2 };
state.store->addToStoreFromDump( state.store->addToStoreFromDump(
source, "env-manifest.nix", FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, references); source, "env-manifest.nix", FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, references);
}); });
/* Get the environment builder expression. */ /* Get the environment builder expression. */

View File

@ -157,7 +157,7 @@ static int main_nix_instantiate(int argc, char * * argv)
auto store = openStore(); auto store = openStore();
auto evalStore = myArgs.evalStoreUrl ? openStore(*myArgs.evalStoreUrl) : store; auto evalStore = myArgs.evalStoreUrl ? openStore(*myArgs.evalStoreUrl) : store;
auto state = std::make_unique<EvalState>(myArgs.lookupPath, evalStore, store); auto state = std::make_unique<EvalState>(myArgs.lookupPath, evalStore, evalSettings, store);
state->repair = myArgs.repair; state->repair = myArgs.repair;
Bindings & autoArgs = *myArgs.getAutoArgs(*state); Bindings & autoArgs = *myArgs.getAutoArgs(*state);

View File

@ -194,10 +194,10 @@ static void opAdd(Strings opFlags, Strings opArgs)
store. */ store. */
static void opAddFixed(Strings opFlags, Strings opArgs) static void opAddFixed(Strings opFlags, Strings opArgs)
{ {
auto method = FileIngestionMethod::Flat; ContentAddressMethod method = ContentAddressMethod::Raw::Flat;
for (auto & i : opFlags) for (auto & i : opFlags)
if (i == "--recursive") method = FileIngestionMethod::Recursive; if (i == "--recursive") method = ContentAddressMethod::Raw::NixArchive;
else throw UsageError("unknown flag '%1%'", i); else throw UsageError("unknown flag '%1%'", i);
if (opArgs.empty()) if (opArgs.empty())
@ -223,7 +223,7 @@ static void opPrintFixedPath(Strings opFlags, Strings opArgs)
auto method = FileIngestionMethod::Flat; auto method = FileIngestionMethod::Flat;
for (auto i : opFlags) for (auto i : opFlags)
if (i == "--recursive") method = FileIngestionMethod::Recursive; if (i == "--recursive") method = FileIngestionMethod::NixArchive;
else throw UsageError("unknown flag '%1%'", i); else throw UsageError("unknown flag '%1%'", i);
if (opArgs.size() != 3) if (opArgs.size() != 3)
@ -563,7 +563,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
if (!hashGiven) { if (!hashGiven) {
HashResult hash = hashPath( HashResult hash = hashPath(
{store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) }}, {store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) }},
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256); FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256);
info->narHash = hash.first; info->narHash = hash.first;
info->narSize = hash.second; info->narSize = hash.second;
} }

View File

@ -12,7 +12,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
{ {
Path path; Path path;
std::optional<std::string> namePart; std::optional<std::string> namePart;
ContentAddressMethod caMethod = FileIngestionMethod::Recursive; ContentAddressMethod caMethod = ContentAddressMethod::Raw::NixArchive;
HashAlgorithm hashAlgo = HashAlgorithm::SHA256; HashAlgorithm hashAlgo = HashAlgorithm::SHA256;
CmdAddToStore() CmdAddToStore()
@ -68,7 +68,7 @@ struct CmdAddFile : CmdAddToStore
{ {
CmdAddFile() CmdAddFile()
{ {
caMethod = FileIngestionMethod::Flat; caMethod = ContentAddressMethod::Raw::Flat;
} }
std::string description() override std::string description() override

Some files were not shown because too many files have changed in this diff Show More