mirror of
https://github.com/NixOS/nix.git
synced 2024-11-26 16:52:27 +00:00
Merge remote-tracking branch 'upstream/master' into overlayfs-store
This commit is contained in:
commit
28398e6d02
2
.github/labeler.yml
vendored
2
.github/labeler.yml
vendored
@ -16,7 +16,7 @@
|
|||||||
"new-cli":
|
"new-cli":
|
||||||
- src/nix/**/*
|
- src/nix/**/*
|
||||||
|
|
||||||
"tests":
|
"with-tests":
|
||||||
# Unit tests
|
# Unit tests
|
||||||
- src/*/tests/**/*
|
- src/*/tests/**/*
|
||||||
# Functional and integration tests
|
# Functional and integration tests
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -18,7 +18,7 @@ perl/Makefile.config
|
|||||||
/doc/manual/generated/*
|
/doc/manual/generated/*
|
||||||
/doc/manual/nix.json
|
/doc/manual/nix.json
|
||||||
/doc/manual/conf-file.json
|
/doc/manual/conf-file.json
|
||||||
/doc/manual/builtins.json
|
/doc/manual/language.json
|
||||||
/doc/manual/xp-features.json
|
/doc/manual/xp-features.json
|
||||||
/doc/manual/src/SUMMARY.md
|
/doc/manual/src/SUMMARY.md
|
||||||
/doc/manual/src/command-ref/new-cli
|
/doc/manual/src/command-ref/new-cli
|
||||||
@ -26,6 +26,7 @@ perl/Makefile.config
|
|||||||
/doc/manual/src/command-ref/experimental-features-shortlist.md
|
/doc/manual/src/command-ref/experimental-features-shortlist.md
|
||||||
/doc/manual/src/contributing/experimental-feature-descriptions.md
|
/doc/manual/src/contributing/experimental-feature-descriptions.md
|
||||||
/doc/manual/src/language/builtins.md
|
/doc/manual/src/language/builtins.md
|
||||||
|
/doc/manual/src/language/builtin-constants.md
|
||||||
|
|
||||||
# /scripts/
|
# /scripts/
|
||||||
/scripts/nix-profile.sh
|
/scripts/nix-profile.sh
|
||||||
|
29
doc/manual/generate-builtin-constants.nix
Normal file
29
doc/manual/generate-builtin-constants.nix
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
let
|
||||||
|
inherit (builtins) concatStringsSep attrValues mapAttrs;
|
||||||
|
inherit (import ./utils.nix) optionalString squash;
|
||||||
|
in
|
||||||
|
|
||||||
|
builtinsInfo:
|
||||||
|
let
|
||||||
|
showBuiltin = name: { doc, type, impure-only }:
|
||||||
|
let
|
||||||
|
type' = optionalString (type != null) " (${type})";
|
||||||
|
|
||||||
|
impureNotice = optionalString impure-only ''
|
||||||
|
Not available in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
squash ''
|
||||||
|
<dt id="builtin-constants-${name}">
|
||||||
|
<a href="#builtin-constants-${name}"><code>${name}</code>${type'}</a>
|
||||||
|
</dt>
|
||||||
|
<dd>
|
||||||
|
|
||||||
|
${doc}
|
||||||
|
|
||||||
|
${impureNotice}
|
||||||
|
|
||||||
|
</dd>
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
concatStringsSep "\n" (attrValues (mapAttrs showBuiltin builtinsInfo))
|
@ -1,24 +1,28 @@
|
|||||||
let
|
let
|
||||||
inherit (builtins) concatStringsSep attrNames;
|
inherit (builtins) concatStringsSep attrValues mapAttrs;
|
||||||
|
inherit (import ./utils.nix) optionalString squash;
|
||||||
in
|
in
|
||||||
|
|
||||||
builtinsInfo:
|
builtinsInfo:
|
||||||
let
|
let
|
||||||
showBuiltin = name:
|
showBuiltin = name: { doc, args, arity, experimental-feature }:
|
||||||
let
|
let
|
||||||
inherit (builtinsInfo.${name}) doc args;
|
experimentalNotice = optionalString (experimental-feature != null) ''
|
||||||
|
This function is only available if the [${experimental-feature}](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) experimental feature is enabled.
|
||||||
|
'';
|
||||||
in
|
in
|
||||||
''
|
squash ''
|
||||||
<dt id="builtins-${name}">
|
<dt id="builtins-${name}">
|
||||||
<a href="#builtins-${name}"><code>${name} ${listArgs args}</code></a>
|
<a href="#builtins-${name}"><code>${name} ${listArgs args}</code></a>
|
||||||
</dt>
|
</dt>
|
||||||
<dd>
|
<dd>
|
||||||
|
|
||||||
${doc}
|
${doc}
|
||||||
|
|
||||||
|
${experimentalNotice}
|
||||||
|
|
||||||
</dd>
|
</dd>
|
||||||
'';
|
'';
|
||||||
listArgs = args: concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
listArgs = args: concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
||||||
in
|
in
|
||||||
concatStringsSep "\n" (map showBuiltin (attrNames builtinsInfo))
|
concatStringsSep "\n" (attrValues (mapAttrs showBuiltin builtinsInfo))
|
||||||
|
|
||||||
|
@ -128,14 +128,20 @@ $(d)/xp-features.json: $(bindir)/nix
|
|||||||
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(bindir)/nix __dump-xp-features > $@.tmp
|
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(bindir)/nix __dump-xp-features > $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
|
$(d)/src/language/builtins.md: $(d)/language.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
|
||||||
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
|
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<)).builtins' >> $@.tmp;
|
||||||
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
|
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/builtins.json: $(bindir)/nix
|
$(d)/src/language/builtin-constants.md: $(d)/language.json $(d)/generate-builtin-constants.nix $(d)/src/language/builtin-constants-prefix.md $(bindir)/nix
|
||||||
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(bindir)/nix __dump-builtins > $@.tmp
|
@cat doc/manual/src/language/builtin-constants-prefix.md > $@.tmp
|
||||||
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtin-constants.nix (builtins.fromJSON (builtins.readFile $<)).constants' >> $@.tmp;
|
||||||
|
@cat doc/manual/src/language/builtin-constants-suffix.md >> $@.tmp
|
||||||
|
@mv $@.tmp $@
|
||||||
|
|
||||||
|
$(d)/language.json: $(bindir)/nix
|
||||||
|
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(bindir)/nix __dump-language > $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
# Generate the HTML manual.
|
# Generate the HTML manual.
|
||||||
@ -167,7 +173,7 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
|
|||||||
done
|
done
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|
||||||
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
|
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(d)/src/language/builtin-constants.md
|
||||||
$(trace-gen) \
|
$(trace-gen) \
|
||||||
tmp="$$(mktemp -d)"; \
|
tmp="$$(mktemp -d)"; \
|
||||||
cp -r doc/manual "$$tmp"; \
|
cp -r doc/manual "$$tmp"; \
|
||||||
|
@ -330,19 +330,31 @@ const redirects = {
|
|||||||
"ssec-relnotes-2.0": "release-notes/rl-2.0.html",
|
"ssec-relnotes-2.0": "release-notes/rl-2.0.html",
|
||||||
"ssec-relnotes-2.1": "release-notes/rl-2.1.html",
|
"ssec-relnotes-2.1": "release-notes/rl-2.1.html",
|
||||||
"ssec-relnotes-2.2": "release-notes/rl-2.2.html",
|
"ssec-relnotes-2.2": "release-notes/rl-2.2.html",
|
||||||
"ssec-relnotes-2.3": "release-notes/rl-2.3.html"
|
"ssec-relnotes-2.3": "release-notes/rl-2.3.html",
|
||||||
},
|
},
|
||||||
"language/values.html": {
|
"language/values.html": {
|
||||||
"simple-values": "#primitives",
|
"simple-values": "#primitives",
|
||||||
"lists": "#list",
|
"lists": "#list",
|
||||||
"strings": "#string",
|
"strings": "#string",
|
||||||
"lists": "#list",
|
"lists": "#list",
|
||||||
"attribute-sets": "#attribute-set"
|
"attribute-sets": "#attribute-set",
|
||||||
},
|
},
|
||||||
"installation/installing-binary.html": {
|
"installation/installing-binary.html": {
|
||||||
"linux": "uninstall.html#linux",
|
"linux": "uninstall.html#linux",
|
||||||
"macos": "uninstall.html#macos",
|
"macos": "uninstall.html#macos",
|
||||||
"uninstalling": "uninstall.html"
|
"uninstalling": "uninstall.html",
|
||||||
|
}
|
||||||
|
"contributing/hacking.html": {
|
||||||
|
"nix-with-flakes": "#building-nix-with-flakes",
|
||||||
|
"classic-nix": "#building-nix",
|
||||||
|
"running-tests": "testing.html#running-tests",
|
||||||
|
"unit-tests": "testing.html#unit-tests",
|
||||||
|
"functional-tests": "testing.html#functional-tests",
|
||||||
|
"debugging-failing-functional-tests": "testing.html#debugging-failing-functional-tests",
|
||||||
|
"integration-tests": "testing.html#integration-tests",
|
||||||
|
"installer-tests": "testing.html#installer-tests",
|
||||||
|
"one-time-setup": "testing.html#one-time-setup",
|
||||||
|
"using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -104,6 +104,7 @@
|
|||||||
- [Glossary](glossary.md)
|
- [Glossary](glossary.md)
|
||||||
- [Contributing](contributing/contributing.md)
|
- [Contributing](contributing/contributing.md)
|
||||||
- [Hacking](contributing/hacking.md)
|
- [Hacking](contributing/hacking.md)
|
||||||
|
- [Testing](contributing/testing.md)
|
||||||
- [Experimental Features](contributing/experimental-features.md)
|
- [Experimental Features](contributing/experimental-features.md)
|
||||||
- [CLI guideline](contributing/cli-guideline.md)
|
- [CLI guideline](contributing/cli-guideline.md)
|
||||||
- [C++ style guide](contributing/cxx.md)
|
- [C++ style guide](contributing/cxx.md)
|
||||||
|
@ -70,6 +70,8 @@ except for `--arg` and `--attr` / `-A` which are passed to [`nix-instantiate`](n
|
|||||||
Change the name of the symlink to the output path created from
|
Change the name of the symlink to the output path created from
|
||||||
`result` to *outlink*.
|
`result` to *outlink*.
|
||||||
|
|
||||||
|
{{#include ./status-build-failure.md}}
|
||||||
|
|
||||||
{{#include ./opt-common.md}}
|
{{#include ./opt-common.md}}
|
||||||
|
|
||||||
{{#include ./env-common.md}}
|
{{#include ./env-common.md}}
|
||||||
|
@ -57,7 +57,7 @@ These options are for deleting old [profiles] prior to deleting unreachable [sto
|
|||||||
Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time).
|
Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time).
|
||||||
*period* is a value such as `30d`, which would mean 30 days.
|
*period* is a value such as `30d`, which would mean 30 days.
|
||||||
|
|
||||||
This is the equivalent of invoking [`nix-env --delete-generations <period>`](@docroot@/command-ref/nix-env/delete-generations.md#generations-days) on each found profile.
|
This is the equivalent of invoking [`nix-env --delete-generations <period>`](@docroot@/command-ref/nix-env/delete-generations.md#generations-time) on each found profile.
|
||||||
See the documentation of that command for additional information about the *period* argument.
|
See the documentation of that command for additional information about the *period* argument.
|
||||||
|
|
||||||
{{#include ./opt-common.md}}
|
{{#include ./opt-common.md}}
|
||||||
|
@ -20,22 +20,30 @@ This operation deletes the specified generations of the current profile.
|
|||||||
|
|
||||||
- The special value <span id="generations-old">`old`</span>
|
- The special value <span id="generations-old">`old`</span>
|
||||||
|
|
||||||
Delete all generations older than the current one.
|
Delete all generations except the current one.
|
||||||
|
|
||||||
- <span id="generations-days">`<days>d`</span>:\
|
> **WARNING**
|
||||||
The last *days* days
|
>
|
||||||
|
> Older *and newer* generations will be deleted by this operation.
|
||||||
|
>
|
||||||
|
> One might expect this to just delete older generations than the curent one, but that is only true if the current generation is also the latest.
|
||||||
|
> Because one can roll back to a previous generation, it is possible to have generations newer than the current one.
|
||||||
|
> They will also be deleted.
|
||||||
|
|
||||||
|
- <span id="generations-time">`<number>d`</span>:\
|
||||||
|
The last *number* days
|
||||||
|
|
||||||
*Example*: `30d`
|
*Example*: `30d`
|
||||||
|
|
||||||
Delete all generations older than *days* days.
|
Delete all generations created more than *number* days ago, except the most recent one of them.
|
||||||
The generation that was active at that point in time is excluded, and will not be deleted.
|
This allows rolling back to generations that were available within the specified period.
|
||||||
|
|
||||||
- <span id="generations-count">`+<count>`</span>:\
|
- <span id="generations-count">`+<number>`</span>:\
|
||||||
The last *count* generations up to the present
|
The last *number* generations up to the present
|
||||||
|
|
||||||
*Example*: `+5`
|
*Example*: `+5`
|
||||||
|
|
||||||
Keep the last *count* generations, along with any newer than current.
|
Keep the last *number* generations, along with any newer than current.
|
||||||
|
|
||||||
Periodically deleting old generations is important to make garbage collection
|
Periodically deleting old generations is important to make garbage collection
|
||||||
effective.
|
effective.
|
||||||
@ -61,7 +69,7 @@ $ nix-env --delete-generations 3 4 8
|
|||||||
|
|
||||||
Delete the generations numbered 3, 4, and 8, so long as the current active generation is not any of those.
|
Delete the generations numbered 3, 4, and 8, so long as the current active generation is not any of those.
|
||||||
|
|
||||||
## Keep most-recent by count count
|
## Keep most-recent by count (number of generations)
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --delete-generations +5
|
$ nix-env --delete-generations +5
|
||||||
@ -72,7 +80,7 @@ Suppose `30` is the current generation, and we currently have generations number
|
|||||||
Then this command will delete generations `20` through `25` (`<= 30 - 5`),
|
Then this command will delete generations `20` through `25` (`<= 30 - 5`),
|
||||||
and keep generations `26` through `31` (`> 30 - 5`).
|
and keep generations `26` through `31` (`> 30 - 5`).
|
||||||
|
|
||||||
## Keep most-recent in days
|
## Keep most-recent by time (number of days)
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --delete-generations 30d
|
$ nix-env --delete-generations 30d
|
||||||
|
@ -54,36 +54,7 @@ The following flags are available:
|
|||||||
previous build, the new output path is left in
|
previous build, the new output path is left in
|
||||||
`/nix/store/name.check.`
|
`/nix/store/name.check.`
|
||||||
|
|
||||||
Special exit codes:
|
{{#include ../status-build-failure.md}}
|
||||||
|
|
||||||
- `100`\
|
|
||||||
Generic build failure, the builder process returned with a non-zero
|
|
||||||
exit code.
|
|
||||||
|
|
||||||
- `101`\
|
|
||||||
Build timeout, the build was aborted because it did not complete
|
|
||||||
within the specified `timeout`.
|
|
||||||
|
|
||||||
- `102`\
|
|
||||||
Hash mismatch, the build output was rejected because it does not
|
|
||||||
match the [`outputHash` attribute of the
|
|
||||||
derivation](@docroot@/language/advanced-attributes.md).
|
|
||||||
|
|
||||||
- `104`\
|
|
||||||
Not deterministic, the build succeeded in check mode but the
|
|
||||||
resulting output is not binary reproducible.
|
|
||||||
|
|
||||||
With the `--keep-going` flag it's possible for multiple failures to
|
|
||||||
occur, in this case the 1xx status codes are or combined using binary
|
|
||||||
or.
|
|
||||||
|
|
||||||
1100100
|
|
||||||
^^^^
|
|
||||||
|||`- timeout
|
|
||||||
||`-- output hash mismatch
|
|
||||||
|`--- build failure
|
|
||||||
`---- not deterministic
|
|
||||||
|
|
||||||
|
|
||||||
{{#include ./opt-common.md}}
|
{{#include ./opt-common.md}}
|
||||||
|
|
||||||
|
34
doc/manual/src/command-ref/status-build-failure.md
Normal file
34
doc/manual/src/command-ref/status-build-failure.md
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# Special exit codes for build failure
|
||||||
|
|
||||||
|
1xx status codes are used when requested builds failed.
|
||||||
|
The following codes are in use:
|
||||||
|
|
||||||
|
- `100` Generic build failure
|
||||||
|
|
||||||
|
The builder process returned with a non-zero exit code.
|
||||||
|
|
||||||
|
- `101` Build timeout
|
||||||
|
|
||||||
|
The build was aborted because it did not complete within the specified `timeout`.
|
||||||
|
|
||||||
|
- `102` Hash mismatch
|
||||||
|
|
||||||
|
The build output was rejected because it does not match the
|
||||||
|
[`outputHash` attribute of the derivation](@docroot@/language/advanced-attributes.md).
|
||||||
|
|
||||||
|
- `104` Not deterministic
|
||||||
|
|
||||||
|
The build succeeded in check mode but the resulting output is not binary reproducible.
|
||||||
|
|
||||||
|
With the `--keep-going` flag it's possible for multiple failures to occur.
|
||||||
|
In this case the 1xx status codes are or combined using
|
||||||
|
[bitwise OR](https://en.wikipedia.org/wiki/Bitwise_operation#OR).
|
||||||
|
|
||||||
|
```
|
||||||
|
0b1100100
|
||||||
|
^^^^
|
||||||
|
|||`- timeout
|
||||||
|
||`-- output hash mismatch
|
||||||
|
|`--- build failure
|
||||||
|
`---- not deterministic
|
||||||
|
```
|
@ -12,14 +12,15 @@ The following instructions assume you already have some version of Nix installed
|
|||||||
|
|
||||||
[installation instructions]: ../installation/installation.md
|
[installation instructions]: ../installation/installation.md
|
||||||
|
|
||||||
## Nix with flakes
|
## Building Nix with flakes
|
||||||
|
|
||||||
This section assumes you are using Nix with [flakes] enabled. See the [next section](#classic-nix) for equivalent instructions which don't require flakes.
|
This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled.
|
||||||
|
See the [Building Nix](#building-nix) section for equivalent instructions using stable Nix interfaces.
|
||||||
|
|
||||||
[flakes]: ../command-ref/new-cli/nix3-flake.md#description
|
[`flakes`]: @docroot@/contributing/experimental-features.md#xp-feature-flakes
|
||||||
|
[`nix-command`]: @docroot@/contributing/experimental-features.md#xp-nix-command
|
||||||
|
|
||||||
To build all dependencies and start a shell in which all environment
|
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
|
||||||
variables are set up so that those dependencies can be found:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix develop
|
$ nix develop
|
||||||
@ -55,20 +56,17 @@ To install it in `$(pwd)/outputs` and test it:
|
|||||||
nix (Nix) 2.12
|
nix (Nix) 2.12
|
||||||
```
|
```
|
||||||
|
|
||||||
To build a release version of Nix:
|
To build a release version of Nix for the current operating system and CPU architecture:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix build
|
$ nix build
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also build Nix for one of the [supported target platforms](#target-platforms).
|
You can also build Nix for one of the [supported platforms](#platforms).
|
||||||
|
|
||||||
## Classic Nix
|
## Building Nix
|
||||||
|
|
||||||
This section is for Nix without [flakes].
|
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
|
||||||
|
|
||||||
To build all dependencies and start a shell in which all environment
|
|
||||||
variables are set up so that those dependencies can be found:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell
|
$ nix-shell
|
||||||
@ -102,13 +100,13 @@ To install it in `$(pwd)/outputs` and test it:
|
|||||||
nix (Nix) 2.12
|
nix (Nix) 2.12
|
||||||
```
|
```
|
||||||
|
|
||||||
To build Nix for the current operating system and CPU architecture use
|
To build a release version of Nix for the current operating system and CPU architecture:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-build
|
$ nix-build
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also build Nix for one of the [supported target platforms](#target-platforms).
|
You can also build Nix for one of the [supported platforms](#platforms).
|
||||||
|
|
||||||
## Platforms
|
## Platforms
|
||||||
|
|
||||||
@ -192,171 +190,6 @@ Configure your editor to use the `clangd` from the shell, either by running it i
|
|||||||
> Some other editors (e.g. Emacs, Vim) need a plugin to support LSP servers in general (e.g. [lsp-mode](https://github.com/emacs-lsp/lsp-mode) for Emacs and [vim-lsp](https://github.com/prabirshrestha/vim-lsp) for vim).
|
> Some other editors (e.g. Emacs, Vim) need a plugin to support LSP servers in general (e.g. [lsp-mode](https://github.com/emacs-lsp/lsp-mode) for Emacs and [vim-lsp](https://github.com/prabirshrestha/vim-lsp) for vim).
|
||||||
> Editor-specific setup is typically opinionated, so we will not cover it here in more detail.
|
> Editor-specific setup is typically opinionated, so we will not cover it here in more detail.
|
||||||
|
|
||||||
## Running tests
|
|
||||||
|
|
||||||
### Unit-tests
|
|
||||||
|
|
||||||
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
|
|
||||||
under `src/{library_name}/tests` using the
|
|
||||||
[googletest](https://google.github.io/googletest/) and
|
|
||||||
[rapidcheck](https://github.com/emil-e/rapidcheck) frameworks.
|
|
||||||
|
|
||||||
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
|
|
||||||
|
|
||||||
### Functional tests
|
|
||||||
|
|
||||||
The functional tests reside under the `tests` directory and are listed in `tests/local.mk`.
|
|
||||||
Each test is a bash script.
|
|
||||||
|
|
||||||
The whole test suite can be run with:
|
|
||||||
|
|
||||||
```shell-session
|
|
||||||
$ make install && make installcheck
|
|
||||||
ran test tests/foo.sh... [PASS]
|
|
||||||
ran test tests/bar.sh... [PASS]
|
|
||||||
...
|
|
||||||
```
|
|
||||||
|
|
||||||
Individual tests can be run with `make`:
|
|
||||||
|
|
||||||
```shell-session
|
|
||||||
$ make tests/${testName}.sh.test
|
|
||||||
ran test tests/${testName}.sh... [PASS]
|
|
||||||
```
|
|
||||||
|
|
||||||
or without `make`:
|
|
||||||
|
|
||||||
```shell-session
|
|
||||||
$ ./mk/run-test.sh tests/${testName}.sh
|
|
||||||
ran test tests/${testName}.sh... [PASS]
|
|
||||||
```
|
|
||||||
|
|
||||||
To see the complete output, one can also run:
|
|
||||||
|
|
||||||
```shell-session
|
|
||||||
$ ./mk/debug-test.sh tests/${testName}.sh
|
|
||||||
+ foo
|
|
||||||
output from foo
|
|
||||||
+ bar
|
|
||||||
output from bar
|
|
||||||
...
|
|
||||||
```
|
|
||||||
|
|
||||||
The test script will then be traced with `set -x` and the output displayed as it happens, regardless of whether the test succeeds or fails.
|
|
||||||
|
|
||||||
#### Debugging failing functional tests
|
|
||||||
|
|
||||||
When a functional test fails, it usually does so somewhere in the middle of the script.
|
|
||||||
|
|
||||||
To figure out what's wrong, it is convenient to run the test regularly up to the failing `nix` command, and then run that command with a debugger like GDB.
|
|
||||||
|
|
||||||
For example, if the script looks like:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
foo
|
|
||||||
nix blah blub
|
|
||||||
bar
|
|
||||||
```
|
|
||||||
edit it like so:
|
|
||||||
|
|
||||||
```diff
|
|
||||||
foo
|
|
||||||
-nix blah blub
|
|
||||||
+gdb --args nix blah blub
|
|
||||||
bar
|
|
||||||
```
|
|
||||||
|
|
||||||
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
|
|
||||||
|
|
||||||
```shell-session
|
|
||||||
$ ./mk/debug-test.sh tests/${testName}.sh
|
|
||||||
...
|
|
||||||
+ gdb blash blub
|
|
||||||
GNU gdb (GDB) 12.1
|
|
||||||
...
|
|
||||||
(gdb)
|
|
||||||
```
|
|
||||||
|
|
||||||
One can debug the Nix invocation in all the usual ways.
|
|
||||||
For example, enter `run` to start the Nix invocation.
|
|
||||||
|
|
||||||
### Integration tests
|
|
||||||
|
|
||||||
The integration tests are defined in the Nix flake under the `hydraJobs.tests` attribute.
|
|
||||||
These tests include everything that needs to interact with external services or run Nix in a non-trivial distributed setup.
|
|
||||||
Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>).
|
|
||||||
|
|
||||||
You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`
|
|
||||||
|
|
||||||
### Installer tests
|
|
||||||
|
|
||||||
After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch.
|
|
||||||
|
|
||||||
Creating a Cachix cache for your installer tests and adding its authorization token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91):
|
|
||||||
|
|
||||||
- The `installer` job generates installers for the platforms below and uploads them to your Cachix cache:
|
|
||||||
- `x86_64-linux`
|
|
||||||
- `armv6l-linux`
|
|
||||||
- `armv7l-linux`
|
|
||||||
- `x86_64-darwin`
|
|
||||||
|
|
||||||
- The `installer_test` job (which runs on `ubuntu-latest` and `macos-latest`) will try to install Nix with the cached installer and run a trivial Nix command.
|
|
||||||
|
|
||||||
#### One-time setup
|
|
||||||
|
|
||||||
1. Have a GitHub account with a fork of the [Nix repository](https://github.com/NixOS/nix).
|
|
||||||
2. At cachix.org:
|
|
||||||
- Create or log in to an account.
|
|
||||||
- Create a Cachix cache using the format `<github-username>-nix-install-tests`.
|
|
||||||
- Navigate to the new cache > Settings > Auth Tokens.
|
|
||||||
- Generate a new Cachix auth token and copy the generated value.
|
|
||||||
3. At github.com:
|
|
||||||
- Navigate to your Nix fork > Settings > Secrets > Actions > New repository secret.
|
|
||||||
- Name the secret `CACHIX_AUTH_TOKEN`.
|
|
||||||
- Paste the copied value of the Cachix cache auth token.
|
|
||||||
|
|
||||||
#### Using the CI-generated installer for manual testing
|
|
||||||
|
|
||||||
After the CI run completes, you can check the output to extract the installer URL:
|
|
||||||
1. Click into the detailed view of the CI run.
|
|
||||||
2. Click into any `installer_test` run (the URL you're here to extract will be the same in all of them).
|
|
||||||
3. Click into the `Run cachix/install-nix-action@v...` step and click the detail triangle next to the first log line (it will also be `Run cachix/install-nix-action@v...`)
|
|
||||||
4. Copy the value of `install_url`
|
|
||||||
5. To generate an install command, plug this `install_url` and your GitHub username into this template:
|
|
||||||
|
|
||||||
```console
|
|
||||||
curl -L <install_url> | sh -s -- --tarball-url-prefix https://<github-username>-nix-install-tests.cachix.org/serve
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- #### Manually generating test installers
|
|
||||||
|
|
||||||
There's obviously a manual way to do this, and it's still the only way for
|
|
||||||
platforms that lack GA runners.
|
|
||||||
|
|
||||||
I did do this back in Fall 2020 (before the GA approach encouraged here). I'll
|
|
||||||
sketch what I recall in case it encourages someone to fill in detail, but: I
|
|
||||||
didn't know what I was doing at the time and had to fumble/ask around a lot--
|
|
||||||
so I don't want to uphold any of it as "right". It may have been dumb or
|
|
||||||
the _hard_ way from the getgo. Fundamentals may have changed since.
|
|
||||||
|
|
||||||
Here's the build command I used to do this on and for x86_64-darwin:
|
|
||||||
nix build --out-link /tmp/foo ".#checks.x86_64-darwin.binaryTarball"
|
|
||||||
|
|
||||||
I used the stable out-link to make it easier to script the next steps:
|
|
||||||
link=$(readlink /tmp/foo)
|
|
||||||
cp $link/*-darwin.tar.xz ~/somewheres
|
|
||||||
|
|
||||||
I've lost the last steps and am just going from memory:
|
|
||||||
|
|
||||||
From here, I think I had to extract and modify the `install` script to point
|
|
||||||
it at this tarball (which I scped to my own site, but it might make more sense
|
|
||||||
to just share them locally). I extracted this script once and then just
|
|
||||||
search/replaced in it for each new build.
|
|
||||||
|
|
||||||
The installer now supports a `--tarball-url-prefix` flag which _may_ have
|
|
||||||
solved this need?
|
|
||||||
-->
|
|
||||||
|
|
||||||
### Checking links in the manual
|
### Checking links in the manual
|
||||||
|
|
||||||
The build checks for broken internal links.
|
The build checks for broken internal links.
|
||||||
|
167
doc/manual/src/contributing/testing.md
Normal file
167
doc/manual/src/contributing/testing.md
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
# Running tests
|
||||||
|
|
||||||
|
## Unit-tests
|
||||||
|
|
||||||
|
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
|
||||||
|
under `src/{library_name}/tests` using the
|
||||||
|
[googletest](https://google.github.io/googletest/) and
|
||||||
|
[rapidcheck](https://github.com/emil-e/rapidcheck) frameworks.
|
||||||
|
|
||||||
|
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
|
||||||
|
|
||||||
|
## Functional tests
|
||||||
|
|
||||||
|
The functional tests reside under the `tests` directory and are listed in `tests/local.mk`.
|
||||||
|
Each test is a bash script.
|
||||||
|
|
||||||
|
The whole test suite can be run with:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ make install && make installcheck
|
||||||
|
ran test tests/foo.sh... [PASS]
|
||||||
|
ran test tests/bar.sh... [PASS]
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
Individual tests can be run with `make`:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ make tests/${testName}.sh.test
|
||||||
|
ran test tests/${testName}.sh... [PASS]
|
||||||
|
```
|
||||||
|
|
||||||
|
or without `make`:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ ./mk/run-test.sh tests/${testName}.sh
|
||||||
|
ran test tests/${testName}.sh... [PASS]
|
||||||
|
```
|
||||||
|
|
||||||
|
To see the complete output, one can also run:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ ./mk/debug-test.sh tests/${testName}.sh
|
||||||
|
+ foo
|
||||||
|
output from foo
|
||||||
|
+ bar
|
||||||
|
output from bar
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
The test script will then be traced with `set -x` and the output displayed as it happens, regardless of whether the test succeeds or fails.
|
||||||
|
|
||||||
|
### Debugging failing functional tests
|
||||||
|
|
||||||
|
When a functional test fails, it usually does so somewhere in the middle of the script.
|
||||||
|
|
||||||
|
To figure out what's wrong, it is convenient to run the test regularly up to the failing `nix` command, and then run that command with a debugger like GDB.
|
||||||
|
|
||||||
|
For example, if the script looks like:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
foo
|
||||||
|
nix blah blub
|
||||||
|
bar
|
||||||
|
```
|
||||||
|
edit it like so:
|
||||||
|
|
||||||
|
```diff
|
||||||
|
foo
|
||||||
|
-nix blah blub
|
||||||
|
+gdb --args nix blah blub
|
||||||
|
bar
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ ./mk/debug-test.sh tests/${testName}.sh
|
||||||
|
...
|
||||||
|
+ gdb blash blub
|
||||||
|
GNU gdb (GDB) 12.1
|
||||||
|
...
|
||||||
|
(gdb)
|
||||||
|
```
|
||||||
|
|
||||||
|
One can debug the Nix invocation in all the usual ways.
|
||||||
|
For example, enter `run` to start the Nix invocation.
|
||||||
|
|
||||||
|
## Integration tests
|
||||||
|
|
||||||
|
The integration tests are defined in the Nix flake under the `hydraJobs.tests` attribute.
|
||||||
|
These tests include everything that needs to interact with external services or run Nix in a non-trivial distributed setup.
|
||||||
|
Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>).
|
||||||
|
|
||||||
|
You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`
|
||||||
|
|
||||||
|
## Installer tests
|
||||||
|
|
||||||
|
After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch.
|
||||||
|
|
||||||
|
Creating a Cachix cache for your installer tests and adding its authorization token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91):
|
||||||
|
|
||||||
|
- The `installer` job generates installers for the platforms below and uploads them to your Cachix cache:
|
||||||
|
- `x86_64-linux`
|
||||||
|
- `armv6l-linux`
|
||||||
|
- `armv7l-linux`
|
||||||
|
- `x86_64-darwin`
|
||||||
|
|
||||||
|
- The `installer_test` job (which runs on `ubuntu-latest` and `macos-latest`) will try to install Nix with the cached installer and run a trivial Nix command.
|
||||||
|
|
||||||
|
### One-time setup
|
||||||
|
|
||||||
|
1. Have a GitHub account with a fork of the [Nix repository](https://github.com/NixOS/nix).
|
||||||
|
2. At cachix.org:
|
||||||
|
- Create or log in to an account.
|
||||||
|
- Create a Cachix cache using the format `<github-username>-nix-install-tests`.
|
||||||
|
- Navigate to the new cache > Settings > Auth Tokens.
|
||||||
|
- Generate a new Cachix auth token and copy the generated value.
|
||||||
|
3. At github.com:
|
||||||
|
- Navigate to your Nix fork > Settings > Secrets > Actions > New repository secret.
|
||||||
|
- Name the secret `CACHIX_AUTH_TOKEN`.
|
||||||
|
- Paste the copied value of the Cachix cache auth token.
|
||||||
|
|
||||||
|
## Working on documentation
|
||||||
|
|
||||||
|
### Using the CI-generated installer for manual testing
|
||||||
|
|
||||||
|
After the CI run completes, you can check the output to extract the installer URL:
|
||||||
|
1. Click into the detailed view of the CI run.
|
||||||
|
2. Click into any `installer_test` run (the URL you're here to extract will be the same in all of them).
|
||||||
|
3. Click into the `Run cachix/install-nix-action@v...` step and click the detail triangle next to the first log line (it will also be `Run cachix/install-nix-action@v...`)
|
||||||
|
4. Copy the value of `install_url`
|
||||||
|
5. To generate an install command, plug this `install_url` and your GitHub username into this template:
|
||||||
|
|
||||||
|
```console
|
||||||
|
curl -L <install_url> | sh -s -- --tarball-url-prefix https://<github-username>-nix-install-tests.cachix.org/serve
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- #### Manually generating test installers
|
||||||
|
|
||||||
|
There's obviously a manual way to do this, and it's still the only way for
|
||||||
|
platforms that lack GA runners.
|
||||||
|
|
||||||
|
I did do this back in Fall 2020 (before the GA approach encouraged here). I'll
|
||||||
|
sketch what I recall in case it encourages someone to fill in detail, but: I
|
||||||
|
didn't know what I was doing at the time and had to fumble/ask around a lot--
|
||||||
|
so I don't want to uphold any of it as "right". It may have been dumb or
|
||||||
|
the _hard_ way from the getgo. Fundamentals may have changed since.
|
||||||
|
|
||||||
|
Here's the build command I used to do this on and for x86_64-darwin:
|
||||||
|
nix build --out-link /tmp/foo ".#checks.x86_64-darwin.binaryTarball"
|
||||||
|
|
||||||
|
I used the stable out-link to make it easier to script the next steps:
|
||||||
|
link=$(readlink /tmp/foo)
|
||||||
|
cp $link/*-darwin.tar.xz ~/somewheres
|
||||||
|
|
||||||
|
I've lost the last steps and am just going from memory:
|
||||||
|
|
||||||
|
From here, I think I had to extract and modify the `install` script to point
|
||||||
|
it at this tarball (which I scped to my own site, but it might make more sense
|
||||||
|
to just share them locally). I extracted this script once and then just
|
||||||
|
search/replaced in it for each new build.
|
||||||
|
|
||||||
|
The installer now supports a `--tarball-url-prefix` flag which _may_ have
|
||||||
|
solved this need?
|
||||||
|
-->
|
||||||
|
|
5
doc/manual/src/language/builtin-constants-prefix.md
Normal file
5
doc/manual/src/language/builtin-constants-prefix.md
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# Built-in Constants
|
||||||
|
|
||||||
|
These constants are built into the Nix language evaluator:
|
||||||
|
|
||||||
|
<dl>
|
1
doc/manual/src/language/builtin-constants-suffix.md
Normal file
1
doc/manual/src/language/builtin-constants-suffix.md
Normal file
@ -0,0 +1 @@
|
|||||||
|
</dl>
|
@ -1,43 +0,0 @@
|
|||||||
# Built-in Constants
|
|
||||||
|
|
||||||
These constants are built into the Nix language evaluator:
|
|
||||||
|
|
||||||
- [`builtins`]{#builtins-builtins} (attribute set)
|
|
||||||
|
|
||||||
Contains all the [built-in functions](./builtins.md) and values, in order to avoid polluting the global scope.
|
|
||||||
|
|
||||||
Since built-in functions were added over time, [testing for attributes](./operators.md#has-attribute) in `builtins` can be used for graceful fallback on older Nix installations:
|
|
||||||
|
|
||||||
```nix
|
|
||||||
if builtins ? getEnv then builtins.getEnv "PATH" else ""
|
|
||||||
```
|
|
||||||
|
|
||||||
- [`builtins.currentSystem`]{#builtins-currentSystem} (string)
|
|
||||||
|
|
||||||
The built-in value `currentSystem` evaluates to the Nix platform
|
|
||||||
identifier for the Nix installation on which the expression is being
|
|
||||||
evaluated, such as `"i686-linux"` or `"x86_64-darwin"`.
|
|
||||||
|
|
||||||
Not available in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).
|
|
||||||
|
|
||||||
- [`builtins.currentTime`]{#builtins-currentTime} (integer)
|
|
||||||
|
|
||||||
Return the [Unix time](https://en.wikipedia.org/wiki/Unix_time) at first evaluation.
|
|
||||||
Repeated references to that name will re-use the initially obtained value.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix repl
|
|
||||||
Welcome to Nix 2.15.1 Type :? for help.
|
|
||||||
|
|
||||||
nix-repl> builtins.currentTime
|
|
||||||
1683705525
|
|
||||||
|
|
||||||
nix-repl> builtins.currentTime
|
|
||||||
1683705525
|
|
||||||
```
|
|
||||||
|
|
||||||
The [store path](@docroot@/glossary.md#gloss-store-path) of a derivation depending on `currentTime` will differ for each evaluation.
|
|
||||||
|
|
||||||
Not available in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).
|
|
@ -1,3 +1,6 @@
|
|||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
- [`nix-channel`](../command-ref/nix-channel.md) now supports a `--list-generations` subcommand
|
- [`nix-channel`](../command-ref/nix-channel.md) now supports a `--list-generations` subcommand
|
||||||
|
|
||||||
|
- Nix now allows unprivileged/[`allowed-users`](../command-ref/conf-file.md#conf-allowed-users) to sign paths.
|
||||||
|
Previously, only [`trusted-users`](../command-ref/conf-file.md#conf-trusted-users) users could sign paths.
|
||||||
|
@ -151,7 +151,7 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
|||||||
},
|
},
|
||||||
ExtraPathInfoFlake::Flake {
|
ExtraPathInfoFlake::Flake {
|
||||||
.originalRef = flakeRef,
|
.originalRef = flakeRef,
|
||||||
.resolvedRef = getLockedFlake()->flake.lockedRef,
|
.lockedRef = getLockedFlake()->flake.lockedRef,
|
||||||
}),
|
}),
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ struct ExtraPathInfoFlake : ExtraPathInfoValue
|
|||||||
*/
|
*/
|
||||||
struct Flake {
|
struct Flake {
|
||||||
FlakeRef originalRef;
|
FlakeRef originalRef;
|
||||||
FlakeRef resolvedRef;
|
FlakeRef lockedRef;
|
||||||
};
|
};
|
||||||
|
|
||||||
Flake flake;
|
Flake flake;
|
||||||
|
@ -95,11 +95,16 @@ RootValue allocRootValue(Value * v)
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
void Value::print(const SymbolTable & symbols, std::ostream & str,
|
void Value::print(const SymbolTable &symbols, std::ostream &str,
|
||||||
std::set<const void *> * seen) const
|
std::set<const void *> *seen, int depth) const
|
||||||
|
|
||||||
{
|
{
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
|
if (depth <= 0) {
|
||||||
|
str << "«too deep»";
|
||||||
|
return;
|
||||||
|
}
|
||||||
switch (internalType) {
|
switch (internalType) {
|
||||||
case tInt:
|
case tInt:
|
||||||
str << integer;
|
str << integer;
|
||||||
@ -123,7 +128,7 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
|
|||||||
str << "{ ";
|
str << "{ ";
|
||||||
for (auto & i : attrs->lexicographicOrder(symbols)) {
|
for (auto & i : attrs->lexicographicOrder(symbols)) {
|
||||||
str << symbols[i->name] << " = ";
|
str << symbols[i->name] << " = ";
|
||||||
i->value->print(symbols, str, seen);
|
i->value->print(symbols, str, seen, depth - 1);
|
||||||
str << "; ";
|
str << "; ";
|
||||||
}
|
}
|
||||||
str << "}";
|
str << "}";
|
||||||
@ -139,7 +144,7 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
|
|||||||
str << "[ ";
|
str << "[ ";
|
||||||
for (auto v2 : listItems()) {
|
for (auto v2 : listItems()) {
|
||||||
if (v2)
|
if (v2)
|
||||||
v2->print(symbols, str, seen);
|
v2->print(symbols, str, seen, depth - 1);
|
||||||
else
|
else
|
||||||
str << "(nullptr)";
|
str << "(nullptr)";
|
||||||
str << " ";
|
str << " ";
|
||||||
@ -181,11 +186,10 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Value::print(const SymbolTable &symbols, std::ostream &str,
|
||||||
void Value::print(const SymbolTable & symbols, std::ostream & str, bool showRepeated) const
|
bool showRepeated, int depth) const {
|
||||||
{
|
|
||||||
std::set<const void *> seen;
|
std::set<const void *> seen;
|
||||||
print(symbols, str, showRepeated ? nullptr : &seen);
|
print(symbols, str, showRepeated ? nullptr : &seen, depth);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pretty print types for assertion errors
|
// Pretty print types for assertion errors
|
||||||
@ -211,20 +215,21 @@ const Value * getPrimOp(const Value &v) {
|
|||||||
return primOp;
|
return primOp;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view showType(ValueType type)
|
std::string_view showType(ValueType type, bool withArticle)
|
||||||
{
|
{
|
||||||
|
#define WA(a, w) withArticle ? a " " w : w
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case nInt: return "an integer";
|
case nInt: return WA("an", "integer");
|
||||||
case nBool: return "a Boolean";
|
case nBool: return WA("a", "Boolean");
|
||||||
case nString: return "a string";
|
case nString: return WA("a", "string");
|
||||||
case nPath: return "a path";
|
case nPath: return WA("a", "path");
|
||||||
case nNull: return "null";
|
case nNull: return "null";
|
||||||
case nAttrs: return "a set";
|
case nAttrs: return WA("a", "set");
|
||||||
case nList: return "a list";
|
case nList: return WA("a", "list");
|
||||||
case nFunction: return "a function";
|
case nFunction: return WA("a", "function");
|
||||||
case nExternal: return "an external value";
|
case nExternal: return WA("an", "external value");
|
||||||
case nFloat: return "a float";
|
case nFloat: return WA("a", "float");
|
||||||
case nThunk: return "a thunk";
|
case nThunk: return WA("a", "thunk");
|
||||||
}
|
}
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
@ -702,28 +707,34 @@ Path EvalState::toRealPath(const Path & path, const NixStringContext & context)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Value * EvalState::addConstant(const std::string & name, Value & v)
|
Value * EvalState::addConstant(const std::string & name, Value & v, Constant info)
|
||||||
{
|
{
|
||||||
Value * v2 = allocValue();
|
Value * v2 = allocValue();
|
||||||
*v2 = v;
|
*v2 = v;
|
||||||
addConstant(name, v2);
|
addConstant(name, v2, info);
|
||||||
return v2;
|
return v2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void EvalState::addConstant(const std::string & name, Value * v)
|
void EvalState::addConstant(const std::string & name, Value * v, Constant info)
|
||||||
{
|
{
|
||||||
staticBaseEnv->vars.emplace_back(symbols.create(name), baseEnvDispl);
|
|
||||||
baseEnv.values[baseEnvDispl++] = v;
|
|
||||||
auto name2 = name.substr(0, 2) == "__" ? name.substr(2) : name;
|
auto name2 = name.substr(0, 2) == "__" ? name.substr(2) : name;
|
||||||
baseEnv.values[0]->attrs->push_back(Attr(symbols.create(name2), v));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
constantInfos.push_back({name2, info});
|
||||||
|
|
||||||
Value * EvalState::addPrimOp(const std::string & name,
|
if (!(evalSettings.pureEval && info.impureOnly)) {
|
||||||
size_t arity, PrimOpFun primOp)
|
/* Check the type, if possible.
|
||||||
{
|
|
||||||
return addPrimOp(PrimOp { .fun = primOp, .arity = arity, .name = name });
|
We might know the type of a thunk in advance, so be allowed
|
||||||
|
to just write it down in that case. */
|
||||||
|
if (auto gotType = v->type(true); gotType != nThunk)
|
||||||
|
assert(info.type == gotType);
|
||||||
|
|
||||||
|
/* Install value the base environment. */
|
||||||
|
staticBaseEnv->vars.emplace_back(symbols.create(name), baseEnvDispl);
|
||||||
|
baseEnv.values[baseEnvDispl++] = v;
|
||||||
|
baseEnv.values[0]->attrs->push_back(Attr(symbols.create(name2), v));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -737,7 +748,10 @@ Value * EvalState::addPrimOp(PrimOp && primOp)
|
|||||||
vPrimOp->mkPrimOp(new PrimOp(primOp));
|
vPrimOp->mkPrimOp(new PrimOp(primOp));
|
||||||
Value v;
|
Value v;
|
||||||
v.mkApp(vPrimOp, vPrimOp);
|
v.mkApp(vPrimOp, vPrimOp);
|
||||||
return addConstant(primOp.name, v);
|
return addConstant(primOp.name, v, {
|
||||||
|
.type = nThunk, // FIXME
|
||||||
|
.doc = primOp.doc,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
auto envName = symbols.create(primOp.name);
|
auto envName = symbols.create(primOp.name);
|
||||||
@ -763,13 +777,13 @@ std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
|
|||||||
{
|
{
|
||||||
if (v.isPrimOp()) {
|
if (v.isPrimOp()) {
|
||||||
auto v2 = &v;
|
auto v2 = &v;
|
||||||
if (v2->primOp->doc)
|
if (auto * doc = v2->primOp->doc)
|
||||||
return Doc {
|
return Doc {
|
||||||
.pos = {},
|
.pos = {},
|
||||||
.name = v2->primOp->name,
|
.name = v2->primOp->name,
|
||||||
.arity = v2->primOp->arity,
|
.arity = v2->primOp->arity,
|
||||||
.args = v2->primOp->args,
|
.args = v2->primOp->args,
|
||||||
.doc = v2->primOp->doc,
|
.doc = doc,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return {};
|
return {};
|
||||||
|
@ -25,15 +25,72 @@ struct DerivedPath;
|
|||||||
enum RepairFlag : bool;
|
enum RepairFlag : bool;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Function that implements a primop.
|
||||||
|
*/
|
||||||
typedef void (* PrimOpFun) (EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
typedef void (* PrimOpFun) (EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Info about a primitive operation, and its implementation
|
||||||
|
*/
|
||||||
struct PrimOp
|
struct PrimOp
|
||||||
{
|
{
|
||||||
PrimOpFun fun;
|
/**
|
||||||
size_t arity;
|
* Name of the primop. `__` prefix is treated specially.
|
||||||
|
*/
|
||||||
std::string name;
|
std::string name;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Names of the parameters of a primop, for primops that take a
|
||||||
|
* fixed number of arguments to be substituted for these parameters.
|
||||||
|
*/
|
||||||
std::vector<std::string> args;
|
std::vector<std::string> args;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aritiy of the primop.
|
||||||
|
*
|
||||||
|
* If `args` is not empty, this field will be computed from that
|
||||||
|
* field instead, so it doesn't need to be manually set.
|
||||||
|
*/
|
||||||
|
size_t arity = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optional free-form documentation about the primop.
|
||||||
|
*/
|
||||||
const char * doc = nullptr;
|
const char * doc = nullptr;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implementation of the primop.
|
||||||
|
*/
|
||||||
|
PrimOpFun fun;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optional experimental for this to be gated on.
|
||||||
|
*/
|
||||||
|
std::optional<ExperimentalFeature> experimentalFeature;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Info about a constant
|
||||||
|
*/
|
||||||
|
struct Constant
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* Optional type of the constant (known since it is a fixed value).
|
||||||
|
*
|
||||||
|
* @todo we should use an enum for this.
|
||||||
|
*/
|
||||||
|
ValueType type = nThunk;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optional free-form documentation about the constant.
|
||||||
|
*/
|
||||||
|
const char * doc = nullptr;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether the constant is impure, and not available in pure mode.
|
||||||
|
*/
|
||||||
|
bool impureOnly = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
@ -65,8 +122,12 @@ std::string printValue(const EvalState & state, const Value & v);
|
|||||||
std::ostream & operator << (std::ostream & os, const ValueType t);
|
std::ostream & operator << (std::ostream & os, const ValueType t);
|
||||||
|
|
||||||
|
|
||||||
// FIXME: maybe change this to an std::variant<SourcePath, URL>.
|
struct SearchPathElem
|
||||||
typedef std::pair<std::string, std::string> SearchPathElem;
|
{
|
||||||
|
std::string prefix;
|
||||||
|
// FIXME: maybe change this to an std::variant<SourcePath, URL>.
|
||||||
|
std::string path;
|
||||||
|
};
|
||||||
typedef std::list<SearchPathElem> SearchPath;
|
typedef std::list<SearchPathElem> SearchPath;
|
||||||
|
|
||||||
|
|
||||||
@ -509,18 +570,23 @@ public:
|
|||||||
*/
|
*/
|
||||||
std::shared_ptr<StaticEnv> staticBaseEnv; // !!! should be private
|
std::shared_ptr<StaticEnv> staticBaseEnv; // !!! should be private
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Name and documentation about every constant.
|
||||||
|
*
|
||||||
|
* Constants from primops are hard to crawl, and their docs will go
|
||||||
|
* here too.
|
||||||
|
*/
|
||||||
|
std::vector<std::pair<std::string, Constant>> constantInfos;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
unsigned int baseEnvDispl = 0;
|
unsigned int baseEnvDispl = 0;
|
||||||
|
|
||||||
void createBaseEnv();
|
void createBaseEnv();
|
||||||
|
|
||||||
Value * addConstant(const std::string & name, Value & v);
|
Value * addConstant(const std::string & name, Value & v, Constant info);
|
||||||
|
|
||||||
void addConstant(const std::string & name, Value * v);
|
void addConstant(const std::string & name, Value * v, Constant info);
|
||||||
|
|
||||||
Value * addPrimOp(const std::string & name,
|
|
||||||
size_t arity, PrimOpFun primOp);
|
|
||||||
|
|
||||||
Value * addPrimOp(PrimOp && primOp);
|
Value * addPrimOp(PrimOp && primOp);
|
||||||
|
|
||||||
@ -534,6 +600,10 @@ public:
|
|||||||
std::optional<std::string> name;
|
std::optional<std::string> name;
|
||||||
size_t arity;
|
size_t arity;
|
||||||
std::vector<std::string> args;
|
std::vector<std::string> args;
|
||||||
|
/**
|
||||||
|
* Unlike the other `doc` fields in this file, this one should never be
|
||||||
|
* `null`.
|
||||||
|
*/
|
||||||
const char * doc;
|
const char * doc;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -700,8 +770,11 @@ struct DebugTraceStacker {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @return A string representing the type of the value `v`.
|
* @return A string representing the type of the value `v`.
|
||||||
|
*
|
||||||
|
* @param withArticle Whether to begin with an english article, e.g. "an
|
||||||
|
* integer" vs "integer".
|
||||||
*/
|
*/
|
||||||
std::string_view showType(ValueType type);
|
std::string_view showType(ValueType type, bool withArticle = true);
|
||||||
std::string showType(const Value & v);
|
std::string showType(const Value & v);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -733,7 +806,12 @@ struct EvalSettings : Config
|
|||||||
|
|
||||||
Setting<Strings> nixPath{
|
Setting<Strings> nixPath{
|
||||||
this, getDefaultNixPath(), "nix-path",
|
this, getDefaultNixPath(), "nix-path",
|
||||||
"List of directories to be searched for `<...>` file references."};
|
R"(
|
||||||
|
List of directories to be searched for `<...>` file references
|
||||||
|
|
||||||
|
In particular, outside of [pure evaluation mode](#conf-pure-evaluation), this determines the value of
|
||||||
|
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtin-constants-nixPath).
|
||||||
|
)"};
|
||||||
|
|
||||||
Setting<bool> restrictEval{
|
Setting<bool> restrictEval{
|
||||||
this, false, "restrict-eval",
|
this, false, "restrict-eval",
|
||||||
|
@ -788,9 +788,6 @@ static RegisterPrimOp r2({
|
|||||||
```nix
|
```nix
|
||||||
(builtins.getFlake "github:edolstra/dwarffs").rev
|
(builtins.getFlake "github:edolstra/dwarffs").rev
|
||||||
```
|
```
|
||||||
|
|
||||||
This function is only available if you enable the experimental feature
|
|
||||||
`flakes`.
|
|
||||||
)",
|
)",
|
||||||
.fun = prim_getFlake,
|
.fun = prim_getFlake,
|
||||||
.experimentalFeature = Xp::Flakes,
|
.experimentalFeature = Xp::Flakes,
|
||||||
|
@ -36,7 +36,7 @@ static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data)
|
|||||||
#define CUR_POS makeCurPos(*yylloc, data)
|
#define CUR_POS makeCurPos(*yylloc, data)
|
||||||
|
|
||||||
// backup to recover from yyless(0)
|
// backup to recover from yyless(0)
|
||||||
YYLTYPE prev_yylloc;
|
thread_local YYLTYPE prev_yylloc;
|
||||||
|
|
||||||
static void initLoc(YYLTYPE * loc)
|
static void initLoc(YYLTYPE * loc)
|
||||||
{
|
{
|
||||||
|
@ -275,7 +275,12 @@ static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* If this is a single string, then don't do a concatenation. */
|
/* If this is a single string, then don't do a concatenation. */
|
||||||
return es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second) ? (*es2)[0].second : new ExprConcatStrings(pos, true, es2);
|
if (es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second)) {
|
||||||
|
auto *const result = (*es2)[0].second;
|
||||||
|
delete es2;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
return new ExprConcatStrings(pos, true, es2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -330,7 +335,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
|||||||
%type <ind_string_parts> ind_string_parts
|
%type <ind_string_parts> ind_string_parts
|
||||||
%type <e> path_start string_parts string_attr
|
%type <e> path_start string_parts string_attr
|
||||||
%type <id> attr
|
%type <id> attr
|
||||||
%token <id> ID ATTRPATH
|
%token <id> ID
|
||||||
%token <str> STR IND_STR
|
%token <str> STR IND_STR
|
||||||
%token <n> INT
|
%token <n> INT
|
||||||
%token <nf> FLOAT
|
%token <nf> FLOAT
|
||||||
@ -741,7 +746,10 @@ void EvalState::addToSearchPath(const std::string & s)
|
|||||||
path = std::string(s, pos + 1);
|
path = std::string(s, pos + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
searchPath.emplace_back(prefix, path);
|
searchPath.emplace_back(SearchPathElem {
|
||||||
|
.prefix = prefix,
|
||||||
|
.path = path,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -755,11 +763,11 @@ SourcePath EvalState::findFile(SearchPath & searchPath, const std::string_view p
|
|||||||
{
|
{
|
||||||
for (auto & i : searchPath) {
|
for (auto & i : searchPath) {
|
||||||
std::string suffix;
|
std::string suffix;
|
||||||
if (i.first.empty())
|
if (i.prefix.empty())
|
||||||
suffix = concatStrings("/", path);
|
suffix = concatStrings("/", path);
|
||||||
else {
|
else {
|
||||||
auto s = i.first.size();
|
auto s = i.prefix.size();
|
||||||
if (path.compare(0, s, i.first) != 0 ||
|
if (path.compare(0, s, i.prefix) != 0 ||
|
||||||
(path.size() > s && path[s] != '/'))
|
(path.size() > s && path[s] != '/'))
|
||||||
continue;
|
continue;
|
||||||
suffix = path.size() == s ? "" : concatStrings("/", path.substr(s));
|
suffix = path.size() == s ? "" : concatStrings("/", path.substr(s));
|
||||||
@ -785,47 +793,47 @@ SourcePath EvalState::findFile(SearchPath & searchPath, const std::string_view p
|
|||||||
|
|
||||||
std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathElem & elem)
|
std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathElem & elem)
|
||||||
{
|
{
|
||||||
auto i = searchPathResolved.find(elem.second);
|
auto i = searchPathResolved.find(elem.path);
|
||||||
if (i != searchPathResolved.end()) return i->second;
|
if (i != searchPathResolved.end()) return i->second;
|
||||||
|
|
||||||
std::pair<bool, std::string> res;
|
std::pair<bool, std::string> res;
|
||||||
|
|
||||||
if (EvalSettings::isPseudoUrl(elem.second)) {
|
if (EvalSettings::isPseudoUrl(elem.path)) {
|
||||||
try {
|
try {
|
||||||
auto storePath = fetchers::downloadTarball(
|
auto storePath = fetchers::downloadTarball(
|
||||||
store, EvalSettings::resolvePseudoUrl(elem.second), "source", false).tree.storePath;
|
store, EvalSettings::resolvePseudoUrl(elem.path), "source", false).tree.storePath;
|
||||||
res = { true, store->toRealPath(storePath) };
|
res = { true, store->toRealPath(storePath) };
|
||||||
} catch (FileTransferError & e) {
|
} catch (FileTransferError & e) {
|
||||||
logWarning({
|
logWarning({
|
||||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
|
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.path)
|
||||||
});
|
});
|
||||||
res = { false, "" };
|
res = { false, "" };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (hasPrefix(elem.second, "flake:")) {
|
else if (hasPrefix(elem.path, "flake:")) {
|
||||||
experimentalFeatureSettings.require(Xp::Flakes);
|
experimentalFeatureSettings.require(Xp::Flakes);
|
||||||
auto flakeRef = parseFlakeRef(elem.second.substr(6), {}, true, false);
|
auto flakeRef = parseFlakeRef(elem.path.substr(6), {}, true, false);
|
||||||
debug("fetching flake search path element '%s''", elem.second);
|
debug("fetching flake search path element '%s''", elem.path);
|
||||||
auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
|
auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
|
||||||
res = { true, store->toRealPath(storePath) };
|
res = { true, store->toRealPath(storePath) };
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
auto path = absPath(elem.second);
|
auto path = absPath(elem.path);
|
||||||
if (pathExists(path))
|
if (pathExists(path))
|
||||||
res = { true, path };
|
res = { true, path };
|
||||||
else {
|
else {
|
||||||
logWarning({
|
logWarning({
|
||||||
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", elem.second)
|
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", elem.path)
|
||||||
});
|
});
|
||||||
res = { false, "" };
|
res = { false, "" };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
debug("resolved search path element '%s' to '%s'", elem.second, res.second);
|
debug("resolved search path element '%s' to '%s'", elem.path, res.second);
|
||||||
|
|
||||||
searchPathResolved[elem.second] = res;
|
searchPathResolved[elem.path] = res;
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -238,7 +238,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_scopedImport(RegisterPrimOp::Info {
|
static RegisterPrimOp primop_scopedImport(PrimOp {
|
||||||
.name = "scopedImport",
|
.name = "scopedImport",
|
||||||
.arity = 2,
|
.arity = 2,
|
||||||
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
@ -692,7 +692,7 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a
|
|||||||
v.listElems()[n++] = i;
|
v.listElems()[n++] = i;
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_genericClosure(RegisterPrimOp::Info {
|
static RegisterPrimOp primop_genericClosure(PrimOp {
|
||||||
.name = "__genericClosure",
|
.name = "__genericClosure",
|
||||||
.args = {"attrset"},
|
.args = {"attrset"},
|
||||||
.arity = 1,
|
.arity = 1,
|
||||||
@ -809,7 +809,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_addErrorContext(RegisterPrimOp::Info {
|
static RegisterPrimOp primop_addErrorContext(PrimOp {
|
||||||
.name = "__addErrorContext",
|
.name = "__addErrorContext",
|
||||||
.arity = 2,
|
.arity = 2,
|
||||||
.fun = prim_addErrorContext,
|
.fun = prim_addErrorContext,
|
||||||
@ -1400,7 +1400,7 @@ drvName, Bindings * attrs, Value & v)
|
|||||||
v.mkAttrs(result);
|
v.mkAttrs(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_derivationStrict(RegisterPrimOp::Info {
|
static RegisterPrimOp primop_derivationStrict(PrimOp {
|
||||||
.name = "derivationStrict",
|
.name = "derivationStrict",
|
||||||
.arity = 1,
|
.arity = 1,
|
||||||
.fun = prim_derivationStrict,
|
.fun = prim_derivationStrict,
|
||||||
@ -1656,7 +1656,10 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
searchPath.emplace_back(prefix, path);
|
searchPath.emplace_back(SearchPathElem {
|
||||||
|
.prefix = prefix,
|
||||||
|
.path = path,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile");
|
auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile");
|
||||||
@ -1664,9 +1667,52 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||||||
v.mkPath(state.checkSourcePath(state.findFile(searchPath, path, pos)));
|
v.mkPath(state.checkSourcePath(state.findFile(searchPath, path, pos)));
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_findFile(RegisterPrimOp::Info {
|
static RegisterPrimOp primop_findFile(PrimOp {
|
||||||
.name = "__findFile",
|
.name = "__findFile",
|
||||||
.arity = 2,
|
.args = {"search path", "lookup path"},
|
||||||
|
.doc = R"(
|
||||||
|
Look up the given path with the given search path.
|
||||||
|
|
||||||
|
A search path is represented list of [attribute sets](./values.md#attribute-set) with two attributes, `prefix`, and `path`.
|
||||||
|
`prefix` is a relative path.
|
||||||
|
`path` denotes a file system location; the exact syntax depends on the command line interface.
|
||||||
|
|
||||||
|
Examples of search path attribute sets:
|
||||||
|
|
||||||
|
- ```
|
||||||
|
{
|
||||||
|
prefix = "nixos-config";
|
||||||
|
path = "/etc/nixos/configuration.nix";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- ```
|
||||||
|
{
|
||||||
|
prefix = "";
|
||||||
|
path = "/nix/var/nix/profiles/per-user/root/channels";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/values.html#type-path) of the match.
|
||||||
|
|
||||||
|
This is the process for each entry:
|
||||||
|
If the lookup path matches `prefix`, then the remainder of the lookup path (the "suffix") is searched for within the directory denoted by `patch`.
|
||||||
|
Note that the `path` may need to be downloaded at this point to look inside.
|
||||||
|
If the suffix is found inside that directory, then the entry is a match;
|
||||||
|
the combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
|
||||||
|
|
||||||
|
The syntax
|
||||||
|
|
||||||
|
```nix
|
||||||
|
<nixpkgs>
|
||||||
|
```
|
||||||
|
|
||||||
|
is equivalent to:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
builtins.findFile builtins.nixPath "nixpkgs"
|
||||||
|
```
|
||||||
|
)",
|
||||||
.fun = prim_findFile,
|
.fun = prim_findFile,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -2385,7 +2431,7 @@ static void prim_unsafeGetAttrPos(EvalState & state, const PosIdx pos, Value * *
|
|||||||
state.mkPos(v, i->pos);
|
state.mkPos(v, i->pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_unsafeGetAttrPos(RegisterPrimOp::Info {
|
static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp {
|
||||||
.name = "__unsafeGetAttrPos",
|
.name = "__unsafeGetAttrPos",
|
||||||
.arity = 2,
|
.arity = 2,
|
||||||
.fun = prim_unsafeGetAttrPos,
|
.fun = prim_unsafeGetAttrPos,
|
||||||
@ -4058,10 +4104,10 @@ static RegisterPrimOp primop_splitVersion({
|
|||||||
RegisterPrimOp::PrimOps * RegisterPrimOp::primOps;
|
RegisterPrimOp::PrimOps * RegisterPrimOp::primOps;
|
||||||
|
|
||||||
|
|
||||||
RegisterPrimOp::RegisterPrimOp(Info && info)
|
RegisterPrimOp::RegisterPrimOp(PrimOp && primOp)
|
||||||
{
|
{
|
||||||
if (!primOps) primOps = new PrimOps;
|
if (!primOps) primOps = new PrimOps;
|
||||||
primOps->push_back(std::move(info));
|
primOps->push_back(std::move(primOp));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -4074,54 +4120,202 @@ void EvalState::createBaseEnv()
|
|||||||
|
|
||||||
/* `builtins' must be first! */
|
/* `builtins' must be first! */
|
||||||
v.mkAttrs(buildBindings(128).finish());
|
v.mkAttrs(buildBindings(128).finish());
|
||||||
addConstant("builtins", v);
|
addConstant("builtins", v, {
|
||||||
|
.type = nAttrs,
|
||||||
|
.doc = R"(
|
||||||
|
Contains all the [built-in functions](@docroot@/language/builtins.md) and values.
|
||||||
|
|
||||||
|
Since built-in functions were added over time, [testing for attributes](./operators.md#has-attribute) in `builtins` can be used for graceful fallback on older Nix installations:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
# if hasContext is not available, we assume `s` has a context
|
||||||
|
if builtins ? hasContext then builtins.hasContext s else true
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
});
|
||||||
|
|
||||||
v.mkBool(true);
|
v.mkBool(true);
|
||||||
addConstant("true", v);
|
addConstant("true", v, {
|
||||||
|
.type = nBool,
|
||||||
|
.doc = R"(
|
||||||
|
Primitive value.
|
||||||
|
|
||||||
|
It can be returned by
|
||||||
|
[comparison operators](@docroot@/language/operators.md#Comparison)
|
||||||
|
and used in
|
||||||
|
[conditional expressions](@docroot@/language/constructs.md#Conditionals).
|
||||||
|
|
||||||
|
The name `true` is not special, and can be shadowed:
|
||||||
|
|
||||||
|
```nix-repl
|
||||||
|
nix-repl> let true = 1; in true
|
||||||
|
1
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
});
|
||||||
|
|
||||||
v.mkBool(false);
|
v.mkBool(false);
|
||||||
addConstant("false", v);
|
addConstant("false", v, {
|
||||||
|
.type = nBool,
|
||||||
|
.doc = R"(
|
||||||
|
Primitive value.
|
||||||
|
|
||||||
|
It can be returned by
|
||||||
|
[comparison operators](@docroot@/language/operators.md#Comparison)
|
||||||
|
and used in
|
||||||
|
[conditional expressions](@docroot@/language/constructs.md#Conditionals).
|
||||||
|
|
||||||
|
The name `false` is not special, and can be shadowed:
|
||||||
|
|
||||||
|
```nix-repl
|
||||||
|
nix-repl> let false = 1; in false
|
||||||
|
1
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
});
|
||||||
|
|
||||||
v.mkNull();
|
v.mkNull();
|
||||||
addConstant("null", v);
|
addConstant("null", v, {
|
||||||
|
.type = nNull,
|
||||||
|
.doc = R"(
|
||||||
|
Primitive value.
|
||||||
|
|
||||||
|
The name `null` is not special, and can be shadowed:
|
||||||
|
|
||||||
|
```nix-repl
|
||||||
|
nix-repl> let null = 1; in null
|
||||||
|
1
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
});
|
||||||
|
|
||||||
if (!evalSettings.pureEval) {
|
if (!evalSettings.pureEval) {
|
||||||
v.mkInt(time(0));
|
v.mkInt(time(0));
|
||||||
addConstant("__currentTime", v);
|
|
||||||
|
|
||||||
v.mkString(settings.thisSystem.get());
|
|
||||||
addConstant("__currentSystem", v);
|
|
||||||
}
|
}
|
||||||
|
addConstant("__currentTime", v, {
|
||||||
|
.type = nInt,
|
||||||
|
.doc = R"(
|
||||||
|
Return the [Unix time](https://en.wikipedia.org/wiki/Unix_time) at first evaluation.
|
||||||
|
Repeated references to that name will re-use the initially obtained value.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix repl
|
||||||
|
Welcome to Nix 2.15.1 Type :? for help.
|
||||||
|
|
||||||
|
nix-repl> builtins.currentTime
|
||||||
|
1683705525
|
||||||
|
|
||||||
|
nix-repl> builtins.currentTime
|
||||||
|
1683705525
|
||||||
|
```
|
||||||
|
|
||||||
|
The [store path](@docroot@/glossary.md#gloss-store-path) of a derivation depending on `currentTime` will differ for each evaluation, unless both evaluate `builtins.currentTime` in the same second.
|
||||||
|
)",
|
||||||
|
.impureOnly = true,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!evalSettings.pureEval) {
|
||||||
|
v.mkString(settings.thisSystem.get());
|
||||||
|
}
|
||||||
|
addConstant("__currentSystem", v, {
|
||||||
|
.type = nString,
|
||||||
|
.doc = R"(
|
||||||
|
The value of the [`system` configuration option](@docroot@/command-ref/conf-file.md#conf-pure-eval).
|
||||||
|
|
||||||
|
It can be used to set the `system` attribute for [`builtins.derivation`](@docroot@/language/derivations.md) such that the resulting derivation can be built on the same system that evaluates the Nix expression:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
builtins.derivation {
|
||||||
|
# ...
|
||||||
|
system = builtins.currentSystem;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
It can be overridden in order to create derivations for different system than the current one:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix-instantiate --system "mips64-linux" --eval --expr 'builtins.currentSystem'
|
||||||
|
"mips64-linux"
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
.impureOnly = true,
|
||||||
|
});
|
||||||
|
|
||||||
v.mkString(nixVersion);
|
v.mkString(nixVersion);
|
||||||
addConstant("__nixVersion", v);
|
addConstant("__nixVersion", v, {
|
||||||
|
.type = nString,
|
||||||
|
.doc = R"(
|
||||||
|
The version of Nix.
|
||||||
|
|
||||||
|
For example, where the command line returns the current Nix version,
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ nix --version
|
||||||
|
nix (Nix) 2.16.0
|
||||||
|
```
|
||||||
|
|
||||||
|
the Nix language evaluator returns the same value:
|
||||||
|
|
||||||
|
```nix-repl
|
||||||
|
nix-repl> builtins.nixVersion
|
||||||
|
"2.16.0"
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
});
|
||||||
|
|
||||||
v.mkString(store->storeDir);
|
v.mkString(store->storeDir);
|
||||||
addConstant("__storeDir", v);
|
addConstant("__storeDir", v, {
|
||||||
|
.type = nString,
|
||||||
|
.doc = R"(
|
||||||
|
Logical file system location of the [Nix store](@docroot@/glossary.md#gloss-store) currently in use.
|
||||||
|
|
||||||
|
This value is determined by the `store` parameter in [Store URLs](@docroot@/command-ref/new-cli/nix3-help-stores.md):
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ nix-instantiate --store 'dummy://?store=/blah' --eval --expr builtins.storeDir
|
||||||
|
"/blah"
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
});
|
||||||
|
|
||||||
/* Language version. This should be increased every time a new
|
/* Language version. This should be increased every time a new
|
||||||
language feature gets added. It's not necessary to increase it
|
language feature gets added. It's not necessary to increase it
|
||||||
when primops get added, because you can just use `builtins ?
|
when primops get added, because you can just use `builtins ?
|
||||||
primOp' to check. */
|
primOp' to check. */
|
||||||
v.mkInt(6);
|
v.mkInt(6);
|
||||||
addConstant("__langVersion", v);
|
addConstant("__langVersion", v, {
|
||||||
|
.type = nInt,
|
||||||
|
.doc = R"(
|
||||||
|
The current version of the Nix language.
|
||||||
|
)",
|
||||||
|
});
|
||||||
|
|
||||||
// Miscellaneous
|
// Miscellaneous
|
||||||
if (evalSettings.enableNativeCode) {
|
if (evalSettings.enableNativeCode) {
|
||||||
addPrimOp("__importNative", 2, prim_importNative);
|
addPrimOp({
|
||||||
addPrimOp("__exec", 1, prim_exec);
|
.name = "__importNative",
|
||||||
|
.arity = 2,
|
||||||
|
.fun = prim_importNative,
|
||||||
|
});
|
||||||
|
addPrimOp({
|
||||||
|
.name = "__exec",
|
||||||
|
.arity = 1,
|
||||||
|
.fun = prim_exec,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
addPrimOp({
|
addPrimOp({
|
||||||
.fun = evalSettings.traceVerbose ? prim_trace : prim_second,
|
|
||||||
.arity = 2,
|
|
||||||
.name = "__traceVerbose",
|
.name = "__traceVerbose",
|
||||||
.args = { "e1", "e2" },
|
.args = { "e1", "e2" },
|
||||||
|
.arity = 2,
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Evaluate *e1* and print its abstract syntax representation on standard
|
Evaluate *e1* and print its abstract syntax representation on standard
|
||||||
error if `--trace-verbose` is enabled. Then return *e2*. This function
|
error if `--trace-verbose` is enabled. Then return *e2*. This function
|
||||||
is useful for debugging.
|
is useful for debugging.
|
||||||
)",
|
)",
|
||||||
|
.fun = evalSettings.traceVerbose ? prim_trace : prim_second,
|
||||||
});
|
});
|
||||||
|
|
||||||
/* Add a value containing the current Nix expression search path. */
|
/* Add a value containing the current Nix expression search path. */
|
||||||
@ -4129,30 +4323,50 @@ void EvalState::createBaseEnv()
|
|||||||
int n = 0;
|
int n = 0;
|
||||||
for (auto & i : searchPath) {
|
for (auto & i : searchPath) {
|
||||||
auto attrs = buildBindings(2);
|
auto attrs = buildBindings(2);
|
||||||
attrs.alloc("path").mkString(i.second);
|
attrs.alloc("path").mkString(i.path);
|
||||||
attrs.alloc("prefix").mkString(i.first);
|
attrs.alloc("prefix").mkString(i.prefix);
|
||||||
(v.listElems()[n++] = allocValue())->mkAttrs(attrs);
|
(v.listElems()[n++] = allocValue())->mkAttrs(attrs);
|
||||||
}
|
}
|
||||||
addConstant("__nixPath", v);
|
addConstant("__nixPath", v, {
|
||||||
|
.type = nList,
|
||||||
|
.doc = R"(
|
||||||
|
The search path used to resolve angle bracket path lookups.
|
||||||
|
|
||||||
|
Angle bracket expressions can be
|
||||||
|
[desugared](https://en.wikipedia.org/wiki/Syntactic_sugar)
|
||||||
|
using this and
|
||||||
|
[`builtins.findFile`](./builtins.html#builtins-findFile):
|
||||||
|
|
||||||
|
```nix
|
||||||
|
<nixpkgs>
|
||||||
|
```
|
||||||
|
|
||||||
|
is equivalent to:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
builtins.findFile builtins.nixPath "nixpkgs"
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
});
|
||||||
|
|
||||||
if (RegisterPrimOp::primOps)
|
if (RegisterPrimOp::primOps)
|
||||||
for (auto & primOp : *RegisterPrimOp::primOps)
|
for (auto & primOp : *RegisterPrimOp::primOps)
|
||||||
if (!primOp.experimentalFeature
|
if (experimentalFeatureSettings.isEnabled(primOp.experimentalFeature))
|
||||||
|| experimentalFeatureSettings.isEnabled(*primOp.experimentalFeature))
|
|
||||||
{
|
{
|
||||||
addPrimOp({
|
auto primOpAdjusted = primOp;
|
||||||
.fun = primOp.fun,
|
primOpAdjusted.arity = std::max(primOp.args.size(), primOp.arity);
|
||||||
.arity = std::max(primOp.args.size(), primOp.arity),
|
addPrimOp(std::move(primOpAdjusted));
|
||||||
.name = primOp.name,
|
|
||||||
.args = primOp.args,
|
|
||||||
.doc = primOp.doc,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Add a wrapper around the derivation primop that computes the
|
/* Add a wrapper around the derivation primop that computes the
|
||||||
`drvPath' and `outPath' attributes lazily. */
|
`drvPath' and `outPath' attributes lazily.
|
||||||
|
|
||||||
|
Null docs because it is documented separately.
|
||||||
|
*/
|
||||||
auto vDerivation = allocValue();
|
auto vDerivation = allocValue();
|
||||||
addConstant("derivation", vDerivation);
|
addConstant("derivation", vDerivation, {
|
||||||
|
.type = nFunction,
|
||||||
|
});
|
||||||
|
|
||||||
/* Now that we've added all primops, sort the `builtins' set,
|
/* Now that we've added all primops, sort the `builtins' set,
|
||||||
because attribute lookups expect it to be sorted. */
|
because attribute lookups expect it to be sorted. */
|
||||||
|
@ -10,17 +10,7 @@ namespace nix {
|
|||||||
|
|
||||||
struct RegisterPrimOp
|
struct RegisterPrimOp
|
||||||
{
|
{
|
||||||
struct Info
|
typedef std::vector<PrimOp> PrimOps;
|
||||||
{
|
|
||||||
std::string name;
|
|
||||||
std::vector<std::string> args;
|
|
||||||
size_t arity = 0;
|
|
||||||
const char * doc;
|
|
||||||
PrimOpFun fun;
|
|
||||||
std::optional<ExperimentalFeature> experimentalFeature;
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef std::vector<Info> PrimOps;
|
|
||||||
static PrimOps * primOps;
|
static PrimOps * primOps;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -28,7 +18,7 @@ struct RegisterPrimOp
|
|||||||
* will get called during EvalState initialization, so there
|
* will get called during EvalState initialization, so there
|
||||||
* may be primops not yet added and builtins is not yet sorted.
|
* may be primops not yet added and builtins is not yet sorted.
|
||||||
*/
|
*/
|
||||||
RegisterPrimOp(Info && info);
|
RegisterPrimOp(PrimOp && primOp);
|
||||||
};
|
};
|
||||||
|
|
||||||
/* These primops are disabled without enableNativeCode, but plugins
|
/* These primops are disabled without enableNativeCode, but plugins
|
||||||
|
@ -154,9 +154,6 @@ static RegisterPrimOp primop_fetchClosure({
|
|||||||
specifying a binary cache from which the path can be fetched.
|
specifying a binary cache from which the path can be fetched.
|
||||||
Also, requiring a content-addressed final store path avoids the
|
Also, requiring a content-addressed final store path avoids the
|
||||||
need for users to configure binary cache public keys.
|
need for users to configure binary cache public keys.
|
||||||
|
|
||||||
This function is only available if you enable the experimental
|
|
||||||
feature `fetch-closure`.
|
|
||||||
)",
|
)",
|
||||||
.fun = prim_fetchClosure,
|
.fun = prim_fetchClosure,
|
||||||
.experimentalFeature = Xp::FetchClosure,
|
.experimentalFeature = Xp::FetchClosure,
|
||||||
|
@ -22,7 +22,7 @@ void emitTreeAttrs(
|
|||||||
{
|
{
|
||||||
assert(input.isLocked());
|
assert(input.isLocked());
|
||||||
|
|
||||||
auto attrs = state.buildBindings(8);
|
auto attrs = state.buildBindings(10);
|
||||||
|
|
||||||
|
|
||||||
state.mkStorePathString(tree.storePath, attrs.alloc(state.sOutPath));
|
state.mkStorePathString(tree.storePath, attrs.alloc(state.sOutPath));
|
||||||
@ -56,6 +56,11 @@ void emitTreeAttrs(
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (auto dirtyRev = fetchers::maybeGetStrAttr(input.attrs, "dirtyRev")) {
|
||||||
|
attrs.alloc("dirtyRev").mkString(*dirtyRev);
|
||||||
|
attrs.alloc("dirtyShortRev").mkString(*fetchers::maybeGetStrAttr(input.attrs, "dirtyShortRev"));
|
||||||
|
}
|
||||||
|
|
||||||
if (auto lastModified = input.getLastModified()) {
|
if (auto lastModified = input.getLastModified()) {
|
||||||
attrs.alloc("lastModified").mkInt(*lastModified);
|
attrs.alloc("lastModified").mkInt(*lastModified);
|
||||||
attrs.alloc("lastModifiedDate").mkString(
|
attrs.alloc("lastModifiedDate").mkString(
|
||||||
|
236
src/libexpr/tests/value/print.cc
Normal file
236
src/libexpr/tests/value/print.cc
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
#include "tests/libexpr.hh"
|
||||||
|
|
||||||
|
#include "value.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
using namespace testing;
|
||||||
|
|
||||||
|
struct ValuePrintingTests : LibExprTest
|
||||||
|
{
|
||||||
|
template<class... A>
|
||||||
|
void test(Value v, std::string_view expected, A... args)
|
||||||
|
{
|
||||||
|
std::stringstream out;
|
||||||
|
v.print(state.symbols, out, args...);
|
||||||
|
ASSERT_EQ(out.str(), expected);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, tInt)
|
||||||
|
{
|
||||||
|
Value vInt;
|
||||||
|
vInt.mkInt(10);
|
||||||
|
test(vInt, "10");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, tBool)
|
||||||
|
{
|
||||||
|
Value vBool;
|
||||||
|
vBool.mkBool(true);
|
||||||
|
test(vBool, "true");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, tString)
|
||||||
|
{
|
||||||
|
Value vString;
|
||||||
|
vString.mkString("some-string");
|
||||||
|
test(vString, "\"some-string\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, tPath)
|
||||||
|
{
|
||||||
|
Value vPath;
|
||||||
|
vPath.mkString("/foo");
|
||||||
|
test(vPath, "\"/foo\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, tNull)
|
||||||
|
{
|
||||||
|
Value vNull;
|
||||||
|
vNull.mkNull();
|
||||||
|
test(vNull, "null");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, tAttrs)
|
||||||
|
{
|
||||||
|
Value vOne;
|
||||||
|
vOne.mkInt(1);
|
||||||
|
|
||||||
|
Value vTwo;
|
||||||
|
vTwo.mkInt(2);
|
||||||
|
|
||||||
|
BindingsBuilder builder(state, state.allocBindings(10));
|
||||||
|
builder.insert(state.symbols.create("one"), &vOne);
|
||||||
|
builder.insert(state.symbols.create("two"), &vTwo);
|
||||||
|
|
||||||
|
Value vAttrs;
|
||||||
|
vAttrs.mkAttrs(builder.finish());
|
||||||
|
|
||||||
|
test(vAttrs, "{ one = 1; two = 2; }");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, tList)
|
||||||
|
{
|
||||||
|
Value vOne;
|
||||||
|
vOne.mkInt(1);
|
||||||
|
|
||||||
|
Value vTwo;
|
||||||
|
vTwo.mkInt(2);
|
||||||
|
|
||||||
|
Value vList;
|
||||||
|
state.mkList(vList, 5);
|
||||||
|
vList.bigList.elems[0] = &vOne;
|
||||||
|
vList.bigList.elems[1] = &vTwo;
|
||||||
|
vList.bigList.size = 3;
|
||||||
|
|
||||||
|
test(vList, "[ 1 2 (nullptr) ]");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, vThunk)
|
||||||
|
{
|
||||||
|
Value vThunk;
|
||||||
|
vThunk.mkThunk(nullptr, nullptr);
|
||||||
|
|
||||||
|
test(vThunk, "<CODE>");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, vApp)
|
||||||
|
{
|
||||||
|
Value vApp;
|
||||||
|
vApp.mkApp(nullptr, nullptr);
|
||||||
|
|
||||||
|
test(vApp, "<CODE>");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, vLambda)
|
||||||
|
{
|
||||||
|
Value vLambda;
|
||||||
|
vLambda.mkLambda(nullptr, nullptr);
|
||||||
|
|
||||||
|
test(vLambda, "<LAMBDA>");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, vPrimOp)
|
||||||
|
{
|
||||||
|
Value vPrimOp;
|
||||||
|
vPrimOp.mkPrimOp(nullptr);
|
||||||
|
|
||||||
|
test(vPrimOp, "<PRIMOP>");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, vPrimOpApp)
|
||||||
|
{
|
||||||
|
Value vPrimOpApp;
|
||||||
|
vPrimOpApp.mkPrimOpApp(nullptr, nullptr);
|
||||||
|
|
||||||
|
test(vPrimOpApp, "<PRIMOP-APP>");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, vExternal)
|
||||||
|
{
|
||||||
|
struct MyExternal : ExternalValueBase
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
std::string showType() const override
|
||||||
|
{
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
std::string typeOf() const override
|
||||||
|
{
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
virtual std::ostream & print(std::ostream & str) const override
|
||||||
|
{
|
||||||
|
str << "testing-external!";
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
} myExternal;
|
||||||
|
Value vExternal;
|
||||||
|
vExternal.mkExternal(&myExternal);
|
||||||
|
|
||||||
|
test(vExternal, "testing-external!");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, vFloat)
|
||||||
|
{
|
||||||
|
Value vFloat;
|
||||||
|
vFloat.mkFloat(2.0);
|
||||||
|
|
||||||
|
test(vFloat, "2");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, vBlackhole)
|
||||||
|
{
|
||||||
|
Value vBlackhole;
|
||||||
|
vBlackhole.mkBlackhole();
|
||||||
|
test(vBlackhole, "«potential infinite recursion»");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, depthAttrs)
|
||||||
|
{
|
||||||
|
Value vOne;
|
||||||
|
vOne.mkInt(1);
|
||||||
|
|
||||||
|
Value vTwo;
|
||||||
|
vTwo.mkInt(2);
|
||||||
|
|
||||||
|
BindingsBuilder builder(state, state.allocBindings(10));
|
||||||
|
builder.insert(state.symbols.create("one"), &vOne);
|
||||||
|
builder.insert(state.symbols.create("two"), &vTwo);
|
||||||
|
|
||||||
|
Value vAttrs;
|
||||||
|
vAttrs.mkAttrs(builder.finish());
|
||||||
|
|
||||||
|
BindingsBuilder builder2(state, state.allocBindings(10));
|
||||||
|
builder2.insert(state.symbols.create("one"), &vOne);
|
||||||
|
builder2.insert(state.symbols.create("two"), &vTwo);
|
||||||
|
builder2.insert(state.symbols.create("nested"), &vAttrs);
|
||||||
|
|
||||||
|
Value vNested;
|
||||||
|
vNested.mkAttrs(builder2.finish());
|
||||||
|
|
||||||
|
test(vNested, "{ nested = «too deep»; one = «too deep»; two = «too deep»; }", false, 1);
|
||||||
|
test(vNested, "{ nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; }", false, 2);
|
||||||
|
test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 3);
|
||||||
|
test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ValuePrintingTests, depthList)
|
||||||
|
{
|
||||||
|
Value vOne;
|
||||||
|
vOne.mkInt(1);
|
||||||
|
|
||||||
|
Value vTwo;
|
||||||
|
vTwo.mkInt(2);
|
||||||
|
|
||||||
|
BindingsBuilder builder(state, state.allocBindings(10));
|
||||||
|
builder.insert(state.symbols.create("one"), &vOne);
|
||||||
|
builder.insert(state.symbols.create("two"), &vTwo);
|
||||||
|
|
||||||
|
Value vAttrs;
|
||||||
|
vAttrs.mkAttrs(builder.finish());
|
||||||
|
|
||||||
|
BindingsBuilder builder2(state, state.allocBindings(10));
|
||||||
|
builder2.insert(state.symbols.create("one"), &vOne);
|
||||||
|
builder2.insert(state.symbols.create("two"), &vTwo);
|
||||||
|
builder2.insert(state.symbols.create("nested"), &vAttrs);
|
||||||
|
|
||||||
|
Value vNested;
|
||||||
|
vNested.mkAttrs(builder2.finish());
|
||||||
|
|
||||||
|
Value vList;
|
||||||
|
state.mkList(vList, 5);
|
||||||
|
vList.bigList.elems[0] = &vOne;
|
||||||
|
vList.bigList.elems[1] = &vTwo;
|
||||||
|
vList.bigList.elems[2] = &vNested;
|
||||||
|
vList.bigList.size = 3;
|
||||||
|
|
||||||
|
test(vList, "[ «too deep» «too deep» «too deep» ]", false, 1);
|
||||||
|
test(vList, "[ 1 2 { nested = «too deep»; one = «too deep»; two = «too deep»; } ]", false, 2);
|
||||||
|
test(vList, "[ 1 2 { nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; } ]", false, 3);
|
||||||
|
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 4);
|
||||||
|
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace nix
|
@ -2,6 +2,7 @@
|
|||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include <cassert>
|
#include <cassert>
|
||||||
|
#include <climits>
|
||||||
|
|
||||||
#include "symbol-table.hh"
|
#include "symbol-table.hh"
|
||||||
#include "value/context.hh"
|
#include "value/context.hh"
|
||||||
@ -137,11 +138,11 @@ private:
|
|||||||
|
|
||||||
friend std::string showType(const Value & v);
|
friend std::string showType(const Value & v);
|
||||||
|
|
||||||
void print(const SymbolTable & symbols, std::ostream & str, std::set<const void *> * seen) const;
|
void print(const SymbolTable &symbols, std::ostream &str, std::set<const void *> *seen, int depth) const;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
void print(const SymbolTable & symbols, std::ostream & str, bool showRepeated = false) const;
|
void print(const SymbolTable &symbols, std::ostream &str, bool showRepeated = false, int depth = INT_MAX) const;
|
||||||
|
|
||||||
// Functions needed to distinguish the type
|
// Functions needed to distinguish the type
|
||||||
// These should be removed eventually, by putting the functionality that's
|
// These should be removed eventually, by putting the functionality that's
|
||||||
@ -218,8 +219,11 @@ public:
|
|||||||
/**
|
/**
|
||||||
* Returns the normal type of a Value. This only returns nThunk if
|
* Returns the normal type of a Value. This only returns nThunk if
|
||||||
* the Value hasn't been forceValue'd
|
* the Value hasn't been forceValue'd
|
||||||
|
*
|
||||||
|
* @param invalidIsThunk Instead of aborting an an invalid (probably
|
||||||
|
* 0, so uninitialized) internal type, return `nThunk`.
|
||||||
*/
|
*/
|
||||||
inline ValueType type() const
|
inline ValueType type(bool invalidIsThunk = false) const
|
||||||
{
|
{
|
||||||
switch (internalType) {
|
switch (internalType) {
|
||||||
case tInt: return nInt;
|
case tInt: return nInt;
|
||||||
@ -234,7 +238,10 @@ public:
|
|||||||
case tFloat: return nFloat;
|
case tFloat: return nFloat;
|
||||||
case tThunk: case tApp: case tBlackhole: return nThunk;
|
case tThunk: case tApp: case tBlackhole: return nThunk;
|
||||||
}
|
}
|
||||||
abort();
|
if (invalidIsThunk)
|
||||||
|
return nThunk;
|
||||||
|
else
|
||||||
|
abort();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -243,6 +243,13 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
|
|||||||
"lastModified",
|
"lastModified",
|
||||||
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
||||||
|
|
||||||
|
if (workdirInfo.hasHead) {
|
||||||
|
input.attrs.insert_or_assign("dirtyRev", chomp(
|
||||||
|
runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "rev-parse", "--verify", "HEAD" })) + "-dirty");
|
||||||
|
input.attrs.insert_or_assign("dirtyShortRev", chomp(
|
||||||
|
runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "rev-parse", "--verify", "--short", "HEAD" })) + "-dirty");
|
||||||
|
}
|
||||||
|
|
||||||
return {std::move(storePath), input};
|
return {std::move(storePath), input};
|
||||||
}
|
}
|
||||||
} // end namespace
|
} // end namespace
|
||||||
@ -283,7 +290,7 @@ struct GitInputScheme : InputScheme
|
|||||||
if (maybeGetStrAttr(attrs, "type") != "git") return {};
|
if (maybeGetStrAttr(attrs, "type") != "git") return {};
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
for (auto & [name, value] : attrs)
|
||||||
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name")
|
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name" && name != "dirtyRev" && name != "dirtyShortRev")
|
||||||
throw Error("unsupported Git input attribute '%s'", name);
|
throw Error("unsupported Git input attribute '%s'", name);
|
||||||
|
|
||||||
parseURL(getStrAttr(attrs, "url"));
|
parseURL(getStrAttr(attrs, "url"));
|
||||||
|
@ -31,11 +31,11 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (failed.size() == 1 && ex) {
|
if (failed.size() == 1 && ex) {
|
||||||
ex->status = worker.exitStatus();
|
ex->status = worker.failingExitStatus();
|
||||||
throw std::move(*ex);
|
throw std::move(*ex);
|
||||||
} else if (!failed.empty()) {
|
} else if (!failed.empty()) {
|
||||||
if (ex) logError(ex->info());
|
if (ex) logError(ex->info());
|
||||||
throw Error(worker.exitStatus(), "build of %s failed", showPaths(failed));
|
throw Error(worker.failingExitStatus(), "build of %s failed", showPaths(failed));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -102,10 +102,10 @@ void Store::ensurePath(const StorePath & path)
|
|||||||
|
|
||||||
if (goal->exitCode != Goal::ecSuccess) {
|
if (goal->exitCode != Goal::ecSuccess) {
|
||||||
if (goal->ex) {
|
if (goal->ex) {
|
||||||
goal->ex->status = worker.exitStatus();
|
goal->ex->status = worker.failingExitStatus();
|
||||||
throw std::move(*goal->ex);
|
throw std::move(*goal->ex);
|
||||||
} else
|
} else
|
||||||
throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
|
throw Error(worker.failingExitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,7 +128,7 @@ void Store::repairPath(const StorePath & path)
|
|||||||
goals.insert(worker.makeDerivationGoal(*info->deriver, OutputsSpec::All { }, bmRepair));
|
goals.insert(worker.makeDerivationGoal(*info->deriver, OutputsSpec::All { }, bmRepair));
|
||||||
worker.run(goals);
|
worker.run(goals);
|
||||||
} else
|
} else
|
||||||
throw Error(worker.exitStatus(), "cannot repair path '%s'", printStorePath(path));
|
throw Error(worker.failingExitStatus(), "cannot repair path '%s'", printStorePath(path));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,14 +5,14 @@ namespace nix {
|
|||||||
|
|
||||||
HookInstance::HookInstance()
|
HookInstance::HookInstance()
|
||||||
{
|
{
|
||||||
debug("starting build hook '%s'", settings.buildHook);
|
debug("starting build hook '%s'", concatStringsSep(" ", settings.buildHook.get()));
|
||||||
|
|
||||||
auto buildHookArgs = tokenizeString<std::list<std::string>>(settings.buildHook.get());
|
auto buildHookArgs = settings.buildHook.get();
|
||||||
|
|
||||||
if (buildHookArgs.empty())
|
if (buildHookArgs.empty())
|
||||||
throw Error("'build-hook' setting is empty");
|
throw Error("'build-hook' setting is empty");
|
||||||
|
|
||||||
auto buildHook = buildHookArgs.front();
|
auto buildHook = canonPath(buildHookArgs.front());
|
||||||
buildHookArgs.pop_front();
|
buildHookArgs.pop_front();
|
||||||
|
|
||||||
Strings args;
|
Strings args;
|
||||||
|
@ -64,8 +64,9 @@ void handleDiffHook(
|
|||||||
const Path & tryA, const Path & tryB,
|
const Path & tryA, const Path & tryB,
|
||||||
const Path & drvPath, const Path & tmpDir)
|
const Path & drvPath, const Path & tmpDir)
|
||||||
{
|
{
|
||||||
auto diffHook = settings.diffHook;
|
auto & diffHookOpt = settings.diffHook.get();
|
||||||
if (diffHook != "" && settings.runDiffHook) {
|
if (diffHookOpt && settings.runDiffHook) {
|
||||||
|
auto & diffHook = *diffHookOpt;
|
||||||
try {
|
try {
|
||||||
auto diffRes = runProgram(RunOptions {
|
auto diffRes = runProgram(RunOptions {
|
||||||
.program = diffHook,
|
.program = diffHook,
|
||||||
@ -394,8 +395,9 @@ static void linkOrCopy(const Path & from, const Path & to)
|
|||||||
bind-mount in this case?
|
bind-mount in this case?
|
||||||
|
|
||||||
It can also fail with EPERM in BeegFS v7 and earlier versions
|
It can also fail with EPERM in BeegFS v7 and earlier versions
|
||||||
|
or fail with EXDEV in OpenAFS
|
||||||
which don't allow hard-links to other directories */
|
which don't allow hard-links to other directories */
|
||||||
if (errno != EMLINK && errno != EPERM)
|
if (errno != EMLINK && errno != EPERM && errno != EXDEV)
|
||||||
throw SysError("linking '%s' to '%s'", to, from);
|
throw SysError("linking '%s' to '%s'", to, from);
|
||||||
copyPath(from, to);
|
copyPath(from, to);
|
||||||
}
|
}
|
||||||
@ -1422,7 +1424,8 @@ void LocalDerivationGoal::startDaemon()
|
|||||||
Store::Params params;
|
Store::Params params;
|
||||||
params["path-info-cache-size"] = "0";
|
params["path-info-cache-size"] = "0";
|
||||||
params["store"] = worker.store.storeDir;
|
params["store"] = worker.store.storeDir;
|
||||||
params["root"] = getLocalStore().rootDir;
|
if (auto & optRoot = getLocalStore().rootDir.get())
|
||||||
|
params["root"] = *optRoot;
|
||||||
params["state"] = "/no-such-path";
|
params["state"] = "/no-such-path";
|
||||||
params["log"] = "/no-such-path";
|
params["log"] = "/no-such-path";
|
||||||
auto store = make_ref<RestrictedStore>(params,
|
auto store = make_ref<RestrictedStore>(params,
|
||||||
|
@ -468,16 +468,9 @@ void Worker::waitForInput()
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
unsigned int Worker::exitStatus()
|
unsigned int Worker::failingExitStatus()
|
||||||
{
|
{
|
||||||
/*
|
// See API docs in header for explanation
|
||||||
* 1100100
|
|
||||||
* ^^^^
|
|
||||||
* |||`- timeout
|
|
||||||
* ||`-- output hash mismatch
|
|
||||||
* |`--- build failure
|
|
||||||
* `---- not deterministic
|
|
||||||
*/
|
|
||||||
unsigned int mask = 0;
|
unsigned int mask = 0;
|
||||||
bool buildFailure = permanentFailure || timedOut || hashMismatch;
|
bool buildFailure = permanentFailure || timedOut || hashMismatch;
|
||||||
if (buildFailure)
|
if (buildFailure)
|
||||||
|
@ -280,7 +280,28 @@ public:
|
|||||||
*/
|
*/
|
||||||
void waitForInput();
|
void waitForInput();
|
||||||
|
|
||||||
unsigned int exitStatus();
|
/***
|
||||||
|
* The exit status in case of failure.
|
||||||
|
*
|
||||||
|
* In the case of a build failure, returned value follows this
|
||||||
|
* bitmask:
|
||||||
|
*
|
||||||
|
* ```
|
||||||
|
* 0b1100100
|
||||||
|
* ^^^^
|
||||||
|
* |||`- timeout
|
||||||
|
* ||`-- output hash mismatch
|
||||||
|
* |`--- build failure
|
||||||
|
* `---- not deterministic
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* In other words, the failure code is at least 100 (0b1100100), but
|
||||||
|
* might also be greater.
|
||||||
|
*
|
||||||
|
* Otherwise (no build failure, but some other sort of failure by
|
||||||
|
* assumption), this returned value is 1.
|
||||||
|
*/
|
||||||
|
unsigned int failingExitStatus();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check whether the given valid path exists and has the right
|
* Check whether the given valid path exists and has the right
|
||||||
|
@ -864,8 +864,6 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||||||
auto path = store->parseStorePath(readString(from));
|
auto path = store->parseStorePath(readString(from));
|
||||||
StringSet sigs = readStrings<StringSet>(from);
|
StringSet sigs = readStrings<StringSet>(from);
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
if (!trusted)
|
|
||||||
throw Error("you are not privileged to add signatures");
|
|
||||||
store->addSignatures(path, sigs);
|
store->addSignatures(path, sigs);
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
to << 1;
|
to << 1;
|
||||||
|
@ -100,7 +100,10 @@ Settings::Settings()
|
|||||||
if (!pathExists(nixExePath)) {
|
if (!pathExists(nixExePath)) {
|
||||||
nixExePath = getSelfExe().value_or("nix");
|
nixExePath = getSelfExe().value_or("nix");
|
||||||
}
|
}
|
||||||
buildHook = nixExePath + " __build-remote";
|
buildHook = {
|
||||||
|
nixExePath,
|
||||||
|
"__build-remote",
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
void loadConfFile()
|
void loadConfFile()
|
||||||
|
@ -236,7 +236,7 @@ public:
|
|||||||
)",
|
)",
|
||||||
{"build-timeout"}};
|
{"build-timeout"}};
|
||||||
|
|
||||||
PathSetting buildHook{this, true, "", "build-hook",
|
Setting<Strings> buildHook{this, {}, "build-hook",
|
||||||
R"(
|
R"(
|
||||||
The path to the helper program that executes remote builds.
|
The path to the helper program that executes remote builds.
|
||||||
|
|
||||||
@ -575,8 +575,8 @@ public:
|
|||||||
line.
|
line.
|
||||||
)"};
|
)"};
|
||||||
|
|
||||||
PathSetting diffHook{
|
OptionalPathSetting diffHook{
|
||||||
this, true, "", "diff-hook",
|
this, std::nullopt, "diff-hook",
|
||||||
R"(
|
R"(
|
||||||
Absolute path to an executable capable of diffing build
|
Absolute path to an executable capable of diffing build
|
||||||
results. The hook is executed if `run-diff-hook` is true, and the
|
results. The hook is executed if `run-diff-hook` is true, and the
|
||||||
@ -719,8 +719,8 @@ public:
|
|||||||
|
|
||||||
At least one of the following conditions must be met for Nix to use a substituter:
|
At least one of the following conditions must be met for Nix to use a substituter:
|
||||||
|
|
||||||
- the substituter is in the [`trusted-substituters`](#conf-trusted-substituters) list
|
- The substituter is in the [`trusted-substituters`](#conf-trusted-substituters) list
|
||||||
- the user calling Nix is in the [`trusted-users`](#conf-trusted-users) list
|
- The user calling Nix is in the [`trusted-users`](#conf-trusted-users) list
|
||||||
|
|
||||||
In addition, each store path should be trusted as described in [`trusted-public-keys`](#conf-trusted-public-keys)
|
In addition, each store path should be trusted as described in [`trusted-public-keys`](#conf-trusted-public-keys)
|
||||||
)",
|
)",
|
||||||
@ -729,12 +729,10 @@ public:
|
|||||||
Setting<StringSet> trustedSubstituters{
|
Setting<StringSet> trustedSubstituters{
|
||||||
this, {}, "trusted-substituters",
|
this, {}, "trusted-substituters",
|
||||||
R"(
|
R"(
|
||||||
A list of [URLs of Nix stores](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format),
|
A list of [Nix store URLs](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format), separated by whitespace.
|
||||||
separated by whitespace. These are
|
These are not used by default, but users of the Nix daemon can enable them by specifying [`substituters`](#conf-substituters).
|
||||||
not used by default, but can be enabled by users of the Nix daemon
|
|
||||||
by specifying `--option substituters urls` on the command
|
Unprivileged users (those set in only [`allowed-users`](#conf-allowed-users) but not [`trusted-users`](#conf-trusted-users)) can pass as `substituters` only those URLs listed in `trusted-substituters`.
|
||||||
line. Unprivileged users are only allowed to pass a subset of the
|
|
||||||
URLs listed in `substituters` and `trusted-substituters`.
|
|
||||||
)",
|
)",
|
||||||
{"trusted-binary-caches"}};
|
{"trusted-binary-caches"}};
|
||||||
|
|
||||||
|
@ -15,22 +15,22 @@ struct LocalFSStoreConfig : virtual StoreConfig
|
|||||||
// it to omit the call to the Setting constructor. Clang works fine
|
// it to omit the call to the Setting constructor. Clang works fine
|
||||||
// either way.
|
// either way.
|
||||||
|
|
||||||
const PathSetting rootDir{(StoreConfig*) this, true, "",
|
const OptionalPathSetting rootDir{(StoreConfig*) this, std::nullopt,
|
||||||
"root",
|
"root",
|
||||||
"Directory prefixed to all other paths."};
|
"Directory prefixed to all other paths."};
|
||||||
|
|
||||||
const PathSetting stateDir{(StoreConfig*) this, false,
|
const PathSetting stateDir{(StoreConfig*) this,
|
||||||
rootDir != "" ? rootDir + "/nix/var/nix" : settings.nixStateDir,
|
rootDir.get() ? *rootDir.get() + "/nix/var/nix" : settings.nixStateDir,
|
||||||
"state",
|
"state",
|
||||||
"Directory where Nix will store state."};
|
"Directory where Nix will store state."};
|
||||||
|
|
||||||
const PathSetting logDir{(StoreConfig*) this, false,
|
const PathSetting logDir{(StoreConfig*) this,
|
||||||
rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir,
|
rootDir.get() ? *rootDir.get() + "/nix/var/log/nix" : settings.nixLogDir,
|
||||||
"log",
|
"log",
|
||||||
"directory where Nix will store log files."};
|
"directory where Nix will store log files."};
|
||||||
|
|
||||||
const PathSetting realStoreDir{(StoreConfig*) this, false,
|
const PathSetting realStoreDir{(StoreConfig*) this,
|
||||||
rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
|
rootDir.get() ? *rootDir.get() + "/nix/store" : storeDir, "real",
|
||||||
"Physical path of the Nix store."};
|
"Physical path of the Nix store."};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -114,7 +114,7 @@ struct StoreConfig : public Config
|
|||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
const PathSetting storeDir_{this, false, settings.nixStore,
|
const PathSetting storeDir_{this, settings.nixStore,
|
||||||
"store",
|
"store",
|
||||||
R"(
|
R"(
|
||||||
Logical location of the Nix store, usually
|
Logical location of the Nix store, usually
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
#include "config.hh"
|
#include "config.hh"
|
||||||
|
#include "json-utils.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
template<typename T>
|
template<typename T>
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
#include "args.hh"
|
#include "args.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
|
#include "json-utils.hh"
|
||||||
|
|
||||||
#include <glob.h>
|
#include <glob.h>
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
void Args::addFlag(Flag && flag_)
|
void Args::addFlag(Flag && flag_)
|
||||||
@ -247,11 +246,7 @@ nlohmann::json Args::toJSON()
|
|||||||
j["arity"] = flag->handler.arity;
|
j["arity"] = flag->handler.arity;
|
||||||
if (!flag->labels.empty())
|
if (!flag->labels.empty())
|
||||||
j["labels"] = flag->labels;
|
j["labels"] = flag->labels;
|
||||||
// TODO With C++23 use `std::optional::tranform`
|
j["experimental-feature"] = flag->experimentalFeature;
|
||||||
if (auto & xp = flag->experimentalFeature)
|
|
||||||
j["experimental-feature"] = showExperimentalFeature(*xp);
|
|
||||||
else
|
|
||||||
j["experimental-feature"] = nullptr;
|
|
||||||
flags[name] = std::move(j);
|
flags[name] = std::move(j);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -416,11 +411,7 @@ nlohmann::json MultiCommand::toJSON()
|
|||||||
cat["id"] = command->category();
|
cat["id"] = command->category();
|
||||||
cat["description"] = trim(categories[command->category()]);
|
cat["description"] = trim(categories[command->category()]);
|
||||||
j["category"] = std::move(cat);
|
j["category"] = std::move(cat);
|
||||||
// TODO With C++23 use `std::optional::tranform`
|
cat["experimental-feature"] = command->experimentalFeature();
|
||||||
if (auto xp = command->experimentalFeature())
|
|
||||||
cat["experimental-feature"] = showExperimentalFeature(*xp);
|
|
||||||
else
|
|
||||||
cat["experimental-feature"] = nullptr;
|
|
||||||
cmds[name] = std::move(j);
|
cmds[name] = std::move(j);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,8 +53,11 @@ template<> void BaseSetting<std::set<ExperimentalFeature>>::appendOrSet(std::set
|
|||||||
template<typename T>
|
template<typename T>
|
||||||
void BaseSetting<T>::appendOrSet(T && newValue, bool append)
|
void BaseSetting<T>::appendOrSet(T && newValue, bool append)
|
||||||
{
|
{
|
||||||
static_assert(!trait::appendable, "using default `appendOrSet` implementation with an appendable type");
|
static_assert(
|
||||||
|
!trait::appendable,
|
||||||
|
"using default `appendOrSet` implementation with an appendable type");
|
||||||
assert(!append);
|
assert(!append);
|
||||||
|
|
||||||
value = std::move(newValue);
|
value = std::move(newValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -71,4 +74,60 @@ void BaseSetting<T>::set(const std::string & str, bool append)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<> void BaseSetting<bool>::convertToArg(Args & args, const std::string & category);
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
|
||||||
|
{
|
||||||
|
args.addFlag({
|
||||||
|
.longName = name,
|
||||||
|
.description = fmt("Set the `%s` setting.", name),
|
||||||
|
.category = category,
|
||||||
|
.labels = {"value"},
|
||||||
|
.handler = {[this](std::string s) { overridden = true; set(s); }},
|
||||||
|
.experimentalFeature = experimentalFeature,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (isAppendable())
|
||||||
|
args.addFlag({
|
||||||
|
.longName = "extra-" + name,
|
||||||
|
.description = fmt("Append to the `%s` setting.", name),
|
||||||
|
.category = category,
|
||||||
|
.labels = {"value"},
|
||||||
|
.handler = {[this](std::string s) { overridden = true; set(s, true); }},
|
||||||
|
.experimentalFeature = experimentalFeature,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#define DECLARE_CONFIG_SERIALISER(TY) \
|
||||||
|
template<> TY BaseSetting< TY >::parse(const std::string & str) const; \
|
||||||
|
template<> std::string BaseSetting< TY >::to_string() const;
|
||||||
|
|
||||||
|
DECLARE_CONFIG_SERIALISER(std::string)
|
||||||
|
DECLARE_CONFIG_SERIALISER(std::optional<std::string>)
|
||||||
|
DECLARE_CONFIG_SERIALISER(bool)
|
||||||
|
DECLARE_CONFIG_SERIALISER(Strings)
|
||||||
|
DECLARE_CONFIG_SERIALISER(StringSet)
|
||||||
|
DECLARE_CONFIG_SERIALISER(StringMap)
|
||||||
|
DECLARE_CONFIG_SERIALISER(std::set<ExperimentalFeature>)
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
T BaseSetting<T>::parse(const std::string & str) const
|
||||||
|
{
|
||||||
|
static_assert(std::is_integral<T>::value, "Integer required.");
|
||||||
|
|
||||||
|
if (auto n = string2Int<T>(str))
|
||||||
|
return *n;
|
||||||
|
else
|
||||||
|
throw UsageError("setting '%s' has invalid value '%s'", name, str);
|
||||||
|
}
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
std::string BaseSetting<T>::to_string() const
|
||||||
|
{
|
||||||
|
static_assert(std::is_integral<T>::value, "Integer required.");
|
||||||
|
|
||||||
|
return std::to_string(value);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -219,29 +219,6 @@ void AbstractSetting::convertToArg(Args & args, const std::string & category)
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T>
|
|
||||||
void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
|
|
||||||
{
|
|
||||||
args.addFlag({
|
|
||||||
.longName = name,
|
|
||||||
.description = fmt("Set the `%s` setting.", name),
|
|
||||||
.category = category,
|
|
||||||
.labels = {"value"},
|
|
||||||
.handler = {[this](std::string s) { overridden = true; set(s); }},
|
|
||||||
.experimentalFeature = experimentalFeature,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (isAppendable())
|
|
||||||
args.addFlag({
|
|
||||||
.longName = "extra-" + name,
|
|
||||||
.description = fmt("Append to the `%s` setting.", name),
|
|
||||||
.category = category,
|
|
||||||
.labels = {"value"},
|
|
||||||
.handler = {[this](std::string s) { overridden = true; set(s, true); }},
|
|
||||||
.experimentalFeature = experimentalFeature,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
template<> std::string BaseSetting<std::string>::parse(const std::string & str) const
|
template<> std::string BaseSetting<std::string>::parse(const std::string & str) const
|
||||||
{
|
{
|
||||||
return str;
|
return str;
|
||||||
@ -252,21 +229,17 @@ template<> std::string BaseSetting<std::string>::to_string() const
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T>
|
template<> std::optional<std::string> BaseSetting<std::optional<std::string>>::parse(const std::string & str) const
|
||||||
T BaseSetting<T>::parse(const std::string & str) const
|
|
||||||
{
|
{
|
||||||
static_assert(std::is_integral<T>::value, "Integer required.");
|
if (str == "")
|
||||||
if (auto n = string2Int<T>(str))
|
return std::nullopt;
|
||||||
return *n;
|
|
||||||
else
|
else
|
||||||
throw UsageError("setting '%s' has invalid value '%s'", name, str);
|
return { str };
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T>
|
template<> std::string BaseSetting<std::optional<std::string>>::to_string() const
|
||||||
std::string BaseSetting<T>::to_string() const
|
|
||||||
{
|
{
|
||||||
static_assert(std::is_integral<T>::value, "Integer required.");
|
return value ? *value : "";
|
||||||
return std::to_string(value);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template<> bool BaseSetting<bool>::parse(const std::string & str) const
|
template<> bool BaseSetting<bool>::parse(const std::string & str) const
|
||||||
@ -403,15 +376,25 @@ template class BaseSetting<StringSet>;
|
|||||||
template class BaseSetting<StringMap>;
|
template class BaseSetting<StringMap>;
|
||||||
template class BaseSetting<std::set<ExperimentalFeature>>;
|
template class BaseSetting<std::set<ExperimentalFeature>>;
|
||||||
|
|
||||||
|
static Path parsePath(const AbstractSetting & s, const std::string & str)
|
||||||
|
{
|
||||||
|
if (str == "")
|
||||||
|
throw UsageError("setting '%s' is a path and paths cannot be empty", s.name);
|
||||||
|
else
|
||||||
|
return canonPath(str);
|
||||||
|
}
|
||||||
|
|
||||||
Path PathSetting::parse(const std::string & str) const
|
Path PathSetting::parse(const std::string & str) const
|
||||||
{
|
{
|
||||||
if (str == "") {
|
return parsePath(*this, str);
|
||||||
if (allowEmpty)
|
}
|
||||||
return "";
|
|
||||||
else
|
std::optional<Path> OptionalPathSetting::parse(const std::string & str) const
|
||||||
throw UsageError("setting '%s' cannot be empty", name);
|
{
|
||||||
} else
|
if (str == "")
|
||||||
return canonPath(str);
|
return std::nullopt;
|
||||||
|
else
|
||||||
|
return parsePath(*this, str);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool GlobalConfig::set(const std::string & name, const std::string & value)
|
bool GlobalConfig::set(const std::string & name, const std::string & value)
|
||||||
|
@ -353,21 +353,20 @@ public:
|
|||||||
/**
|
/**
|
||||||
* A special setting for Paths. These are automatically canonicalised
|
* A special setting for Paths. These are automatically canonicalised
|
||||||
* (e.g. "/foo//bar/" becomes "/foo/bar").
|
* (e.g. "/foo//bar/" becomes "/foo/bar").
|
||||||
|
*
|
||||||
|
* It is mandatory to specify a path; i.e. the empty string is not
|
||||||
|
* permitted.
|
||||||
*/
|
*/
|
||||||
class PathSetting : public BaseSetting<Path>
|
class PathSetting : public BaseSetting<Path>
|
||||||
{
|
{
|
||||||
bool allowEmpty;
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
PathSetting(Config * options,
|
PathSetting(Config * options,
|
||||||
bool allowEmpty,
|
|
||||||
const Path & def,
|
const Path & def,
|
||||||
const std::string & name,
|
const std::string & name,
|
||||||
const std::string & description,
|
const std::string & description,
|
||||||
const std::set<std::string> & aliases = {})
|
const std::set<std::string> & aliases = {})
|
||||||
: BaseSetting<Path>(def, true, name, description, aliases)
|
: BaseSetting<Path>(def, true, name, description, aliases)
|
||||||
, allowEmpty(allowEmpty)
|
|
||||||
{
|
{
|
||||||
options->addSetting(this);
|
options->addSetting(this);
|
||||||
}
|
}
|
||||||
@ -379,6 +378,30 @@ public:
|
|||||||
void operator =(const Path & v) { this->assign(v); }
|
void operator =(const Path & v) { this->assign(v); }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Like `PathSetting`, but the absence of a path is also allowed.
|
||||||
|
*
|
||||||
|
* `std::optional` is used instead of the empty string for clarity.
|
||||||
|
*/
|
||||||
|
class OptionalPathSetting : public BaseSetting<std::optional<Path>>
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
|
||||||
|
OptionalPathSetting(Config * options,
|
||||||
|
const std::optional<Path> & def,
|
||||||
|
const std::string & name,
|
||||||
|
const std::string & description,
|
||||||
|
const std::set<std::string> & aliases = {})
|
||||||
|
: BaseSetting<std::optional<Path>>(def, true, name, description, aliases)
|
||||||
|
{
|
||||||
|
options->addSetting(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Path> parse(const std::string & str) const override;
|
||||||
|
|
||||||
|
void operator =(const std::optional<Path> & v) { this->assign(v); }
|
||||||
|
};
|
||||||
|
|
||||||
struct GlobalConfig : public AbstractConfig
|
struct GlobalConfig : public AbstractConfig
|
||||||
{
|
{
|
||||||
typedef std::vector<Config*> ConfigRegistrations;
|
typedef std::vector<Config*> ConfigRegistrations;
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
#include "comparator.hh"
|
#include "comparator.hh"
|
||||||
#include "error.hh"
|
#include "error.hh"
|
||||||
#include "nlohmann/json_fwd.hpp"
|
#include "json-utils.hh"
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
@ -94,4 +94,10 @@ public:
|
|||||||
void to_json(nlohmann::json &, const ExperimentalFeature &);
|
void to_json(nlohmann::json &, const ExperimentalFeature &);
|
||||||
void from_json(const nlohmann::json &, ExperimentalFeature &);
|
void from_json(const nlohmann::json &, ExperimentalFeature &);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* It is always rendered as a string
|
||||||
|
*/
|
||||||
|
template<>
|
||||||
|
struct json_avoids_null<ExperimentalFeature> : std::true_type {};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
19
src/libutil/json-utils.cc
Normal file
19
src/libutil/json-utils.cc
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
#include "json-utils.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
const nlohmann::json * get(const nlohmann::json & map, const std::string & key)
|
||||||
|
{
|
||||||
|
auto i = map.find(key);
|
||||||
|
if (i == map.end()) return nullptr;
|
||||||
|
return &*i;
|
||||||
|
}
|
||||||
|
|
||||||
|
nlohmann::json * get(nlohmann::json & map, const std::string & key)
|
||||||
|
{
|
||||||
|
auto i = map.find(key);
|
||||||
|
if (i == map.end()) return nullptr;
|
||||||
|
return &*i;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -2,21 +2,77 @@
|
|||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
#include <list>
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
const nlohmann::json * get(const nlohmann::json & map, const std::string & key)
|
const nlohmann::json * get(const nlohmann::json & map, const std::string & key);
|
||||||
{
|
|
||||||
auto i = map.find(key);
|
|
||||||
if (i == map.end()) return nullptr;
|
|
||||||
return &*i;
|
|
||||||
}
|
|
||||||
|
|
||||||
nlohmann::json * get(nlohmann::json & map, const std::string & key)
|
nlohmann::json * get(nlohmann::json & map, const std::string & key);
|
||||||
{
|
|
||||||
auto i = map.find(key);
|
/**
|
||||||
if (i == map.end()) return nullptr;
|
* For `adl_serializer<std::optional<T>>` below, we need to track what
|
||||||
return &*i;
|
* types are not already using `null`. Only for them can we use `null`
|
||||||
}
|
* to represent `std::nullopt`.
|
||||||
|
*/
|
||||||
|
template<typename T>
|
||||||
|
struct json_avoids_null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle numbers in default impl
|
||||||
|
*/
|
||||||
|
template<typename T>
|
||||||
|
struct json_avoids_null : std::bool_constant<std::is_integral<T>::value> {};
|
||||||
|
|
||||||
|
template<>
|
||||||
|
struct json_avoids_null<std::nullptr_t> : std::false_type {};
|
||||||
|
|
||||||
|
template<>
|
||||||
|
struct json_avoids_null<bool> : std::true_type {};
|
||||||
|
|
||||||
|
template<>
|
||||||
|
struct json_avoids_null<std::string> : std::true_type {};
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
struct json_avoids_null<std::vector<T>> : std::true_type {};
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
struct json_avoids_null<std::list<T>> : std::true_type {};
|
||||||
|
|
||||||
|
template<typename K, typename V>
|
||||||
|
struct json_avoids_null<std::map<K, V>> : std::true_type {};
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace nlohmann {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This "instance" is widely requested, see
|
||||||
|
* https://github.com/nlohmann/json/issues/1749, but momentum has stalled
|
||||||
|
* out. Writing there here in Nix as a stop-gap.
|
||||||
|
*
|
||||||
|
* We need to make sure the underlying type does not use `null` for this to
|
||||||
|
* round trip. We do that with a static assert.
|
||||||
|
*/
|
||||||
|
template<typename T>
|
||||||
|
struct adl_serializer<std::optional<T>> {
|
||||||
|
static std::optional<T> from_json(const json & json) {
|
||||||
|
static_assert(
|
||||||
|
nix::json_avoids_null<T>::value,
|
||||||
|
"null is already in use for underlying type's JSON");
|
||||||
|
return json.is_null()
|
||||||
|
? std::nullopt
|
||||||
|
: std::optional { adl_serializer<T>::from_json(json) };
|
||||||
|
}
|
||||||
|
static void to_json(json & json, std::optional<T> t) {
|
||||||
|
static_assert(
|
||||||
|
nix::json_avoids_null<T>::value,
|
||||||
|
"null is already in use for underlying type's JSON");
|
||||||
|
if (t)
|
||||||
|
adl_serializer<T>::to_json(json, *t);
|
||||||
|
else
|
||||||
|
json = nullptr;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -56,19 +56,16 @@ struct AuthorizationSettings : Config {
|
|||||||
Setting<Strings> trustedUsers{
|
Setting<Strings> trustedUsers{
|
||||||
this, {"root"}, "trusted-users",
|
this, {"root"}, "trusted-users",
|
||||||
R"(
|
R"(
|
||||||
A list of names of users (separated by whitespace) that have
|
A list of user names, separated by whitespace.
|
||||||
additional rights when connecting to the Nix daemon, such as the
|
These users will have additional rights when connecting to the Nix daemon, such as the ability to specify additional [substituters](#conf-substituters), or to import unsigned [NARs](@docroot@/glossary.md#gloss-nar).
|
||||||
ability to specify additional binary caches, or to import unsigned
|
|
||||||
NARs. You can also specify groups by prefixing them with `@`; for
|
You can also specify groups by prefixing names with `@`.
|
||||||
instance, `@wheel` means all users in the `wheel` group. The default
|
For instance, `@wheel` means all users in the `wheel` group.
|
||||||
is `root`.
|
|
||||||
|
|
||||||
> **Warning**
|
> **Warning**
|
||||||
>
|
>
|
||||||
> Adding a user to `trusted-users` is essentially equivalent to
|
> Adding a user to `trusted-users` is essentially equivalent to giving that user root access to the system.
|
||||||
> giving that user root access to the system. For example, the user
|
> For example, the user can access or replace store path contents that are critical for system security.
|
||||||
> can set `sandbox-paths` and thereby obtain read access to
|
|
||||||
> directories that are otherwise inacessible to them.
|
|
||||||
)"};
|
)"};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -77,12 +74,16 @@ struct AuthorizationSettings : Config {
|
|||||||
Setting<Strings> allowedUsers{
|
Setting<Strings> allowedUsers{
|
||||||
this, {"*"}, "allowed-users",
|
this, {"*"}, "allowed-users",
|
||||||
R"(
|
R"(
|
||||||
A list of names of users (separated by whitespace) that are allowed
|
A list user names, separated by whitespace.
|
||||||
to connect to the Nix daemon. As with the `trusted-users` option,
|
These users are allowed to connect to the Nix daemon.
|
||||||
you can specify groups by prefixing them with `@`. Also, you can
|
|
||||||
allow all users by specifying `*`. The default is `*`.
|
|
||||||
|
|
||||||
Note that trusted users are always allowed to connect.
|
You can specify groups by prefixing names with `@`.
|
||||||
|
For instance, `@wheel` means all users in the `wheel` group.
|
||||||
|
Also, you can allow all users by specifying `*`.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> Trusted users (set in [`trusted-users`](#conf-trusted-users)) can always connect to the Nix daemon.
|
||||||
)"};
|
)"};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -179,6 +179,8 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
|
|||||||
j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
|
j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
|
||||||
if (auto rev = flake.lockedRef.input.getRev())
|
if (auto rev = flake.lockedRef.input.getRev())
|
||||||
j["revision"] = rev->to_string(Base16, false);
|
j["revision"] = rev->to_string(Base16, false);
|
||||||
|
if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev"))
|
||||||
|
j["dirtyRevision"] = *dirtyRev;
|
||||||
if (auto revCount = flake.lockedRef.input.getRevCount())
|
if (auto revCount = flake.lockedRef.input.getRevCount())
|
||||||
j["revCount"] = *revCount;
|
j["revCount"] = *revCount;
|
||||||
if (auto lastModified = flake.lockedRef.input.getLastModified())
|
if (auto lastModified = flake.lockedRef.input.getLastModified())
|
||||||
@ -204,6 +206,10 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
|
|||||||
logger->cout(
|
logger->cout(
|
||||||
ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
|
ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
|
||||||
rev->to_string(Base16, false));
|
rev->to_string(Base16, false));
|
||||||
|
if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev"))
|
||||||
|
logger->cout(
|
||||||
|
ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
|
||||||
|
*dirtyRev);
|
||||||
if (auto revCount = flake.lockedRef.input.getRevCount())
|
if (auto revCount = flake.lockedRef.input.getRevCount())
|
||||||
logger->cout(
|
logger->cout(
|
||||||
ANSI_BOLD "Revisions:" ANSI_NORMAL " %s",
|
ANSI_BOLD "Revisions:" ANSI_NORMAL " %s",
|
||||||
@ -380,8 +386,10 @@ struct CmdFlakeCheck : FlakeCommand
|
|||||||
auto checkOverlay = [&](const std::string & attrPath, Value & v, const PosIdx pos) {
|
auto checkOverlay = [&](const std::string & attrPath, Value & v, const PosIdx pos) {
|
||||||
try {
|
try {
|
||||||
state->forceValue(v, pos);
|
state->forceValue(v, pos);
|
||||||
if (!v.isLambda()
|
if (!v.isLambda()) {
|
||||||
|| v.lambda.fun->hasFormals()
|
throw Error("overlay is not a function, but %s instead", showType(v));
|
||||||
|
}
|
||||||
|
if (v.lambda.fun->hasFormals()
|
||||||
|| !argHasName(v.lambda.fun->arg, "final"))
|
|| !argHasName(v.lambda.fun->arg, "final"))
|
||||||
throw Error("overlay does not take an argument named 'final'");
|
throw Error("overlay does not take an argument named 'final'");
|
||||||
auto body = dynamic_cast<ExprLambda *>(v.lambda.fun->body);
|
auto body = dynamic_cast<ExprLambda *>(v.lambda.fun->body);
|
||||||
|
@ -71,8 +71,6 @@ inputs.nixpkgs = {
|
|||||||
|
|
||||||
Here are some examples of flake references in their URL-like representation:
|
Here are some examples of flake references in their URL-like representation:
|
||||||
|
|
||||||
* `.`: The flake in the current directory.
|
|
||||||
* `/home/alice/src/patchelf`: A flake in some other directory.
|
|
||||||
* `nixpkgs`: The `nixpkgs` entry in the flake registry.
|
* `nixpkgs`: The `nixpkgs` entry in the flake registry.
|
||||||
* `nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293`: The `nixpkgs`
|
* `nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293`: The `nixpkgs`
|
||||||
entry in the flake registry, with its Git revision overridden to a
|
entry in the flake registry, with its Git revision overridden to a
|
||||||
@ -93,6 +91,23 @@ Here are some examples of flake references in their URL-like representation:
|
|||||||
* `https://github.com/NixOS/patchelf/archive/master.tar.gz`: A tarball
|
* `https://github.com/NixOS/patchelf/archive/master.tar.gz`: A tarball
|
||||||
flake.
|
flake.
|
||||||
|
|
||||||
|
## Path-like syntax
|
||||||
|
|
||||||
|
Flakes corresponding to a local path can also be referred to by a direct path reference, either `/absolute/path/to/the/flake` or `./relative/path/to/the/flake` (note that the leading `./` is mandatory for relative paths to avoid any ambiguity).
|
||||||
|
|
||||||
|
The semantic of such a path is as follows:
|
||||||
|
|
||||||
|
* If the directory is part of a Git repository, then the input will be treated as a `git+file:` URL, otherwise it will be treated as a `path:` url;
|
||||||
|
* If the directory doesn't contain a `flake.nix` file, then Nix will search for such a file upwards in the file system hierarchy until it finds any of:
|
||||||
|
1. The Git repository root, or
|
||||||
|
2. The filesystem root (/), or
|
||||||
|
3. A folder on a different mount point.
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
* `.`: The flake to which the current directory belongs to.
|
||||||
|
* `/home/alice/src/patchelf`: A flake in some other directory.
|
||||||
|
|
||||||
## Flake reference attributes
|
## Flake reference attributes
|
||||||
|
|
||||||
The following generic flake reference attributes are supported:
|
The following generic flake reference attributes are supported:
|
||||||
|
@ -352,7 +352,7 @@ void mainWrapped(int argc, char * * argv)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (argc == 2 && std::string(argv[1]) == "__dump-builtins") {
|
if (argc == 2 && std::string(argv[1]) == "__dump-language") {
|
||||||
experimentalFeatureSettings.experimentalFeatures = {
|
experimentalFeatureSettings.experimentalFeatures = {
|
||||||
Xp::Flakes,
|
Xp::Flakes,
|
||||||
Xp::FetchClosure,
|
Xp::FetchClosure,
|
||||||
@ -360,17 +360,34 @@ void mainWrapped(int argc, char * * argv)
|
|||||||
evalSettings.pureEval = false;
|
evalSettings.pureEval = false;
|
||||||
EvalState state({}, openStore("dummy://"));
|
EvalState state({}, openStore("dummy://"));
|
||||||
auto res = nlohmann::json::object();
|
auto res = nlohmann::json::object();
|
||||||
auto builtins = state.baseEnv.values[0]->attrs;
|
res["builtins"] = ({
|
||||||
for (auto & builtin : *builtins) {
|
auto builtinsJson = nlohmann::json::object();
|
||||||
auto b = nlohmann::json::object();
|
auto builtins = state.baseEnv.values[0]->attrs;
|
||||||
if (!builtin.value->isPrimOp()) continue;
|
for (auto & builtin : *builtins) {
|
||||||
auto primOp = builtin.value->primOp;
|
auto b = nlohmann::json::object();
|
||||||
if (!primOp->doc) continue;
|
if (!builtin.value->isPrimOp()) continue;
|
||||||
b["arity"] = primOp->arity;
|
auto primOp = builtin.value->primOp;
|
||||||
b["args"] = primOp->args;
|
if (!primOp->doc) continue;
|
||||||
b["doc"] = trim(stripIndentation(primOp->doc));
|
b["arity"] = primOp->arity;
|
||||||
res[state.symbols[builtin.name]] = std::move(b);
|
b["args"] = primOp->args;
|
||||||
}
|
b["doc"] = trim(stripIndentation(primOp->doc));
|
||||||
|
b["experimental-feature"] = primOp->experimentalFeature;
|
||||||
|
builtinsJson[state.symbols[builtin.name]] = std::move(b);
|
||||||
|
}
|
||||||
|
std::move(builtinsJson);
|
||||||
|
});
|
||||||
|
res["constants"] = ({
|
||||||
|
auto constantsJson = nlohmann::json::object();
|
||||||
|
for (auto & [name, info] : state.constantInfos) {
|
||||||
|
auto c = nlohmann::json::object();
|
||||||
|
if (!info.doc) continue;
|
||||||
|
c["doc"] = trim(stripIndentation(info.doc));
|
||||||
|
c["type"] = showType(info.type, false);
|
||||||
|
c["impure-only"] = info.impureOnly;
|
||||||
|
constantsJson[name] = std::move(c);
|
||||||
|
}
|
||||||
|
std::move(constantsJson);
|
||||||
|
});
|
||||||
logger->cout("%s", res);
|
logger->cout("%s", res);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -6,26 +6,48 @@ R""(
|
|||||||
|
|
||||||
```console
|
```console
|
||||||
# nix profile list
|
# nix profile list
|
||||||
0 flake:nixpkgs#legacyPackages.x86_64-linux.spotify github:NixOS/nixpkgs/c23db78bbd474c4d0c5c3c551877523b4a50db06#legacyPackages.x86_64-linux.spotify /nix/store/akpdsid105phbbvknjsdh7hl4v3fhjkr-spotify-1.1.46.916.g416cacf1
|
Index: 0
|
||||||
1 flake:nixpkgs#legacyPackages.x86_64-linux.zoom-us github:NixOS/nixpkgs/c23db78bbd474c4d0c5c3c551877523b4a50db06#legacyPackages.x86_64-linux.zoom-us /nix/store/89pmjmbih5qpi7accgacd17ybpgp4xfm-zoom-us-5.4.53350.1027
|
Flake attribute: legacyPackages.x86_64-linux.gdb
|
||||||
2 flake:blender-bin#packages.x86_64-linux.default github:edolstra/nix-warez/d09d7eea893dcb162e89bc67f6dc1ced14abfc27?dir=blender#packages.x86_64-linux.default /nix/store/zfgralhqjnam662kqsgq6isjw8lhrflz-blender-bin-2.91.0
|
Original flake URL: flake:nixpkgs
|
||||||
|
Locked flake URL: github:NixOS/nixpkgs/7b38b03d76ab71bdc8dc325e3f6338d984cc35ca
|
||||||
|
Store paths: /nix/store/indzcw5wvlhx6vwk7k4iq29q15chvr3d-gdb-11.1
|
||||||
|
|
||||||
|
Index: 1
|
||||||
|
Flake attribute: packages.x86_64-linux.default
|
||||||
|
Original flake URL: flake:blender-bin
|
||||||
|
Locked flake URL: github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender
|
||||||
|
Store paths: /nix/store/i798sxl3j40wpdi1rgf391id1b5klw7g-blender-bin-3.1.2
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Note that you can unambiguously rebuild a package from a profile
|
||||||
|
through its locked flake URL and flake attribute, e.g.
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix build github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender#packages.x86_64-linux.default
|
||||||
|
```
|
||||||
|
|
||||||
|
will build the package with index 1 shown above.
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
This command shows what packages are currently installed in a
|
This command shows what packages are currently installed in a
|
||||||
profile. The output consists of one line per package, with the
|
profile. For each installed package, it shows the following
|
||||||
following fields:
|
information:
|
||||||
|
|
||||||
* An integer that can be used to unambiguously identify the package in
|
* `Index`: An integer that can be used to unambiguously identify the
|
||||||
invocations of `nix profile remove` and `nix profile upgrade`.
|
package in invocations of `nix profile remove` and `nix profile
|
||||||
|
upgrade`.
|
||||||
|
|
||||||
* The original ("unlocked") flake reference and output attribute path
|
* `Flake attribute`: The flake output attribute path that provides the
|
||||||
used at installation time.
|
package (e.g. `packages.x86_64-linux.hello`).
|
||||||
|
|
||||||
* The locked flake reference to which the unlocked flake reference was
|
* `Original flake URL`: The original ("unlocked") flake reference
|
||||||
resolved.
|
specified by the user when the package was first installed via `nix
|
||||||
|
profile install`.
|
||||||
|
|
||||||
* The store path(s) of the package.
|
* `Locked flake URL`: The locked flake reference to which the original
|
||||||
|
flake reference was resolved.
|
||||||
|
|
||||||
|
* `Store paths`: The store path(s) of the package.
|
||||||
|
|
||||||
)""
|
)""
|
||||||
|
@ -21,7 +21,7 @@ struct ProfileElementSource
|
|||||||
{
|
{
|
||||||
FlakeRef originalRef;
|
FlakeRef originalRef;
|
||||||
// FIXME: record original attrpath.
|
// FIXME: record original attrpath.
|
||||||
FlakeRef resolvedRef;
|
FlakeRef lockedRef;
|
||||||
std::string attrPath;
|
std::string attrPath;
|
||||||
ExtendedOutputsSpec outputs;
|
ExtendedOutputsSpec outputs;
|
||||||
|
|
||||||
@ -168,7 +168,7 @@ struct ProfileManifest
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string toJSON(Store & store) const
|
nlohmann::json toJSON(Store & store) const
|
||||||
{
|
{
|
||||||
auto array = nlohmann::json::array();
|
auto array = nlohmann::json::array();
|
||||||
for (auto & element : elements) {
|
for (auto & element : elements) {
|
||||||
@ -181,7 +181,7 @@ struct ProfileManifest
|
|||||||
obj["priority"] = element.priority;
|
obj["priority"] = element.priority;
|
||||||
if (element.source) {
|
if (element.source) {
|
||||||
obj["originalUrl"] = element.source->originalRef.to_string();
|
obj["originalUrl"] = element.source->originalRef.to_string();
|
||||||
obj["url"] = element.source->resolvedRef.to_string();
|
obj["url"] = element.source->lockedRef.to_string();
|
||||||
obj["attrPath"] = element.source->attrPath;
|
obj["attrPath"] = element.source->attrPath;
|
||||||
obj["outputs"] = element.source->outputs;
|
obj["outputs"] = element.source->outputs;
|
||||||
}
|
}
|
||||||
@ -190,7 +190,7 @@ struct ProfileManifest
|
|||||||
nlohmann::json json;
|
nlohmann::json json;
|
||||||
json["version"] = 2;
|
json["version"] = 2;
|
||||||
json["elements"] = array;
|
json["elements"] = array;
|
||||||
return json.dump();
|
return json;
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePath build(ref<Store> store)
|
StorePath build(ref<Store> store)
|
||||||
@ -210,7 +210,7 @@ struct ProfileManifest
|
|||||||
|
|
||||||
buildProfile(tempDir, std::move(pkgs));
|
buildProfile(tempDir, std::move(pkgs));
|
||||||
|
|
||||||
writeFile(tempDir + "/manifest.json", toJSON(*store));
|
writeFile(tempDir + "/manifest.json", toJSON(*store).dump());
|
||||||
|
|
||||||
/* Add the symlink tree to the store. */
|
/* Add the symlink tree to the store. */
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
@ -349,7 +349,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
|||||||
if (auto * info2 = dynamic_cast<ExtraPathInfoFlake *>(&*info)) {
|
if (auto * info2 = dynamic_cast<ExtraPathInfoFlake *>(&*info)) {
|
||||||
element.source = ProfileElementSource {
|
element.source = ProfileElementSource {
|
||||||
.originalRef = info2->flake.originalRef,
|
.originalRef = info2->flake.originalRef,
|
||||||
.resolvedRef = info2->flake.resolvedRef,
|
.lockedRef = info2->flake.lockedRef,
|
||||||
.attrPath = info2->value.attrPath,
|
.attrPath = info2->value.attrPath,
|
||||||
.outputs = info2->value.extendedOutputsSpec,
|
.outputs = info2->value.extendedOutputsSpec,
|
||||||
};
|
};
|
||||||
@ -588,14 +588,14 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
|
|||||||
assert(infop);
|
assert(infop);
|
||||||
auto & info = *infop;
|
auto & info = *infop;
|
||||||
|
|
||||||
if (element.source->resolvedRef == info.flake.resolvedRef) continue;
|
if (element.source->lockedRef == info.flake.lockedRef) continue;
|
||||||
|
|
||||||
printInfo("upgrading '%s' from flake '%s' to '%s'",
|
printInfo("upgrading '%s' from flake '%s' to '%s'",
|
||||||
element.source->attrPath, element.source->resolvedRef, info.flake.resolvedRef);
|
element.source->attrPath, element.source->lockedRef, info.flake.lockedRef);
|
||||||
|
|
||||||
element.source = ProfileElementSource {
|
element.source = ProfileElementSource {
|
||||||
.originalRef = installable->flakeRef,
|
.originalRef = installable->flakeRef,
|
||||||
.resolvedRef = info.flake.resolvedRef,
|
.lockedRef = info.flake.lockedRef,
|
||||||
.attrPath = info.value.attrPath,
|
.attrPath = info.value.attrPath,
|
||||||
.outputs = installable->extendedOutputsSpec,
|
.outputs = installable->extendedOutputsSpec,
|
||||||
};
|
};
|
||||||
@ -635,7 +635,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultProfile
|
struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultProfile, MixJSON
|
||||||
{
|
{
|
||||||
std::string description() override
|
std::string description() override
|
||||||
{
|
{
|
||||||
@ -653,12 +653,22 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro
|
|||||||
{
|
{
|
||||||
ProfileManifest manifest(*getEvalState(), *profile);
|
ProfileManifest manifest(*getEvalState(), *profile);
|
||||||
|
|
||||||
for (size_t i = 0; i < manifest.elements.size(); ++i) {
|
if (json) {
|
||||||
auto & element(manifest.elements[i]);
|
std::cout << manifest.toJSON(*store).dump() << "\n";
|
||||||
logger->cout("%d %s %s %s", i,
|
} else {
|
||||||
element.source ? element.source->originalRef.to_string() + "#" + element.source->attrPath + element.source->outputs.to_string() : "-",
|
for (size_t i = 0; i < manifest.elements.size(); ++i) {
|
||||||
element.source ? element.source->resolvedRef.to_string() + "#" + element.source->attrPath + element.source->outputs.to_string() : "-",
|
auto & element(manifest.elements[i]);
|
||||||
concatStringsSep(" ", store->printStorePathSet(element.storePaths)));
|
if (i) logger->cout("");
|
||||||
|
logger->cout("Index: " ANSI_BOLD "%s" ANSI_NORMAL "%s",
|
||||||
|
i,
|
||||||
|
element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL);
|
||||||
|
if (element.source) {
|
||||||
|
logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string());
|
||||||
|
logger->cout("Original flake URL: %s", element.source->originalRef.to_string());
|
||||||
|
logger->cout("Locked flake URL: %s", element.source->lockedRef.to_string());
|
||||||
|
}
|
||||||
|
logger->cout("Store paths: %s", concatStringsSep(" ", store->printStorePathSet(element.storePaths)));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -18,6 +18,9 @@ clearStore
|
|||||||
nix-build dependencies.nix --no-out-link
|
nix-build dependencies.nix --no-out-link
|
||||||
nix-build dependencies.nix --no-out-link --check
|
nix-build dependencies.nix --no-out-link --check
|
||||||
|
|
||||||
|
# Build failure exit codes (100, 104, etc.) are from
|
||||||
|
# doc/manual/src/command-ref/status-build-failure.md
|
||||||
|
|
||||||
# check for dangling temporary build directories
|
# check for dangling temporary build directories
|
||||||
# only retain if build fails and --keep-failed is specified, or...
|
# only retain if build fails and --keep-failed is specified, or...
|
||||||
# ...build is non-deterministic and --check and --keep-failed are both specified
|
# ...build is non-deterministic and --check and --keep-failed are both specified
|
||||||
|
@ -105,6 +105,8 @@ path2=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
|
|||||||
[[ $(cat $path2/dir1/foo) = foo ]]
|
[[ $(cat $path2/dir1/foo) = foo ]]
|
||||||
|
|
||||||
[[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]]
|
[[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]]
|
||||||
|
[[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).dirtyRev") = "${rev2}-dirty" ]]
|
||||||
|
[[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).dirtyShortRev") = "${rev2:0:7}-dirty" ]]
|
||||||
|
|
||||||
# ... unless we're using an explicit ref or rev.
|
# ... unless we're using an explicit ref or rev.
|
||||||
path3=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"master\"; }).outPath")
|
path3=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"master\"; }).outPath")
|
||||||
@ -119,6 +121,10 @@ git -C $repo commit -m 'Bla3' -a
|
|||||||
path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchGit file://$repo).outPath")
|
path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchGit file://$repo).outPath")
|
||||||
[[ $path2 = $path4 ]]
|
[[ $path2 = $path4 ]]
|
||||||
|
|
||||||
|
[[ $(nix eval --impure --expr "builtins.hasAttr \"rev\" (builtins.fetchGit $repo)") == "true" ]]
|
||||||
|
[[ $(nix eval --impure --expr "builtins.hasAttr \"dirtyRev\" (builtins.fetchGit $repo)") == "false" ]]
|
||||||
|
[[ $(nix eval --impure --expr "builtins.hasAttr \"dirtyShortRev\" (builtins.fetchGit $repo)") == "false" ]]
|
||||||
|
|
||||||
status=0
|
status=0
|
||||||
nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; rev = \"$rev2\"; narHash = \"sha256-B5yIPHhEm0eysJKEsO7nqxprh9vcblFxpJG11gXJus1=\"; }).outPath" || status=$?
|
nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; rev = \"$rev2\"; narHash = \"sha256-B5yIPHhEm0eysJKEsO7nqxprh9vcblFxpJG11gXJus1=\"; }).outPath" || status=$?
|
||||||
[[ "$status" = "102" ]]
|
[[ "$status" = "102" ]]
|
||||||
|
@ -25,6 +25,18 @@ EOF
|
|||||||
|
|
||||||
(! nix flake check $flakeDir)
|
(! nix flake check $flakeDir)
|
||||||
|
|
||||||
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
|
{
|
||||||
|
outputs = { self, ... }: {
|
||||||
|
overlays.x86_64-linux.foo = final: prev: {
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
checkRes=$(nix flake check $flakeDir 2>&1 && fail "nix flake check --all-systems should have failed" || true)
|
||||||
|
echo "$checkRes" | grepQuiet "error: overlay is not a function, but a set instead"
|
||||||
|
|
||||||
cat > $flakeDir/flake.nix <<EOF
|
cat > $flakeDir/flake.nix <<EOF
|
||||||
{
|
{
|
||||||
outputs = { self }: {
|
outputs = { self }: {
|
||||||
|
@ -95,11 +95,16 @@ json=$(nix flake metadata flake1 --json | jq .)
|
|||||||
[[ $(echo "$json" | jq -r .lastModified) = $(git -C $flake1Dir log -n1 --format=%ct) ]]
|
[[ $(echo "$json" | jq -r .lastModified) = $(git -C $flake1Dir log -n1 --format=%ct) ]]
|
||||||
hash1=$(echo "$json" | jq -r .revision)
|
hash1=$(echo "$json" | jq -r .revision)
|
||||||
|
|
||||||
|
echo foo > $flake1Dir/foo
|
||||||
|
git -C $flake1Dir add $flake1Dir/foo
|
||||||
|
[[ $(nix flake metadata flake1 --json --refresh | jq -r .dirtyRevision) == "$hash1-dirty" ]]
|
||||||
|
|
||||||
echo -n '# foo' >> $flake1Dir/flake.nix
|
echo -n '# foo' >> $flake1Dir/flake.nix
|
||||||
flake1OriginalCommit=$(git -C $flake1Dir rev-parse HEAD)
|
flake1OriginalCommit=$(git -C $flake1Dir rev-parse HEAD)
|
||||||
git -C $flake1Dir commit -a -m 'Foo'
|
git -C $flake1Dir commit -a -m 'Foo'
|
||||||
flake1NewCommit=$(git -C $flake1Dir rev-parse HEAD)
|
flake1NewCommit=$(git -C $flake1Dir rev-parse HEAD)
|
||||||
hash2=$(nix flake metadata flake1 --json --refresh | jq -r .revision)
|
hash2=$(nix flake metadata flake1 --json --refresh | jq -r .revision)
|
||||||
|
[[ $(nix flake metadata flake1 --json --refresh | jq -r .dirtyRevision) == "null" ]]
|
||||||
[[ $hash1 != $hash2 ]]
|
[[ $hash1 != $hash2 ]]
|
||||||
|
|
||||||
# Test 'nix build' on a flake.
|
# Test 'nix build' on a flake.
|
||||||
|
@ -1,29 +1,30 @@
|
|||||||
{ fixed-output }:
|
{ mode }:
|
||||||
|
|
||||||
with import ./config.nix;
|
with import ./config.nix;
|
||||||
|
|
||||||
mkDerivation ({
|
mkDerivation (
|
||||||
name = "ssl-export";
|
{
|
||||||
buildCommand = ''
|
name = "ssl-export";
|
||||||
# Add some indirection, otherwise grepping into the debug output finds the string.
|
buildCommand = ''
|
||||||
report () { echo CERT_$1_IN_SANDBOX; }
|
# Add some indirection, otherwise grepping into the debug output finds the string.
|
||||||
|
report () { echo CERT_$1_IN_SANDBOX; }
|
||||||
|
|
||||||
if [ -f /etc/ssl/certs/ca-certificates.crt ]; then
|
if [ -f /etc/ssl/certs/ca-certificates.crt ]; then
|
||||||
content=$(</etc/ssl/certs/ca-certificates.crt)
|
content=$(</etc/ssl/certs/ca-certificates.crt)
|
||||||
if [ "$content" == CERT_CONTENT ]; then
|
if [ "$content" == CERT_CONTENT ]; then
|
||||||
report present
|
report present
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
report missing
|
||||||
fi
|
fi
|
||||||
else
|
|
||||||
report missing
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Always fail, because we do not want to bother with fixed-output
|
# Always fail, because we do not want to bother with fixed-output
|
||||||
# derivations being cached, and do not want to compute the right hash.
|
# derivations being cached, and do not want to compute the right hash.
|
||||||
false;
|
false;
|
||||||
'';
|
'';
|
||||||
} // (
|
} // {
|
||||||
if fixed-output == "fixed-output"
|
fixed-output = { outputHash = "sha256:0000000000000000000000000000000000000000000000000000000000000000"; };
|
||||||
then { outputHash = "sha256:0000000000000000000000000000000000000000000000000000000000000000"; }
|
normal = { };
|
||||||
else { }
|
}.${mode}
|
||||||
))
|
)
|
||||||
|
|
||||||
|
@ -11,6 +11,8 @@ requireSandboxSupport
|
|||||||
# otherwise things get complicated (e.g. if it's in /bin, do we need
|
# otherwise things get complicated (e.g. if it's in /bin, do we need
|
||||||
# /lib as well?).
|
# /lib as well?).
|
||||||
if [[ ! $SHELL =~ /nix/store ]]; then skipTest "Shell is not from Nix store"; fi
|
if [[ ! $SHELL =~ /nix/store ]]; then skipTest "Shell is not from Nix store"; fi
|
||||||
|
# An alias to automatically bind-mount the $SHELL on nix-build invocations
|
||||||
|
nix-sandbox-build () { nix-build --no-out-link --sandbox-paths /nix/store "$@"; }
|
||||||
|
|
||||||
chmod -R u+w $TEST_ROOT/store0 || true
|
chmod -R u+w $TEST_ROOT/store0 || true
|
||||||
rm -rf $TEST_ROOT/store0
|
rm -rf $TEST_ROOT/store0
|
||||||
@ -18,7 +20,7 @@ rm -rf $TEST_ROOT/store0
|
|||||||
export NIX_STORE_DIR=/my/store
|
export NIX_STORE_DIR=/my/store
|
||||||
export NIX_REMOTE=$TEST_ROOT/store0
|
export NIX_REMOTE=$TEST_ROOT/store0
|
||||||
|
|
||||||
outPath=$(nix-build dependencies.nix --no-out-link --sandbox-paths /nix/store)
|
outPath=$(nix-sandbox-build dependencies.nix)
|
||||||
|
|
||||||
[[ $outPath =~ /my/store/.*-dependencies ]]
|
[[ $outPath =~ /my/store/.*-dependencies ]]
|
||||||
|
|
||||||
@ -29,24 +31,31 @@ nix store ls -R -l $outPath | grep foobar
|
|||||||
nix store cat $outPath/foobar | grep FOOBAR
|
nix store cat $outPath/foobar | grep FOOBAR
|
||||||
|
|
||||||
# Test --check without hash rewriting.
|
# Test --check without hash rewriting.
|
||||||
nix-build dependencies.nix --no-out-link --check --sandbox-paths /nix/store
|
nix-sandbox-build dependencies.nix --check
|
||||||
|
|
||||||
# Test that sandboxed builds with --check and -K can move .check directory to store
|
# Test that sandboxed builds with --check and -K can move .check directory to store
|
||||||
nix-build check.nix -A nondeterministic --sandbox-paths /nix/store --no-out-link
|
nix-sandbox-build check.nix -A nondeterministic
|
||||||
|
|
||||||
(! nix-build check.nix -A nondeterministic --sandbox-paths /nix/store --no-out-link --check -K 2> $TEST_ROOT/log)
|
# `100 + 4` means non-determinstic, see doc/manual/src/command-ref/status-build-failure.md
|
||||||
if grepQuiet 'error: renaming' $TEST_ROOT/log; then false; fi
|
expectStderr 104 nix-sandbox-build check.nix -A nondeterministic --check -K > $TEST_ROOT/log
|
||||||
|
grepQuietInverse 'error: renaming' $TEST_ROOT/log
|
||||||
grepQuiet 'may not be deterministic' $TEST_ROOT/log
|
grepQuiet 'may not be deterministic' $TEST_ROOT/log
|
||||||
|
|
||||||
# Test that sandboxed builds cannot write to /etc easily
|
# Test that sandboxed builds cannot write to /etc easily
|
||||||
(! nix-build -E 'with import ./config.nix; mkDerivation { name = "etc-write"; buildCommand = "echo > /etc/test"; }' --no-out-link --sandbox-paths /nix/store)
|
# `100` means build failure without extra info, see doc/manual/src/command-ref/status-build-failure.md
|
||||||
|
expectStderr 100 nix-sandbox-build -E 'with import ./config.nix; mkDerivation { name = "etc-write"; buildCommand = "echo > /etc/test"; }' |
|
||||||
|
grepQuiet "/etc/test: Permission denied"
|
||||||
|
|
||||||
|
|
||||||
## Test mounting of SSL certificates into the sandbox
|
## Test mounting of SSL certificates into the sandbox
|
||||||
testCert () {
|
testCert () {
|
||||||
(! nix-build linux-sandbox-cert-test.nix --argstr fixed-output "$2" --no-out-link --sandbox-paths /nix/store --option ssl-cert-file "$3" 2> $TEST_ROOT/log)
|
expectation=$1 # "missing" | "present"
|
||||||
cat $TEST_ROOT/log
|
mode=$2 # "normal" | "fixed-output"
|
||||||
grepQuiet "CERT_${1}_IN_SANDBOX" $TEST_ROOT/log
|
certFile=$3 # a string that can be the path to a cert file
|
||||||
|
# `100` means build failure without extra info, see doc/manual/src/command-ref/status-build-failure.md
|
||||||
|
[ "$mode" == fixed-output ] && ret=1 || ret=100
|
||||||
|
expectStderr $ret nix-sandbox-build linux-sandbox-cert-test.nix --argstr mode "$mode" --option ssl-cert-file "$certFile" |
|
||||||
|
grepQuiet "CERT_${expectation}_IN_SANDBOX"
|
||||||
}
|
}
|
||||||
|
|
||||||
nocert=$TEST_ROOT/no-cert-file.pem
|
nocert=$TEST_ROOT/no-cert-file.pem
|
||||||
|
@ -47,8 +47,9 @@ cp ./config.nix $flake1Dir/
|
|||||||
|
|
||||||
# Test upgrading from nix-env.
|
# Test upgrading from nix-env.
|
||||||
nix-env -f ./user-envs.nix -i foo-1.0
|
nix-env -f ./user-envs.nix -i foo-1.0
|
||||||
nix profile list | grep '0 - - .*-foo-1.0'
|
nix profile list | grep -A2 'Index:.*0' | grep 'Store paths:.*foo-1.0'
|
||||||
nix profile install $flake1Dir -L
|
nix profile install $flake1Dir -L
|
||||||
|
nix profile list | grep -A4 'Index:.*1' | grep 'Locked flake URL:.*narHash'
|
||||||
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
|
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
|
||||||
[ -e $TEST_HOME/.nix-profile/share/man ]
|
[ -e $TEST_HOME/.nix-profile/share/man ]
|
||||||
(! [ -e $TEST_HOME/.nix-profile/include ])
|
(! [ -e $TEST_HOME/.nix-profile/include ])
|
||||||
|
@ -75,5 +75,20 @@
|
|||||||
su --login bob -c '(! nix-store --verify --repair 2>&1)' | tee diag 1>&2
|
su --login bob -c '(! nix-store --verify --repair 2>&1)' | tee diag 1>&2
|
||||||
grep -F "you are not privileged to repair paths" diag
|
grep -F "you are not privileged to repair paths" diag
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
machine.succeed("""
|
||||||
|
set -x
|
||||||
|
su --login mallory -c '
|
||||||
|
nix-store --generate-binary-cache-key cache1.example.org sk1 pk1
|
||||||
|
(! nix store sign --key-file sk1 ${pathFour} 2>&1)' | tee diag 1>&2
|
||||||
|
grep -F "cannot open connection to remote store 'daemon'" diag
|
||||||
|
""")
|
||||||
|
|
||||||
|
machine.succeed("""
|
||||||
|
su --login bob -c '
|
||||||
|
nix-store --generate-binary-cache-key cache1.example.org sk1 pk1
|
||||||
|
nix store sign --key-file sk1 ${pathFour}
|
||||||
|
'
|
||||||
|
""")
|
||||||
'';
|
'';
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,9 @@ programs += test-libstoreconsumer
|
|||||||
|
|
||||||
test-libstoreconsumer_DIR := $(d)
|
test-libstoreconsumer_DIR := $(d)
|
||||||
|
|
||||||
|
# do not install
|
||||||
|
test-libstoreconsumer_INSTALL_DIR :=
|
||||||
|
|
||||||
test-libstoreconsumer_SOURCES := \
|
test-libstoreconsumer_SOURCES := \
|
||||||
$(wildcard $(d)/*.cc) \
|
$(wildcard $(d)/*.cc) \
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user