mirror of
https://github.com/NixOS/nix.git
synced 2024-11-25 08:12:29 +00:00
Merge branch 'master' into overlayfs-store
This commit is contained in:
commit
cb4f85f11c
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@ -64,7 +64,7 @@ jobs:
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- uses: cachix/install-nix-action@v25
|
||||
with:
|
||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
||||
install_url: https://releases.nixos.org/nix/nix-2.20.3/install
|
||||
- uses: cachix/cachix-action@v14
|
||||
with:
|
||||
name: '${{ env.CACHIX_NAME }}'
|
||||
@ -116,7 +116,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- uses: cachix/install-nix-action@v25
|
||||
with:
|
||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
||||
install_url: https://releases.nixos.org/nix/nix-2.20.3/install
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
||||
- uses: cachix/cachix-action@v14
|
||||
@ -153,6 +153,8 @@ jobs:
|
||||
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
|
||||
|
||||
docker tag nix:$NIX_VERSION $IMAGE_ID:$NIX_VERSION
|
||||
docker tag nix:$NIX_VERSION $IMAGE_ID:master
|
||||
docker tag nix:$NIX_VERSION $IMAGE_ID:latest
|
||||
docker push $IMAGE_ID:$NIX_VERSION
|
||||
docker push $IMAGE_ID:latest
|
||||
# deprecated 2024-02-24
|
||||
docker push $IMAGE_ID:master
|
||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -45,13 +45,16 @@ perl/Makefile.config
|
||||
/src/libexpr/parser-tab.hh
|
||||
/src/libexpr/parser-tab.output
|
||||
/src/libexpr/nix.tbl
|
||||
/src/libexpr/tests
|
||||
/tests/unit/libexpr/libnixexpr-tests
|
||||
|
||||
# /src/libstore/
|
||||
*.gen.*
|
||||
/src/libstore/tests
|
||||
/tests/unit/libstore/libnixstore-tests
|
||||
|
||||
# /src/libutil/
|
||||
/src/libutil/tests
|
||||
/tests/unit/libutil/libnixutil-tests
|
||||
|
||||
/src/nix/nix
|
||||
@ -94,6 +97,7 @@ perl/Makefile.config
|
||||
/tests/functional/ca/config.nix
|
||||
/tests/functional/dyn-drv/config.nix
|
||||
/tests/functional/repl-result-out
|
||||
/tests/functional/debugger-test-out
|
||||
/tests/functional/test-libstoreconsumer/test-libstoreconsumer
|
||||
|
||||
# /tests/functional/lang/
|
||||
|
@ -63,7 +63,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy).
|
||||
- Functional tests – [`tests/functional/**.sh`](./tests/functional)
|
||||
- Unit tests – [`src/*/tests`](./src/)
|
||||
- Integration tests – [`tests/nixos/*`](./tests/nixos)
|
||||
- [ ] User documentation in the [manual](..doc/manual/src)
|
||||
- [ ] User documentation in the [manual](./doc/manual/src)
|
||||
- [ ] API documentation in header files
|
||||
- [ ] Code and comments are self-explanatory
|
||||
- [ ] Commit message explains **why** the change was made
|
||||
|
29
Makefile
29
Makefile
@ -42,12 +42,24 @@ ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes)
|
||||
makefiles += \
|
||||
tests/functional/local.mk \
|
||||
tests/functional/ca/local.mk \
|
||||
tests/functional/git-hashing/local.mk \
|
||||
tests/functional/dyn-drv/local.mk \
|
||||
tests/functional/local-overlay-store/local.mk \
|
||||
tests/functional/test-libstoreconsumer/local.mk \
|
||||
tests/functional/plugins/local.mk
|
||||
endif
|
||||
|
||||
# Some makefiles require access to built programs and must be included late.
|
||||
makefiles-late =
|
||||
|
||||
ifeq ($(ENABLE_DOC_GEN), yes)
|
||||
makefiles-late += doc/manual/local.mk
|
||||
endif
|
||||
|
||||
ifeq ($(ENABLE_INTERNAL_API_DOCS), yes)
|
||||
makefiles-late += doc/internal-api/local.mk
|
||||
endif
|
||||
|
||||
# Miscellaneous global Flags
|
||||
|
||||
OPTIMIZE = 1
|
||||
@ -57,6 +69,7 @@ ifeq ($(OPTIMIZE), 1)
|
||||
GLOBAL_LDFLAGS += $(CXXLTO)
|
||||
else
|
||||
GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
|
||||
unexport NIX_HARDENING_ENABLE
|
||||
endif
|
||||
|
||||
include mk/platform.mk
|
||||
@ -71,7 +84,7 @@ ifdef HOST_WINDOWS
|
||||
GLOBAL_LDFLAGS += -Wl,--export-all-symbols
|
||||
endif
|
||||
|
||||
GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src
|
||||
GLOBAL_CXXFLAGS += -g -Wall -Wimplicit-fallthrough -include $(buildprefix)config.h -std=c++2a -I src
|
||||
|
||||
# Include the main lib, causing rules to be defined
|
||||
|
||||
@ -96,24 +109,16 @@ installcheck:
|
||||
@exit 1
|
||||
endif
|
||||
|
||||
# Documentation or else fallback stub rules.
|
||||
#
|
||||
# The documentation makefiles be included after `mk/lib.mk` so rules
|
||||
# refer to variables defined by `mk/lib.mk`. Rules are not "lazy" like
|
||||
# variables, unfortunately.
|
||||
# Documentation fallback stub rules.
|
||||
|
||||
ifeq ($(ENABLE_DOC_GEN), yes)
|
||||
$(eval $(call include-sub-makefile, doc/manual/local.mk))
|
||||
else
|
||||
ifneq ($(ENABLE_DOC_GEN), yes)
|
||||
.PHONY: manual-html manpages
|
||||
manual-html manpages:
|
||||
@echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'."
|
||||
@exit 1
|
||||
endif
|
||||
|
||||
ifeq ($(ENABLE_INTERNAL_API_DOCS), yes)
|
||||
$(eval $(call include-sub-makefile, doc/internal-api/local.mk))
|
||||
else
|
||||
ifneq ($(ENABLE_INTERNAL_API_DOCS), yes)
|
||||
.PHONY: internal-api-html
|
||||
internal-api-html:
|
||||
@echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
|
||||
|
@ -47,6 +47,10 @@ AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')])
|
||||
# State should be stored in /nix/var, unless the user overrides it explicitly.
|
||||
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
|
||||
|
||||
# Assign a default value to C{,XX}FLAGS as the default configure script sets them
|
||||
# to -O2 otherwise, which we don't want to have hardcoded
|
||||
CFLAGS=${CFLAGS-""}
|
||||
CXXFLAGS=${CXXFLAGS-""}
|
||||
|
||||
AC_PROG_CC
|
||||
AC_PROG_CXX
|
||||
|
@ -6,6 +6,8 @@ additional-css = ["custom.css"]
|
||||
additional-js = ["redirects.js"]
|
||||
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
|
||||
git-repository-url = "https://github.com/NixOS/nix"
|
||||
fold.enable = true
|
||||
fold.level = 1
|
||||
|
||||
[preprocessor.anchors]
|
||||
renderers = ["html"]
|
||||
|
@ -18,7 +18,7 @@ const redirects = {
|
||||
"chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
|
||||
"chap-diff-hook": "advanced-topics/diff-hook.html",
|
||||
"check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
|
||||
"chap-distributed-builds": "advanced-topics/distributed-builds.html",
|
||||
"chap-distributed-builds": "command-ref/conf-file.html#conf-builders",
|
||||
"chap-post-build-hook": "advanced-topics/post-build-hook.html",
|
||||
"chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
|
||||
"chap-writing-nix-expressions": "language/index.html",
|
||||
@ -358,7 +358,11 @@ const redirects = {
|
||||
"one-time-setup": "testing.html#one-time-setup",
|
||||
"using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
|
||||
"characterization-testing": "#characterisation-testing-unit",
|
||||
}
|
||||
},
|
||||
"glossary.html": {
|
||||
"gloss-local-store": "store/types/local-store.html",
|
||||
"gloss-chroot-store": "store/types/local-store.html",
|
||||
},
|
||||
};
|
||||
|
||||
// the following code matches the current page's URL against the set of redirects.
|
||||
|
40
doc/manual/rl-next/better-errors-in-nix-repl.md
Normal file
40
doc/manual/rl-next/better-errors-in-nix-repl.md
Normal file
@ -0,0 +1,40 @@
|
||||
---
|
||||
synopsis: Concise error printing in `nix repl`
|
||||
prs: 9928
|
||||
---
|
||||
|
||||
Previously, if an element of a list or attribute set threw an error while
|
||||
evaluating, `nix repl` would print the entire error (including source location
|
||||
information) inline. This output was clumsy and difficult to parse:
|
||||
|
||||
```
|
||||
nix-repl> { err = builtins.throw "uh oh!"; }
|
||||
{ err = «error:
|
||||
… while calling the 'throw' builtin
|
||||
at «string»:1:9:
|
||||
1| { err = builtins.throw "uh oh!"; }
|
||||
| ^
|
||||
|
||||
error: uh oh!»; }
|
||||
```
|
||||
|
||||
Now, only the error message is displayed, making the output much more readable.
|
||||
```
|
||||
nix-repl> { err = builtins.throw "uh oh!"; }
|
||||
{ err = «error: uh oh!»; }
|
||||
```
|
||||
|
||||
However, if the whole expression being evaluated throws an error, source
|
||||
locations and (if applicable) a stack trace are printed, just like you'd expect:
|
||||
|
||||
```
|
||||
nix-repl> builtins.throw "uh oh!"
|
||||
error:
|
||||
… while calling the 'throw' builtin
|
||||
at «string»:1:1:
|
||||
1| builtins.throw "uh oh!"
|
||||
| ^
|
||||
|
||||
error: uh oh!
|
||||
```
|
||||
|
@ -0,0 +1,9 @@
|
||||
---
|
||||
synopsis: "`--debugger` can now access bindings from `let` expressions"
|
||||
prs: 9918
|
||||
issues: 8827.
|
||||
---
|
||||
|
||||
Breakpoints and errors in the bindings of a `let` expression can now access
|
||||
those bindings in the debugger. Previously, only the body of `let` expressions
|
||||
could access those bindings.
|
9
doc/manual/rl-next/debugger-on-trace.md
Normal file
9
doc/manual/rl-next/debugger-on-trace.md
Normal file
@ -0,0 +1,9 @@
|
||||
---
|
||||
synopsis: Enter the `--debugger` when `builtins.trace` is called if `debugger-on-trace` is set
|
||||
prs: 9914
|
||||
---
|
||||
|
||||
If the `debugger-on-trace` option is set and `--debugger` is given,
|
||||
`builtins.trace` calls will behave similarly to `builtins.break` and will enter
|
||||
the debug REPL. This is useful for determining where warnings are being emitted
|
||||
from.
|
25
doc/manual/rl-next/debugger-positions.md
Normal file
25
doc/manual/rl-next/debugger-positions.md
Normal file
@ -0,0 +1,25 @@
|
||||
---
|
||||
synopsis: Debugger prints source position information
|
||||
prs: 9913
|
||||
---
|
||||
|
||||
The `--debugger` now prints source location information, instead of the
|
||||
pointers of source location information. Before:
|
||||
|
||||
```
|
||||
nix-repl> :bt
|
||||
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
|
||||
0x600001522598
|
||||
```
|
||||
|
||||
After:
|
||||
|
||||
```
|
||||
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
|
||||
/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
|
||||
|
||||
131|
|
||||
132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
|
||||
| ^
|
||||
133| in
|
||||
```
|
@ -0,0 +1,25 @@
|
||||
---
|
||||
synopsis: The `--debugger` will start more reliably in `let` expressions and function calls
|
||||
prs: 9917
|
||||
issues: 6649
|
||||
---
|
||||
|
||||
Previously, if you attempted to evaluate this file with the debugger:
|
||||
|
||||
```nix
|
||||
let
|
||||
a = builtins.trace "before inner break" (
|
||||
builtins.break "hello"
|
||||
);
|
||||
b = builtins.trace "before outer break" (
|
||||
builtins.break a
|
||||
);
|
||||
in
|
||||
b
|
||||
```
|
||||
|
||||
Nix would correctly enter the debugger at `builtins.break a`, but if you asked
|
||||
it to `:continue`, it would skip over the `builtins.break "hello"` expression
|
||||
entirely.
|
||||
|
||||
Now, Nix will correctly enter the debugger at both breakpoints.
|
7
doc/manual/rl-next/inherit-from-by-need.md
Normal file
7
doc/manual/rl-next/inherit-from-by-need.md
Normal file
@ -0,0 +1,7 @@
|
||||
---
|
||||
synopsis: "`inherit (x) ...` evaluates `x` only once"
|
||||
prs: 9847
|
||||
---
|
||||
|
||||
`inherit (x) a b ...` now evaluates the expression `x` only once for all inherited attributes rather than once for each inherited attribute.
|
||||
This does not usually have a measurable impact, but side-effects (such as `builtins.trace`) would be duplicated and expensive expressions (such as derivations) could cause a measurable slowdown.
|
50
doc/manual/rl-next/lambda-printing.md
Normal file
50
doc/manual/rl-next/lambda-printing.md
Normal file
@ -0,0 +1,50 @@
|
||||
---
|
||||
synopsis: Functions are printed with more detail
|
||||
prs: 9606
|
||||
issues: 7145
|
||||
---
|
||||
|
||||
Functions and `builtins` are printed with more detail in `nix repl`, `nix
|
||||
eval`, `builtins.trace`, and most other places values are printed.
|
||||
|
||||
Before:
|
||||
|
||||
```
|
||||
$ nix repl nixpkgs
|
||||
nix-repl> builtins.map
|
||||
«primop»
|
||||
|
||||
nix-repl> builtins.map lib.id
|
||||
«primop-app»
|
||||
|
||||
nix-repl> builtins.trace lib.id "my-value"
|
||||
trace: <LAMBDA>
|
||||
"my-value"
|
||||
|
||||
$ nix eval --file functions.nix
|
||||
{ id = <LAMBDA>; primop = <PRIMOP>; primop-app = <PRIMOP-APP>; }
|
||||
```
|
||||
|
||||
After:
|
||||
|
||||
```
|
||||
$ nix repl nixpkgs
|
||||
nix-repl> builtins.map
|
||||
«primop map»
|
||||
|
||||
nix-repl> builtins.map lib.id
|
||||
«partially applied primop map»
|
||||
|
||||
nix-repl> builtins.trace lib.id "my-value"
|
||||
trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5»
|
||||
"my-value"
|
||||
|
||||
$ nix eval --file functions.nix
|
||||
{ id = «lambda id @ /Users/wiggles/nix/functions.nix:2:8»; primop = «primop map»; primop-app = «partially applied primop map»; }
|
||||
```
|
||||
|
||||
This was actually released in Nix 2.20, but wasn't added to the release notes
|
||||
so we're announcing it here. The historical release notes have been updated as well.
|
||||
|
||||
[type-error]: https://github.com/NixOS/nix/pull/9753
|
||||
[coercion-error]: https://github.com/NixOS/nix/pull/9754
|
13
doc/manual/rl-next/more-commands-respect-ctrl-c.md
Normal file
13
doc/manual/rl-next/more-commands-respect-ctrl-c.md
Normal file
@ -0,0 +1,13 @@
|
||||
---
|
||||
synopsis: Nix commands respect Ctrl-C
|
||||
prs: 9687 6995
|
||||
issues: 7245
|
||||
---
|
||||
|
||||
Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed
|
||||
while performing various operations (including `nix develop`, `nix flake
|
||||
update`, and so on). With several fixes to Nix's signal handlers, Nix commands
|
||||
will now exit quickly after Ctrl-C is pressed.
|
||||
|
||||
This was actually released in Nix 2.20, but wasn't added to the release notes
|
||||
so we're announcing it here. The historical release notes have been updated as well.
|
24
doc/manual/rl-next/pretty-print-in-nix-repl.md
Normal file
24
doc/manual/rl-next/pretty-print-in-nix-repl.md
Normal file
@ -0,0 +1,24 @@
|
||||
---
|
||||
synopsis: "`nix repl` pretty-prints values"
|
||||
prs: 9931
|
||||
---
|
||||
|
||||
`nix repl` will now pretty-print values:
|
||||
|
||||
```
|
||||
{
|
||||
attrs = {
|
||||
a = {
|
||||
b = {
|
||||
c = { };
|
||||
};
|
||||
};
|
||||
};
|
||||
list = [ 1 ];
|
||||
list' = [
|
||||
1
|
||||
2
|
||||
3
|
||||
];
|
||||
}
|
||||
```
|
37
doc/manual/rl-next/reduce-debugger-clutter.md
Normal file
37
doc/manual/rl-next/reduce-debugger-clutter.md
Normal file
@ -0,0 +1,37 @@
|
||||
---
|
||||
synopsis: "Visual clutter in `--debugger` is reduced"
|
||||
prs: 9919
|
||||
---
|
||||
|
||||
Before:
|
||||
```
|
||||
info: breakpoint reached
|
||||
|
||||
|
||||
Starting REPL to allow you to inspect the current state of the evaluator.
|
||||
|
||||
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
|
||||
|
||||
nix-repl> :continue
|
||||
error: uh oh
|
||||
|
||||
|
||||
Starting REPL to allow you to inspect the current state of the evaluator.
|
||||
|
||||
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
|
||||
|
||||
nix-repl>
|
||||
```
|
||||
|
||||
After:
|
||||
|
||||
```
|
||||
info: breakpoint reached
|
||||
|
||||
Nix 2.20.0pre20231222_dirty debugger
|
||||
Type :? for help.
|
||||
nix-repl> :continue
|
||||
error: uh oh
|
||||
|
||||
nix-repl>
|
||||
```
|
8
doc/manual/rl-next/repl-ctrl-c-while-printing.md
Normal file
8
doc/manual/rl-next/repl-ctrl-c-while-printing.md
Normal file
@ -0,0 +1,8 @@
|
||||
---
|
||||
synopsis: "`nix repl` now respects Ctrl-C while printing values"
|
||||
prs: 9927
|
||||
---
|
||||
|
||||
`nix repl` will now halt immediately when Ctrl-C is pressed while it's printing
|
||||
a value. This is useful if you got curious about what would happen if you
|
||||
printed all of Nixpkgs.
|
22
doc/manual/rl-next/repl-cycle-detection.md
Normal file
22
doc/manual/rl-next/repl-cycle-detection.md
Normal file
@ -0,0 +1,22 @@
|
||||
---
|
||||
synopsis: Cycle detection in `nix repl` is simpler and more reliable
|
||||
prs: 9926
|
||||
issues: 8672
|
||||
---
|
||||
|
||||
The cycle detection in `nix repl`, `nix eval`, `builtins.trace`, and everywhere
|
||||
else values are printed is now simpler and matches the cycle detection in
|
||||
`nix-instantiate --eval` output.
|
||||
|
||||
Before:
|
||||
|
||||
```
|
||||
nix eval --expr 'let self = { inherit self; }; in self'
|
||||
{ self = { self = «repeated»; }; }
|
||||
```
|
||||
|
||||
After:
|
||||
|
||||
```
|
||||
{ self = «repeated»; }
|
||||
```
|
@ -0,0 +1,23 @@
|
||||
---
|
||||
synopsis: "In the debugger, `while evaluating the attribute` errors now include position information"
|
||||
prs: 9915
|
||||
---
|
||||
|
||||
Before:
|
||||
|
||||
```
|
||||
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
|
||||
0x600001522598
|
||||
```
|
||||
|
||||
After:
|
||||
|
||||
```
|
||||
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
|
||||
/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
|
||||
|
||||
131|
|
||||
132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
|
||||
| ^
|
||||
133| in
|
||||
```
|
9
doc/manual/rl-next/stack-size-macos.md
Normal file
9
doc/manual/rl-next/stack-size-macos.md
Normal file
@ -0,0 +1,9 @@
|
||||
---
|
||||
synopsis: Stack size is increased on macOS
|
||||
prs: 9860
|
||||
---
|
||||
|
||||
Previously, Nix would set the stack size to 64MiB on Linux, but would leave the
|
||||
stack size set to the default (approximately 8KiB) on macOS. Now, the stack
|
||||
size is correctly set to 64MiB on macOS as well, which should reduce stack
|
||||
overflow segfaults in deeply-recursive Nix expressions.
|
@ -104,11 +104,12 @@
|
||||
- [Channels](command-ref/files/channels.md)
|
||||
- [Default Nix expression](command-ref/files/default-nix-expression.md)
|
||||
- [Architecture and Design](architecture/architecture.md)
|
||||
- [JSON Formats](json/index.md)
|
||||
- [Store Object Info](json/store-object-info.md)
|
||||
- [Derivation](json/derivation.md)
|
||||
- [Protocols](protocols/index.md)
|
||||
- [Formats and Protocols](protocols/index.md)
|
||||
- [JSON Formats](protocols/json/index.md)
|
||||
- [Store Object Info](protocols/json/store-object-info.md)
|
||||
- [Derivation](protocols/json/derivation.md)
|
||||
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
||||
- [Store Path Specification](protocols/store-path.md)
|
||||
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
||||
- [Glossary](glossary.md)
|
||||
- [Contributing](contributing/index.md)
|
||||
|
@ -36,5 +36,6 @@
|
||||
/package-management/s3-substituter /store/types/s3-binary-cache-store 301!
|
||||
|
||||
/protocols/protocols /protocols 301!
|
||||
/json/* /protocols/json/:splat 301!
|
||||
|
||||
/release-notes/release-notes /release-notes 301!
|
@ -36,16 +36,8 @@ error: cannot connect to 'mac'
|
||||
then you need to ensure that the `PATH` of non-interactive login shells
|
||||
contains Nix.
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> If you are building via the Nix daemon, it is the Nix daemon user account (that is, `root`) that should have SSH access to a user (not necessarily `root`) on the remote machine.
|
||||
>
|
||||
> If you can’t or don’t want to configure `root` to be able to access the remote machine, you can use a private Nix store instead by passing e.g. `--store ~/my-nix` when running a Nix command from the local machine.
|
||||
|
||||
The list of remote machines can be specified on the command line or in
|
||||
the Nix configuration file. The former is convenient for testing. For
|
||||
example, the following command allows you to build a derivation for
|
||||
`x86_64-darwin` on a Linux machine:
|
||||
The [list of remote build machines](@docroot@/command-ref/conf-file.md#conf-builders) can be specified on the command line or in the Nix configuration file.
|
||||
For example, the following command allows you to build a derivation for `x86_64-darwin` on a Linux machine:
|
||||
|
||||
```console
|
||||
$ uname
|
||||
@ -60,97 +52,20 @@ $ cat ./result
|
||||
Darwin
|
||||
```
|
||||
|
||||
It is possible to specify multiple builders separated by a semicolon or
|
||||
a newline, e.g.
|
||||
It is possible to specify multiple build machines separated by a semicolon or a newline, e.g.
|
||||
|
||||
```console
|
||||
--builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd'
|
||||
```
|
||||
|
||||
Each machine specification consists of the following elements, separated
|
||||
by spaces. Only the first element is required. To leave a field at its
|
||||
default, set it to `-`.
|
||||
|
||||
1. The URI of the remote store in the format
|
||||
`ssh://[username@]hostname`, e.g. `ssh://nix@mac` or `ssh://mac`.
|
||||
For backward compatibility, `ssh://` may be omitted. The hostname
|
||||
may be an alias defined in your `~/.ssh/config`.
|
||||
|
||||
2. A comma-separated list of Nix platform type identifiers, such as
|
||||
`x86_64-darwin`. It is possible for a machine to support multiple
|
||||
platform types, e.g., `i686-linux,x86_64-linux`. If omitted, this
|
||||
defaults to the local platform type.
|
||||
|
||||
3. The SSH identity file to be used to log in to the remote machine. If
|
||||
omitted, SSH will use its regular identities.
|
||||
|
||||
4. The maximum number of builds that Nix will execute in parallel on
|
||||
the machine. Typically this should be equal to the number of CPU
|
||||
cores. For instance, the machine `itchy` in the example will execute
|
||||
up to 8 builds in parallel.
|
||||
|
||||
5. The “speed factor”, indicating the relative speed of the machine. If
|
||||
there are multiple machines of the right type, Nix will prefer the
|
||||
fastest, taking load into account.
|
||||
|
||||
6. A comma-separated list of *supported features*. If a derivation has
|
||||
the `requiredSystemFeatures` attribute, then Nix will only perform
|
||||
the derivation on a machine that has the specified features. For
|
||||
instance, the attribute
|
||||
|
||||
```nix
|
||||
requiredSystemFeatures = [ "kvm" ];
|
||||
```
|
||||
|
||||
will cause the build to be performed on a machine that has the `kvm`
|
||||
feature.
|
||||
|
||||
7. A comma-separated list of *mandatory features*. A machine will only
|
||||
be used to build a derivation if all of the machine’s mandatory
|
||||
features appear in the derivation’s `requiredSystemFeatures`
|
||||
attribute.
|
||||
|
||||
8. The (base64-encoded) public host key of the remote machine. If omitted, SSH
|
||||
will use its regular known-hosts file. Specifically, the field is calculated
|
||||
via `base64 -w0 /etc/ssh/ssh_host_ed25519_key.pub`.
|
||||
|
||||
For example, the machine specification
|
||||
|
||||
nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 1 kvm
|
||||
nix@itchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 2
|
||||
nix@poochie.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 1 2 kvm benchmark
|
||||
|
||||
specifies several machines that can perform `i686-linux` builds.
|
||||
However, `poochie` will only do builds that have the attribute
|
||||
|
||||
```nix
|
||||
requiredSystemFeatures = [ "benchmark" ];
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```nix
|
||||
requiredSystemFeatures = [ "benchmark" "kvm" ];
|
||||
```
|
||||
|
||||
`itchy` cannot do builds that require `kvm`, but `scratchy` does support
|
||||
such builds. For regular builds, `itchy` will be preferred over
|
||||
`scratchy` because it has a higher speed factor.
|
||||
|
||||
Remote builders can also be configured in `nix.conf`, e.g.
|
||||
Remote build machines can also be configured in [`nix.conf`](@docroot@/command-ref/conf-file.md), e.g.
|
||||
|
||||
builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd
|
||||
|
||||
Finally, remote builders can be configured in a separate configuration
|
||||
file included in `builders` via the syntax `@file`. For example,
|
||||
Finally, remote build machines can be configured in a separate configuration
|
||||
file included in `builders` via the syntax `@/path/to/file`. For example,
|
||||
|
||||
builders = @/etc/nix/machines
|
||||
|
||||
causes the list of machines in `/etc/nix/machines` to be included. (This
|
||||
is the default.)
|
||||
|
||||
If you want the builders to use caches, you likely want to set the
|
||||
option `builders-use-substitutes` in your local `nix.conf`.
|
||||
|
||||
To build only on remote builders and disable building on the local
|
||||
machine, you can use the option `--max-jobs 0`.
|
||||
causes the list of machines in `/etc/nix/machines` to be included.
|
||||
(This is the default.)
|
||||
|
@ -51,7 +51,7 @@ These options are for deleting old [profiles] prior to deleting unreachable [sto
|
||||
- <span id="opt-delete-old">[`--delete-old`](#opt-delete-old)</span> / `-d`\
|
||||
Delete all old generations of profiles.
|
||||
|
||||
This is the equivalent of invoking `nix-env --delete-generations old` on each found profile.
|
||||
This is the equivalent of invoking [`nix-env --delete-generations old`](@docroot@/command-ref/nix-env/delete-generations.md#generations-old) on each found profile.
|
||||
|
||||
- <span id="opt-delete-older-than">[`--delete-older-than`](#opt-delete-older-than)</span> *period*\
|
||||
Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time).
|
||||
|
@ -12,13 +12,13 @@ This operation deletes the specified generations of the current profile.
|
||||
|
||||
*generations* can be a one of the following:
|
||||
|
||||
- <span id="generations-list">`<number>...`</span>:\
|
||||
- <span id="generations-list">[`<number>...`](#generations-list)</span>:\
|
||||
A list of generation numbers, each one a separate command-line argument.
|
||||
|
||||
Delete exactly the profile generations given by their generation number.
|
||||
Deleting the current generation is not allowed.
|
||||
|
||||
- The special value <span id="generations-old">`old`</span>
|
||||
- <span id="generations-old">[The special value `old`](#generations-old)</span>
|
||||
|
||||
Delete all generations except the current one.
|
||||
|
||||
@ -30,7 +30,7 @@ This operation deletes the specified generations of the current profile.
|
||||
> Because one can roll back to a previous generation, it is possible to have generations newer than the current one.
|
||||
> They will also be deleted.
|
||||
|
||||
- <span id="generations-time">`<number>d`</span>:\
|
||||
- <span id="generations-time">[`<number>d`](#generations-time)</span>:\
|
||||
The last *number* days
|
||||
|
||||
*Example*: `30d`
|
||||
@ -38,7 +38,7 @@ This operation deletes the specified generations of the current profile.
|
||||
Delete all generations created more than *number* days ago, except the most recent one of them.
|
||||
This allows rolling back to generations that were available within the specified period.
|
||||
|
||||
- <span id="generations-count">`+<number>`</span>:\
|
||||
- <span id="generations-count">[`+<number>`](#generations-count)</span>:\
|
||||
The last *number* generations up to the present
|
||||
|
||||
*Example*: `+5`
|
||||
|
@ -44,13 +44,13 @@ To build Nix itself in this shell:
|
||||
```console
|
||||
[nix-shell]$ autoreconfPhase
|
||||
[nix-shell]$ configurePhase
|
||||
[nix-shell]$ make -j $NIX_BUILD_CORES
|
||||
[nix-shell]$ make -j $NIX_BUILD_CORES OPTIMIZE=0
|
||||
```
|
||||
|
||||
To install it in `$(pwd)/outputs` and test it:
|
||||
|
||||
```console
|
||||
[nix-shell]$ make install
|
||||
[nix-shell]$ make install OPTIMIZE=0
|
||||
[nix-shell]$ make installcheck check -j $NIX_BUILD_CORES
|
||||
[nix-shell]$ nix --version
|
||||
nix (Nix) 2.12
|
||||
@ -147,10 +147,10 @@ Nix can be built for various platforms, as specified in [`flake.nix`]:
|
||||
|
||||
In order to build Nix for a different platform than the one you're currently
|
||||
on, you need a way for your current Nix installation to build code for that
|
||||
platform. Common solutions include [remote builders] and [binary format emulation]
|
||||
platform. Common solutions include [remote build machines] and [binary format emulation]
|
||||
(only supported on NixOS).
|
||||
|
||||
[remote builders]: ../advanced-topics/distributed-builds.md
|
||||
[remote builders]: @docroot@/language/derivations.md#attr-builder
|
||||
[binary format emulation]: https://nixos.org/manual/nixos/stable/options.html#opt-boot.binfmt.emulatedSystems
|
||||
|
||||
Given such a setup, executing the build only requires selecting the respective attribute.
|
||||
|
@ -37,7 +37,7 @@
|
||||
This can be achieved by:
|
||||
- Fetching a pre-built [store object] from a [substituter]
|
||||
- Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation]
|
||||
- Delegating to a [remote builder](@docroot@/advanced-topics/distributed-builds.html) and retrieving the outputs
|
||||
- Delegating to a [remote machine](@docroot@/command-ref/conf-file.md#conf-builders) and retrieving the outputs
|
||||
<!-- TODO: link [running] to build process page, #8888 -->
|
||||
|
||||
See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm.
|
||||
@ -59,23 +59,12 @@
|
||||
|
||||
- [store]{#gloss-store}
|
||||
|
||||
A collection of store objects, with operations to manipulate that collection.
|
||||
See [Nix store](./store/index.md) for details.
|
||||
A collection of [store objects][store object], with operations to manipulate that collection.
|
||||
See [Nix Store](./store/index.md) for details.
|
||||
|
||||
There are many types of stores.
|
||||
See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md) for a complete list.
|
||||
|
||||
From the perspective of the location where Nix is invoked, the Nix store can be referred to _local_ or _remote_.
|
||||
Only a [local store]{#gloss-local-store} exposes a location in the file system of the machine where Nix is invoked that allows access to store objects, typically `/nix/store`.
|
||||
Local stores can be used for building [derivations](#gloss-derivation).
|
||||
See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
|
||||
There are many types of stores, see [Store Types](./store/types/index.md) for details.
|
||||
|
||||
[store]: #gloss-store
|
||||
[local store]: #gloss-local-store
|
||||
|
||||
- [chroot store]{#gloss-chroot-store}
|
||||
|
||||
A [local store] whose canonical path is anything other than `/nix/store`.
|
||||
|
||||
- [binary cache]{#gloss-binary-cache}
|
||||
|
||||
@ -87,7 +76,7 @@
|
||||
|
||||
- [store path]{#gloss-store-path}
|
||||
|
||||
The location of a [store object](@docroot@/store/index.md#store-object) in the file system, i.e., an immediate child of the Nix store directory.
|
||||
The location of a [store object] in the file system, i.e., an immediate child of the Nix store directory.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
@ -243,6 +232,7 @@
|
||||
- All paths in the store path's [closure] are valid.
|
||||
|
||||
[validity]: #gloss-validity
|
||||
[local store]: @docroot@/store/types/local-store.md
|
||||
|
||||
- [user environment]{#gloss-user-env}
|
||||
|
||||
|
@ -16,7 +16,7 @@ nix (Nix) 2.18.1
|
||||
> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with [`nix-build`](@docroot@/command-ref/nix-build.md) or [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md), may change the database schema!
|
||||
> Reverting to an older version of Nix may therefore require purging the store database before it can be used.
|
||||
|
||||
### Linux multi-user
|
||||
## Linux multi-user
|
||||
|
||||
```console
|
||||
$ sudo su
|
||||
|
@ -257,7 +257,7 @@ Derivations can declare some infrequently used optional attributes.
|
||||
of the environment (typically, a few hundred kilobyte).
|
||||
|
||||
- [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\
|
||||
If this attribute is set to `true` and [distributed building is enabled](../advanced-topics/distributed-builds.md), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
|
||||
If this attribute is set to `true` and [distributed building is enabled](@docroot@/command-ref/conf-file.md#conf-builders), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
|
||||
This is useful for derivations that are cheapest to build locally.
|
||||
|
||||
- [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\
|
||||
|
@ -36,7 +36,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect
|
||||
The system type on which the [`builder`](#attr-builder) executable is meant to be run.
|
||||
|
||||
A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option].
|
||||
It can automatically [build on other platforms](../advanced-topics/distributed-builds.md) by forwarding build requests to other machines.
|
||||
It can automatically [build on other platforms](@docroot@/language/derivations.md#attr-builder) by forwarding build requests to other machines.
|
||||
|
||||
[`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system
|
||||
|
||||
|
@ -1,6 +1,8 @@
|
||||
# Import From Derivation
|
||||
|
||||
The value of a Nix expression can depend on the contents of a [store object](@docroot@/glossary.md#gloss-store-object).
|
||||
The value of a Nix expression can depend on the contents of a [store object].
|
||||
|
||||
[store object]: @docroot@/glossary.md#gloss-store-object
|
||||
|
||||
Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD):
|
||||
|
||||
|
@ -84,7 +84,7 @@ The `+` operator is overloaded to also work on strings and paths.
|
||||
>
|
||||
> *string* `+` *string*
|
||||
|
||||
Concatenate two [string]s and merge their string contexts.
|
||||
Concatenate two [strings][string] and merge their string contexts.
|
||||
|
||||
[String concatenation]: #string-concatenation
|
||||
|
||||
@ -94,7 +94,7 @@ Concatenate two [string]s and merge their string contexts.
|
||||
>
|
||||
> *path* `+` *path*
|
||||
|
||||
Concatenate two [path]s.
|
||||
Concatenate two [paths][path].
|
||||
The result is a path.
|
||||
|
||||
[Path concatenation]: #path-concatenation
|
||||
@ -150,9 +150,9 @@ If an attribute name is present in both, the attribute value from the latter is
|
||||
|
||||
Comparison is
|
||||
|
||||
- [arithmetic] for [number]s
|
||||
- lexicographic for [string]s and [path]s
|
||||
- item-wise lexicographic for [list]s:
|
||||
- [arithmetic] for [numbers][number]
|
||||
- lexicographic for [strings][string] and [paths][path]
|
||||
- item-wise lexicographic for [lists][list]:
|
||||
elements at the same index in both lists are compared according to their type and skipped if they are equal.
|
||||
|
||||
All comparison operators are implemented in terms of `<`, and the following equivalencies hold:
|
||||
@ -163,12 +163,12 @@ All comparison operators are implemented in terms of `<`, and the following equi
|
||||
| *a* `>` *b* | *b* `<` *a* |
|
||||
| *a* `>=` *b* | `! (` *a* `<` *b* `)` |
|
||||
|
||||
[Comparison]: #comparison-operators
|
||||
[Comparison]: #comparison
|
||||
|
||||
## Equality
|
||||
|
||||
- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated.
|
||||
- Comparison of [function]s always returns `false`.
|
||||
- [Attribute sets][attribute set] and [lists][list] are compared recursively, and therefore are fully evaluated.
|
||||
- Comparison of [functions][function] always returns `false`.
|
||||
- Numbers are type-compatible, see [arithmetic] operators.
|
||||
- Floating point numbers only differ up to a limited precision.
|
||||
|
||||
|
@ -20,6 +20,8 @@ Rather than writing
|
||||
|
||||
(where `freetype` is a [derivation]), you can instead write
|
||||
|
||||
[derivation]: ../glossary.md#gloss-derivation
|
||||
|
||||
```nix
|
||||
"--with-freetype2-library=${freetype}/lib"
|
||||
```
|
||||
|
@ -156,6 +156,8 @@ function and the fifth being a set.
|
||||
|
||||
Note that lists are only lazy in values, and they are strict in length.
|
||||
|
||||
Elements in a list can be accessed using [`builtins.elemAt`](./builtins.md#builtins-elemAt).
|
||||
|
||||
## Attribute Set
|
||||
|
||||
An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`).
|
||||
|
@ -14,11 +14,11 @@ Info about a [store object].
|
||||
|
||||
* `narHash`:
|
||||
|
||||
Hash of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar).
|
||||
Hash of the [file system object] part of the store object when serialized as a [Nix Archive].
|
||||
|
||||
* `narSize`:
|
||||
|
||||
Size of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar).
|
||||
Size of the [file system object] part of the store object when serialized as a [Nix Archive].
|
||||
|
||||
* `references`:
|
||||
|
||||
@ -30,6 +30,7 @@ Info about a [store object].
|
||||
|
||||
[store path]: @docroot@/glossary.md#gloss-store-path
|
||||
[file system object]: @docroot@/store/file-system-object.md
|
||||
[Nix Archive]: @docroot@/glossary.md#gloss-nar
|
||||
|
||||
## Impure fields
|
||||
|
131
doc/manual/src/protocols/store-path.md
Normal file
131
doc/manual/src/protocols/store-path.md
Normal file
@ -0,0 +1,131 @@
|
||||
# Complete Store Path Calculation
|
||||
|
||||
This is the complete specification for how store paths are calculated.
|
||||
|
||||
The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), but must deviate for a few things such as hash functions which we treat as bidirectional for specification purposes.
|
||||
|
||||
Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to.
|
||||
But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful.
|
||||
|
||||
## Store path proper
|
||||
|
||||
```ebnf
|
||||
store-path = store-dir "/" digest "-" name
|
||||
```
|
||||
where
|
||||
|
||||
- `name` = the name of the store object.
|
||||
|
||||
- `store-dir` = the [store directory](@docroot@/store/store-path.md#store-directory)
|
||||
|
||||
- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `fingerprint`
|
||||
|
||||
This the hash part of the store name
|
||||
|
||||
## Fingerprint
|
||||
|
||||
- ```ebnf
|
||||
fingerprint = type ":" sha256 ":" inner-digest ":" store ":" name
|
||||
```
|
||||
|
||||
Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
|
||||
(e.g. you won't get `/nix/store/<digest>-name1` and `/nix/store/<digest>-name2`, or `/gnu/store/<digest>-name1`, with equal hash parts).
|
||||
|
||||
- `type` = one of:
|
||||
|
||||
- ```ebnf
|
||||
| "text" ( ":" store-path )*
|
||||
```
|
||||
|
||||
for encoded derivations written to the store.
|
||||
The optional trailing store paths are the references of the store object.
|
||||
|
||||
- ```ebnf
|
||||
| "source" ( ":" store-path )*
|
||||
```
|
||||
|
||||
For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
|
||||
Just like in the text case, we can have the store objects referenced by their paths.
|
||||
Additionally, we can have an optional `:self` label to denote self reference.
|
||||
|
||||
- ```ebnf
|
||||
| "output:" id
|
||||
```
|
||||
|
||||
For either the outputs built from derivations,
|
||||
paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256].
|
||||
(in that case "source" is used; this is only necessary for compatibility).
|
||||
|
||||
`id` is the name of the output (usually, "out").
|
||||
For content-addressed store objects, `id`, is always "out".
|
||||
|
||||
- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-fingerprint`
|
||||
|
||||
## Inner fingerprint
|
||||
|
||||
- `inner-fingerprint` = one of the following based on `type`:
|
||||
|
||||
- if `type` = `"text:" ...`:
|
||||
|
||||
the string written to the resulting store path.
|
||||
|
||||
- if `type` = `"source:" ...`:
|
||||
|
||||
the the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object.
|
||||
|
||||
- if `type` = `"output:" id`:
|
||||
|
||||
- For input-addressed derivation outputs:
|
||||
|
||||
the [ATerm](@docroot@/protocols/derivation-aterm.md) serialization of the derivation modulo fixed output derivations.
|
||||
|
||||
- For content-addressed store paths:
|
||||
|
||||
```ebnf
|
||||
"fixed:out:" rec algo ":" hash ":"
|
||||
```
|
||||
|
||||
where
|
||||
|
||||
- `rec` = one of:
|
||||
|
||||
- ```ebnf
|
||||
| ""
|
||||
```
|
||||
(empty string) for hashes of the flat (single file) serialization
|
||||
|
||||
- ```ebnf
|
||||
| "r:"
|
||||
```
|
||||
hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
|
||||
|
||||
- ```ebnf
|
||||
| "git:"
|
||||
```
|
||||
hashes of the [Git blob/tree](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects) [Merkel tree](https://en.wikipedia.org/wiki/Merkle_tree) format
|
||||
|
||||
- ```ebnf
|
||||
algo = "md5" | "sha1" | "sha256"
|
||||
```
|
||||
|
||||
- `hash` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
|
||||
|
||||
Note that `id` = `"out"`, regardless of the name part of the store path.
|
||||
Also note that NAR + SHA-256 must not use this case, and instead must use the `type` = `"source:" ...` case.
|
||||
|
||||
[Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
|
||||
[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
|
||||
|
||||
### Historical Note
|
||||
|
||||
The `type` = `"source:" ...` and `type` = `"output:out"` grammars technically overlap in purpose,
|
||||
in that both can represent data hashed by its SHA-256 NAR serialization.
|
||||
|
||||
The original reason for this way of computing names was to prevent name collisions (for security).
|
||||
For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
|
||||
The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
|
||||
|
||||
Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separating derivation-produced vs manually-hashed content-addressed data like this was not useful.
|
||||
Now, data that is content-addressed with SHA-256 + NAR-serialization always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
|
||||
This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
|
||||
It also removes the ambiguity from the grammar.
|
@ -167,3 +167,36 @@
|
||||
|
||||
error: expected a set but found an integer
|
||||
```
|
||||
|
||||
- Functions are printed with more detail [#7145](https://github.com/NixOS/nix/issues/7145) [#9606](https://github.com/NixOS/nix/pull/9606)
|
||||
|
||||
`nix repl`, `nix eval`, `builtins.trace`, and most other places values are
|
||||
printed will now include function names and source location information:
|
||||
|
||||
```
|
||||
$ nix repl nixpkgs
|
||||
nix-repl> builtins.map
|
||||
«primop map»
|
||||
|
||||
nix-repl> builtins.map lib.id
|
||||
«partially applied primop map»
|
||||
|
||||
nix-repl> builtins.trace lib.id "my-value"
|
||||
trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5»
|
||||
"my-value"
|
||||
```
|
||||
|
||||
- Flake operations like `nix develop` will no longer fail when run in a Git
|
||||
repository where the `flake.lock` file is `.gitignore`d
|
||||
[#8854](https://github.com/NixOS/nix/issues/8854)
|
||||
[#9324](https://github.com/NixOS/nix/pull/9324)
|
||||
|
||||
- Nix commands will now respect Ctrl-C
|
||||
[#7145](https://github.com/NixOS/nix/issues/7145)
|
||||
[#6995](https://github.com/NixOS/nix/pull/6995)
|
||||
[#9687](https://github.com/NixOS/nix/pull/9687)
|
||||
|
||||
Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed
|
||||
while performing various operations (including `nix develop`, `nix flake
|
||||
update`, and so on). With several fixes to Nix's signal handlers, Nix
|
||||
commands will now exit quickly after Ctrl-C is pressed.
|
||||
|
@ -34,16 +34,16 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1705033721,
|
||||
"narHash": "sha256-K5eJHmL1/kev6WuqyqqbS1cdNnSidIZ3jeqJ7GbrYnQ=",
|
||||
"lastModified": 1709083642,
|
||||
"narHash": "sha256-7kkJQd4rZ+vFrzWu8sTRtta5D1kBG0LSRYAfhtmMlSo=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "a1982c92d8980a0114372973cbdfe0a307f1bdea",
|
||||
"rev": "b550fe4b4776908ac2a861124307045f8e717c8e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-23.05-small",
|
||||
"ref": "release-23.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
|
26
flake.nix
26
flake.nix
@ -1,7 +1,9 @@
|
||||
{
|
||||
description = "The purely functional package manager";
|
||||
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
|
||||
# TODO switch to nixos-23.11-small
|
||||
# https://nixpk.gs/pr-tracker.html?pr=291954
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-23.11";
|
||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
||||
inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };
|
||||
@ -10,20 +12,10 @@
|
||||
|
||||
let
|
||||
inherit (nixpkgs) lib;
|
||||
|
||||
# Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981
|
||||
# Not an "idiomatic" flake input because:
|
||||
# - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730
|
||||
# - Subflake would download redundant and huge parent flake
|
||||
# - No git tree hash support: https://github.com/NixOS/nix/issues/6044
|
||||
inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; }))
|
||||
fileset;
|
||||
inherit (lib) fileset;
|
||||
|
||||
officialRelease = false;
|
||||
|
||||
# Set to true to build the release notes for the next release.
|
||||
buildUnreleasedNotes = false;
|
||||
|
||||
version = lib.fileContents ./.version + versionSuffix;
|
||||
versionSuffix =
|
||||
if officialRelease
|
||||
@ -404,8 +396,11 @@
|
||||
# Make bash completion work.
|
||||
XDG_DATA_DIRS+=:$out/share
|
||||
'';
|
||||
|
||||
nativeBuildInputs = attrs.nativeBuildInputs or []
|
||||
++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear
|
||||
# TODO: Remove the darwin check once
|
||||
# https://github.com/NixOS/nixpkgs/pull/291814 is available
|
||||
++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
|
||||
++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) pkgs.buildPackages.clang-tools;
|
||||
});
|
||||
in
|
||||
@ -417,8 +412,9 @@
|
||||
(forAllStdenvs (stdenvName: makeShell pkgs pkgs.${stdenvName}));
|
||||
in
|
||||
(makeShells "native" nixpkgsFor.${system}.native) //
|
||||
(makeShells "static" nixpkgsFor.${system}.static) //
|
||||
(lib.genAttrs shellCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) //
|
||||
(lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin)
|
||||
(makeShells "static" nixpkgsFor.${system}.static)) //
|
||||
(lib.genAttrs shellCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) //
|
||||
{
|
||||
default = self.devShells.${system}.native-stdenvPackages;
|
||||
}
|
||||
|
@ -97,6 +97,10 @@ $(foreach test-group, $(install-tests-groups), \
|
||||
$(eval $(call run-test,$(test),$(install_test_init))) \
|
||||
$(eval $(test-group).test-group: $(test).test)))
|
||||
|
||||
# Include makefiles requiring built programs.
|
||||
$(foreach mf, $(makefiles-late), $(eval $(call include-sub-makefile,$(mf))))
|
||||
|
||||
|
||||
$(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file))))))
|
||||
|
||||
|
||||
|
@ -10,10 +10,10 @@ endef
|
||||
|
||||
ifneq ($(MAKECMDGOALS), clean)
|
||||
|
||||
$(buildprefix)%.h: %.h.in
|
||||
$(buildprefix)%.h: %.h.in $(buildprefix)config.status
|
||||
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%)
|
||||
|
||||
$(buildprefix)%: %.in
|
||||
$(buildprefix)%: %.in $(buildprefix)config.status
|
||||
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%)
|
||||
|
||||
endif
|
||||
|
12
package.nix
12
package.nix
@ -154,7 +154,7 @@ in {
|
||||
in
|
||||
fileset.toSource {
|
||||
root = ./.;
|
||||
fileset = fileset.intersect baseFiles (fileset.unions ([
|
||||
fileset = fileset.intersection baseFiles (fileset.unions ([
|
||||
# For configure
|
||||
./.version
|
||||
./configure.ac
|
||||
@ -209,6 +209,10 @@ in {
|
||||
(lib.getBin lowdown)
|
||||
mdbook
|
||||
mdbook-linkcheck
|
||||
] ++ lib.optionals doInstallCheck [
|
||||
git
|
||||
mercurial
|
||||
openssh
|
||||
] ++ lib.optionals (doInstallCheck || enableManual) [
|
||||
jq # Also for custom mdBook preprocessor.
|
||||
] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
|
||||
@ -249,12 +253,6 @@ in {
|
||||
dontBuild = !attrs.doBuild;
|
||||
doCheck = attrs.doCheck;
|
||||
|
||||
nativeCheckInputs = [
|
||||
git
|
||||
mercurial
|
||||
openssh
|
||||
];
|
||||
|
||||
disallowedReferences = [ boost ];
|
||||
|
||||
preConfigure = lib.optionalString (doBuild && ! stdenv.hostPlatform.isStatic) (
|
||||
|
2
perl/.yath.rc
Normal file
2
perl/.yath.rc
Normal file
@ -0,0 +1,2 @@
|
||||
[test]
|
||||
-I=rel(lib/Nix)
|
@ -5,12 +5,12 @@
|
||||
, nix, curl, bzip2, xz, boost, libsodium, darwin
|
||||
}:
|
||||
|
||||
perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||
perl.pkgs.toPerlModule (stdenv.mkDerivation (finalAttrs: {
|
||||
name = "nix-perl-${nix.version}";
|
||||
|
||||
src = fileset.toSource {
|
||||
root = ../.;
|
||||
fileset = fileset.unions [
|
||||
fileset = fileset.unions ([
|
||||
../.version
|
||||
../m4
|
||||
../mk
|
||||
@ -20,7 +20,10 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||
./configure.ac
|
||||
./lib
|
||||
./local.mk
|
||||
];
|
||||
] ++ lib.optionals finalAttrs.doCheck [
|
||||
./.yath.rc
|
||||
./t
|
||||
]);
|
||||
};
|
||||
|
||||
nativeBuildInputs =
|
||||
@ -40,6 +43,13 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||
++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
||||
|
||||
# `perlPackages.Test2Harness` is marked broken for Darwin
|
||||
doCheck = !stdenv.isDarwin;
|
||||
|
||||
nativeCheckInputs = [
|
||||
perlPackages.Test2Harness
|
||||
];
|
||||
|
||||
configureFlags = [
|
||||
"--with-dbi=${perlPackages.DBI}/${perl.libPrefix}"
|
||||
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}"
|
||||
@ -48,4 +58,4 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||
enableParallelBuilding = true;
|
||||
|
||||
postUnpack = "sourceRoot=$sourceRoot/perl";
|
||||
})
|
||||
}))
|
||||
|
@ -12,17 +12,20 @@ our %EXPORT_TAGS = ( 'all' => [ qw( ) ] );
|
||||
our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
|
||||
|
||||
our @EXPORT = qw(
|
||||
setVerbosity
|
||||
isValidPath queryReferences queryPathInfo queryDeriver queryPathHash
|
||||
queryPathFromHashPart
|
||||
topoSortPaths computeFSClosure followLinksToStorePath exportPaths importPaths
|
||||
StoreWrapper
|
||||
StoreWrapper::new
|
||||
StoreWrapper::isValidPath StoreWrapper::queryReferences StoreWrapper::queryPathInfo StoreWrapper::queryDeriver StoreWrapper::queryPathHash
|
||||
StoreWrapper::queryPathFromHashPart
|
||||
StoreWrapper::topoSortPaths StoreWrapper::computeFSClosure followLinksToStorePath StoreWrapper::exportPaths StoreWrapper::importPaths
|
||||
StoreWrapper::addToStore StoreWrapper::makeFixedOutputPath
|
||||
StoreWrapper::derivationFromPath
|
||||
StoreWrapper::addTempRoot
|
||||
StoreWrapper::queryRawRealisation
|
||||
|
||||
hashPath hashFile hashString convertHash
|
||||
signString checkSignature
|
||||
addToStore makeFixedOutputPath
|
||||
derivationFromPath
|
||||
addTempRoot
|
||||
getBinDir getStoreDir
|
||||
queryRawRealisation
|
||||
setVerbosity
|
||||
);
|
||||
|
||||
our $VERSION = '0.15';
|
||||
|
@ -17,47 +17,61 @@
|
||||
#include <sodium.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
||||
using namespace nix;
|
||||
|
||||
static bool libStoreInitialized = false;
|
||||
|
||||
static ref<Store> store()
|
||||
{
|
||||
static std::shared_ptr<Store> _store;
|
||||
if (!_store) {
|
||||
try {
|
||||
initLibStore();
|
||||
_store = openStore();
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
}
|
||||
return ref<Store>(_store);
|
||||
}
|
||||
|
||||
struct StoreWrapper {
|
||||
ref<Store> store;
|
||||
};
|
||||
|
||||
MODULE = Nix::Store PACKAGE = Nix::Store
|
||||
PROTOTYPES: ENABLE
|
||||
|
||||
TYPEMAP: <<HERE
|
||||
StoreWrapper * O_OBJECT
|
||||
|
||||
OUTPUT
|
||||
O_OBJECT
|
||||
sv_setref_pv( $arg, CLASS, (void*)$var );
|
||||
|
||||
INPUT
|
||||
O_OBJECT
|
||||
if ( sv_isobject($arg) && (SvTYPE(SvRV($arg)) == SVt_PVMG) ) {
|
||||
$var = ($type)SvIV((SV*)SvRV( $arg ));
|
||||
}
|
||||
else {
|
||||
warn( \"${Package}::$func_name() -- \"
|
||||
\"$var not a blessed SV reference\");
|
||||
XSRETURN_UNDEF;
|
||||
}
|
||||
HERE
|
||||
|
||||
#undef dNOOP // Hack to work around "error: declaration of 'Perl___notused' has a different language linkage" error message on clang.
|
||||
#define dNOOP
|
||||
|
||||
void
|
||||
StoreWrapper::DESTROY()
|
||||
|
||||
void init()
|
||||
CODE:
|
||||
store();
|
||||
|
||||
|
||||
void setVerbosity(int level)
|
||||
CODE:
|
||||
verbosity = (Verbosity) level;
|
||||
|
||||
|
||||
int isValidPath(char * path)
|
||||
StoreWrapper *
|
||||
StoreWrapper::new(char * s = nullptr)
|
||||
CODE:
|
||||
static std::shared_ptr<Store> _store;
|
||||
try {
|
||||
RETVAL = store()->isValidPath(store()->parseStorePath(path));
|
||||
if (!libStoreInitialized) {
|
||||
initLibStore();
|
||||
libStoreInitialized = true;
|
||||
}
|
||||
if (items == 1) {
|
||||
_store = openStore();
|
||||
RETVAL = new StoreWrapper {
|
||||
.store = ref<Store>{_store}
|
||||
};
|
||||
} else {
|
||||
RETVAL = new StoreWrapper {
|
||||
.store = openStore(s)
|
||||
};
|
||||
}
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
@ -65,52 +79,81 @@ int isValidPath(char * path)
|
||||
RETVAL
|
||||
|
||||
|
||||
SV * queryReferences(char * path)
|
||||
void init()
|
||||
CODE:
|
||||
if (!libStoreInitialized) {
|
||||
initLibStore();
|
||||
libStoreInitialized = true;
|
||||
}
|
||||
|
||||
|
||||
void setVerbosity(int level)
|
||||
CODE:
|
||||
verbosity = (Verbosity) level;
|
||||
|
||||
|
||||
int
|
||||
StoreWrapper::isValidPath(char * path)
|
||||
CODE:
|
||||
try {
|
||||
RETVAL = THIS->store->isValidPath(THIS->store->parseStorePath(path));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
OUTPUT:
|
||||
RETVAL
|
||||
|
||||
|
||||
SV *
|
||||
StoreWrapper::queryReferences(char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
for (auto & i : store()->queryPathInfo(store()->parseStorePath(path))->references)
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0)));
|
||||
for (auto & i : THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->references)
|
||||
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
|
||||
SV * queryPathHash(char * path)
|
||||
SV *
|
||||
StoreWrapper::queryPathHash(char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true);
|
||||
auto s = THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
|
||||
SV * queryDeriver(char * path)
|
||||
SV *
|
||||
StoreWrapper::queryDeriver(char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
auto info = store()->queryPathInfo(store()->parseStorePath(path));
|
||||
auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path));
|
||||
if (!info->deriver) XSRETURN_UNDEF;
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
|
||||
SV * queryPathInfo(char * path, int base32)
|
||||
SV *
|
||||
StoreWrapper::queryPathInfo(char * path, int base32)
|
||||
PPCODE:
|
||||
try {
|
||||
auto info = store()->queryPathInfo(store()->parseStorePath(path));
|
||||
auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path));
|
||||
if (!info->deriver)
|
||||
XPUSHs(&PL_sv_undef);
|
||||
else
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0)));
|
||||
auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
mXPUSHi(info->registrationTime);
|
||||
mXPUSHi(info->narSize);
|
||||
AV * refs = newAV();
|
||||
for (auto & i : info->references)
|
||||
av_push(refs, newSVpv(store()->printStorePath(i).c_str(), 0));
|
||||
av_push(refs, newSVpv(THIS->store->printStorePath(i).c_str(), 0));
|
||||
XPUSHs(sv_2mortal(newRV((SV *) refs)));
|
||||
AV * sigs = newAV();
|
||||
for (auto & i : info->sigs)
|
||||
@ -120,10 +163,11 @@ SV * queryPathInfo(char * path, int base32)
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
SV * queryRawRealisation(char * outputId)
|
||||
SV *
|
||||
StoreWrapper::queryRawRealisation(char * outputId)
|
||||
PPCODE:
|
||||
try {
|
||||
auto realisation = store()->queryRealisation(DrvOutput::parse(outputId));
|
||||
auto realisation = THIS->store->queryRealisation(DrvOutput::parse(outputId));
|
||||
if (realisation)
|
||||
XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0)));
|
||||
else
|
||||
@ -133,46 +177,50 @@ SV * queryRawRealisation(char * outputId)
|
||||
}
|
||||
|
||||
|
||||
SV * queryPathFromHashPart(char * hashPart)
|
||||
SV *
|
||||
StoreWrapper::queryPathFromHashPart(char * hashPart)
|
||||
PPCODE:
|
||||
try {
|
||||
auto path = store()->queryPathFromHashPart(hashPart);
|
||||
XPUSHs(sv_2mortal(newSVpv(path ? store()->printStorePath(*path).c_str() : "", 0)));
|
||||
auto path = THIS->store->queryPathFromHashPart(hashPart);
|
||||
XPUSHs(sv_2mortal(newSVpv(path ? THIS->store->printStorePath(*path).c_str() : "", 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
|
||||
SV * computeFSClosure(int flipDirection, int includeOutputs, ...)
|
||||
SV *
|
||||
StoreWrapper::computeFSClosure(int flipDirection, int includeOutputs, ...)
|
||||
PPCODE:
|
||||
try {
|
||||
StorePathSet paths;
|
||||
for (int n = 2; n < items; ++n)
|
||||
store()->computeFSClosure(store()->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs);
|
||||
THIS->store->computeFSClosure(THIS->store->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs);
|
||||
for (auto & i : paths)
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0)));
|
||||
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
|
||||
SV * topoSortPaths(...)
|
||||
SV *
|
||||
StoreWrapper::topoSortPaths(...)
|
||||
PPCODE:
|
||||
try {
|
||||
StorePathSet paths;
|
||||
for (int n = 0; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n))));
|
||||
auto sorted = store()->topoSortPaths(paths);
|
||||
for (int n = 0; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n))));
|
||||
auto sorted = THIS->store->topoSortPaths(paths);
|
||||
for (auto & i : sorted)
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0)));
|
||||
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
|
||||
SV * followLinksToStorePath(char * path)
|
||||
SV *
|
||||
StoreWrapper::followLinksToStorePath(char * path)
|
||||
CODE:
|
||||
try {
|
||||
RETVAL = newSVpv(store()->printStorePath(store()->followLinksToStorePath(path)).c_str(), 0);
|
||||
RETVAL = newSVpv(THIS->store->printStorePath(THIS->store->followLinksToStorePath(path)).c_str(), 0);
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
@ -180,35 +228,38 @@ SV * followLinksToStorePath(char * path)
|
||||
RETVAL
|
||||
|
||||
|
||||
void exportPaths(int fd, ...)
|
||||
void
|
||||
StoreWrapper::exportPaths(int fd, ...)
|
||||
PPCODE:
|
||||
try {
|
||||
StorePathSet paths;
|
||||
for (int n = 1; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n))));
|
||||
for (int n = 1; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n))));
|
||||
FdSink sink(fd);
|
||||
store()->exportPaths(paths, sink);
|
||||
THIS->store->exportPaths(paths, sink);
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
|
||||
void importPaths(int fd, int dontCheckSigs)
|
||||
void
|
||||
StoreWrapper::importPaths(int fd, int dontCheckSigs)
|
||||
PPCODE:
|
||||
try {
|
||||
FdSource source(fd);
|
||||
store()->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||
THIS->store->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
|
||||
SV * hashPath(char * algo, int base32, char * path)
|
||||
SV *
|
||||
hashPath(char * algo, int base32, char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
PosixSourceAccessor accessor;
|
||||
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
|
||||
Hash h = hashPath(
|
||||
accessor, CanonPath::fromCwd(path),
|
||||
FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
|
||||
accessor, canonPath,
|
||||
FileIngestionMethod::Recursive, parseHashAlgo(algo));
|
||||
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
@ -280,64 +331,67 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg)
|
||||
RETVAL
|
||||
|
||||
|
||||
SV * addToStore(char * srcPath, int recursive, char * algo)
|
||||
SV *
|
||||
StoreWrapper::addToStore(char * srcPath, int recursive, char * algo)
|
||||
PPCODE:
|
||||
try {
|
||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||
PosixSourceAccessor accessor;
|
||||
auto path = store()->addToStore(
|
||||
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(srcPath);
|
||||
auto path = THIS->store->addToStore(
|
||||
std::string(baseNameOf(srcPath)),
|
||||
accessor, CanonPath::fromCwd(srcPath),
|
||||
accessor, canonPath,
|
||||
method, parseHashAlgo(algo));
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
||||
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
|
||||
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
||||
SV *
|
||||
StoreWrapper::makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
||||
PPCODE:
|
||||
try {
|
||||
auto h = Hash::parseAny(hash, parseHashAlgo(algo));
|
||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo {
|
||||
auto path = THIS->store->makeFixedOutputPath(name, FixedOutputInfo {
|
||||
.method = method,
|
||||
.hash = h,
|
||||
.references = {},
|
||||
});
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
||||
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
||||
|
||||
SV * derivationFromPath(char * drvPath)
|
||||
SV *
|
||||
StoreWrapper::derivationFromPath(char * drvPath)
|
||||
PREINIT:
|
||||
HV *hash;
|
||||
CODE:
|
||||
try {
|
||||
Derivation drv = store()->derivationFromPath(store()->parseStorePath(drvPath));
|
||||
Derivation drv = THIS->store->derivationFromPath(THIS->store->parseStorePath(drvPath));
|
||||
hash = newHV();
|
||||
|
||||
HV * outputs = newHV();
|
||||
for (auto & i : drv.outputsAndOptPaths(*store())) {
|
||||
for (auto & i : drv.outputsAndOptPaths(*THIS->store)) {
|
||||
hv_store(
|
||||
outputs, i.first.c_str(), i.first.size(),
|
||||
!i.second.second
|
||||
? newSV(0) /* null value */
|
||||
: newSVpv(store()->printStorePath(*i.second.second).c_str(), 0),
|
||||
: newSVpv(THIS->store->printStorePath(*i.second.second).c_str(), 0),
|
||||
0);
|
||||
}
|
||||
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
||||
|
||||
AV * inputDrvs = newAV();
|
||||
for (auto & i : drv.inputDrvs.map)
|
||||
av_push(inputDrvs, newSVpv(store()->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second
|
||||
av_push(inputDrvs, newSVpv(THIS->store->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second
|
||||
hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs));
|
||||
|
||||
AV * inputSrcs = newAV();
|
||||
for (auto & i : drv.inputSrcs)
|
||||
av_push(inputSrcs, newSVpv(store()->printStorePath(i).c_str(), 0));
|
||||
av_push(inputSrcs, newSVpv(THIS->store->printStorePath(i).c_str(), 0));
|
||||
hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs));
|
||||
|
||||
hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0));
|
||||
@ -361,10 +415,11 @@ SV * derivationFromPath(char * drvPath)
|
||||
RETVAL
|
||||
|
||||
|
||||
void addTempRoot(char * storePath)
|
||||
void
|
||||
StoreWrapper::addTempRoot(char * storePath)
|
||||
PPCODE:
|
||||
try {
|
||||
store()->addTempRoot(store()->parseStorePath(storePath));
|
||||
THIS->store->addTempRoot(THIS->store->parseStorePath(storePath));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
}
|
||||
|
@ -41,3 +41,6 @@ Store_FORCE_INSTALL = 1
|
||||
Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store
|
||||
|
||||
clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config
|
||||
|
||||
check: all
|
||||
yath test
|
||||
|
13
perl/t/init.t
Normal file
13
perl/t/init.t
Normal file
@ -0,0 +1,13 @@
|
||||
use strict;
|
||||
use warnings;
|
||||
use Test2::V0;
|
||||
|
||||
use Nix::Store;
|
||||
|
||||
my $s = new Nix::Store("dummy://");
|
||||
|
||||
my $res = $s->isValidPath("/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar");
|
||||
|
||||
ok(!$res, "should not have path");
|
||||
|
||||
done_testing;
|
@ -102,7 +102,7 @@ poly_extra_try_me_commands() {
|
||||
poly_configure_nix_daemon_service() {
|
||||
task "Setting up the nix-daemon LaunchDaemon"
|
||||
_sudo "to set up the nix-daemon as a LaunchDaemon" \
|
||||
/usr/bin/install -m -rw-r--r-- "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
|
||||
/usr/bin/install -m "u=rw,go=r" "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
|
||||
|
||||
_sudo "to load the LaunchDaemon plist for nix-daemon" \
|
||||
launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
||||
|
@ -58,6 +58,31 @@ readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
|
||||
|
||||
readonly ROOT_HOME=~root
|
||||
|
||||
readonly PROXY_ENVIRONMENT_VARIABLES=(
|
||||
http_proxy
|
||||
https_proxy
|
||||
ftp_proxy
|
||||
no_proxy
|
||||
HTTP_PROXY
|
||||
HTTPS_PROXY
|
||||
FTP_PROXY
|
||||
NO_PROXY
|
||||
)
|
||||
|
||||
SUDO_EXTRA_ENVIRONMENT_VARIABLES=()
|
||||
|
||||
setup_sudo_extra_environment_variables() {
|
||||
local i=${#SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}
|
||||
for variable in "${PROXY_ENVIRONMENT_VARIABLES[@]}"; do
|
||||
if [ "x${!variable:-}" != "x" ]; then
|
||||
SUDO_EXTRA_ENVIRONMENT_VARIABLES[i]="$variable=${!variable}"
|
||||
i=$((i + 1))
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
setup_sudo_extra_environment_variables
|
||||
|
||||
if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then
|
||||
readonly IS_HEADLESS='no'
|
||||
else
|
||||
@ -361,7 +386,7 @@ _sudo() {
|
||||
if is_root; then
|
||||
env "$@"
|
||||
else
|
||||
sudo "$@"
|
||||
sudo "${SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}" "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
|
@ -202,7 +202,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||
else
|
||||
drvstr = "<unknown>";
|
||||
|
||||
auto error = hintformat(errorText);
|
||||
auto error = HintFmt(errorText);
|
||||
error
|
||||
% drvstr
|
||||
% neededSystem
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "store-api.hh"
|
||||
#include "command.hh"
|
||||
#include "tarball.hh"
|
||||
#include "fetch-to-store.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
@ -156,7 +157,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||
for (auto & i : autoArgs) {
|
||||
auto v = state.allocValue();
|
||||
if (i.second[0] == 'E')
|
||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(CanonPath::fromCwd())));
|
||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(".")));
|
||||
else
|
||||
v->mkString(((std::string_view) i.second).substr(1));
|
||||
res.insert(state.symbols.create(i.first), v);
|
||||
@ -164,11 +165,12 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||
return res.finish();
|
||||
}
|
||||
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir)
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir)
|
||||
{
|
||||
if (EvalSettings::isPseudoUrl(s)) {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).storePath;
|
||||
auto accessor = fetchers::downloadTarball(
|
||||
EvalSettings::resolvePseudoUrl(s)).accessor;
|
||||
auto storePath = fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy);
|
||||
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
|
||||
}
|
||||
|
||||
@ -185,7 +187,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDi
|
||||
}
|
||||
|
||||
else
|
||||
return state.rootPath(CanonPath(s, baseDir));
|
||||
return state.rootPath(baseDir ? absPath(s, *baseDir) : absPath(s));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -29,6 +29,6 @@ private:
|
||||
std::map<std::string, std::string> autoArgs;
|
||||
};
|
||||
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd());
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr);
|
||||
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ Strings editorFor(const SourcePath & file, uint32_t line)
|
||||
editor.find("vim") != std::string::npos ||
|
||||
editor.find("kak") != std::string::npos))
|
||||
args.push_back(fmt("+%d", line));
|
||||
args.push_back(path->abs());
|
||||
args.push_back(path->string());
|
||||
return args;
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
|
||||
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
if (v.type() == nPath) {
|
||||
auto storePath = fetchToStore(*state->store, v.path());
|
||||
auto storePath = fetchToStore(*state->store, v.path(), FetchMode::Copy);
|
||||
return {{
|
||||
.path = DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
|
@ -487,10 +487,11 @@ Installables SourceExprCommand::parseInstallables(
|
||||
state->eval(e, *vFile);
|
||||
}
|
||||
else if (file) {
|
||||
state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile);
|
||||
auto dir = absPath(getCommandBaseDir());
|
||||
state->evalFile(lookupFileArg(*state, *file, &dir), *vFile);
|
||||
}
|
||||
else {
|
||||
CanonPath dir(CanonPath::fromCwd(getCommandBaseDir()));
|
||||
Path dir = absPath(getCommandBaseDir());
|
||||
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
|
||||
state->eval(e, *vFile);
|
||||
}
|
||||
|
121
src/libcmd/misc-store-flags.cc
Normal file
121
src/libcmd/misc-store-flags.cc
Normal file
@ -0,0 +1,121 @@
|
||||
#include "misc-store-flags.hh"
|
||||
|
||||
namespace nix::flag
|
||||
{
|
||||
|
||||
static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
|
||||
{
|
||||
for (auto & format : hashFormats) {
|
||||
if (hasPrefix(format, prefix)) {
|
||||
completions.add(format);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Args::Flag hashFormatWithDefault(std::string && longName, HashFormat * hf)
|
||||
{
|
||||
assert(*hf == nix::HashFormat::SRI);
|
||||
return Args::Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.",
|
||||
.labels = {"hash-format"},
|
||||
.handler = {[hf](std::string s) {
|
||||
*hf = parseHashFormat(s);
|
||||
}},
|
||||
.completer = hashFormatCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag hashFormatOpt(std::string && longName, std::optional<HashFormat> * ohf)
|
||||
{
|
||||
return Args::Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash format (`base16`, `nix32`, `base64`, `sri`).",
|
||||
.labels = {"hash-format"},
|
||||
.handler = {[ohf](std::string s) {
|
||||
*ohf = std::optional<HashFormat>{parseHashFormat(s)};
|
||||
}},
|
||||
.completer = hashFormatCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
|
||||
{
|
||||
for (auto & algo : hashAlgorithms)
|
||||
if (hasPrefix(algo, prefix))
|
||||
completions.add(algo);
|
||||
}
|
||||
|
||||
Args::Flag hashAlgo(std::string && longName, HashAlgorithm * ha)
|
||||
{
|
||||
return Args::Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`).",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[ha](std::string s) {
|
||||
*ha = parseHashAlgo(s);
|
||||
}},
|
||||
.completer = hashAlgoCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag hashAlgoOpt(std::string && longName, std::optional<HashAlgorithm> * oha)
|
||||
{
|
||||
return Args::Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[oha](std::string s) {
|
||||
*oha = std::optional<HashAlgorithm>{parseHashAlgo(s)};
|
||||
}},
|
||||
.completer = hashAlgoCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag fileIngestionMethod(FileIngestionMethod * method)
|
||||
{
|
||||
return Args::Flag {
|
||||
.longName = "mode",
|
||||
// FIXME indentation carefully made for context, this is messed up.
|
||||
.description = R"(
|
||||
How to compute the hash of the input.
|
||||
One of:
|
||||
|
||||
- `nar` (the default): Serialises the input as an archive (following the [_Nix Archive Format_](https://edolstra.github.io/pubs/phd-thesis.pdf#page=101)) and passes that to the hash function.
|
||||
|
||||
- `flat`: Assumes that the input is a single file and directly passes it to the hash function;
|
||||
)",
|
||||
.labels = {"file-ingestion-method"},
|
||||
.handler = {[method](std::string s) {
|
||||
*method = parseFileIngestionMethod(s);
|
||||
}},
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag contentAddressMethod(ContentAddressMethod * method)
|
||||
{
|
||||
return Args::Flag {
|
||||
.longName = "mode",
|
||||
// FIXME indentation carefully made for context, this is messed up.
|
||||
.description = R"(
|
||||
How to compute the content-address of the store object.
|
||||
One of:
|
||||
|
||||
- `nar` (the default): Serialises the input as an archive (following the [_Nix Archive Format_](https://edolstra.github.io/pubs/phd-thesis.pdf#page=101)) and passes that to the hash function.
|
||||
|
||||
- `flat`: Assumes that the input is a single file and directly passes it to the hash function;
|
||||
|
||||
- `text`: Like `flat`, but used for
|
||||
[derivations](@docroot@/glossary.md#store-derivation) serialized in store object and
|
||||
[`builtins.toFile`](@docroot@/language/builtins.html#builtins-toFile).
|
||||
For advanced use-cases only;
|
||||
for regular usage prefer `nar` and `flat.
|
||||
)",
|
||||
.labels = {"content-address-method"},
|
||||
.handler = {[method](std::string s) {
|
||||
*method = ContentAddressMethod::parse(s);
|
||||
}},
|
||||
};
|
||||
}
|
||||
|
||||
}
|
21
src/libcmd/misc-store-flags.hh
Normal file
21
src/libcmd/misc-store-flags.hh
Normal file
@ -0,0 +1,21 @@
|
||||
#include "args.hh"
|
||||
#include "content-address.hh"
|
||||
|
||||
namespace nix::flag {
|
||||
|
||||
Args::Flag hashAlgo(std::string && longName, HashAlgorithm * ha);
|
||||
static inline Args::Flag hashAlgo(HashAlgorithm * ha)
|
||||
{
|
||||
return hashAlgo("hash-algo", ha);
|
||||
}
|
||||
Args::Flag hashAlgoOpt(std::string && longName, std::optional<HashAlgorithm> * oha);
|
||||
Args::Flag hashFormatWithDefault(std::string && longName, HashFormat * hf);
|
||||
Args::Flag hashFormatOpt(std::string && longName, std::optional<HashFormat> * ohf);
|
||||
static inline Args::Flag hashAlgoOpt(std::optional<HashAlgorithm> * oha)
|
||||
{
|
||||
return hashAlgoOpt("hash-algo", oha);
|
||||
}
|
||||
Args::Flag fileIngestionMethod(FileIngestionMethod * method);
|
||||
Args::Flag contentAddressMethod(ContentAddressMethod * method);
|
||||
|
||||
}
|
@ -52,6 +52,27 @@ extern "C" {
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Returned by `NixRepl::processLine`.
|
||||
*/
|
||||
enum class ProcessLineResult {
|
||||
/**
|
||||
* The user exited with `:quit`. The REPL should exit. The surrounding
|
||||
* program or evaluation (e.g., if the REPL was acting as the debugger)
|
||||
* should also exit.
|
||||
*/
|
||||
Quit,
|
||||
/**
|
||||
* The user exited with `:continue`. The REPL should exit, but the program
|
||||
* should continue running.
|
||||
*/
|
||||
Continue,
|
||||
/**
|
||||
* The user did not exit. The REPL should request another line of input.
|
||||
*/
|
||||
PromptAgain,
|
||||
};
|
||||
|
||||
struct NixRepl
|
||||
: AbstractNixRepl
|
||||
#if HAVE_BOEHMGC
|
||||
@ -75,13 +96,13 @@ struct NixRepl
|
||||
std::function<AnnotatedValues()> getValues);
|
||||
virtual ~NixRepl();
|
||||
|
||||
void mainLoop() override;
|
||||
ReplExitStatus mainLoop() override;
|
||||
void initEnv() override;
|
||||
|
||||
StringSet completePrefix(const std::string & prefix);
|
||||
bool getLine(std::string & input, const std::string & prompt);
|
||||
StorePath getDerivationPath(Value & v);
|
||||
bool processLine(std::string line);
|
||||
ProcessLineResult processLine(std::string line);
|
||||
|
||||
void loadFile(const Path & path);
|
||||
void loadFlake(const std::string & flakeRef);
|
||||
@ -101,7 +122,8 @@ struct NixRepl
|
||||
.ansiColors = true,
|
||||
.force = true,
|
||||
.derivationPaths = true,
|
||||
.maxDepth = maxDepth
|
||||
.maxDepth = maxDepth,
|
||||
.prettyIndent = 2
|
||||
});
|
||||
}
|
||||
};
|
||||
@ -232,7 +254,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
||||
: positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
|
||||
|
||||
if (pos) {
|
||||
out << pos;
|
||||
out << *pos;
|
||||
if (auto loc = pos->getCodeLines()) {
|
||||
out << "\n";
|
||||
printCodeLines(out, "", *pos, *loc);
|
||||
@ -243,10 +265,19 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
||||
return out;
|
||||
}
|
||||
|
||||
void NixRepl::mainLoop()
|
||||
static bool isFirstRepl = true;
|
||||
|
||||
ReplExitStatus NixRepl::mainLoop()
|
||||
{
|
||||
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
|
||||
notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n");
|
||||
if (isFirstRepl) {
|
||||
std::string_view debuggerNotice = "";
|
||||
if (state->debugRepl) {
|
||||
debuggerNotice = " debugger";
|
||||
}
|
||||
notice("Nix %1%%2%\nType :? for help.", nixVersion, debuggerNotice);
|
||||
}
|
||||
|
||||
isFirstRepl = false;
|
||||
|
||||
loadFiles();
|
||||
|
||||
@ -277,15 +308,25 @@ void NixRepl::mainLoop()
|
||||
// When continuing input from previous lines, don't print a prompt, just align to the same
|
||||
// number of chars as the prompt.
|
||||
if (!getLine(input, input.empty() ? "nix-repl> " : " ")) {
|
||||
// ctrl-D should exit the debugger.
|
||||
// Ctrl-D should exit the debugger.
|
||||
state->debugStop = false;
|
||||
state->debugQuit = true;
|
||||
logger->cout("");
|
||||
break;
|
||||
// TODO: Should Ctrl-D exit just the current debugger session or
|
||||
// the entire program?
|
||||
return ReplExitStatus::QuitAll;
|
||||
}
|
||||
logger->resume();
|
||||
try {
|
||||
if (!removeWhitespace(input).empty() && !processLine(input)) return;
|
||||
switch (processLine(input)) {
|
||||
case ProcessLineResult::Quit:
|
||||
return ReplExitStatus::QuitAll;
|
||||
case ProcessLineResult::Continue:
|
||||
return ReplExitStatus::Continue;
|
||||
case ProcessLineResult::PromptAgain:
|
||||
break;
|
||||
default:
|
||||
abort();
|
||||
}
|
||||
} catch (ParseError & e) {
|
||||
if (e.msg().find("unexpected end of file") != std::string::npos) {
|
||||
// For parse errors on incomplete input, we continue waiting for the next line of
|
||||
@ -342,7 +383,6 @@ bool NixRepl::getLine(std::string & input, const std::string & prompt)
|
||||
};
|
||||
|
||||
setupSignals();
|
||||
Finally resetTerminal([&]() { rl_deprep_terminal(); });
|
||||
char * s = readline(prompt.c_str());
|
||||
Finally doFree([&]() { free(s); });
|
||||
restoreSignals();
|
||||
@ -422,8 +462,6 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
||||
// Quietly ignore parse errors.
|
||||
} catch (EvalError & e) {
|
||||
// Quietly ignore evaluation errors.
|
||||
} catch (UndefinedVarError & e) {
|
||||
// Quietly ignore undefined variable errors.
|
||||
} catch (BadURL & e) {
|
||||
// Quietly ignore BadURL flake-related errors.
|
||||
}
|
||||
@ -475,10 +513,11 @@ void NixRepl::loadDebugTraceEnv(DebugTrace & dt)
|
||||
}
|
||||
}
|
||||
|
||||
bool NixRepl::processLine(std::string line)
|
||||
ProcessLineResult NixRepl::processLine(std::string line)
|
||||
{
|
||||
line = trim(line);
|
||||
if (line == "") return true;
|
||||
if (line.empty())
|
||||
return ProcessLineResult::PromptAgain;
|
||||
|
||||
_isInterrupted = false;
|
||||
|
||||
@ -573,13 +612,13 @@ bool NixRepl::processLine(std::string line)
|
||||
else if (state->debugRepl && (command == ":s" || command == ":step")) {
|
||||
// set flag to stop at next DebugTrace; exit repl.
|
||||
state->debugStop = true;
|
||||
return false;
|
||||
return ProcessLineResult::Continue;
|
||||
}
|
||||
|
||||
else if (state->debugRepl && (command == ":c" || command == ":continue")) {
|
||||
// set flag to run to next breakpoint or end of program; exit repl.
|
||||
state->debugStop = false;
|
||||
return false;
|
||||
return ProcessLineResult::Continue;
|
||||
}
|
||||
|
||||
else if (command == ":a" || command == ":add") {
|
||||
@ -722,8 +761,7 @@ bool NixRepl::processLine(std::string line)
|
||||
|
||||
else if (command == ":q" || command == ":quit") {
|
||||
state->debugStop = false;
|
||||
state->debugQuit = true;
|
||||
return false;
|
||||
return ProcessLineResult::Quit;
|
||||
}
|
||||
|
||||
else if (command == ":doc") {
|
||||
@ -784,7 +822,7 @@ bool NixRepl::processLine(std::string line)
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
return ProcessLineResult::PromptAgain;
|
||||
}
|
||||
|
||||
void NixRepl::loadFile(const Path & path)
|
||||
@ -890,7 +928,7 @@ void NixRepl::addVarToScope(const Symbol name, Value & v)
|
||||
|
||||
Expr * NixRepl::parseString(std::string s)
|
||||
{
|
||||
return state->parseExprFromString(std::move(s), state->rootPath(CanonPath::fromCwd()), staticEnv);
|
||||
return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
@ -915,7 +953,7 @@ std::unique_ptr<AbstractNixRepl> AbstractNixRepl::create(
|
||||
}
|
||||
|
||||
|
||||
void AbstractNixRepl::runSimple(
|
||||
ReplExitStatus AbstractNixRepl::runSimple(
|
||||
ref<EvalState> evalState,
|
||||
const ValMap & extraEnv)
|
||||
{
|
||||
@ -937,7 +975,7 @@ void AbstractNixRepl::runSimple(
|
||||
for (auto & [name, value] : extraEnv)
|
||||
repl->addVarToScope(repl->state->symbols.create(name), *value);
|
||||
|
||||
repl->mainLoop();
|
||||
return repl->mainLoop();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -28,13 +28,13 @@ struct AbstractNixRepl
|
||||
const SearchPath & searchPath, nix::ref<Store> store, ref<EvalState> state,
|
||||
std::function<AnnotatedValues()> getValues);
|
||||
|
||||
static void runSimple(
|
||||
static ReplExitStatus runSimple(
|
||||
ref<EvalState> evalState,
|
||||
const ValMap & extraEnv);
|
||||
|
||||
virtual void initEnv() = 0;
|
||||
|
||||
virtual void mainLoop() = 0;
|
||||
virtual ReplExitStatus mainLoop() = 0;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -65,10 +65,10 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
||||
if (!attrIndex) {
|
||||
|
||||
if (v->type() != nAttrs)
|
||||
throw TypeError(
|
||||
state.error<TypeError>(
|
||||
"the expression selected by the selection path '%1%' should be a set but is %2%",
|
||||
attrPath,
|
||||
showType(*v));
|
||||
showType(*v)).debugThrow();
|
||||
if (attr.empty())
|
||||
throw Error("empty attribute name in selection path '%1%'", attrPath);
|
||||
|
||||
@ -88,10 +88,10 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
||||
else {
|
||||
|
||||
if (!v->isList())
|
||||
throw TypeError(
|
||||
state.error<TypeError>(
|
||||
"the expression selected by the selection path '%1%' should be a list but is %2%",
|
||||
attrPath,
|
||||
showType(*v));
|
||||
showType(*v)).debugThrow();
|
||||
if (*attrIndex >= v->listSize())
|
||||
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath);
|
||||
|
||||
|
@ -491,7 +491,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
||||
if (forceErrors)
|
||||
debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name));
|
||||
else
|
||||
throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name));
|
||||
throw CachedEvalError(root->state, "cached failure of attribute '%s'", getAttrPathStr(name));
|
||||
} else
|
||||
return std::make_shared<AttrCursor>(root,
|
||||
std::make_pair(shared_from_this(), name), nullptr, std::move(attr));
|
||||
@ -500,7 +500,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
||||
// evaluate to see whether 'name' exists
|
||||
} else
|
||||
return nullptr;
|
||||
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
||||
//error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
@ -508,7 +508,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
||||
|
||||
if (v.type() != nAttrs)
|
||||
return nullptr;
|
||||
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
||||
//error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
|
||||
auto attr = v.attrs->get(name);
|
||||
|
||||
@ -574,14 +574,14 @@ std::string AttrCursor::getString()
|
||||
debug("using cached string attribute '%s'", getAttrPathStr());
|
||||
return s->first;
|
||||
} else
|
||||
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a string", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
auto & v = forceValue();
|
||||
|
||||
if (v.type() != nString && v.type() != nPath)
|
||||
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a string but %s", getAttrPathStr()).debugThrow();
|
||||
|
||||
return v.type() == nString ? v.c_str() : v.path().to_string();
|
||||
}
|
||||
@ -616,7 +616,7 @@ string_t AttrCursor::getStringWithContext()
|
||||
return *s;
|
||||
}
|
||||
} else
|
||||
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a string", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
@ -630,7 +630,7 @@ string_t AttrCursor::getStringWithContext()
|
||||
else if (v.type() == nPath)
|
||||
return {v.path().to_string(), {}};
|
||||
else
|
||||
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a string but %s", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
|
||||
bool AttrCursor::getBool()
|
||||
@ -643,14 +643,14 @@ bool AttrCursor::getBool()
|
||||
debug("using cached Boolean attribute '%s'", getAttrPathStr());
|
||||
return *b;
|
||||
} else
|
||||
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a Boolean", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
auto & v = forceValue();
|
||||
|
||||
if (v.type() != nBool)
|
||||
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a Boolean", getAttrPathStr()).debugThrow();
|
||||
|
||||
return v.boolean;
|
||||
}
|
||||
@ -665,14 +665,14 @@ NixInt AttrCursor::getInt()
|
||||
debug("using cached integer attribute '%s'", getAttrPathStr());
|
||||
return i->x;
|
||||
} else
|
||||
throw TypeError("'%s' is not an integer", getAttrPathStr());
|
||||
root->state.error<TypeError>("'%s' is not an integer", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
auto & v = forceValue();
|
||||
|
||||
if (v.type() != nInt)
|
||||
throw TypeError("'%s' is not an integer", getAttrPathStr());
|
||||
root->state.error<TypeError>("'%s' is not an integer", getAttrPathStr()).debugThrow();
|
||||
|
||||
return v.integer;
|
||||
}
|
||||
@ -687,7 +687,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
|
||||
debug("using cached list of strings attribute '%s'", getAttrPathStr());
|
||||
return *l;
|
||||
} else
|
||||
throw TypeError("'%s' is not a list of strings", getAttrPathStr());
|
||||
root->state.error<TypeError>("'%s' is not a list of strings", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
@ -697,7 +697,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
|
||||
root->state.forceValue(v, noPos);
|
||||
|
||||
if (v.type() != nList)
|
||||
throw TypeError("'%s' is not a list", getAttrPathStr());
|
||||
root->state.error<TypeError>("'%s' is not a list", getAttrPathStr()).debugThrow();
|
||||
|
||||
std::vector<std::string> res;
|
||||
|
||||
@ -720,14 +720,14 @@ std::vector<Symbol> AttrCursor::getAttrs()
|
||||
debug("using cached attrset attribute '%s'", getAttrPathStr());
|
||||
return *attrs;
|
||||
} else
|
||||
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
auto & v = forceValue();
|
||||
|
||||
if (v.type() != nAttrs)
|
||||
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
|
||||
std::vector<Symbol> attrs;
|
||||
for (auto & attr : *getValue().attrs)
|
||||
|
105
src/libexpr/eval-error.cc
Normal file
105
src/libexpr/eval-error.cc
Normal file
@ -0,0 +1,105 @@
|
||||
#include "eval-error.hh"
|
||||
#include "eval.hh"
|
||||
#include "value.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withExitStatus(unsigned int exitStatus)
|
||||
{
|
||||
error.withExitStatus(exitStatus);
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::atPos(PosIdx pos)
|
||||
{
|
||||
error.err.pos = error.state.positions[pos];
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::atPos(Value & value, PosIdx fallback)
|
||||
{
|
||||
return atPos(value.determinePos(fallback));
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withTrace(PosIdx pos, const std::string_view text)
|
||||
{
|
||||
error.err.traces.push_front(
|
||||
Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text))});
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withSuggestions(Suggestions & s)
|
||||
{
|
||||
error.err.suggestions = s;
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withFrame(const Env & env, const Expr & expr)
|
||||
{
|
||||
// NOTE: This is abusing side-effects.
|
||||
// TODO: check compatibility with nested debugger calls.
|
||||
// TODO: What side-effects??
|
||||
error.state.debugTraces.push_front(DebugTrace{
|
||||
.pos = error.state.positions[expr.getPos()],
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = HintFmt("Fake frame for debugging purposes"),
|
||||
.isError = true});
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::addTrace(PosIdx pos, HintFmt hint)
|
||||
{
|
||||
error.addTrace(error.state.positions[pos], hint);
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
template<typename... Args>
|
||||
EvalErrorBuilder<T> &
|
||||
EvalErrorBuilder<T>::addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs)
|
||||
{
|
||||
|
||||
addTrace(error.state.positions[pos], HintFmt(std::string(formatString), formatArgs...));
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
void EvalErrorBuilder<T>::debugThrow()
|
||||
{
|
||||
if (error.state.debugRepl && !error.state.debugTraces.empty()) {
|
||||
const DebugTrace & last = error.state.debugTraces.front();
|
||||
const Env * env = &last.env;
|
||||
const Expr * expr = &last.expr;
|
||||
error.state.runDebugRepl(&error, *env, *expr);
|
||||
}
|
||||
|
||||
// `EvalState` is the only class that can construct an `EvalErrorBuilder`,
|
||||
// and it does so in dynamic storage. This is the final method called on
|
||||
// any such instance and must delete itself before throwing the underlying
|
||||
// error.
|
||||
auto error = std::move(this->error);
|
||||
delete this;
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
template class EvalErrorBuilder<EvalError>;
|
||||
template class EvalErrorBuilder<AssertionError>;
|
||||
template class EvalErrorBuilder<ThrownError>;
|
||||
template class EvalErrorBuilder<Abort>;
|
||||
template class EvalErrorBuilder<TypeError>;
|
||||
template class EvalErrorBuilder<UndefinedVarError>;
|
||||
template class EvalErrorBuilder<MissingArgumentError>;
|
||||
template class EvalErrorBuilder<InfiniteRecursionError>;
|
||||
template class EvalErrorBuilder<CachedEvalError>;
|
||||
template class EvalErrorBuilder<InvalidPathError>;
|
||||
|
||||
}
|
104
src/libexpr/eval-error.hh
Normal file
104
src/libexpr/eval-error.hh
Normal file
@ -0,0 +1,104 @@
|
||||
#pragma once
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "error.hh"
|
||||
#include "pos-idx.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct Env;
|
||||
struct Expr;
|
||||
struct Value;
|
||||
|
||||
class EvalState;
|
||||
template<class T>
|
||||
class EvalErrorBuilder;
|
||||
|
||||
class EvalError : public Error
|
||||
{
|
||||
template<class T>
|
||||
friend class EvalErrorBuilder;
|
||||
public:
|
||||
EvalState & state;
|
||||
|
||||
EvalError(EvalState & state, ErrorInfo && errorInfo)
|
||||
: Error(errorInfo)
|
||||
, state(state)
|
||||
{
|
||||
}
|
||||
|
||||
template<typename... Args>
|
||||
explicit EvalError(EvalState & state, const std::string & formatString, const Args &... formatArgs)
|
||||
: Error(formatString, formatArgs...)
|
||||
, state(state)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
MakeError(ParseError, Error);
|
||||
MakeError(AssertionError, EvalError);
|
||||
MakeError(ThrownError, AssertionError);
|
||||
MakeError(Abort, EvalError);
|
||||
MakeError(TypeError, EvalError);
|
||||
MakeError(UndefinedVarError, EvalError);
|
||||
MakeError(MissingArgumentError, EvalError);
|
||||
MakeError(CachedEvalError, EvalError);
|
||||
MakeError(InfiniteRecursionError, EvalError);
|
||||
|
||||
struct InvalidPathError : public EvalError
|
||||
{
|
||||
public:
|
||||
Path path;
|
||||
InvalidPathError(EvalState & state, const Path & path)
|
||||
: EvalError(state, "path '%s' is not valid", path)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* `EvalErrorBuilder`s may only be constructed by `EvalState`. The `debugThrow`
|
||||
* method must be the final method in any such `EvalErrorBuilder` usage, and it
|
||||
* handles deleting the object.
|
||||
*/
|
||||
template<class T>
|
||||
class EvalErrorBuilder final
|
||||
{
|
||||
friend class EvalState;
|
||||
|
||||
template<typename... Args>
|
||||
explicit EvalErrorBuilder(EvalState & state, const Args &... args)
|
||||
: error(T(state, args...))
|
||||
{
|
||||
}
|
||||
|
||||
public:
|
||||
T error;
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withExitStatus(unsigned int exitStatus);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & atPos(PosIdx pos);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & atPos(Value & value, PosIdx fallback = noPos);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withTrace(PosIdx pos, const std::string_view text);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withFrameTrace(PosIdx pos, const std::string_view text);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withSuggestions(Suggestions & s);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withFrame(const Env & e, const Expr & ex);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & addTrace(PosIdx pos, HintFmt hint);
|
||||
|
||||
template<typename... Args>
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> &
|
||||
addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs);
|
||||
|
||||
/**
|
||||
* Delete the `EvalErrorBuilder` and throw the underlying exception.
|
||||
*/
|
||||
[[gnu::noinline, gnu::noreturn]] void debugThrow();
|
||||
};
|
||||
|
||||
}
|
@ -3,6 +3,7 @@
|
||||
|
||||
#include "print.hh"
|
||||
#include "eval.hh"
|
||||
#include "eval-error.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
@ -115,10 +116,11 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e
|
||||
PosIdx pos = getPos();
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nAttrs) {
|
||||
error("expected a set but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a set but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
@ -128,10 +130,11 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e
|
||||
{
|
||||
forceValue(v, pos);
|
||||
if (!v.isList()) {
|
||||
error("expected a list but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a list but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -127,6 +127,16 @@ struct EvalSettings : Config
|
||||
|
||||
Setting<unsigned int> maxCallDepth{this, 10000, "max-call-depth",
|
||||
"The maximum function call depth to allow before erroring."};
|
||||
|
||||
Setting<bool> builtinsTraceDebugger{this, false, "debugger-on-trace",
|
||||
R"(
|
||||
If set to true and the `--debugger` flag is given,
|
||||
[`builtins.trace`](@docroot@/language/builtins.md#builtins-trace) will
|
||||
enter the debugger like
|
||||
[`builtins.break`](@docroot@/language/builtins.md#builtins-break).
|
||||
|
||||
This is useful for debugging warnings in third-party Nix code.
|
||||
)"};
|
||||
};
|
||||
|
||||
extern EvalSettings evalSettings;
|
||||
|
@ -3,6 +3,7 @@
|
||||
#include "hash.hh"
|
||||
#include "primops.hh"
|
||||
#include "print-options.hh"
|
||||
#include "shared.hh"
|
||||
#include "types.hh"
|
||||
#include "util.hh"
|
||||
#include "store-api.hh"
|
||||
@ -339,46 +340,6 @@ void initGC()
|
||||
gcInitialised = true;
|
||||
}
|
||||
|
||||
|
||||
ErrorBuilder & ErrorBuilder::atPos(PosIdx pos)
|
||||
{
|
||||
info.errPos = state.positions[pos];
|
||||
return *this;
|
||||
}
|
||||
|
||||
ErrorBuilder & ErrorBuilder::withTrace(PosIdx pos, const std::string_view text)
|
||||
{
|
||||
info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = false });
|
||||
return *this;
|
||||
}
|
||||
|
||||
ErrorBuilder & ErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text)
|
||||
{
|
||||
info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = true });
|
||||
return *this;
|
||||
}
|
||||
|
||||
ErrorBuilder & ErrorBuilder::withSuggestions(Suggestions & s)
|
||||
{
|
||||
info.suggestions = s;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ErrorBuilder & ErrorBuilder::withFrame(const Env & env, const Expr & expr)
|
||||
{
|
||||
// NOTE: This is abusing side-effects.
|
||||
// TODO: check compatibility with nested debugger calls.
|
||||
state.debugTraces.push_front(DebugTrace {
|
||||
.pos = nullptr,
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = hintformat("Fake frame for debugging purposes"),
|
||||
.isError = true
|
||||
});
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
EvalState::EvalState(
|
||||
const SearchPath & _searchPath,
|
||||
ref<Store> store,
|
||||
@ -434,14 +395,14 @@ EvalState::EvalState(
|
||||
, emptyBindings(0)
|
||||
, rootFS(
|
||||
evalSettings.restrictEval || evalSettings.pureEval
|
||||
? ref<InputAccessor>(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {},
|
||||
? ref<InputAccessor>(AllowListInputAccessor::create(makeFSInputAccessor(), {},
|
||||
[](const CanonPath & path) -> RestrictedPathError {
|
||||
auto modeInformation = evalSettings.pureEval
|
||||
? "in pure evaluation mode (use '--impure' to override)"
|
||||
: "in restricted mode";
|
||||
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
|
||||
}))
|
||||
: makeFSInputAccessor(CanonPath::root))
|
||||
: makeFSInputAccessor())
|
||||
, corepkgsFS(makeMemoryInputAccessor())
|
||||
, internalFS(makeMemoryInputAccessor())
|
||||
, derivationInternal{corepkgsFS->addFile(
|
||||
@ -456,7 +417,6 @@ EvalState::EvalState(
|
||||
, buildStore(buildStore ? buildStore : store)
|
||||
, debugRepl(nullptr)
|
||||
, debugStop(false)
|
||||
, debugQuit(false)
|
||||
, trylevel(0)
|
||||
, regexCache(makeRegexCache())
|
||||
#if HAVE_BOEHMGC
|
||||
@ -507,13 +467,13 @@ EvalState::~EvalState()
|
||||
void EvalState::allowPath(const Path & path)
|
||||
{
|
||||
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||
rootFS2->allowPath(CanonPath(path));
|
||||
rootFS2->allowPrefix(CanonPath(path));
|
||||
}
|
||||
|
||||
void EvalState::allowPath(const StorePath & storePath)
|
||||
{
|
||||
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||
rootFS2->allowPath(CanonPath(store->toRealPath(storePath)));
|
||||
rootFS2->allowPrefix(CanonPath(store->toRealPath(storePath)));
|
||||
}
|
||||
|
||||
void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
|
||||
@ -744,7 +704,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env &
|
||||
if (se.up && env.up) {
|
||||
std::cout << "static: ";
|
||||
printStaticEnvBindings(st, se);
|
||||
printWithBindings(st, env);
|
||||
if (se.isWith)
|
||||
printWithBindings(st, env);
|
||||
std::cout << std::endl;
|
||||
printEnvBindings(st, *se.up, *env.up, ++lvl);
|
||||
} else {
|
||||
@ -756,7 +717,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env &
|
||||
std::cout << st[i.first] << " ";
|
||||
std::cout << ANSI_NORMAL;
|
||||
std::cout << std::endl;
|
||||
printWithBindings(st, env); // probably nothing there for the top level.
|
||||
if (se.isWith)
|
||||
printWithBindings(st, env); // probably nothing there for the top level.
|
||||
std::cout << std::endl;
|
||||
|
||||
}
|
||||
@ -778,7 +740,7 @@ void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const En
|
||||
if (env.up && se.up) {
|
||||
mapStaticEnvBindings(st, *se.up, *env.up, vm);
|
||||
|
||||
if (!env.values[0]->isThunk()) {
|
||||
if (se.isWith && !env.values[0]->isThunk()) {
|
||||
// add 'with' bindings.
|
||||
Bindings::iterator j = env.values[0]->attrs->begin();
|
||||
while (j != env.values[0]->attrs->end()) {
|
||||
@ -811,7 +773,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
||||
? std::make_unique<DebugTraceStacker>(
|
||||
*this,
|
||||
DebugTrace {
|
||||
.pos = error->info().errPos ? error->info().errPos : positions[expr.getPos()],
|
||||
.pos = error->info().pos ? error->info().pos : positions[expr.getPos()],
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = error->info().msg,
|
||||
@ -821,45 +783,55 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
||||
|
||||
if (error)
|
||||
{
|
||||
printError("%s\n\n", error->what());
|
||||
printError("%s\n", error->what());
|
||||
|
||||
if (trylevel > 0 && error->info().level != lvlInfo)
|
||||
printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n");
|
||||
|
||||
printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL);
|
||||
}
|
||||
|
||||
auto se = getStaticEnv(expr);
|
||||
if (se) {
|
||||
auto vm = mapStaticEnvBindings(symbols, *se.get(), env);
|
||||
(debugRepl)(ref<EvalState>(shared_from_this()), *vm);
|
||||
auto exitStatus = (debugRepl)(ref<EvalState>(shared_from_this()), *vm);
|
||||
switch (exitStatus) {
|
||||
case ReplExitStatus::QuitAll:
|
||||
if (error)
|
||||
throw *error;
|
||||
throw Exit(0);
|
||||
case ReplExitStatus::Continue:
|
||||
break;
|
||||
default:
|
||||
abort();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) const
|
||||
template<typename... Args>
|
||||
void EvalState::addErrorTrace(Error & e, const Args & ... formatArgs) const
|
||||
{
|
||||
e.addTrace(nullptr, s, s2);
|
||||
e.addTrace(nullptr, HintFmt(formatArgs...));
|
||||
}
|
||||
|
||||
void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const
|
||||
template<typename... Args>
|
||||
void EvalState::addErrorTrace(Error & e, const PosIdx pos, const Args & ... formatArgs) const
|
||||
{
|
||||
e.addTrace(positions[pos], hintfmt(s, s2), frame);
|
||||
e.addTrace(positions[pos], HintFmt(formatArgs...));
|
||||
}
|
||||
|
||||
template<typename... Args>
|
||||
static std::unique_ptr<DebugTraceStacker> makeDebugTraceStacker(
|
||||
EvalState & state,
|
||||
Expr & expr,
|
||||
Env & env,
|
||||
std::shared_ptr<Pos> && pos,
|
||||
const char * s,
|
||||
const std::string & s2)
|
||||
const Args & ... formatArgs)
|
||||
{
|
||||
return std::make_unique<DebugTraceStacker>(state,
|
||||
DebugTrace {
|
||||
.pos = std::move(pos),
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = hintfmt(s, s2),
|
||||
.hint = HintFmt(formatArgs...),
|
||||
.isError = false
|
||||
});
|
||||
}
|
||||
@ -930,7 +902,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
|
||||
return j->value;
|
||||
}
|
||||
if (!fromWith->parentWith)
|
||||
error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow<UndefinedVarError>();
|
||||
error<UndefinedVarError>("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow();
|
||||
for (size_t l = fromWith->prevWith; l; --l, env = env->up) ;
|
||||
fromWith = fromWith->parentWith;
|
||||
}
|
||||
@ -1136,7 +1108,7 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial)
|
||||
// computation.
|
||||
if (mustBeTrivial &&
|
||||
!(dynamic_cast<ExprAttrs *>(e)))
|
||||
error("file '%s' must be an attribute set", path).debugThrow<EvalError>();
|
||||
error<EvalError>("file '%s' must be an attribute set", path).debugThrow();
|
||||
eval(e, v);
|
||||
} catch (Error & e) {
|
||||
addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath.to_string());
|
||||
@ -1167,10 +1139,11 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri
|
||||
Value v;
|
||||
e->eval(*this, env, v);
|
||||
if (v.type() != nBool)
|
||||
error("expected a Boolean but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withFrame(env, *e).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a Boolean but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).withFrame(env, *e).debugThrow();
|
||||
return v.boolean;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
@ -1184,10 +1157,11 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po
|
||||
try {
|
||||
e->eval(*this, env, v);
|
||||
if (v.type() != nAttrs)
|
||||
error("expected a set but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withFrame(env, *e).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a set but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).withFrame(env, *e).debugThrow();
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
@ -1224,6 +1198,18 @@ void ExprPath::eval(EvalState & state, Env & env, Value & v)
|
||||
}
|
||||
|
||||
|
||||
Env * ExprAttrs::buildInheritFromEnv(EvalState & state, Env & up)
|
||||
{
|
||||
Env & inheritEnv = state.allocEnv(inheritFromExprs->size());
|
||||
inheritEnv.up = &up;
|
||||
|
||||
Displacement displ = 0;
|
||||
for (auto from : *inheritFromExprs)
|
||||
inheritEnv.values[displ++] = from->maybeThunk(state, up);
|
||||
|
||||
return &inheritEnv;
|
||||
}
|
||||
|
||||
void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
v.mkAttrs(state.buildBindings(attrs.size() + dynamicAttrs.size()).finish());
|
||||
@ -1235,6 +1221,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
||||
Env & env2(state.allocEnv(attrs.size()));
|
||||
env2.up = &env;
|
||||
dynamicEnv = &env2;
|
||||
Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env2) : nullptr;
|
||||
|
||||
AttrDefs::iterator overrides = attrs.find(state.sOverrides);
|
||||
bool hasOverrides = overrides != attrs.end();
|
||||
@ -1245,11 +1232,11 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs) {
|
||||
Value * vAttr;
|
||||
if (hasOverrides && !i.second.inherited) {
|
||||
if (hasOverrides && i.second.kind != AttrDef::Kind::Inherited) {
|
||||
vAttr = state.allocValue();
|
||||
mkThunk(*vAttr, env2, i.second.e);
|
||||
mkThunk(*vAttr, *i.second.chooseByKind(&env2, &env, inheritEnv), i.second.e);
|
||||
} else
|
||||
vAttr = i.second.e->maybeThunk(state, i.second.inherited ? env : env2);
|
||||
vAttr = i.second.e->maybeThunk(state, *i.second.chooseByKind(&env2, &env, inheritEnv));
|
||||
env2.values[displ++] = vAttr;
|
||||
v.attrs->push_back(Attr(i.first, vAttr, i.second.pos));
|
||||
}
|
||||
@ -1281,9 +1268,15 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
for (auto & i : attrs)
|
||||
v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), i.second.pos));
|
||||
else {
|
||||
Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env) : nullptr;
|
||||
for (auto & i : attrs) {
|
||||
v.attrs->push_back(Attr(
|
||||
i.first,
|
||||
i.second.e->maybeThunk(state, *i.second.chooseByKind(&env, &env, inheritEnv)),
|
||||
i.second.pos));
|
||||
}
|
||||
}
|
||||
|
||||
/* Dynamic attrs apply *after* rec and __overrides. */
|
||||
for (auto & i : dynamicAttrs) {
|
||||
@ -1296,7 +1289,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
||||
auto nameSym = state.symbols.create(nameVal.string_view());
|
||||
Bindings::iterator j = v.attrs->find(nameSym);
|
||||
if (j != v.attrs->end())
|
||||
state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
state.error<EvalError>("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow();
|
||||
|
||||
i.valueExpr->setName(nameSym);
|
||||
/* Keep sorted order so find can catch duplicates */
|
||||
@ -1315,12 +1308,30 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v)
|
||||
Env & env2(state.allocEnv(attrs->attrs.size()));
|
||||
env2.up = &env;
|
||||
|
||||
Env * inheritEnv = attrs->inheritFromExprs ? attrs->buildInheritFromEnv(state, env2) : nullptr;
|
||||
|
||||
/* The recursive attributes are evaluated in the new environment,
|
||||
while the inherited attributes are evaluated in the original
|
||||
environment. */
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs->attrs)
|
||||
env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2);
|
||||
for (auto & i : attrs->attrs) {
|
||||
env2.values[displ++] = i.second.e->maybeThunk(
|
||||
state,
|
||||
*i.second.chooseByKind(&env2, &env, inheritEnv));
|
||||
}
|
||||
|
||||
auto dts = state.debugRepl
|
||||
? makeDebugTraceStacker(
|
||||
state,
|
||||
*this,
|
||||
env2,
|
||||
getPos()
|
||||
? std::make_shared<Pos>(state.positions[getPos()])
|
||||
: nullptr,
|
||||
"while evaluating a '%1%' expression",
|
||||
"let"
|
||||
)
|
||||
: nullptr;
|
||||
|
||||
body->eval(state, env2, v);
|
||||
}
|
||||
@ -1384,7 +1395,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
|
||||
state,
|
||||
*this,
|
||||
env,
|
||||
state.positions[pos2],
|
||||
state.positions[getPos()],
|
||||
"while evaluating the attribute '%1%'",
|
||||
showAttrPath(state, env, attrPath))
|
||||
: nullptr;
|
||||
@ -1408,8 +1419,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
|
||||
for (auto & attr : *vAttrs->attrs)
|
||||
allAttrNames.insert(state.symbols[attr.name]);
|
||||
auto suggestions = Suggestions::bestMatches(allAttrNames, state.symbols[name]);
|
||||
state.error("attribute '%1%' missing", state.symbols[name])
|
||||
.atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow<EvalError>();
|
||||
state.error<EvalError>("attribute '%1%' missing", state.symbols[name])
|
||||
.atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow();
|
||||
}
|
||||
}
|
||||
vAttrs = j->value;
|
||||
@ -1482,7 +1493,7 @@ public:
|
||||
void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos)
|
||||
{
|
||||
if (callDepth > evalSettings.maxCallDepth)
|
||||
error("stack overflow; max-call-depth exceeded").atPos(pos).template debugThrow<EvalError>();
|
||||
error<EvalError>("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow();
|
||||
CallDepth _level(callDepth);
|
||||
|
||||
auto trace = evalSettings.traceFunctionCalls
|
||||
@ -1540,13 +1551,13 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||
auto j = args[0]->attrs->get(i.name);
|
||||
if (!j) {
|
||||
if (!i.def) {
|
||||
error("function '%1%' called without required argument '%2%'",
|
||||
error<TypeError>("function '%1%' called without required argument '%2%'",
|
||||
(lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"),
|
||||
symbols[i.name])
|
||||
.atPos(lambda.pos)
|
||||
.withTrace(pos, "from call site")
|
||||
.withFrame(*fun.lambda.env, lambda)
|
||||
.debugThrow<TypeError>();
|
||||
.debugThrow();
|
||||
}
|
||||
env2.values[displ++] = i.def->maybeThunk(*this, env2);
|
||||
} else {
|
||||
@ -1566,14 +1577,14 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||
for (auto & formal : lambda.formals->formals)
|
||||
formalNames.insert(symbols[formal.name]);
|
||||
auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]);
|
||||
error("function '%1%' called with unexpected argument '%2%'",
|
||||
error<TypeError>("function '%1%' called with unexpected argument '%2%'",
|
||||
(lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"),
|
||||
symbols[i.name])
|
||||
.atPos(lambda.pos)
|
||||
.withTrace(pos, "from call site")
|
||||
.withSuggestions(suggestions)
|
||||
.withFrame(*fun.lambda.env, lambda)
|
||||
.debugThrow<TypeError>();
|
||||
.debugThrow();
|
||||
}
|
||||
abort(); // can't happen
|
||||
}
|
||||
@ -1602,9 +1613,8 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||
"while calling %s",
|
||||
lambda.name
|
||||
? concatStrings("'", symbols[lambda.name], "'")
|
||||
: "anonymous lambda",
|
||||
true);
|
||||
if (pos) addErrorTrace(e, pos, "from call site%s", "", true);
|
||||
: "anonymous lambda");
|
||||
if (pos) addErrorTrace(e, pos, "from call site");
|
||||
}
|
||||
throw;
|
||||
}
|
||||
@ -1705,11 +1715,12 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||
}
|
||||
|
||||
else
|
||||
error("attempt to call something which is not a function but %1%: %2%",
|
||||
error<TypeError>(
|
||||
"attempt to call something which is not a function but %1%: %2%",
|
||||
showType(vCur),
|
||||
ValuePrinter(*this, vCur, errorPrintOptions))
|
||||
.atPos(pos)
|
||||
.debugThrow<TypeError>();
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
vRes = vCur;
|
||||
@ -1718,6 +1729,18 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||
|
||||
void ExprCall::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
auto dts = state.debugRepl
|
||||
? makeDebugTraceStacker(
|
||||
state,
|
||||
*this,
|
||||
env,
|
||||
getPos()
|
||||
? std::make_shared<Pos>(state.positions[getPos()])
|
||||
: nullptr,
|
||||
"while calling a function"
|
||||
)
|
||||
: nullptr;
|
||||
|
||||
Value vFun;
|
||||
fun->eval(state, env, vFun);
|
||||
|
||||
@ -1779,12 +1802,12 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
|
||||
if (j != args.end()) {
|
||||
attrs.insert(*j);
|
||||
} else if (!i.def) {
|
||||
error(R"(cannot evaluate a function that has an argument without a value ('%1%')
|
||||
error<MissingArgumentError>(R"(cannot evaluate a function that has an argument without a value ('%1%')
|
||||
Nix attempted to evaluate a function as a top level expression; in
|
||||
this case it must have its arguments supplied either by default
|
||||
values, or passed explicitly with '--arg' or '--argstr'. See
|
||||
https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name])
|
||||
.atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow<MissingArgumentError>();
|
||||
.atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1815,7 +1838,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
|
||||
if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) {
|
||||
std::ostringstream out;
|
||||
cond->show(state.symbols, out);
|
||||
state.error("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow<AssertionError>();
|
||||
state.error<AssertionError>("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow();
|
||||
}
|
||||
body->eval(state, env, v);
|
||||
}
|
||||
@ -1993,14 +2016,14 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||
nf = n;
|
||||
nf += vTmp.fpoint;
|
||||
} else
|
||||
state.error("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow();
|
||||
} else if (firstType == nFloat) {
|
||||
if (vTmp.type() == nInt) {
|
||||
nf += vTmp.integer;
|
||||
} else if (vTmp.type() == nFloat) {
|
||||
nf += vTmp.fpoint;
|
||||
} else
|
||||
state.error("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow();
|
||||
} else {
|
||||
if (s.empty()) s.reserve(es->size());
|
||||
/* skip canonization of first path, which would only be not
|
||||
@ -2022,7 +2045,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||
v.mkFloat(nf);
|
||||
else if (firstType == nPath) {
|
||||
if (!context.empty())
|
||||
state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
state.error<EvalError>("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow();
|
||||
v.mkPath(state.rootPath(CanonPath(canonPath(str()))));
|
||||
} else
|
||||
v.mkStringMove(c_str(), context);
|
||||
@ -2037,8 +2060,9 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v)
|
||||
|
||||
void ExprBlackHole::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
state.error("infinite recursion encountered")
|
||||
.debugThrow<InfiniteRecursionError>();
|
||||
state.error<InfiniteRecursionError>("infinite recursion encountered")
|
||||
.atPos(v.determinePos(noPos))
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
// always force this to be separate, otherwise forceValue may inline it and take
|
||||
@ -2052,7 +2076,7 @@ void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos)
|
||||
try {
|
||||
std::rethrow_exception(e);
|
||||
} catch (InfiniteRecursionError & e) {
|
||||
e.err.errPos = positions[pos];
|
||||
e.atPos(positions[pos]);
|
||||
} catch (...) {
|
||||
}
|
||||
}
|
||||
@ -2100,15 +2124,18 @@ NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCt
|
||||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nInt)
|
||||
error("expected an integer but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected an integer but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).debugThrow();
|
||||
return v.integer;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
}
|
||||
|
||||
return v.integer;
|
||||
}
|
||||
|
||||
|
||||
@ -2119,10 +2146,11 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err
|
||||
if (v.type() == nInt)
|
||||
return v.integer;
|
||||
else if (v.type() != nFloat)
|
||||
error("expected a float but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a float but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).debugThrow();
|
||||
return v.fpoint;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
@ -2136,15 +2164,18 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx
|
||||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nBool)
|
||||
error("expected a Boolean but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a Boolean but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).debugThrow();
|
||||
return v.boolean;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
}
|
||||
|
||||
return v.boolean;
|
||||
}
|
||||
|
||||
|
||||
@ -2159,10 +2190,11 @@ void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view erro
|
||||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nFunction && !isFunctor(v))
|
||||
error("expected a function but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a function but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).debugThrow();
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
@ -2175,10 +2207,11 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
|
||||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nString)
|
||||
error("expected a string but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a string but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).debugThrow();
|
||||
return v.string_view();
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
@ -2207,7 +2240,7 @@ std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::s
|
||||
{
|
||||
auto s = forceString(v, pos, errorCtx);
|
||||
if (v.context()) {
|
||||
error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
error<EvalError>("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
return s;
|
||||
}
|
||||
@ -2272,11 +2305,13 @@ BackedStringView EvalState::coerceToString(
|
||||
return std::move(*maybeString);
|
||||
auto i = v.attrs->find(sOutPath);
|
||||
if (i == v.attrs->end()) {
|
||||
error("cannot coerce %1% to a string: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
error<TypeError>(
|
||||
"cannot coerce %1% to a string: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
)
|
||||
.withTrace(pos, errorCtx)
|
||||
.debugThrow<TypeError>();
|
||||
.debugThrow();
|
||||
}
|
||||
return coerceToString(pos, *i->value, context, errorCtx,
|
||||
coerceMore, copyToStore, canonicalizePath);
|
||||
@ -2284,7 +2319,7 @@ BackedStringView EvalState::coerceToString(
|
||||
|
||||
if (v.type() == nExternal) {
|
||||
try {
|
||||
return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore);
|
||||
return v.external->coerceToString(*this, pos, context, coerceMore, copyToStore);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(nullptr, errorCtx);
|
||||
throw;
|
||||
@ -2320,25 +2355,33 @@ BackedStringView EvalState::coerceToString(
|
||||
}
|
||||
}
|
||||
|
||||
error("cannot coerce %1% to a string: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
error<TypeError>("cannot coerce %1% to a string: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
)
|
||||
.withTrace(pos, errorCtx)
|
||||
.debugThrow<TypeError>();
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
|
||||
StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePath & path)
|
||||
{
|
||||
if (nix::isDerivation(path.path.abs()))
|
||||
error("file names are not allowed to end in '%1%'", drvExtension).debugThrow<EvalError>();
|
||||
error<EvalError>("file names are not allowed to end in '%1%'", drvExtension).debugThrow();
|
||||
|
||||
auto i = srcToStore.find(path);
|
||||
|
||||
auto dstPath = i != srcToStore.end()
|
||||
? i->second
|
||||
: [&]() {
|
||||
auto dstPath = fetchToStore(*store, path.resolveSymlinks(), path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
|
||||
auto dstPath = fetchToStore(
|
||||
*store,
|
||||
path.resolveSymlinks(),
|
||||
settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
|
||||
path.baseName(),
|
||||
FileIngestionMethod::Recursive,
|
||||
nullptr,
|
||||
repair);
|
||||
allowPath(dstPath);
|
||||
srcToStore.insert_or_assign(path, dstPath);
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||
@ -2380,7 +2423,7 @@ SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext
|
||||
relative to the root filesystem. */
|
||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||
if (path == "" || path[0] != '/')
|
||||
error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
error<EvalError>("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow();
|
||||
return rootPath(CanonPath(path));
|
||||
}
|
||||
|
||||
@ -2390,7 +2433,7 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringCon
|
||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||
if (auto storePath = store->maybeParseStorePath(path))
|
||||
return *storePath;
|
||||
error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
error<EvalError>("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
|
||||
|
||||
@ -2400,18 +2443,18 @@ std::pair<SingleDerivedPath, std::string_view> EvalState::coerceToSingleDerivedP
|
||||
auto s = forceString(v, context, pos, errorCtx);
|
||||
auto csize = context.size();
|
||||
if (csize != 1)
|
||||
error(
|
||||
error<EvalError>(
|
||||
"string '%s' has %d entries in its context. It should only have exactly one entry",
|
||||
s, csize)
|
||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
.withTrace(pos, errorCtx).debugThrow();
|
||||
auto derivedPath = std::visit(overloaded {
|
||||
[&](NixStringContextElem::Opaque && o) -> SingleDerivedPath {
|
||||
return std::move(o);
|
||||
},
|
||||
[&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath {
|
||||
error(
|
||||
error<EvalError>(
|
||||
"string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time",
|
||||
s).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
s).withTrace(pos, errorCtx).debugThrow();
|
||||
},
|
||||
[&](NixStringContextElem::Built && b) -> SingleDerivedPath {
|
||||
return std::move(b);
|
||||
@ -2434,16 +2477,16 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value &
|
||||
error message. */
|
||||
std::visit(overloaded {
|
||||
[&](const SingleDerivedPath::Opaque & o) {
|
||||
error(
|
||||
error<EvalError>(
|
||||
"path string '%s' has context with the different path '%s'",
|
||||
s, sExpected)
|
||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
.withTrace(pos, errorCtx).debugThrow();
|
||||
},
|
||||
[&](const SingleDerivedPath::Built & b) {
|
||||
error(
|
||||
error<EvalError>(
|
||||
"string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'",
|
||||
s, b.output, b.drvPath->to_string(*store), sExpected)
|
||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
.withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
}, derivedPath.raw());
|
||||
}
|
||||
@ -2528,7 +2571,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
||||
|
||||
case nThunk: // Must not be left by forceValue
|
||||
default:
|
||||
error("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
error<EvalError>("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
@ -2689,14 +2732,14 @@ SourcePath resolveExprPath(SourcePath path)
|
||||
// Basic cycle/depth limit to avoid infinite loops.
|
||||
if (++followCount >= maxFollow)
|
||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||
auto p = path.parent().resolveSymlinks() + path.baseName();
|
||||
auto p = path.parent().resolveSymlinks() / path.baseName();
|
||||
if (p.lstat().type != InputAccessor::tSymlink) break;
|
||||
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||
}
|
||||
|
||||
/* If `path' refers to a directory, append `/default.nix'. */
|
||||
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
|
||||
return path + "default.nix";
|
||||
return path / "default.nix";
|
||||
|
||||
return path;
|
||||
}
|
||||
@ -2738,7 +2781,7 @@ Expr * EvalState::parseStdin()
|
||||
// drainFD should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
auto s = make_ref<std::string>(std::move(buffer));
|
||||
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv);
|
||||
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
@ -2767,13 +2810,12 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_
|
||||
if (hasPrefix(path, "nix/"))
|
||||
return {corepkgsFS, CanonPath(path.substr(3))};
|
||||
|
||||
debugThrow(ThrownError({
|
||||
.msg = hintfmt(evalSettings.pureEval
|
||||
error<ThrownError>(
|
||||
evalSettings.pureEval
|
||||
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
|
||||
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
|
||||
path),
|
||||
.errPos = positions[pos]
|
||||
}), 0, 0);
|
||||
path
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
|
||||
@ -2787,12 +2829,13 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||
|
||||
if (EvalSettings::isPseudoUrl(value)) {
|
||||
try {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
|
||||
auto accessor = fetchers::downloadTarball(
|
||||
EvalSettings::resolvePseudoUrl(value)).accessor;
|
||||
auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy);
|
||||
res = { store->toRealPath(storePath) };
|
||||
} catch (FileTransferError & e) {
|
||||
} catch (Error & e) {
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||
.msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -2825,7 +2868,7 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||
res = { path };
|
||||
else {
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value)
|
||||
.msg = HintFmt("Nix search path entry '%1%' does not exist, ignoring", value)
|
||||
});
|
||||
res = std::nullopt;
|
||||
}
|
||||
@ -2856,11 +2899,11 @@ Expr * EvalState::parse(
|
||||
}
|
||||
|
||||
|
||||
std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const
|
||||
std::string ExternalValueBase::coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const
|
||||
{
|
||||
throw TypeError({
|
||||
.msg = hintfmt("cannot coerce %1% to a string: %2%", showType(), *this)
|
||||
});
|
||||
state.error<TypeError>(
|
||||
"cannot coerce %1% to a string: %2%", showType(), *this
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
|
||||
|
@ -2,6 +2,7 @@
|
||||
///@file
|
||||
|
||||
#include "attr-set.hh"
|
||||
#include "eval-error.hh"
|
||||
#include "types.hh"
|
||||
#include "value.hh"
|
||||
#include "nixexpr.hh"
|
||||
@ -10,6 +11,7 @@
|
||||
#include "experimental-features.hh"
|
||||
#include "input-accessor.hh"
|
||||
#include "search-path.hh"
|
||||
#include "repl-exit-status.hh"
|
||||
|
||||
#include <map>
|
||||
#include <optional>
|
||||
@ -147,49 +149,10 @@ struct DebugTrace {
|
||||
std::shared_ptr<Pos> pos;
|
||||
const Expr & expr;
|
||||
const Env & env;
|
||||
hintformat hint;
|
||||
HintFmt hint;
|
||||
bool isError;
|
||||
};
|
||||
|
||||
void debugError(Error * e, Env & env, Expr & expr);
|
||||
|
||||
class ErrorBuilder
|
||||
{
|
||||
private:
|
||||
EvalState & state;
|
||||
ErrorInfo info;
|
||||
|
||||
ErrorBuilder(EvalState & s, ErrorInfo && i): state(s), info(i) { }
|
||||
|
||||
public:
|
||||
template<typename... Args>
|
||||
[[nodiscard, gnu::noinline]]
|
||||
static ErrorBuilder * create(EvalState & s, const Args & ... args)
|
||||
{
|
||||
return new ErrorBuilder(s, ErrorInfo { .msg = hintfmt(args...) });
|
||||
}
|
||||
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & atPos(PosIdx pos);
|
||||
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & withTrace(PosIdx pos, const std::string_view text);
|
||||
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text);
|
||||
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & withSuggestions(Suggestions & s);
|
||||
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & withFrame(const Env & e, const Expr & ex);
|
||||
|
||||
template<class ErrorType>
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
void debugThrow();
|
||||
};
|
||||
|
||||
|
||||
class EvalState : public std::enable_shared_from_this<EvalState>
|
||||
{
|
||||
public:
|
||||
@ -257,9 +220,8 @@ public:
|
||||
/**
|
||||
* Debugger
|
||||
*/
|
||||
void (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
|
||||
ReplExitStatus (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
|
||||
bool debugStop;
|
||||
bool debugQuit;
|
||||
int trylevel;
|
||||
std::list<DebugTrace> debugTraces;
|
||||
std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs;
|
||||
@ -274,39 +236,11 @@ public:
|
||||
|
||||
void runDebugRepl(const Error * error, const Env & env, const Expr & expr);
|
||||
|
||||
template<class E>
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
void debugThrowLastTrace(E && error)
|
||||
{
|
||||
debugThrow(error, nullptr, nullptr);
|
||||
}
|
||||
|
||||
template<class E>
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
void debugThrow(E && error, const Env * env, const Expr * expr)
|
||||
{
|
||||
if (debugRepl && ((env && expr) || !debugTraces.empty())) {
|
||||
if (!env || !expr) {
|
||||
const DebugTrace & last = debugTraces.front();
|
||||
env = &last.env;
|
||||
expr = &last.expr;
|
||||
}
|
||||
runDebugRepl(&error, *env, *expr);
|
||||
}
|
||||
|
||||
throw std::move(error);
|
||||
}
|
||||
|
||||
// This is dangerous, but gets in line with the idea that error creation and
|
||||
// throwing should not allocate on the stack of hot functions.
|
||||
// as long as errors are immediately thrown, it works.
|
||||
ErrorBuilder * errorBuilder;
|
||||
|
||||
template<typename... Args>
|
||||
template<class T, typename... Args>
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & error(const Args & ... args) {
|
||||
errorBuilder = ErrorBuilder::create(*this, args...);
|
||||
return *errorBuilder;
|
||||
EvalErrorBuilder<T> & error(const Args & ... args) {
|
||||
// `EvalErrorBuilder::debugThrow` performs the corresponding `delete`.
|
||||
return *new EvalErrorBuilder<T>(*this, args...);
|
||||
}
|
||||
|
||||
private:
|
||||
@ -372,6 +306,11 @@ public:
|
||||
*/
|
||||
SourcePath rootPath(CanonPath path);
|
||||
|
||||
/**
|
||||
* Variant which accepts relative paths too.
|
||||
*/
|
||||
SourcePath rootPath(PathView path);
|
||||
|
||||
/**
|
||||
* Allow access to a path.
|
||||
*/
|
||||
@ -493,10 +432,12 @@ public:
|
||||
std::string_view forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx);
|
||||
std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
|
||||
template<typename... Args>
|
||||
[[gnu::noinline]]
|
||||
void addErrorTrace(Error & e, const char * s, const std::string & s2) const;
|
||||
void addErrorTrace(Error & e, const Args & ... formatArgs) const;
|
||||
template<typename... Args>
|
||||
[[gnu::noinline]]
|
||||
void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame = false) const;
|
||||
void addErrorTrace(Error & e, const PosIdx pos, const Args & ... formatArgs) const;
|
||||
|
||||
public:
|
||||
/**
|
||||
@ -819,7 +760,6 @@ struct DebugTraceStacker {
|
||||
DebugTraceStacker(EvalState & evalState, DebugTrace t);
|
||||
~DebugTraceStacker()
|
||||
{
|
||||
// assert(evalState.debugTraces.front() == trace);
|
||||
evalState.debugTraces.pop_front();
|
||||
}
|
||||
EvalState & evalState;
|
||||
@ -845,22 +785,6 @@ SourcePath resolveExprPath(SourcePath path);
|
||||
*/
|
||||
bool isAllowedURI(std::string_view uri, const Strings & allowedPaths);
|
||||
|
||||
struct InvalidPathError : EvalError
|
||||
{
|
||||
Path path;
|
||||
InvalidPathError(const Path & path);
|
||||
#ifdef EXCEPTION_NEEDS_THROW_SPEC
|
||||
~InvalidPathError() throw () { };
|
||||
#endif
|
||||
};
|
||||
|
||||
template<class ErrorType>
|
||||
void ErrorBuilder::debugThrow()
|
||||
{
|
||||
// NOTE: We always use the -LastTrace version as we push the new trace in withFrame()
|
||||
state.debugThrowLastTrace(ErrorType(info));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#include "eval-inline.hh"
|
||||
|
@ -1,20 +1,52 @@
|
||||
lockFileStr: rootSrc: rootSubdir:
|
||||
# This is a helper to callFlake() to lazily fetch flake inputs.
|
||||
|
||||
# The contents of the lock file, in JSON format.
|
||||
lockFileStr:
|
||||
|
||||
# A mapping of lock file node IDs to { sourceInfo, subdir } attrsets,
|
||||
# with sourceInfo.outPath providing an InputAccessor to a previously
|
||||
# fetched tree. This is necessary for possibly unlocked inputs, in
|
||||
# particular the root input, but also --override-inputs pointing to
|
||||
# unlocked trees.
|
||||
overrides:
|
||||
|
||||
let
|
||||
|
||||
lockFile = builtins.fromJSON lockFileStr;
|
||||
|
||||
# Resolve a input spec into a node name. An input spec is
|
||||
# either a node name, or a 'follows' path from the root
|
||||
# node.
|
||||
resolveInput = inputSpec:
|
||||
if builtins.isList inputSpec
|
||||
then getInputByPath lockFile.root inputSpec
|
||||
else inputSpec;
|
||||
|
||||
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
|
||||
# root node, returning the final node.
|
||||
getInputByPath = nodeName: path:
|
||||
if path == []
|
||||
then nodeName
|
||||
else
|
||||
getInputByPath
|
||||
# Since this could be a 'follows' input, call resolveInput.
|
||||
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
|
||||
(builtins.tail path);
|
||||
|
||||
allNodes =
|
||||
builtins.mapAttrs
|
||||
(key: node:
|
||||
let
|
||||
|
||||
sourceInfo =
|
||||
if key == lockFile.root
|
||||
then rootSrc
|
||||
else fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||
if overrides ? ${key}
|
||||
then
|
||||
overrides.${key}.sourceInfo
|
||||
else
|
||||
# FIXME: remove obsolete node.info.
|
||||
fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||
|
||||
subdir = if key == lockFile.root then rootSubdir else node.locked.dir or "";
|
||||
subdir = overrides.${key}.dir or node.locked.dir or "";
|
||||
|
||||
outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir);
|
||||
|
||||
@ -24,25 +56,6 @@ let
|
||||
(inputName: inputSpec: allNodes.${resolveInput inputSpec})
|
||||
(node.inputs or {});
|
||||
|
||||
# Resolve a input spec into a node name. An input spec is
|
||||
# either a node name, or a 'follows' path from the root
|
||||
# node.
|
||||
resolveInput = inputSpec:
|
||||
if builtins.isList inputSpec
|
||||
then getInputByPath lockFile.root inputSpec
|
||||
else inputSpec;
|
||||
|
||||
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
|
||||
# root node, returning the final node.
|
||||
getInputByPath = nodeName: path:
|
||||
if path == []
|
||||
then nodeName
|
||||
else
|
||||
getInputByPath
|
||||
# Since this could be a 'follows' input, call resolveInput.
|
||||
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
|
||||
(builtins.tail path);
|
||||
|
||||
outputs = flake.outputs (inputs // { self = result; });
|
||||
|
||||
result =
|
||||
|
@ -147,15 +147,15 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||
NixStringContext emptyContext = {};
|
||||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump());
|
||||
} else
|
||||
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value));
|
||||
state.error<TypeError>("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value)).debugThrow();
|
||||
}
|
||||
#pragma GCC diagnostic pop
|
||||
}
|
||||
} catch (Error & e) {
|
||||
e.addTrace(
|
||||
state.positions[attr.pos],
|
||||
hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
|
||||
HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
@ -164,7 +164,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||
try {
|
||||
input.ref = FlakeRef::fromAttrs(attrs);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[pos], hintfmt("while evaluating flake input"));
|
||||
e.addTrace(state.positions[pos], HintFmt("while evaluating flake input"));
|
||||
throw;
|
||||
}
|
||||
else {
|
||||
@ -295,15 +295,15 @@ static Flake getFlake(
|
||||
std::vector<std::string> ss;
|
||||
for (auto elem : setting.value->listItems()) {
|
||||
if (elem->type() != nString)
|
||||
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||
state.symbols[setting.name], showType(*setting.value));
|
||||
state.error<TypeError>("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||
state.symbols[setting.name], showType(*setting.value)).debugThrow();
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, ""));
|
||||
}
|
||||
flake.config.settings.emplace(state.symbols[setting.name], ss);
|
||||
}
|
||||
else
|
||||
throw TypeError("flake configuration setting '%s' is %s",
|
||||
state.symbols[setting.name], showType(*setting.value));
|
||||
state.error<TypeError>("flake configuration setting '%s' is %s",
|
||||
state.symbols[setting.name], showType(*setting.value)).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
@ -365,6 +365,7 @@ LockedFlake lockFlake(
|
||||
std::map<InputPath, FlakeInput> overrides;
|
||||
std::set<InputPath> explicitCliOverrides;
|
||||
std::set<InputPath> overridesUsed, updatesUsed;
|
||||
std::map<ref<Node>, StorePath> nodePaths;
|
||||
|
||||
for (auto & i : lockFlags.inputOverrides) {
|
||||
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
|
||||
@ -535,11 +536,13 @@ LockedFlake lockFlake(
|
||||
}
|
||||
}
|
||||
|
||||
computeLocks(
|
||||
mustRefetch
|
||||
? getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath).inputs
|
||||
: fakeInputs,
|
||||
childNode, inputPath, oldLock, lockRootPath, parentPath, !mustRefetch);
|
||||
if (mustRefetch) {
|
||||
auto inputFlake = getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath);
|
||||
nodePaths.emplace(childNode, inputFlake.storePath);
|
||||
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, lockRootPath, parentPath, false);
|
||||
} else {
|
||||
computeLocks(fakeInputs, childNode, inputPath, oldLock, lockRootPath, parentPath, true);
|
||||
}
|
||||
|
||||
} else {
|
||||
/* We need to create a new lock file entry. So fetch
|
||||
@ -584,6 +587,7 @@ LockedFlake lockFlake(
|
||||
flake. Also, unless we already have this flake
|
||||
in the top-level lock file, use this flake's
|
||||
own lock file. */
|
||||
nodePaths.emplace(childNode, inputFlake.storePath);
|
||||
computeLocks(
|
||||
inputFlake.inputs, childNode, inputPath,
|
||||
oldLock
|
||||
@ -596,11 +600,13 @@ LockedFlake lockFlake(
|
||||
}
|
||||
|
||||
else {
|
||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
state, *input.ref, useRegistries, flakeCache);
|
||||
|
||||
auto childNode = make_ref<LockedNode>(lockedRef, ref, false);
|
||||
|
||||
nodePaths.emplace(childNode, storePath);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
}
|
||||
}
|
||||
@ -615,6 +621,8 @@ LockedFlake lockFlake(
|
||||
// Bring in the current ref for relative path resolution if we have it
|
||||
auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true);
|
||||
|
||||
nodePaths.emplace(newLockFile.root, flake.storePath);
|
||||
|
||||
computeLocks(
|
||||
flake.inputs,
|
||||
newLockFile.root,
|
||||
@ -707,14 +715,6 @@ LockedFlake lockFlake(
|
||||
flake.lockedRef.input.getRev() &&
|
||||
prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
|
||||
warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
|
||||
|
||||
/* Make sure that we picked up the change,
|
||||
i.e. the tree should usually be dirty
|
||||
now. Corner case: we could have reverted from a
|
||||
dirty to a clean tree! */
|
||||
if (flake.lockedRef.input == prevLockedRef.input
|
||||
&& !flake.lockedRef.input.isLocked())
|
||||
throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
|
||||
}
|
||||
} else
|
||||
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
|
||||
@ -724,7 +724,11 @@ LockedFlake lockFlake(
|
||||
}
|
||||
}
|
||||
|
||||
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
|
||||
return LockedFlake {
|
||||
.flake = std::move(flake),
|
||||
.lockFile = std::move(newLockFile),
|
||||
.nodePaths = std::move(nodePaths)
|
||||
};
|
||||
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while updating the lock file of flake '%s'", flake.lockedRef.to_string());
|
||||
@ -736,30 +740,48 @@ void callFlake(EvalState & state,
|
||||
const LockedFlake & lockedFlake,
|
||||
Value & vRes)
|
||||
{
|
||||
auto vLocks = state.allocValue();
|
||||
auto vRootSrc = state.allocValue();
|
||||
auto vRootSubdir = state.allocValue();
|
||||
auto vTmp1 = state.allocValue();
|
||||
auto vTmp2 = state.allocValue();
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
|
||||
vLocks->mkString(lockedFlake.lockFile.to_string());
|
||||
auto [lockFileStr, keyMap] = lockedFlake.lockFile.to_string();
|
||||
|
||||
emitTreeAttrs(
|
||||
state,
|
||||
lockedFlake.flake.storePath,
|
||||
lockedFlake.flake.lockedRef.input,
|
||||
*vRootSrc,
|
||||
false,
|
||||
lockedFlake.flake.forceDirty);
|
||||
auto overrides = state.buildBindings(lockedFlake.nodePaths.size());
|
||||
|
||||
vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir);
|
||||
for (auto & [node, storePath] : lockedFlake.nodePaths) {
|
||||
auto override = state.buildBindings(2);
|
||||
|
||||
auto & vSourceInfo = override.alloc(state.symbols.create("sourceInfo"));
|
||||
|
||||
auto lockedNode = node.dynamic_pointer_cast<const LockedNode>();
|
||||
|
||||
emitTreeAttrs(
|
||||
state,
|
||||
storePath,
|
||||
lockedNode ? lockedNode->lockedRef.input : lockedFlake.flake.lockedRef.input,
|
||||
vSourceInfo,
|
||||
false,
|
||||
!lockedNode && lockedFlake.flake.forceDirty);
|
||||
|
||||
auto key = keyMap.find(node);
|
||||
assert(key != keyMap.end());
|
||||
|
||||
override
|
||||
.alloc(state.symbols.create("dir"))
|
||||
.mkString(lockedNode ? lockedNode->lockedRef.subdir : lockedFlake.flake.lockedRef.subdir);
|
||||
|
||||
overrides.alloc(state.symbols.create(key->second)).mkAttrs(override);
|
||||
}
|
||||
|
||||
auto & vOverrides = state.allocValue()->mkAttrs(overrides);
|
||||
|
||||
auto vCallFlake = state.allocValue();
|
||||
state.evalFile(state.callFlakeInternal, *vCallFlake);
|
||||
|
||||
auto vTmp1 = state.allocValue();
|
||||
auto vLocks = state.allocValue();
|
||||
vLocks->mkString(lockFileStr);
|
||||
state.callFunction(*vCallFlake, *vLocks, *vTmp1, noPos);
|
||||
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
|
||||
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
|
||||
|
||||
state.callFunction(*vTmp1, vOverrides, vRes, noPos);
|
||||
}
|
||||
|
||||
static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
@ -865,11 +887,11 @@ static void prim_flakeRefToString(
|
||||
attrs.emplace(state.symbols[attr.name],
|
||||
std::string(attr.value->string_view()));
|
||||
} else {
|
||||
state.error(
|
||||
state.error<EvalError>(
|
||||
"flake reference attribute sets may only contain integers, Booleans, "
|
||||
"and strings, but attribute '%s' is %s",
|
||||
state.symbols[attr.name],
|
||||
showType(*attr.value)).debugThrow<EvalError>();
|
||||
showType(*attr.value)).debugThrow();
|
||||
}
|
||||
}
|
||||
auto flakeRef = FlakeRef::fromAttrs(attrs);
|
||||
|
@ -103,6 +103,13 @@ struct LockedFlake
|
||||
Flake flake;
|
||||
LockFile lockFile;
|
||||
|
||||
/**
|
||||
* Store paths of nodes that have been fetched in
|
||||
* lockFlake(); in particular, the root node and the overriden
|
||||
* inputs.
|
||||
*/
|
||||
std::map<ref<Node>, StorePath> nodePaths;
|
||||
|
||||
Fingerprint getFingerprint() const;
|
||||
};
|
||||
|
||||
|
@ -38,7 +38,7 @@ LockedNode::LockedNode(const nlohmann::json & json)
|
||||
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
||||
{
|
||||
if (!lockedRef.input.isLocked())
|
||||
throw Error("lock file contains mutable lock '%s'",
|
||||
throw Error("lock file contains unlocked input '%s'",
|
||||
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
||||
}
|
||||
|
||||
@ -107,7 +107,7 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
|
||||
std::string inputKey = i.value();
|
||||
auto k = nodeMap.find(inputKey);
|
||||
if (k == nodeMap.end()) {
|
||||
auto nodes = json["nodes"];
|
||||
auto & nodes = json["nodes"];
|
||||
auto jsonNode2 = nodes.find(inputKey);
|
||||
if (jsonNode2 == nodes.end())
|
||||
throw Error("lock file references missing node '%s'", inputKey);
|
||||
@ -134,10 +134,10 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
|
||||
// a bit since we don't need to worry about cycles.
|
||||
}
|
||||
|
||||
nlohmann::json LockFile::toJSON() const
|
||||
std::pair<nlohmann::json, LockFile::KeyMap> LockFile::toJSON() const
|
||||
{
|
||||
nlohmann::json nodes;
|
||||
std::unordered_map<std::shared_ptr<const Node>, std::string> nodeKeys;
|
||||
KeyMap nodeKeys;
|
||||
std::unordered_set<std::string> keys;
|
||||
|
||||
std::function<std::string(const std::string & key, ref<const Node> node)> dumpNode;
|
||||
@ -194,12 +194,13 @@ nlohmann::json LockFile::toJSON() const
|
||||
json["root"] = dumpNode("root", root);
|
||||
json["nodes"] = std::move(nodes);
|
||||
|
||||
return json;
|
||||
return {json, std::move(nodeKeys)};
|
||||
}
|
||||
|
||||
std::string LockFile::to_string() const
|
||||
std::pair<std::string, LockFile::KeyMap> LockFile::to_string() const
|
||||
{
|
||||
return toJSON().dump(2);
|
||||
auto [json, nodeKeys] = toJSON();
|
||||
return {json.dump(2), std::move(nodeKeys)};
|
||||
}
|
||||
|
||||
LockFile LockFile::read(const Path & path)
|
||||
@ -210,7 +211,7 @@ LockFile LockFile::read(const Path & path)
|
||||
|
||||
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
|
||||
{
|
||||
stream << lockFile.toJSON().dump(2);
|
||||
stream << lockFile.toJSON().first.dump(2);
|
||||
return stream;
|
||||
}
|
||||
|
||||
@ -243,7 +244,7 @@ std::optional<FlakeRef> LockFile::isUnlocked() const
|
||||
bool LockFile::operator ==(const LockFile & other) const
|
||||
{
|
||||
// FIXME: slow
|
||||
return toJSON() == other.toJSON();
|
||||
return toJSON().first == other.toJSON().first;
|
||||
}
|
||||
|
||||
bool LockFile::operator !=(const LockFile & other) const
|
||||
|
@ -59,14 +59,15 @@ struct LockFile
|
||||
|
||||
typedef std::map<ref<const Node>, std::string> KeyMap;
|
||||
|
||||
nlohmann::json toJSON() const;
|
||||
std::pair<nlohmann::json, KeyMap> toJSON() const;
|
||||
|
||||
std::string to_string() const;
|
||||
std::pair<std::string, KeyMap> to_string() const;
|
||||
|
||||
static LockFile read(const Path & path);
|
||||
|
||||
/**
|
||||
* Check whether this lock file has any unlocked inputs.
|
||||
* Check whether this lock file has any unlocked inputs. If so,
|
||||
* return one.
|
||||
*/
|
||||
std::optional<FlakeRef> isUnlocked() const;
|
||||
|
||||
|
@ -5,13 +5,12 @@
|
||||
namespace nix {
|
||||
|
||||
static const std::string attributeNamePattern("[a-zA-Z0-9_-]+");
|
||||
static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?");
|
||||
static const std::regex lastAttributeRegex("^((?:" + attributeNamePattern + "\\.)*)(" + attributeNamePattern +")(\\^.*)?$");
|
||||
static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+");
|
||||
static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")");
|
||||
static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?");
|
||||
static const std::regex gitProviderRegex("github|gitlab|sourcehut");
|
||||
static const std::regex gitSchemeRegex("git($|\\+.*)");
|
||||
static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)");
|
||||
|
||||
std::optional<std::string> getNameFromURL(const ParsedURL & url)
|
||||
{
|
||||
@ -22,8 +21,11 @@ std::optional<std::string> getNameFromURL(const ParsedURL & url)
|
||||
return url.query.at("dir");
|
||||
|
||||
/* If the fragment isn't a "default" and contains two attribute elements, use the last one */
|
||||
if (std::regex_match(url.fragment, match, lastAttributeRegex))
|
||||
return match.str(1);
|
||||
if (std::regex_match(url.fragment, match, lastAttributeRegex)
|
||||
&& match.str(1) != "defaultPackage."
|
||||
&& match.str(2) != "default") {
|
||||
return match.str(2);
|
||||
}
|
||||
|
||||
/* If this is a github/gitlab/sourcehut flake, use the repo name */
|
||||
if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex))
|
||||
@ -33,10 +35,6 @@ std::optional<std::string> getNameFromURL(const ParsedURL & url)
|
||||
if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex))
|
||||
return match.str(1);
|
||||
|
||||
/* If everything failed but there is a non-default fragment, use it in full */
|
||||
if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex))
|
||||
return url.fragment;
|
||||
|
||||
/* If there is no fragment, take the last element of the path */
|
||||
if (std::regex_match(url.path, match, lastPathSegmentRegex))
|
||||
return match.str(1);
|
||||
|
@ -49,7 +49,7 @@ std::string PackageInfo::queryName() const
|
||||
{
|
||||
if (name == "" && attrs) {
|
||||
auto i = attrs->find(state->sName);
|
||||
if (i == attrs->end()) throw TypeError("derivation name missing");
|
||||
if (i == attrs->end()) state->error<TypeError>("derivation name missing").debugThrow();
|
||||
name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation");
|
||||
}
|
||||
return name;
|
||||
@ -396,7 +396,8 @@ static void getDerivations(EvalState & state, Value & vIn,
|
||||
}
|
||||
}
|
||||
|
||||
else throw TypeError("expression does not evaluate to a derivation (or a set or list of those)");
|
||||
else
|
||||
state.error<TypeError>("expression does not evaluate to a derivation (or a set or list of those)").debugThrow();
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,4 +1,6 @@
|
||||
#include "json-to-value.hh"
|
||||
#include "value.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
#include <variant>
|
||||
#include <nlohmann/json.hpp>
|
||||
@ -159,7 +161,7 @@ public:
|
||||
}
|
||||
|
||||
bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) {
|
||||
throw JSONParseError(ex.what());
|
||||
throw JSONParseError("%s", ex.what());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,13 +1,16 @@
|
||||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "eval.hh"
|
||||
#include "error.hh"
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace nix {
|
||||
|
||||
MakeError(JSONParseError, EvalError);
|
||||
class EvalState;
|
||||
struct Value;
|
||||
|
||||
MakeError(JSONParseError, Error);
|
||||
|
||||
void parseJSON(EvalState & state, const std::string_view & s, Value & v);
|
||||
|
||||
|
@ -94,6 +94,9 @@ static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
|
||||
|
||||
}
|
||||
|
||||
// yacc generates code that uses unannotated fallthrough.
|
||||
#pragma GCC diagnostic ignored "-Wimplicit-fallthrough"
|
||||
|
||||
#define YY_USER_INIT initLoc(yylloc)
|
||||
#define YY_USER_ACTION adjustLoc(yylloc, yytext, yyleng);
|
||||
|
||||
@ -146,9 +149,9 @@ or { return OR_KW; }
|
||||
try {
|
||||
yylval->n = boost::lexical_cast<int64_t>(yytext);
|
||||
} catch (const boost::bad_lexical_cast &) {
|
||||
throw ParseError({
|
||||
.msg = hintfmt("invalid integer '%1%'", yytext),
|
||||
.errPos = state->positions[CUR_POS],
|
||||
throw ParseError(ErrorInfo{
|
||||
.msg = HintFmt("invalid integer '%1%'", yytext),
|
||||
.pos = state->positions[CUR_POS],
|
||||
});
|
||||
}
|
||||
return INT_LIT;
|
||||
@ -156,9 +159,9 @@ or { return OR_KW; }
|
||||
{FLOAT} { errno = 0;
|
||||
yylval->nf = strtod(yytext, 0);
|
||||
if (errno != 0)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("invalid float '%1%'", yytext),
|
||||
.errPos = state->positions[CUR_POS],
|
||||
throw ParseError(ErrorInfo{
|
||||
.msg = HintFmt("invalid float '%1%'", yytext),
|
||||
.pos = state->positions[CUR_POS],
|
||||
});
|
||||
return FLOAT_LIT;
|
||||
}
|
||||
@ -285,9 +288,9 @@ or { return OR_KW; }
|
||||
|
||||
<INPATH_SLASH>{ANY} |
|
||||
<INPATH_SLASH><<EOF>> {
|
||||
throw ParseError({
|
||||
.msg = hintfmt("path has a trailing slash"),
|
||||
.errPos = state->positions[CUR_POS],
|
||||
throw ParseError(ErrorInfo{
|
||||
.msg = HintFmt("path has a trailing slash"),
|
||||
.pos = state->positions[CUR_POS],
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -70,10 +70,8 @@ void ExprOpHasAttr::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
str << ") ? " << showAttrPath(symbols, attrPath) << ")";
|
||||
}
|
||||
|
||||
void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) const
|
||||
{
|
||||
if (recursive) str << "rec ";
|
||||
str << "{ ";
|
||||
typedef const decltype(attrs)::value_type * Attr;
|
||||
std::vector<Attr> sorted;
|
||||
for (auto & i : attrs) sorted.push_back(&i);
|
||||
@ -81,10 +79,37 @@ void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
std::string_view sa = symbols[a->first], sb = symbols[b->first];
|
||||
return sa < sb;
|
||||
});
|
||||
std::vector<Symbol> inherits;
|
||||
std::map<ExprInheritFrom *, std::vector<Symbol>> inheritsFrom;
|
||||
for (auto & i : sorted) {
|
||||
if (i->second.inherited)
|
||||
str << "inherit " << symbols[i->first] << " " << "; ";
|
||||
else {
|
||||
switch (i->second.kind) {
|
||||
case AttrDef::Kind::Plain:
|
||||
break;
|
||||
case AttrDef::Kind::Inherited:
|
||||
inherits.push_back(i->first);
|
||||
break;
|
||||
case AttrDef::Kind::InheritedFrom: {
|
||||
auto & select = dynamic_cast<ExprSelect &>(*i->second.e);
|
||||
auto & from = dynamic_cast<ExprInheritFrom &>(*select.e);
|
||||
inheritsFrom[&from].push_back(i->first);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!inherits.empty()) {
|
||||
str << "inherit";
|
||||
for (auto sym : inherits) str << " " << symbols[sym];
|
||||
str << "; ";
|
||||
}
|
||||
for (const auto & [from, syms] : inheritsFrom) {
|
||||
str << "inherit (";
|
||||
(*inheritFromExprs)[from->displ]->show(symbols, str);
|
||||
str << ")";
|
||||
for (auto sym : syms) str << " " << symbols[sym];
|
||||
str << "; ";
|
||||
}
|
||||
for (auto & i : sorted) {
|
||||
if (i->second.kind == AttrDef::Kind::Plain) {
|
||||
str << symbols[i->first] << " = ";
|
||||
i->second.e->show(symbols, str);
|
||||
str << "; ";
|
||||
@ -97,6 +122,13 @@ void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
i.valueExpr->show(symbols, str);
|
||||
str << "; ";
|
||||
}
|
||||
}
|
||||
|
||||
void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
{
|
||||
if (recursive) str << "rec ";
|
||||
str << "{ ";
|
||||
showBindings(symbols, str);
|
||||
str << "}";
|
||||
}
|
||||
|
||||
@ -152,15 +184,7 @@ void ExprCall::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
void ExprLet::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
{
|
||||
str << "(let ";
|
||||
for (auto & i : attrs->attrs)
|
||||
if (i.second.inherited) {
|
||||
str << "inherit " << symbols[i.first] << "; ";
|
||||
}
|
||||
else {
|
||||
str << symbols[i.first] << " = ";
|
||||
i.second.e->show(symbols, str);
|
||||
str << "; ";
|
||||
}
|
||||
attrs->showBindings(symbols, str);
|
||||
str << "in ";
|
||||
body->show(symbols, str);
|
||||
str << ")";
|
||||
@ -296,15 +320,21 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||
enclosing `with'. If there is no `with', then we can issue an
|
||||
"undefined variable" error now. */
|
||||
if (withLevel == -1)
|
||||
throw UndefinedVarError({
|
||||
.msg = hintfmt("undefined variable '%1%'", es.symbols[name]),
|
||||
.errPos = es.positions[pos]
|
||||
});
|
||||
es.error<UndefinedVarError>(
|
||||
"undefined variable '%1%'",
|
||||
es.symbols[name]
|
||||
).atPos(pos).debugThrow();
|
||||
for (auto * e = env.get(); e && !fromWith; e = e->up)
|
||||
fromWith = e->isWith;
|
||||
this->level = withLevel;
|
||||
}
|
||||
|
||||
void ExprInheritFrom::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
{
|
||||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
}
|
||||
|
||||
void ExprSelect::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
{
|
||||
if (es.debugRepl)
|
||||
@ -328,22 +358,47 @@ void ExprOpHasAttr::bindVars(EvalState & es, const std::shared_ptr<const StaticE
|
||||
i.expr->bindVars(es, env);
|
||||
}
|
||||
|
||||
std::shared_ptr<const StaticEnv> ExprAttrs::bindInheritSources(
|
||||
EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
{
|
||||
if (!inheritFromExprs)
|
||||
return nullptr;
|
||||
|
||||
// the inherit (from) source values are inserted into an env of its own, which
|
||||
// does not introduce any variable names.
|
||||
// analysis must see an empty env, or an env that contains only entries with
|
||||
// otherwise unused names to not interfere with regular names. the parser
|
||||
// has already filled all exprs that access this env with appropriate level
|
||||
// and displacement, and nothing else is allowed to access it. ideally we'd
|
||||
// not even *have* an expr that grabs anything from this env since it's fully
|
||||
// invisible, but the evaluator does not allow for this yet.
|
||||
auto inner = std::make_shared<StaticEnv>(nullptr, env.get(), 0);
|
||||
for (auto from : *inheritFromExprs)
|
||||
from->bindVars(es, env);
|
||||
|
||||
return inner;
|
||||
}
|
||||
|
||||
void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
{
|
||||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
if (recursive) {
|
||||
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), recursive ? attrs.size() : 0);
|
||||
auto newEnv = [&] () -> std::shared_ptr<const StaticEnv> {
|
||||
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), attrs.size());
|
||||
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs)
|
||||
newEnv->vars.emplace_back(i.first, i.second.displ = displ++);
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs)
|
||||
newEnv->vars.emplace_back(i.first, i.second.displ = displ++);
|
||||
return newEnv;
|
||||
}();
|
||||
|
||||
// No need to sort newEnv since attrs is in sorted order.
|
||||
|
||||
auto inheritFromEnv = bindInheritSources(es, newEnv);
|
||||
for (auto & i : attrs)
|
||||
i.second.e->bindVars(es, i.second.inherited ? env : newEnv);
|
||||
i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, inheritFromEnv));
|
||||
|
||||
for (auto & i : dynamicAttrs) {
|
||||
i.nameExpr->bindVars(es, newEnv);
|
||||
@ -351,8 +406,10 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv>
|
||||
}
|
||||
}
|
||||
else {
|
||||
auto inheritFromEnv = bindInheritSources(es, env);
|
||||
|
||||
for (auto & i : attrs)
|
||||
i.second.e->bindVars(es, env);
|
||||
i.second.e->bindVars(es, i.second.chooseByKind(env, env, inheritFromEnv));
|
||||
|
||||
for (auto & i : dynamicAttrs) {
|
||||
i.nameExpr->bindVars(es, env);
|
||||
@ -409,19 +466,23 @@ void ExprCall::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||
|
||||
void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
{
|
||||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
auto newEnv = [&] () -> std::shared_ptr<const StaticEnv> {
|
||||
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), attrs->attrs.size());
|
||||
|
||||
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), attrs->attrs.size());
|
||||
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs->attrs)
|
||||
newEnv->vars.emplace_back(i.first, i.second.displ = displ++);
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs->attrs)
|
||||
newEnv->vars.emplace_back(i.first, i.second.displ = displ++);
|
||||
return newEnv;
|
||||
}();
|
||||
|
||||
// No need to sort newEnv since attrs->attrs is in sorted order.
|
||||
|
||||
auto inheritFromEnv = attrs->bindInheritSources(es, newEnv);
|
||||
for (auto & i : attrs->attrs)
|
||||
i.second.e->bindVars(es, i.second.inherited ? env : newEnv);
|
||||
i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, inheritFromEnv));
|
||||
|
||||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, newEnv));
|
||||
|
||||
body->bindVars(es, newEnv);
|
||||
}
|
||||
@ -447,9 +508,6 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||
break;
|
||||
}
|
||||
|
||||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
attrs->bindVars(es, env);
|
||||
auto newEnv = std::make_shared<StaticEnv>(this, env.get());
|
||||
body->bindVars(es, newEnv);
|
||||
|
@ -9,110 +9,13 @@
|
||||
#include "error.hh"
|
||||
#include "chunked-vector.hh"
|
||||
#include "position.hh"
|
||||
#include "eval-error.hh"
|
||||
#include "pos-idx.hh"
|
||||
#include "pos-table.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
MakeError(EvalError, Error);
|
||||
MakeError(ParseError, Error);
|
||||
MakeError(AssertionError, EvalError);
|
||||
MakeError(ThrownError, AssertionError);
|
||||
MakeError(Abort, EvalError);
|
||||
MakeError(TypeError, EvalError);
|
||||
MakeError(UndefinedVarError, Error);
|
||||
MakeError(MissingArgumentError, EvalError);
|
||||
|
||||
class InfiniteRecursionError : public EvalError
|
||||
{
|
||||
friend class EvalState;
|
||||
public:
|
||||
using EvalError::EvalError;
|
||||
};
|
||||
|
||||
class PosIdx {
|
||||
friend class PosTable;
|
||||
|
||||
private:
|
||||
uint32_t id;
|
||||
|
||||
explicit PosIdx(uint32_t id): id(id) {}
|
||||
|
||||
public:
|
||||
PosIdx() : id(0) {}
|
||||
|
||||
explicit operator bool() const { return id > 0; }
|
||||
|
||||
bool operator <(const PosIdx other) const { return id < other.id; }
|
||||
|
||||
bool operator ==(const PosIdx other) const { return id == other.id; }
|
||||
|
||||
bool operator !=(const PosIdx other) const { return id != other.id; }
|
||||
};
|
||||
|
||||
class PosTable
|
||||
{
|
||||
public:
|
||||
class Origin {
|
||||
friend PosTable;
|
||||
private:
|
||||
// must always be invalid by default, add() replaces this with the actual value.
|
||||
// subsequent add() calls use this index as a token to quickly check whether the
|
||||
// current origins.back() can be reused or not.
|
||||
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
|
||||
|
||||
// Used for searching in PosTable::[].
|
||||
explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {}
|
||||
|
||||
public:
|
||||
const Pos::Origin origin;
|
||||
|
||||
Origin(Pos::Origin origin): origin(origin) {}
|
||||
};
|
||||
|
||||
struct Offset {
|
||||
uint32_t line, column;
|
||||
};
|
||||
|
||||
private:
|
||||
std::vector<Origin> origins;
|
||||
ChunkedVector<Offset, 8192> offsets;
|
||||
|
||||
public:
|
||||
PosTable(): offsets(1024)
|
||||
{
|
||||
origins.reserve(1024);
|
||||
}
|
||||
|
||||
PosIdx add(const Origin & origin, uint32_t line, uint32_t column)
|
||||
{
|
||||
const auto idx = offsets.add({line, column}).second;
|
||||
if (origins.empty() || origins.back().idx != origin.idx) {
|
||||
origin.idx = idx;
|
||||
origins.push_back(origin);
|
||||
}
|
||||
return PosIdx(idx + 1);
|
||||
}
|
||||
|
||||
Pos operator[](PosIdx p) const
|
||||
{
|
||||
if (p.id == 0 || p.id > offsets.size())
|
||||
return {};
|
||||
const auto idx = p.id - 1;
|
||||
/* we want the last key <= idx, so we'll take prev(first key > idx).
|
||||
this is guaranteed to never rewind origin.begin because the first
|
||||
key is always 0. */
|
||||
const auto pastOrigin = std::upper_bound(
|
||||
origins.begin(), origins.end(), Origin(idx),
|
||||
[] (const auto & a, const auto & b) { return a.idx < b.idx; });
|
||||
const auto origin = *std::prev(pastOrigin);
|
||||
const auto offset = offsets[idx];
|
||||
return {offset.line, offset.column, origin.origin};
|
||||
}
|
||||
};
|
||||
|
||||
inline PosIdx noPos = {};
|
||||
|
||||
|
||||
struct Env;
|
||||
struct Value;
|
||||
class EvalState;
|
||||
@ -232,6 +135,23 @@ struct ExprVar : Expr
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
/**
|
||||
* A pseudo-expression for the purpose of evaluating the `from` expression in `inherit (from)` syntax.
|
||||
* Unlike normal variable references, the displacement is set during parsing, and always refers to
|
||||
* `ExprAttrs::inheritFromExprs` (by itself or in `ExprLet`), whose values are put into their own `Env`.
|
||||
*/
|
||||
struct ExprInheritFrom : ExprVar
|
||||
{
|
||||
ExprInheritFrom(PosIdx pos, Displacement displ): ExprVar(pos, {})
|
||||
{
|
||||
this->level = 0;
|
||||
this->displ = displ;
|
||||
this->fromWith = nullptr;
|
||||
}
|
||||
|
||||
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
|
||||
};
|
||||
|
||||
struct ExprSelect : Expr
|
||||
{
|
||||
PosIdx pos;
|
||||
@ -257,16 +177,40 @@ struct ExprAttrs : Expr
|
||||
bool recursive;
|
||||
PosIdx pos;
|
||||
struct AttrDef {
|
||||
bool inherited;
|
||||
enum class Kind {
|
||||
/** `attr = expr;` */
|
||||
Plain,
|
||||
/** `inherit attr1 attrn;` */
|
||||
Inherited,
|
||||
/** `inherit (expr) attr1 attrn;` */
|
||||
InheritedFrom,
|
||||
};
|
||||
|
||||
Kind kind;
|
||||
Expr * e;
|
||||
PosIdx pos;
|
||||
Displacement displ; // displacement
|
||||
AttrDef(Expr * e, const PosIdx & pos, bool inherited=false)
|
||||
: inherited(inherited), e(e), pos(pos) { };
|
||||
AttrDef(Expr * e, const PosIdx & pos, Kind kind = Kind::Plain)
|
||||
: kind(kind), e(e), pos(pos) { };
|
||||
AttrDef() { };
|
||||
|
||||
template<typename T>
|
||||
const T & chooseByKind(const T & plain, const T & inherited, const T & inheritedFrom) const
|
||||
{
|
||||
switch (kind) {
|
||||
case Kind::Plain:
|
||||
return plain;
|
||||
case Kind::Inherited:
|
||||
return inherited;
|
||||
default:
|
||||
case Kind::InheritedFrom:
|
||||
return inheritedFrom;
|
||||
}
|
||||
}
|
||||
};
|
||||
typedef std::map<Symbol, AttrDef> AttrDefs;
|
||||
AttrDefs attrs;
|
||||
std::unique_ptr<std::vector<Expr *>> inheritFromExprs;
|
||||
struct DynamicAttrDef {
|
||||
Expr * nameExpr, * valueExpr;
|
||||
PosIdx pos;
|
||||
@ -279,6 +223,11 @@ struct ExprAttrs : Expr
|
||||
ExprAttrs() : recursive(false) { };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
COMMON_METHODS
|
||||
|
||||
std::shared_ptr<const StaticEnv> bindInheritSources(
|
||||
EvalState & es, const std::shared_ptr<const StaticEnv> & env);
|
||||
Env * buildInheritFromEnv(EvalState & state, Env & up);
|
||||
void showBindings(const SymbolTable & symbols, std::ostream & str) const;
|
||||
};
|
||||
|
||||
struct ExprList : Expr
|
||||
|
@ -64,17 +64,17 @@ struct ParserState
|
||||
inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = hintfmt("attribute '%1%' already defined at %2%",
|
||||
.msg = HintFmt("attribute '%1%' already defined at %2%",
|
||||
showAttrPath(symbols, attrPath), positions[prevPos]),
|
||||
.errPos = positions[pos]
|
||||
.pos = positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = hintfmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]),
|
||||
.errPos = positions[pos]
|
||||
.msg = HintFmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]),
|
||||
.pos = positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
@ -89,7 +89,7 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr *
|
||||
if (i->symbol) {
|
||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||
if (j != attrs->attrs.end()) {
|
||||
if (!j->second.inherited) {
|
||||
if (j->second.kind != ExprAttrs::AttrDef::Kind::Inherited) {
|
||||
ExprAttrs * attrs2 = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||
if (!attrs2) dupAttr(attrPath, pos, j->second.pos);
|
||||
attrs = attrs2;
|
||||
@ -118,13 +118,24 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr *
|
||||
auto ae = dynamic_cast<ExprAttrs *>(e);
|
||||
auto jAttrs = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||
if (jAttrs && ae) {
|
||||
if (ae->inheritFromExprs && !jAttrs->inheritFromExprs)
|
||||
jAttrs->inheritFromExprs = std::make_unique<std::vector<Expr *>>();
|
||||
for (auto & ad : ae->attrs) {
|
||||
auto j2 = jAttrs->attrs.find(ad.first);
|
||||
if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
|
||||
dupAttr(ad.first, j2->second.pos, ad.second.pos);
|
||||
jAttrs->attrs.emplace(ad.first, ad.second);
|
||||
if (ad.second.kind == ExprAttrs::AttrDef::Kind::InheritedFrom) {
|
||||
auto & sel = dynamic_cast<ExprSelect &>(*ad.second.e);
|
||||
auto & from = dynamic_cast<ExprInheritFrom &>(*sel.e);
|
||||
from.displ += jAttrs->inheritFromExprs->size();
|
||||
}
|
||||
}
|
||||
jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
|
||||
if (ae->inheritFromExprs) {
|
||||
jAttrs->inheritFromExprs->insert(jAttrs->inheritFromExprs->end(),
|
||||
ae->inheritFromExprs->begin(), ae->inheritFromExprs->end());
|
||||
}
|
||||
} else {
|
||||
dupAttr(attrPath, pos, j->second.pos);
|
||||
}
|
||||
@ -154,14 +165,14 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym
|
||||
}
|
||||
if (duplicate)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]),
|
||||
.errPos = positions[duplicate->second]
|
||||
.msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]),
|
||||
.pos = positions[duplicate->second]
|
||||
});
|
||||
|
||||
if (arg && formals->has(arg))
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]),
|
||||
.errPos = positions[pos]
|
||||
.msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]),
|
||||
.pos = positions[pos]
|
||||
});
|
||||
|
||||
return formals;
|
||||
|
@ -65,8 +65,8 @@ using namespace nix;
|
||||
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = hintfmt(error),
|
||||
.errPos = state->positions[state->at(*loc)]
|
||||
.msg = HintFmt(error),
|
||||
.pos = state->positions[state->at(*loc)]
|
||||
});
|
||||
}
|
||||
|
||||
@ -154,8 +154,8 @@ expr_function
|
||||
| LET binds IN_KW expr_function
|
||||
{ if (!$2->dynamicAttrs.empty())
|
||||
throw ParseError({
|
||||
.msg = hintfmt("dynamic attributes not allowed in let"),
|
||||
.errPos = state->positions[CUR_POS]
|
||||
.msg = HintFmt("dynamic attributes not allowed in let"),
|
||||
.pos = state->positions[CUR_POS]
|
||||
});
|
||||
$$ = new ExprLet($2, $4);
|
||||
}
|
||||
@ -244,8 +244,8 @@ expr_simple
|
||||
static bool noURLLiterals = experimentalFeatureSettings.isEnabled(Xp::NoUrlLiterals);
|
||||
if (noURLLiterals)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("URL literals are disabled"),
|
||||
.errPos = state->positions[CUR_POS]
|
||||
.msg = HintFmt("URL literals are disabled"),
|
||||
.pos = state->positions[CUR_POS]
|
||||
});
|
||||
$$ = new ExprString(std::string($1));
|
||||
}
|
||||
@ -313,17 +313,27 @@ binds
|
||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos);
|
||||
auto pos = state->at(@3);
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true));
|
||||
$$->attrs.emplace(
|
||||
i.symbol,
|
||||
ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, ExprAttrs::AttrDef::Kind::Inherited));
|
||||
}
|
||||
delete $3;
|
||||
}
|
||||
| binds INHERIT '(' expr ')' attrs ';'
|
||||
{ $$ = $1;
|
||||
/* !!! Should ensure sharing of the expression in $4. */
|
||||
if (!$$->inheritFromExprs)
|
||||
$$->inheritFromExprs = std::make_unique<std::vector<Expr *>>();
|
||||
$$->inheritFromExprs->push_back($4);
|
||||
auto from = new nix::ExprInheritFrom(state->at(@4), $$->inheritFromExprs->size() - 1);
|
||||
for (auto & i : *$6) {
|
||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos);
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->at(@6)));
|
||||
$$->attrs.emplace(
|
||||
i.symbol,
|
||||
ExprAttrs::AttrDef(
|
||||
new ExprSelect(CUR_POS, from, i.symbol),
|
||||
state->at(@6),
|
||||
ExprAttrs::AttrDef::Kind::InheritedFrom));
|
||||
}
|
||||
delete $6;
|
||||
}
|
||||
@ -340,8 +350,8 @@ attrs
|
||||
delete str;
|
||||
} else
|
||||
throw ParseError({
|
||||
.msg = hintfmt("dynamic attributes not allowed in inherit"),
|
||||
.errPos = state->positions[state->at(@2)]
|
||||
.msg = HintFmt("dynamic attributes not allowed in inherit"),
|
||||
.pos = state->positions[state->at(@2)]
|
||||
});
|
||||
}
|
||||
| { $$ = new AttrPath; }
|
||||
|
@ -1,5 +1,4 @@
|
||||
#include "eval.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
@ -8,4 +7,9 @@ SourcePath EvalState::rootPath(CanonPath path)
|
||||
return {rootFS, std::move(path)};
|
||||
}
|
||||
|
||||
SourcePath EvalState::rootPath(PathView path)
|
||||
{
|
||||
return {rootFS, CanonPath(absPath(path))};
|
||||
}
|
||||
|
||||
}
|
||||
|
48
src/libexpr/pos-idx.hh
Normal file
48
src/libexpr/pos-idx.hh
Normal file
@ -0,0 +1,48 @@
|
||||
#pragma once
|
||||
|
||||
#include <cinttypes>
|
||||
|
||||
namespace nix {
|
||||
|
||||
class PosIdx
|
||||
{
|
||||
friend class PosTable;
|
||||
|
||||
private:
|
||||
uint32_t id;
|
||||
|
||||
explicit PosIdx(uint32_t id)
|
||||
: id(id)
|
||||
{
|
||||
}
|
||||
|
||||
public:
|
||||
PosIdx()
|
||||
: id(0)
|
||||
{
|
||||
}
|
||||
|
||||
explicit operator bool() const
|
||||
{
|
||||
return id > 0;
|
||||
}
|
||||
|
||||
bool operator<(const PosIdx other) const
|
||||
{
|
||||
return id < other.id;
|
||||
}
|
||||
|
||||
bool operator==(const PosIdx other) const
|
||||
{
|
||||
return id == other.id;
|
||||
}
|
||||
|
||||
bool operator!=(const PosIdx other) const
|
||||
{
|
||||
return id != other.id;
|
||||
}
|
||||
};
|
||||
|
||||
inline PosIdx noPos = {};
|
||||
|
||||
}
|
83
src/libexpr/pos-table.hh
Normal file
83
src/libexpr/pos-table.hh
Normal file
@ -0,0 +1,83 @@
|
||||
#pragma once
|
||||
|
||||
#include <cinttypes>
|
||||
#include <numeric>
|
||||
#include <vector>
|
||||
|
||||
#include "chunked-vector.hh"
|
||||
#include "pos-idx.hh"
|
||||
#include "position.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class PosTable
|
||||
{
|
||||
public:
|
||||
class Origin
|
||||
{
|
||||
friend PosTable;
|
||||
private:
|
||||
// must always be invalid by default, add() replaces this with the actual value.
|
||||
// subsequent add() calls use this index as a token to quickly check whether the
|
||||
// current origins.back() can be reused or not.
|
||||
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
|
||||
|
||||
// Used for searching in PosTable::[].
|
||||
explicit Origin(uint32_t idx)
|
||||
: idx(idx)
|
||||
, origin{std::monostate()}
|
||||
{
|
||||
}
|
||||
|
||||
public:
|
||||
const Pos::Origin origin;
|
||||
|
||||
Origin(Pos::Origin origin)
|
||||
: origin(origin)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
struct Offset
|
||||
{
|
||||
uint32_t line, column;
|
||||
};
|
||||
|
||||
private:
|
||||
std::vector<Origin> origins;
|
||||
ChunkedVector<Offset, 8192> offsets;
|
||||
|
||||
public:
|
||||
PosTable()
|
||||
: offsets(1024)
|
||||
{
|
||||
origins.reserve(1024);
|
||||
}
|
||||
|
||||
PosIdx add(const Origin & origin, uint32_t line, uint32_t column)
|
||||
{
|
||||
const auto idx = offsets.add({line, column}).second;
|
||||
if (origins.empty() || origins.back().idx != origin.idx) {
|
||||
origin.idx = idx;
|
||||
origins.push_back(origin);
|
||||
}
|
||||
return PosIdx(idx + 1);
|
||||
}
|
||||
|
||||
Pos operator[](PosIdx p) const
|
||||
{
|
||||
if (p.id == 0 || p.id > offsets.size())
|
||||
return {};
|
||||
const auto idx = p.id - 1;
|
||||
/* we want the last key <= idx, so we'll take prev(first key > idx).
|
||||
this is guaranteed to never rewind origin.begin because the first
|
||||
key is always 0. */
|
||||
const auto pastOrigin = std::upper_bound(
|
||||
origins.begin(), origins.end(), Origin(idx), [](const auto & a, const auto & b) { return a.idx < b.idx; });
|
||||
const auto origin = *std::prev(pastOrigin);
|
||||
const auto offset = offsets[idx];
|
||||
return {offset.line, offset.column, origin.origin};
|
||||
}
|
||||
};
|
||||
|
||||
}
|
@ -39,10 +39,6 @@ namespace nix {
|
||||
* Miscellaneous
|
||||
*************************************************************/
|
||||
|
||||
|
||||
InvalidPathError::InvalidPathError(const Path & path) :
|
||||
EvalError("path '%s' is not valid", path), path(path) {}
|
||||
|
||||
StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||
{
|
||||
std::vector<DerivedPath::Built> drvs;
|
||||
@ -51,7 +47,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||
for (auto & c : context) {
|
||||
auto ensureValid = [&](const StorePath & p) {
|
||||
if (!store->isValidPath(p))
|
||||
debugThrowLastTrace(InvalidPathError(store->printStorePath(p)));
|
||||
error<InvalidPathError>(store->printStorePath(p)).debugThrow();
|
||||
};
|
||||
std::visit(overloaded {
|
||||
[&](const NixStringContextElem::Built & b) {
|
||||
@ -78,9 +74,10 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||
if (drvs.empty()) return {};
|
||||
|
||||
if (!evalSettings.enableImportFromDerivation)
|
||||
debugThrowLastTrace(Error(
|
||||
error<EvalError>(
|
||||
"cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled",
|
||||
drvs.begin()->to_string(*store)));
|
||||
drvs.begin()->to_string(*store)
|
||||
).debugThrow();
|
||||
|
||||
/* Build/substitute the context. */
|
||||
std::vector<DerivedPath> buildReqs;
|
||||
@ -118,7 +115,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||
return res;
|
||||
}
|
||||
|
||||
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bool resolveSymlinks = true)
|
||||
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, std::optional<SymlinkResolution> resolveSymlinks = SymlinkResolution::Full)
|
||||
{
|
||||
NixStringContext context;
|
||||
|
||||
@ -130,7 +127,7 @@ static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bo
|
||||
auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
|
||||
path = {path.accessor, CanonPath(realPath)};
|
||||
}
|
||||
return resolveSymlinks ? path.resolveSymlinks() : path;
|
||||
return resolveSymlinks ? path.resolveSymlinks(*resolveSymlinks) : path;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
|
||||
throw;
|
||||
@ -170,7 +167,7 @@ static void mkOutputString(
|
||||
argument. */
|
||||
static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, vPath, false);
|
||||
auto path = realisePath(state, pos, vPath, std::nullopt);
|
||||
auto path2 = path.path.abs();
|
||||
|
||||
// FIXME
|
||||
@ -340,16 +337,16 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
|
||||
|
||||
void *handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL);
|
||||
if (!handle)
|
||||
state.debugThrowLastTrace(EvalError("could not open '%1%': %2%", path, dlerror()));
|
||||
state.error<EvalError>("could not open '%1%': %2%", path, dlerror()).debugThrow();
|
||||
|
||||
dlerror();
|
||||
ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str());
|
||||
if(!func) {
|
||||
char *message = dlerror();
|
||||
if (message)
|
||||
state.debugThrowLastTrace(EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message));
|
||||
state.error<EvalError>("could not load symbol '%1%' from '%2%': %3%", sym, path, message).debugThrow();
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path));
|
||||
state.error<EvalError>("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path).debugThrow();
|
||||
}
|
||||
|
||||
(func)(state, v);
|
||||
@ -365,7 +362,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
auto elems = args[0]->listElems();
|
||||
auto count = args[0]->listSize();
|
||||
if (count == 0)
|
||||
state.error("at least one argument to 'exec' required").atPos(pos).debugThrow<EvalError>();
|
||||
state.error<EvalError>("at least one argument to 'exec' required").atPos(pos).debugThrow();
|
||||
NixStringContext context;
|
||||
auto program = state.coerceToString(pos, *elems[0], context,
|
||||
"while evaluating the first element of the argument passed to builtins.exec",
|
||||
@ -380,7 +377,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
try {
|
||||
auto _ = state.realiseContext(context); // FIXME: Handle CA derivations
|
||||
} catch (InvalidPathError & e) {
|
||||
state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
auto output = runProgram(program, true, commandArgs);
|
||||
@ -582,7 +579,7 @@ struct CompareValues
|
||||
if (v1->type() == nInt && v2->type() == nFloat)
|
||||
return v1->integer < v2->fpoint;
|
||||
if (v1->type() != v2->type())
|
||||
state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow();
|
||||
// Allow selecting a subset of enum values
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
||||
@ -610,7 +607,7 @@ struct CompareValues
|
||||
}
|
||||
}
|
||||
default:
|
||||
state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow();
|
||||
#pragma GCC diagnostic pop
|
||||
}
|
||||
} catch (Error & e) {
|
||||
@ -637,7 +634,7 @@ static Bindings::iterator getAttr(
|
||||
{
|
||||
Bindings::iterator value = attrSet->find(attrSym);
|
||||
if (value == attrSet->end()) {
|
||||
state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow<TypeError>();
|
||||
state.error<TypeError>("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
@ -708,38 +705,53 @@ static RegisterPrimOp primop_genericClosure(PrimOp {
|
||||
.args = {"attrset"},
|
||||
.arity = 1,
|
||||
.doc = R"(
|
||||
Take an *attrset* with values named `startSet` and `operator` in order to
|
||||
return a *list of attrsets* by starting with the `startSet` and recursively
|
||||
applying the `operator` function to each `item`. The *attrsets* in the
|
||||
`startSet` and the *attrsets* produced by `operator` must contain a value
|
||||
named `key` which is comparable. The result is produced by calling `operator`
|
||||
for each `item` with a value for `key` that has not been called yet including
|
||||
newly produced `item`s. The function terminates when no new `item`s are
|
||||
produced. The resulting *list of attrsets* contains only *attrsets* with a
|
||||
unique key. For example,
|
||||
`builtins.genericClosure` iteratively computes the transitive closure over an arbitrary relation defined by a function.
|
||||
|
||||
```
|
||||
builtins.genericClosure {
|
||||
startSet = [ {key = 5;} ];
|
||||
operator = item: [{
|
||||
key = if (item.key / 2 ) * 2 == item.key
|
||||
then item.key / 2
|
||||
else 3 * item.key + 1;
|
||||
}];
|
||||
}
|
||||
```
|
||||
evaluates to
|
||||
```
|
||||
[ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
|
||||
```
|
||||
It takes *attrset* with two attributes named `startSet` and `operator`, and returns a list of attrbute sets:
|
||||
|
||||
- `startSet`:
|
||||
The initial list of attribute sets.
|
||||
|
||||
- `operator`:
|
||||
A function that takes an attribute set and returns a list of attribute sets.
|
||||
It defines how each item in the current set is processed and expanded into more items.
|
||||
|
||||
Each attribute set in the list `startSet` and the list returned by `operator` must have an attribute `key`, which must support equality comparison.
|
||||
The value of `key` can be one of the following types:
|
||||
|
||||
`key` can be one of the following types:
|
||||
- [Number](@docroot@/language/values.md#type-number)
|
||||
- [Boolean](@docroot@/language/values.md#type-boolean)
|
||||
- [String](@docroot@/language/values.md#type-string)
|
||||
- [Path](@docroot@/language/values.md#type-path)
|
||||
- [List](@docroot@/language/values.md#list)
|
||||
|
||||
The result is produced by calling the `operator` on each `item` that has not been called yet, including newly added items, until no new items are added.
|
||||
Items are compared by their `key` attribute.
|
||||
|
||||
Common usages are:
|
||||
|
||||
- Generating unique collections of items, such as dependency graphs.
|
||||
- Traversing through structures that may contain cycles or loops.
|
||||
- Processing data structures with complex internal relationships.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> builtins.genericClosure {
|
||||
> startSet = [ {key = 5;} ];
|
||||
> operator = item: [{
|
||||
> key = if (item.key / 2 ) * 2 == item.key
|
||||
> then item.key / 2
|
||||
> else 3 * item.key + 1;
|
||||
> }];
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> evaluates to
|
||||
>
|
||||
> ```nix
|
||||
> [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
|
||||
> ```
|
||||
)",
|
||||
.fun = prim_genericClosure,
|
||||
});
|
||||
@ -757,21 +769,12 @@ static RegisterPrimOp primop_break({
|
||||
if (state.debugRepl && !state.debugTraces.empty()) {
|
||||
auto error = Error(ErrorInfo {
|
||||
.level = lvlInfo,
|
||||
.msg = hintfmt("breakpoint reached"),
|
||||
.errPos = state.positions[pos],
|
||||
.msg = HintFmt("breakpoint reached"),
|
||||
.pos = state.positions[pos],
|
||||
});
|
||||
|
||||
auto & dt = state.debugTraces.front();
|
||||
state.runDebugRepl(&error, dt.env, dt.expr);
|
||||
|
||||
if (state.debugQuit) {
|
||||
// If the user elects to quit the repl, throw an exception.
|
||||
throw Error(ErrorInfo{
|
||||
.level = lvlInfo,
|
||||
.msg = hintfmt("quit the debugger"),
|
||||
.errPos = nullptr,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Return the value we were passed.
|
||||
@ -790,7 +793,7 @@ static RegisterPrimOp primop_abort({
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtins.abort").toOwned();
|
||||
state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s));
|
||||
state.error<Abort>("evaluation aborted with the following error message: '%1%'", s).debugThrow();
|
||||
}
|
||||
});
|
||||
|
||||
@ -809,7 +812,7 @@ static RegisterPrimOp primop_throw({
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtin.throw").toOwned();
|
||||
state.debugThrowLastTrace(ThrownError(s));
|
||||
state.error<ThrownError>(s).debugThrow();
|
||||
}
|
||||
});
|
||||
|
||||
@ -823,7 +826,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
|
||||
auto message = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtins.addErrorContext",
|
||||
false, false).toOwned();
|
||||
e.addTrace(nullptr, hintfmt(message), true);
|
||||
e.addTrace(nullptr, HintFmt(message));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
@ -882,7 +885,7 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Va
|
||||
/* increment state.trylevel, and decrement it when this function returns. */
|
||||
MaintainCount trylevel(state.trylevel);
|
||||
|
||||
void (* savedDebugRepl)(ref<EvalState> es, const ValMap & extraEnv) = nullptr;
|
||||
ReplExitStatus (* savedDebugRepl)(ref<EvalState> es, const ValMap & extraEnv) = nullptr;
|
||||
if (state.debugRepl && evalSettings.ignoreExceptionsDuringTry)
|
||||
{
|
||||
/* to prevent starting the repl from exceptions withing a tryEval, null it. */
|
||||
@ -998,6 +1001,10 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu
|
||||
printError("trace: %1%", args[0]->string_view());
|
||||
else
|
||||
printError("trace: %1%", ValuePrinter(state, *args[0]));
|
||||
if (evalSettings.builtinsTraceDebugger && state.debugRepl && !state.debugTraces.empty()) {
|
||||
const DebugTrace & last = state.debugTraces.front();
|
||||
state.runDebugRepl(nullptr, last.env, last.expr);
|
||||
}
|
||||
state.forceValue(*args[1], pos);
|
||||
v = *args[1];
|
||||
}
|
||||
@ -1009,6 +1016,12 @@ static RegisterPrimOp primop_trace({
|
||||
Evaluate *e1* and print its abstract syntax representation on
|
||||
standard error. Then return *e2*. This function is useful for
|
||||
debugging.
|
||||
|
||||
If the
|
||||
[`debugger-on-trace`](@docroot@/command-ref/conf-file.md#conf-debugger-on-trace)
|
||||
option is set to `true` and the `--debugger` flag is given, the
|
||||
interactive debugger will be started when `trace` is called (like
|
||||
[`break`](@docroot@/language/builtins.md#builtins-break)).
|
||||
)",
|
||||
.fun = prim_trace,
|
||||
});
|
||||
@ -1074,10 +1087,10 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||
* often results from the composition of several functions
|
||||
* (derivationStrict, derivation, mkDerivation, mkPythonModule, etc.)
|
||||
*/
|
||||
e.addTrace(nullptr, hintfmt(
|
||||
e.addTrace(nullptr, HintFmt(
|
||||
"while evaluating derivation '%s'\n"
|
||||
" whose name attribute is located at %s",
|
||||
drvName, pos), true);
|
||||
drvName, pos));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
@ -1088,9 +1101,10 @@ drvName, Bindings * attrs, Value & v)
|
||||
/* Check whether attributes should be passed as a JSON file. */
|
||||
using nlohmann::json;
|
||||
std::optional<json> jsonObject;
|
||||
auto pos = v.determinePos(noPos);
|
||||
auto attr = attrs->find(state.sStructuredAttrs);
|
||||
if (attr != attrs->end() &&
|
||||
state.forceBool(*attr->value, noPos,
|
||||
state.forceBool(*attr->value, pos,
|
||||
"while evaluating the `__structuredAttrs` "
|
||||
"attribute passed to builtins.derivationStrict"))
|
||||
jsonObject = json::object();
|
||||
@ -1099,7 +1113,7 @@ drvName, Bindings * attrs, Value & v)
|
||||
bool ignoreNulls = false;
|
||||
attr = attrs->find(state.sIgnoreNulls);
|
||||
if (attr != attrs->end())
|
||||
ignoreNulls = state.forceBool(*attr->value, noPos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict");
|
||||
ignoreNulls = state.forceBool(*attr->value, pos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict");
|
||||
|
||||
/* Build the derivation expression by processing the attributes. */
|
||||
Derivation drv;
|
||||
@ -1124,41 +1138,40 @@ drvName, Bindings * attrs, Value & v)
|
||||
auto handleHashMode = [&](const std::string_view s) {
|
||||
if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive;
|
||||
else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
|
||||
else if (s == "text") {
|
||||
else if (s == "git") {
|
||||
experimentalFeatureSettings.require(Xp::GitHashing);
|
||||
ingestionMethod = FileIngestionMethod::Git;
|
||||
} else if (s == "text") {
|
||||
experimentalFeatureSettings.require(Xp::DynamicDerivations);
|
||||
ingestionMethod = TextIngestionMethod {};
|
||||
} else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"invalid value '%s' for 'outputHashMode' attribute", s
|
||||
).atPos(v).debugThrow();
|
||||
};
|
||||
|
||||
auto handleOutputs = [&](const Strings & ss) {
|
||||
outputs.clear();
|
||||
for (auto & j : ss) {
|
||||
if (outputs.find(j) != outputs.end())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("duplicate derivation output '%1%'", j),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>("duplicate derivation output '%1%'", j)
|
||||
.atPos(v)
|
||||
.debugThrow();
|
||||
/* !!! Check whether j is a valid attribute
|
||||
name. */
|
||||
/* Derivations cannot be named ‘drv’, because
|
||||
then we'd have an attribute ‘drvPath’ in
|
||||
the resulting set. */
|
||||
if (j == "drv")
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("invalid derivation output name 'drv'" ),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>("invalid derivation output name 'drv'")
|
||||
.atPos(v)
|
||||
.debugThrow();
|
||||
outputs.insert(j);
|
||||
}
|
||||
if (outputs.empty())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("derivation cannot have an empty set of outputs"),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>("derivation cannot have an empty set of outputs")
|
||||
.atPos(v)
|
||||
.debugThrow();
|
||||
};
|
||||
|
||||
try {
|
||||
@ -1167,16 +1180,16 @@ drvName, Bindings * attrs, Value & v)
|
||||
const std::string_view context_below("");
|
||||
|
||||
if (ignoreNulls) {
|
||||
state.forceValue(*i->value, noPos);
|
||||
state.forceValue(*i->value, pos);
|
||||
if (i->value->type() == nNull) continue;
|
||||
}
|
||||
|
||||
if (i->name == state.sContentAddressed && state.forceBool(*i->value, noPos, context_below)) {
|
||||
if (i->name == state.sContentAddressed && state.forceBool(*i->value, pos, context_below)) {
|
||||
contentAddressed = true;
|
||||
experimentalFeatureSettings.require(Xp::CaDerivations);
|
||||
}
|
||||
|
||||
else if (i->name == state.sImpure && state.forceBool(*i->value, noPos, context_below)) {
|
||||
else if (i->name == state.sImpure && state.forceBool(*i->value, pos, context_below)) {
|
||||
isImpure = true;
|
||||
experimentalFeatureSettings.require(Xp::ImpureDerivations);
|
||||
}
|
||||
@ -1184,9 +1197,9 @@ drvName, Bindings * attrs, Value & v)
|
||||
/* The `args' attribute is special: it supplies the
|
||||
command-line arguments to the builder. */
|
||||
else if (i->name == state.sArgs) {
|
||||
state.forceList(*i->value, noPos, context_below);
|
||||
state.forceList(*i->value, pos, context_below);
|
||||
for (auto elem : i->value->listItems()) {
|
||||
auto s = state.coerceToString(noPos, *elem, context,
|
||||
auto s = state.coerceToString(pos, *elem, context,
|
||||
"while evaluating an element of the argument list",
|
||||
true).toOwned();
|
||||
drv.args.push_back(s);
|
||||
@ -1201,29 +1214,29 @@ drvName, Bindings * attrs, Value & v)
|
||||
|
||||
if (i->name == state.sStructuredAttrs) continue;
|
||||
|
||||
(*jsonObject)[key] = printValueAsJSON(state, true, *i->value, noPos, context);
|
||||
(*jsonObject)[key] = printValueAsJSON(state, true, *i->value, pos, context);
|
||||
|
||||
if (i->name == state.sBuilder)
|
||||
drv.builder = state.forceString(*i->value, context, noPos, context_below);
|
||||
drv.builder = state.forceString(*i->value, context, pos, context_below);
|
||||
else if (i->name == state.sSystem)
|
||||
drv.platform = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||
drv.platform = state.forceStringNoCtx(*i->value, pos, context_below);
|
||||
else if (i->name == state.sOutputHash)
|
||||
outputHash = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||
outputHash = state.forceStringNoCtx(*i->value, pos, context_below);
|
||||
else if (i->name == state.sOutputHashAlgo)
|
||||
outputHashAlgo = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||
outputHashAlgo = state.forceStringNoCtx(*i->value, pos, context_below);
|
||||
else if (i->name == state.sOutputHashMode)
|
||||
handleHashMode(state.forceStringNoCtx(*i->value, noPos, context_below));
|
||||
handleHashMode(state.forceStringNoCtx(*i->value, pos, context_below));
|
||||
else if (i->name == state.sOutputs) {
|
||||
/* Require ‘outputs’ to be a list of strings. */
|
||||
state.forceList(*i->value, noPos, context_below);
|
||||
state.forceList(*i->value, pos, context_below);
|
||||
Strings ss;
|
||||
for (auto elem : i->value->listItems())
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, noPos, context_below));
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, pos, context_below));
|
||||
handleOutputs(ss);
|
||||
}
|
||||
|
||||
} else {
|
||||
auto s = state.coerceToString(noPos, *i->value, context, context_below, true).toOwned();
|
||||
auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned();
|
||||
drv.env.emplace(key, s);
|
||||
if (i->name == state.sBuilder) drv.builder = std::move(s);
|
||||
else if (i->name == state.sSystem) drv.platform = std::move(s);
|
||||
@ -1238,8 +1251,7 @@ drvName, Bindings * attrs, Value & v)
|
||||
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[i->pos],
|
||||
hintfmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName),
|
||||
true);
|
||||
HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
@ -1281,16 +1293,14 @@ drvName, Bindings * attrs, Value & v)
|
||||
|
||||
/* Do we have all required attributes? */
|
||||
if (drv.builder == "")
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("required attribute 'builder' missing"),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>("required attribute 'builder' missing")
|
||||
.atPos(v)
|
||||
.debugThrow();
|
||||
|
||||
if (drv.platform == "")
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("required attribute 'system' missing"),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>("required attribute 'system' missing")
|
||||
.atPos(v)
|
||||
.debugThrow();
|
||||
|
||||
/* Check whether the derivation name is valid. */
|
||||
if (isDerivation(drvName) &&
|
||||
@ -1298,10 +1308,10 @@ drvName, Bindings * attrs, Value & v)
|
||||
outputs.size() == 1 &&
|
||||
*(outputs.begin()) == "out"))
|
||||
{
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("derivation names are allowed to end in '%s' only if they produce a single derivation file", drvExtension),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"derivation names are allowed to end in '%s' only if they produce a single derivation file",
|
||||
drvExtension
|
||||
).atPos(v).debugThrow();
|
||||
}
|
||||
|
||||
if (outputHash) {
|
||||
@ -1310,10 +1320,9 @@ drvName, Bindings * attrs, Value & v)
|
||||
Ignore `__contentAddressed` because fixed output derivations are
|
||||
already content addressed. */
|
||||
if (outputs.size() != 1 || *(outputs.begin()) != "out")
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("multiple outputs are not supported in fixed-output derivations"),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"multiple outputs are not supported in fixed-output derivations"
|
||||
).atPos(v).debugThrow();
|
||||
|
||||
auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo));
|
||||
|
||||
@ -1332,10 +1341,8 @@ drvName, Bindings * attrs, Value & v)
|
||||
|
||||
else if (contentAddressed || isImpure) {
|
||||
if (contentAddressed && isImpure)
|
||||
throw EvalError({
|
||||
.msg = hintfmt("derivation cannot be both content-addressed and impure"),
|
||||
.errPos = state.positions[noPos]
|
||||
});
|
||||
state.error<EvalError>("derivation cannot be both content-addressed and impure")
|
||||
.atPos(v).debugThrow();
|
||||
|
||||
auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256);
|
||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
|
||||
@ -1376,10 +1383,10 @@ drvName, Bindings * attrs, Value & v)
|
||||
for (auto & i : outputs) {
|
||||
auto h = get(hashModulo.hashes, i);
|
||||
if (!h)
|
||||
throw AssertionError({
|
||||
.msg = hintfmt("derivation produced no hash for output '%s'", i),
|
||||
.errPos = state.positions[noPos],
|
||||
});
|
||||
state.error<AssertionError>(
|
||||
"derivation produced no hash for output '%s'",
|
||||
i
|
||||
).atPos(v).debugThrow();
|
||||
auto outPath = state.store->makeOutputPath(i, *h, drvName);
|
||||
drv.env[i] = state.store->printStorePath(outPath);
|
||||
drv.outputs.insert_or_assign(
|
||||
@ -1485,10 +1492,10 @@ static RegisterPrimOp primop_toPath({
|
||||
static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
if (evalSettings.pureEval)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("'%s' is not allowed in pure evaluation mode", "builtins.storePath"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"'%s' is not allowed in pure evaluation mode",
|
||||
"builtins.storePath"
|
||||
).atPos(pos).debugThrow();
|
||||
|
||||
NixStringContext context;
|
||||
auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path;
|
||||
@ -1498,10 +1505,8 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
|
||||
if (!state.store->isStorePath(path.abs()))
|
||||
path = CanonPath(canonPath(path.abs(), true));
|
||||
if (!state.store->isInStore(path.abs()))
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("path '%1%' is not in the Nix store", path),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("path '%1%' is not in the Nix store", path)
|
||||
.atPos(pos).debugThrow();
|
||||
auto path2 = state.store->toStorePath(path.abs()).first;
|
||||
if (!settings.readOnlyMode)
|
||||
state.store->ensurePath(path2);
|
||||
@ -1534,13 +1539,16 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args,
|
||||
try {
|
||||
auto & arg = *args[0];
|
||||
|
||||
auto path = realisePath(state, pos, arg);
|
||||
|
||||
/* SourcePath doesn't know about trailing slash. */
|
||||
state.forceValue(arg, pos);
|
||||
auto mustBeDir = arg.type() == nString
|
||||
&& (arg.string_view().ends_with("/")
|
||||
|| arg.string_view().ends_with("/."));
|
||||
|
||||
auto symlinkResolution =
|
||||
mustBeDir ? SymlinkResolution::Full : SymlinkResolution::Ancestors;
|
||||
auto path = realisePath(state, pos, arg, symlinkResolution);
|
||||
|
||||
auto st = path.maybeLstat();
|
||||
auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
|
||||
v.mkBool(exists);
|
||||
@ -1616,7 +1624,10 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
auto path = realisePath(state, pos, *args[0]);
|
||||
auto s = path.readFile();
|
||||
if (s.find((char) 0) != std::string::npos)
|
||||
state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path));
|
||||
state.error<EvalError>(
|
||||
"the contents of the file '%1%' cannot be represented as a Nix string",
|
||||
path
|
||||
).atPos(pos).debugThrow();
|
||||
StorePathSet refs;
|
||||
if (state.store->isInStore(path.path.abs())) {
|
||||
try {
|
||||
@ -1673,10 +1684,11 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
auto rewrites = state.realiseContext(context);
|
||||
path = rewriteStrings(path, rewrites);
|
||||
} catch (InvalidPathError & e) {
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"cannot find '%1%', since path '%2%' is not valid",
|
||||
path,
|
||||
e.path
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
searchPath.elements.emplace_back(SearchPath::Elem {
|
||||
@ -1745,10 +1757,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
|
||||
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||
if (!ha)
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow();
|
||||
|
||||
auto path = realisePath(state, pos, *args[1]);
|
||||
|
||||
@ -1777,7 +1786,7 @@ static std::string_view fileTypeToString(InputAccessor::Type type)
|
||||
|
||||
static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, *args[0], false);
|
||||
auto path = realisePath(state, pos, *args[0], std::nullopt);
|
||||
/* Retrieve the directory entry type and stringize it. */
|
||||
v.mkString(fileTypeToString(path.lstat().type));
|
||||
}
|
||||
@ -1816,7 +1825,7 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va
|
||||
// detailed node info quickly in this case we produce a thunk to
|
||||
// query the file type lazily.
|
||||
auto epath = state.allocValue();
|
||||
epath->mkPath(path + name);
|
||||
epath->mkPath(path / name);
|
||||
if (!readFileType)
|
||||
readFileType = &state.getBuiltin("readFileType");
|
||||
attr.mkApp(readFileType, epath);
|
||||
@ -2068,13 +2077,12 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
|
||||
if (auto p = std::get_if<NixStringContextElem::Opaque>(&c.raw))
|
||||
refs.insert(p->path);
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt(
|
||||
"in 'toFile': the file named '%1%' must not contain a reference "
|
||||
"to a derivation but contains (%2%)",
|
||||
name, c.to_string()),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"files created by %1% may not reference derivations, but %2% references %3%",
|
||||
"builtins.toFile",
|
||||
name,
|
||||
c.to_string()
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
auto storePath = settings.readOnlyMode
|
||||
@ -2084,7 +2092,7 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
|
||||
})
|
||||
: ({
|
||||
StringSource s { contents };
|
||||
state.store->addToStoreFromDump(s, name, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
|
||||
state.store->addToStoreFromDump(s, name, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
|
||||
});
|
||||
|
||||
/* Note: we don't need to add `context' to the context of the
|
||||
@ -2241,9 +2249,19 @@ static void addPath(
|
||||
});
|
||||
|
||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||
auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair);
|
||||
auto dstPath = fetchToStore(
|
||||
*state.store,
|
||||
path.resolveSymlinks(),
|
||||
settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
|
||||
name,
|
||||
method,
|
||||
filter.get(),
|
||||
state.repair);
|
||||
if (expectedHash && expectedStorePath != dstPath)
|
||||
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
||||
state.error<EvalError>(
|
||||
"store path mismatch in (possibly filtered) path added from '%s'",
|
||||
path
|
||||
).atPos(pos).debugThrow();
|
||||
state.allowAndSetStorePathString(dstPath, v);
|
||||
} else
|
||||
state.allowAndSetStorePathString(*expectedStorePath, v);
|
||||
@ -2343,16 +2361,15 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||
else if (n == "sha256")
|
||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
|
||||
.errPos = state.positions[attr.pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"unsupported argument '%1%' to 'addPath'",
|
||||
state.symbols[attr.name]
|
||||
).atPos(attr.pos).debugThrow();
|
||||
}
|
||||
if (!path)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("missing required 'path' attribute in the first argument to builtins.path"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"missing required 'path' attribute in the first argument to builtins.path"
|
||||
).atPos(pos).debugThrow();
|
||||
if (name.empty())
|
||||
name = path->baseName();
|
||||
|
||||
@ -2770,10 +2787,7 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * arg
|
||||
return;
|
||||
}
|
||||
if (!args[0]->isLambda())
|
||||
state.debugThrowLastTrace(TypeError({
|
||||
.msg = hintfmt("'functionArgs' requires a function"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<TypeError>("'functionArgs' requires a function").atPos(pos).debugThrow();
|
||||
|
||||
if (!args[0]->lambda.fun->hasFormals()) {
|
||||
v.mkAttrs(&state.emptyBindings);
|
||||
@ -2943,10 +2957,10 @@ static void elemAt(EvalState & state, const PosIdx pos, Value & list, int n, Val
|
||||
{
|
||||
state.forceList(list, pos, "while evaluating the first argument passed to builtins.elemAt");
|
||||
if (n < 0 || (unsigned int) n >= list.listSize())
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("list index %1% is out of bounds", n),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"list index %1% is out of bounds",
|
||||
n
|
||||
).atPos(pos).debugThrow();
|
||||
state.forceValue(*list.listElems()[n], pos);
|
||||
v = *list.listElems()[n];
|
||||
}
|
||||
@ -2991,10 +3005,7 @@ static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||
{
|
||||
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.tail");
|
||||
if (args[0]->listSize() == 0)
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("'tail' called on an empty list"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("'tail' called on an empty list").atPos(pos).debugThrow();
|
||||
|
||||
state.mkList(v, args[0]->listSize() - 1);
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n)
|
||||
@ -3251,7 +3262,7 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va
|
||||
auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList");
|
||||
|
||||
if (len < 0)
|
||||
state.error("cannot create list of size %1%", len).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot create list of size %1%", len).atPos(pos).debugThrow();
|
||||
|
||||
// More strict than striclty (!) necessary, but acceptable
|
||||
// as evaluating map without accessing any values makes little sense.
|
||||
@ -3568,10 +3579,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||
|
||||
NixFloat f2 = state.forceFloat(*args[1], pos, "while evaluating the second operand of the division");
|
||||
if (f2 == 0)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("division by zero"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("division by zero").atPos(pos).debugThrow();
|
||||
|
||||
if (args[0]->type() == nFloat || args[1]->type() == nFloat) {
|
||||
v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first operand of the division") / f2);
|
||||
@ -3580,10 +3588,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||
NixInt i2 = state.forceInt(*args[1], pos, "while evaluating the second operand of the division");
|
||||
/* Avoid division overflow as it might raise SIGFPE. */
|
||||
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("overflow in integer division"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("overflow in integer division").atPos(pos).debugThrow();
|
||||
|
||||
v.mkInt(i1 / i2);
|
||||
}
|
||||
@ -3714,10 +3719,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args,
|
||||
int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring");
|
||||
|
||||
if (start < 0)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("negative start position in 'substring'"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("negative start position in 'substring'").atPos(pos).debugThrow();
|
||||
|
||||
|
||||
int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring");
|
||||
@ -3782,10 +3784,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
|
||||
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
|
||||
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||
if (!ha)
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow();
|
||||
|
||||
NixStringContext context; // discarded
|
||||
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
||||
@ -3951,15 +3950,13 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
} catch (std::regex_error & e) {
|
||||
if (e.code() == std::regex_constants::error_space) {
|
||||
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("memory limit exceeded by regular expression '%s'", re)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
} else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("invalid regular expression '%s'", re),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("invalid regular expression '%s'", re)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
@ -4055,15 +4052,13 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
} catch (std::regex_error & e) {
|
||||
if (e.code() == std::regex_constants::error_space) {
|
||||
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("memory limit exceeded by regular expression '%s'", re)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
} else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("invalid regular expression '%s'", re),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("invalid regular expression '%s'", re)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
@ -4139,7 +4134,9 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
|
||||
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings");
|
||||
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings");
|
||||
if (args[0]->listSize() != args[1]->listSize())
|
||||
state.error("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths").atPos(pos).debugThrow<EvalError>();
|
||||
state.error<EvalError>(
|
||||
"'from' and 'to' arguments passed to builtins.replaceStrings have different lengths"
|
||||
).atPos(pos).debugThrow();
|
||||
|
||||
std::vector<std::string> from;
|
||||
from.reserve(args[0]->listSize());
|
||||
|
@ -98,30 +98,30 @@ static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, V
|
||||
|
||||
auto contextSize = context.size();
|
||||
if (contextSize != 1) {
|
||||
throw EvalError({
|
||||
.msg = hintfmt("context of string '%s' must have exactly one element, but has %d", *s, contextSize),
|
||||
.errPos = state.positions[pos]
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"context of string '%s' must have exactly one element, but has %d",
|
||||
*s,
|
||||
contextSize
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
NixStringContext context2 {
|
||||
(NixStringContextElem { std::visit(overloaded {
|
||||
[&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep {
|
||||
if (!c.path.isDerivation()) {
|
||||
throw EvalError({
|
||||
.msg = hintfmt("path '%s' is not a derivation",
|
||||
state.store->printStorePath(c.path)),
|
||||
.errPos = state.positions[pos],
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"path '%s' is not a derivation",
|
||||
state.store->printStorePath(c.path)
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
return NixStringContextElem::DrvDeep {
|
||||
.drvPath = c.path,
|
||||
};
|
||||
},
|
||||
[&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep {
|
||||
throw EvalError({
|
||||
.msg = hintfmt("`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", c.output),
|
||||
.errPos = state.positions[pos],
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'",
|
||||
c.output
|
||||
).atPos(pos).debugThrow();
|
||||
},
|
||||
[&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep {
|
||||
/* Reuse original item because we want this to be idempotent. */
|
||||
@ -261,10 +261,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||
for (auto & i : *args[1]->attrs) {
|
||||
const auto & name = state.symbols[i.name];
|
||||
if (!state.store->isStorePath(name))
|
||||
throw EvalError({
|
||||
.msg = hintfmt("context key '%s' is not a store path", name),
|
||||
.errPos = state.positions[i.pos]
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"context key '%s' is not a store path",
|
||||
name
|
||||
).atPos(i.pos).debugThrow();
|
||||
auto namePath = state.store->parseStorePath(name);
|
||||
if (!settings.readOnlyMode)
|
||||
state.store->ensurePath(namePath);
|
||||
@ -281,10 +281,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||
if (iter != i.value->attrs->end()) {
|
||||
if (state.forceBool(*iter->value, iter->pos, "while evaluating the `allOutputs` attribute of a string context")) {
|
||||
if (!isDerivation(name)) {
|
||||
throw EvalError({
|
||||
.msg = hintfmt("tried to add all-outputs context of %s, which is not a derivation, to a string", name),
|
||||
.errPos = state.positions[i.pos]
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"tried to add all-outputs context of %s, which is not a derivation, to a string",
|
||||
name
|
||||
).atPos(i.pos).debugThrow();
|
||||
}
|
||||
context.emplace(NixStringContextElem::DrvDeep {
|
||||
.drvPath = namePath,
|
||||
@ -296,10 +296,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||
if (iter != i.value->attrs->end()) {
|
||||
state.forceList(*iter->value, iter->pos, "while evaluating the `outputs` attribute of a string context");
|
||||
if (iter->value->listSize() && !isDerivation(name)) {
|
||||
throw EvalError({
|
||||
.msg = hintfmt("tried to add derivation output context of %s, which is not a derivation, to a string", name),
|
||||
.errPos = state.positions[i.pos]
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"tried to add derivation output context of %s, which is not a derivation, to a string",
|
||||
name
|
||||
).atPos(i.pos).debugThrow();
|
||||
}
|
||||
for (auto elem : iter->value->listItems()) {
|
||||
auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context");
|
||||
|
@ -23,20 +23,20 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor
|
||||
auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath);
|
||||
if (toPathMaybe && *toPathMaybe != rewrittenPath)
|
||||
throw Error({
|
||||
.msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
|
||||
.msg = HintFmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
|
||||
state.store->printStorePath(fromPath),
|
||||
state.store->printStorePath(rewrittenPath),
|
||||
state.store->printStorePath(*toPathMaybe)),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
if (!toPathMaybe)
|
||||
throw Error({
|
||||
.msg = hintfmt(
|
||||
.msg = HintFmt(
|
||||
"rewriting '%s' to content-addressed form yielded '%s'\n"
|
||||
"Use this value for the 'toPath' attribute passed to 'fetchClosure'",
|
||||
state.store->printStorePath(fromPath),
|
||||
state.store->printStorePath(rewrittenPath)),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
@ -50,11 +50,11 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor
|
||||
// We don't perform the rewriting when outPath already exists, as an optimisation.
|
||||
// However, we can quickly detect a mistake if the toPath is input addressed.
|
||||
throw Error({
|
||||
.msg = hintfmt(
|
||||
.msg = HintFmt(
|
||||
"The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n"
|
||||
"Set 'toPath' to an empty string to make Nix report the correct content-addressed path.",
|
||||
state.store->printStorePath(toPath)),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
@ -73,14 +73,14 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos
|
||||
|
||||
if (!info->isContentAddressed(*state.store)) {
|
||||
throw Error({
|
||||
.msg = hintfmt(
|
||||
.msg = HintFmt(
|
||||
"The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n"
|
||||
"If you do intend to fetch an input-addressed store path, add\n\n"
|
||||
" inputAddressed = true;\n\n"
|
||||
"to the 'fetchClosure' arguments.\n\n"
|
||||
"Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.",
|
||||
state.store->printStorePath(fromPath)),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
@ -99,11 +99,11 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId
|
||||
|
||||
if (info->isContentAddressed(*state.store)) {
|
||||
throw Error({
|
||||
.msg = hintfmt(
|
||||
.msg = HintFmt(
|
||||
"The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n"
|
||||
"Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed",
|
||||
state.store->printStorePath(fromPath)),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
@ -153,15 +153,15 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||
|
||||
else
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
if (!fromPath)
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
|
||||
bool inputAddressed = inputAddressedMaybe.value_or(false);
|
||||
@ -169,17 +169,17 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||
if (inputAddressed) {
|
||||
if (toPath)
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them",
|
||||
.msg = HintFmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them",
|
||||
"inputAddressed",
|
||||
"toPath"),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
if (!fromStoreUrl)
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
|
||||
auto parsedURL = parseURL(*fromStoreUrl);
|
||||
@ -188,14 +188,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||
parsedURL.scheme != "https" &&
|
||||
!(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file"))
|
||||
throw Error({
|
||||
.msg = hintfmt("'fetchClosure' only supports http:// and https:// stores"),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt("'fetchClosure' only supports http:// and https:// stores"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
|
||||
if (!parsedURL.query.empty())
|
||||
throw Error({
|
||||
.msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
|
||||
auto fromStore = openStore(parsedURL.to_string());
|
||||
|
@ -38,17 +38,11 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||
else if (n == "name")
|
||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial");
|
||||
else
|
||||
throw EvalError({
|
||||
.msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]),
|
||||
.errPos = state.positions[attr.pos]
|
||||
});
|
||||
state.error<EvalError>("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]).atPos(attr.pos).debugThrow();
|
||||
}
|
||||
|
||||
if (url.empty())
|
||||
throw EvalError({
|
||||
.msg = hintfmt("'url' argument required"),
|
||||
.errPos = state.positions[pos]
|
||||
});
|
||||
state.error<EvalError>("'url' argument required").atPos(pos).debugThrow();
|
||||
|
||||
} else
|
||||
url = state.coerceToString(pos, *args[0], context,
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "tarball.hh"
|
||||
#include "url.hh"
|
||||
#include "value-to-json.hh"
|
||||
#include "fetch-to-store.hh"
|
||||
|
||||
#include <ctime>
|
||||
#include <iomanip>
|
||||
@ -24,8 +25,6 @@ void emitTreeAttrs(
|
||||
bool emptyRevFallback,
|
||||
bool forceDirty)
|
||||
{
|
||||
assert(input.isLocked());
|
||||
|
||||
auto attrs = state.buildBindings(100);
|
||||
|
||||
state.mkStorePathString(storePath, attrs.alloc(state.sOutPath));
|
||||
@ -100,16 +99,14 @@ static void fetchTree(
|
||||
|
||||
if (auto aType = args[0]->attrs->get(state.sType)) {
|
||||
if (type)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("unexpected attribute 'type'"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"unexpected attribute 'type'"
|
||||
).atPos(pos).debugThrow();
|
||||
type = state.forceStringNoCtx(*aType->value, aType->pos, "while evaluating the `type` attribute passed to builtins.fetchTree");
|
||||
} else if (!type)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"attribute 'type' is missing in call to 'fetchTree'"
|
||||
).atPos(pos).debugThrow();
|
||||
|
||||
attrs.emplace("type", type.value());
|
||||
|
||||
@ -132,8 +129,8 @@ static void fetchTree(
|
||||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
|
||||
}
|
||||
else
|
||||
state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value)));
|
||||
state.error<TypeError>("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value)).debugThrow();
|
||||
}
|
||||
|
||||
if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
|
||||
@ -142,10 +139,9 @@ static void fetchTree(
|
||||
|
||||
if (!params.allowNameArgument)
|
||||
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("attribute 'name' isn’t supported in call to 'fetchTree'"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"attribute 'name' isn’t supported in call to 'fetchTree'"
|
||||
).atPos(pos).debugThrow();
|
||||
|
||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
} else {
|
||||
@ -163,10 +159,9 @@ static void fetchTree(
|
||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
} else {
|
||||
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"
|
||||
).atPos(pos).debugThrow();
|
||||
input = fetchers::Input::fromURL(url);
|
||||
}
|
||||
}
|
||||
@ -175,10 +170,14 @@ static void fetchTree(
|
||||
input = lookupInRegistries(state.store, input).first;
|
||||
|
||||
if (evalSettings.pureEval && !input.isLocked()) {
|
||||
auto fetcher = "fetchTree";
|
||||
if (params.isFetchGit)
|
||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos]));
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos]));
|
||||
fetcher = "fetchGit";
|
||||
|
||||
state.error<EvalError>(
|
||||
"in pure evaluation mode, '%s' will not fetch unlocked input '%s'",
|
||||
fetcher, input.to_string()
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
state.checkURI(input.toURLString());
|
||||
@ -432,17 +431,13 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
else if (n == "name")
|
||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("unsupported argument '%s' to '%s'", n, who),
|
||||
.errPos = state.positions[attr.pos]
|
||||
}));
|
||||
state.error<EvalError>("unsupported argument '%s' to '%s'", n, who)
|
||||
.atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
if (!url)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("'url' argument required"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"'url' argument required").atPos(pos).debugThrow();
|
||||
} else
|
||||
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
|
||||
|
||||
@ -455,7 +450,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
name = baseNameOf(*url);
|
||||
|
||||
if (evalSettings.pureEval && !expectedHash)
|
||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
|
||||
state.error<EvalError>("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow();
|
||||
|
||||
// early exit if pinned and already in the store
|
||||
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
|
||||
@ -477,16 +472,22 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
// https://github.com/NixOS/nix/issues/4313
|
||||
auto storePath =
|
||||
unpack
|
||||
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath
|
||||
? fetchToStore(*state.store, fetchers::downloadTarball(*url).accessor, FetchMode::Copy, name)
|
||||
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
|
||||
|
||||
if (expectedHash) {
|
||||
auto hash = unpack
|
||||
? state.store->queryPathInfo(storePath)->narHash
|
||||
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
|
||||
if (hash != *expectedHash)
|
||||
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||
*url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)));
|
||||
if (hash != *expectedHash) {
|
||||
state.error<EvalError>(
|
||||
"hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||
*url,
|
||||
expectedHash->to_string(HashFormat::Nix32, true),
|
||||
hash.to_string(HashFormat::Nix32, true)
|
||||
).withExitStatus(102)
|
||||
.debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
state.allowAndSetStorePathString(storePath, v);
|
||||
|
@ -83,10 +83,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
try {
|
||||
visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */));
|
||||
} catch (std::exception & e) { // TODO: toml::syntax_error
|
||||
throw EvalError({
|
||||
.msg = hintfmt("while parsing a TOML string: %s", e.what()),
|
||||
.errPos = state.positions[pos]
|
||||
});
|
||||
state.error<EvalError>("while parsing TOML: %s", e.what()).atPos(pos).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -17,24 +17,29 @@ struct PrintOptions
|
||||
* If true, output ANSI color sequences.
|
||||
*/
|
||||
bool ansiColors = false;
|
||||
|
||||
/**
|
||||
* If true, force values.
|
||||
*/
|
||||
bool force = false;
|
||||
|
||||
/**
|
||||
* If true and `force` is set, print derivations as
|
||||
* `«derivation /nix/store/...»` instead of as attribute sets.
|
||||
*/
|
||||
bool derivationPaths = false;
|
||||
|
||||
/**
|
||||
* If true, track which values have been printed and skip them on
|
||||
* subsequent encounters. Useful for self-referential values.
|
||||
*/
|
||||
bool trackRepeated = true;
|
||||
|
||||
/**
|
||||
* Maximum depth to evaluate to.
|
||||
*/
|
||||
size_t maxDepth = std::numeric_limits<size_t>::max();
|
||||
|
||||
/**
|
||||
* Maximum number of attributes in attribute sets to print.
|
||||
*
|
||||
@ -42,6 +47,7 @@ struct PrintOptions
|
||||
* attribute set encountered.
|
||||
*/
|
||||
size_t maxAttrs = std::numeric_limits<size_t>::max();
|
||||
|
||||
/**
|
||||
* Maximum number of list items to print.
|
||||
*
|
||||
@ -49,10 +55,26 @@ struct PrintOptions
|
||||
* list encountered.
|
||||
*/
|
||||
size_t maxListItems = std::numeric_limits<size_t>::max();
|
||||
|
||||
/**
|
||||
* Maximum string length to print.
|
||||
*/
|
||||
size_t maxStringLength = std::numeric_limits<size_t>::max();
|
||||
|
||||
/**
|
||||
* Indentation width for pretty-printing.
|
||||
*
|
||||
* If set to 0 (the default), values are not pretty-printed.
|
||||
*/
|
||||
size_t prettyIndent = 0;
|
||||
|
||||
/**
|
||||
* True if pretty-printing is enabled.
|
||||
*/
|
||||
inline bool shouldPrettyPrint()
|
||||
{
|
||||
return prettyIndent > 0;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -152,7 +152,8 @@ struct ImportantFirstAttrNameCmp
|
||||
}
|
||||
};
|
||||
|
||||
typedef std::set<Value *> ValuesSeen;
|
||||
typedef std::set<const void *> ValuesSeen;
|
||||
typedef std::vector<std::pair<std::string, Value *>> AttrVec;
|
||||
|
||||
class Printer
|
||||
{
|
||||
@ -163,6 +164,37 @@ private:
|
||||
std::optional<ValuesSeen> seen;
|
||||
size_t attrsPrinted = 0;
|
||||
size_t listItemsPrinted = 0;
|
||||
std::string indent;
|
||||
|
||||
void increaseIndent()
|
||||
{
|
||||
if (options.shouldPrettyPrint()) {
|
||||
indent.append(options.prettyIndent, ' ');
|
||||
}
|
||||
}
|
||||
|
||||
void decreaseIndent()
|
||||
{
|
||||
if (options.shouldPrettyPrint()) {
|
||||
assert(indent.size() >= options.prettyIndent);
|
||||
indent.resize(indent.size() - options.prettyIndent);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a space (for separating items or attributes).
|
||||
*
|
||||
* If pretty-printing is enabled, a newline and the current `indent` is
|
||||
* printed instead.
|
||||
*/
|
||||
void printSpace(bool prettyPrint)
|
||||
{
|
||||
if (prettyPrint) {
|
||||
output << "\n" << indent;
|
||||
} else {
|
||||
output << " ";
|
||||
}
|
||||
}
|
||||
|
||||
void printRepeated()
|
||||
{
|
||||
@ -255,14 +287,36 @@ private:
|
||||
output << "»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
} catch (BaseError & e) {
|
||||
} catch (Error & e) {
|
||||
printError_(e);
|
||||
}
|
||||
}
|
||||
|
||||
bool shouldPrettyPrintAttrs(AttrVec & v)
|
||||
{
|
||||
if (!options.shouldPrettyPrint() || v.empty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Pretty-print attrsets with more than one item.
|
||||
if (v.size() > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
auto item = v[0].second;
|
||||
if (!item) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Pretty-print single-item attrsets only if they contain nested
|
||||
// structures.
|
||||
auto itemType = item->type();
|
||||
return itemType == nList || itemType == nAttrs || itemType == nThunk;
|
||||
}
|
||||
|
||||
void printAttrs(Value & v, size_t depth)
|
||||
{
|
||||
if (seen && !seen->insert(&v).second) {
|
||||
if (seen && !seen->insert(v.attrs).second) {
|
||||
printRepeated();
|
||||
return;
|
||||
}
|
||||
@ -270,9 +324,10 @@ private:
|
||||
if (options.force && options.derivationPaths && state.isDerivation(v)) {
|
||||
printDerivation(v);
|
||||
} else if (depth < options.maxDepth) {
|
||||
output << "{ ";
|
||||
increaseIndent();
|
||||
output << "{";
|
||||
|
||||
std::vector<std::pair<std::string, Value *>> sorted;
|
||||
AttrVec sorted;
|
||||
for (auto & i : *v.attrs)
|
||||
sorted.emplace_back(std::pair(state.symbols[i.name], i.value));
|
||||
|
||||
@ -281,7 +336,11 @@ private:
|
||||
else
|
||||
std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp());
|
||||
|
||||
auto prettyPrint = shouldPrettyPrintAttrs(sorted);
|
||||
|
||||
for (auto & i : sorted) {
|
||||
printSpace(prettyPrint);
|
||||
|
||||
if (attrsPrinted >= options.maxAttrs) {
|
||||
printElided(sorted.size() - attrsPrinted, "attribute", "attributes");
|
||||
break;
|
||||
@ -290,13 +349,38 @@ private:
|
||||
printAttributeName(output, i.first);
|
||||
output << " = ";
|
||||
print(*i.second, depth + 1);
|
||||
output << "; ";
|
||||
output << ";";
|
||||
attrsPrinted++;
|
||||
}
|
||||
|
||||
decreaseIndent();
|
||||
printSpace(prettyPrint);
|
||||
output << "}";
|
||||
} else
|
||||
} else {
|
||||
output << "{ ... }";
|
||||
}
|
||||
}
|
||||
|
||||
bool shouldPrettyPrintList(std::span<Value * const> list)
|
||||
{
|
||||
if (!options.shouldPrettyPrint() || list.empty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Pretty-print lists with more than one item.
|
||||
if (list.size() > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
auto item = list[0];
|
||||
if (!item) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Pretty-print single-item lists only if they contain nested
|
||||
// structures.
|
||||
auto itemType = item->type();
|
||||
return itemType == nList || itemType == nAttrs || itemType == nThunk;
|
||||
}
|
||||
|
||||
void printList(Value & v, size_t depth)
|
||||
@ -306,11 +390,16 @@ private:
|
||||
return;
|
||||
}
|
||||
|
||||
output << "[ ";
|
||||
if (depth < options.maxDepth) {
|
||||
for (auto elem : v.listItems()) {
|
||||
increaseIndent();
|
||||
output << "[";
|
||||
auto listItems = v.listItems();
|
||||
auto prettyPrint = shouldPrettyPrintList(listItems);
|
||||
for (auto elem : listItems) {
|
||||
printSpace(prettyPrint);
|
||||
|
||||
if (listItemsPrinted >= options.maxListItems) {
|
||||
printElided(v.listSize() - listItemsPrinted, "item", "items");
|
||||
printElided(listItems.size() - listItemsPrinted, "item", "items");
|
||||
break;
|
||||
}
|
||||
|
||||
@ -319,13 +408,15 @@ private:
|
||||
} else {
|
||||
printNullptr();
|
||||
}
|
||||
output << " ";
|
||||
listItemsPrinted++;
|
||||
}
|
||||
|
||||
decreaseIndent();
|
||||
printSpace(prettyPrint);
|
||||
output << "]";
|
||||
} else {
|
||||
output << "[ ... ]";
|
||||
}
|
||||
else
|
||||
output << "... ";
|
||||
output << "]";
|
||||
}
|
||||
|
||||
void printFunction(Value & v)
|
||||
@ -405,11 +496,11 @@ private:
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printError_(BaseError & e)
|
||||
void printError_(Error & e)
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_RED;
|
||||
output << "«" << e.msg() << "»";
|
||||
output << "«error: " << filterANSIEscapes(e.info().msg.str(), true) << "»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
@ -422,7 +513,7 @@ private:
|
||||
if (options.force) {
|
||||
try {
|
||||
state.forceValue(v, v.determinePos(noPos));
|
||||
} catch (BaseError & e) {
|
||||
} catch (Error & e) {
|
||||
printError_(e);
|
||||
return;
|
||||
}
|
||||
@ -488,6 +579,7 @@ public:
|
||||
{
|
||||
attrsPrinted = 0;
|
||||
listItemsPrinted = 0;
|
||||
indent.clear();
|
||||
|
||||
if (options.trackRepeated) {
|
||||
seen.emplace();
|
||||
@ -511,4 +603,11 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer)
|
||||
return output;
|
||||
}
|
||||
|
||||
template<>
|
||||
HintFmt & HintFmt::operator%(const ValuePrinter & value)
|
||||
{
|
||||
fmt % value;
|
||||
return *this;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -9,6 +9,7 @@
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include "fmt.hh"
|
||||
#include "print-options.hh"
|
||||
|
||||
namespace nix {
|
||||
@ -78,4 +79,13 @@ public:
|
||||
};
|
||||
|
||||
std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer);
|
||||
|
||||
|
||||
/**
|
||||
* `ValuePrinter` does its own ANSI formatting, so we don't color it
|
||||
* magenta.
|
||||
*/
|
||||
template<>
|
||||
HintFmt & HintFmt::operator%(const ValuePrinter & value);
|
||||
|
||||
}
|
||||
|
20
src/libexpr/repl-exit-status.hh
Normal file
20
src/libexpr/repl-exit-status.hh
Normal file
@ -0,0 +1,20 @@
|
||||
#pragma once
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Exit status returned from the REPL.
|
||||
*/
|
||||
enum class ReplExitStatus {
|
||||
/**
|
||||
* The user exited with `:quit`. The program (e.g., if the REPL was acting
|
||||
* as the debugger) should exit.
|
||||
*/
|
||||
QuitAll,
|
||||
/**
|
||||
* The user exited with `:continue`. The program should continue running.
|
||||
*/
|
||||
Continue,
|
||||
};
|
||||
|
||||
}
|
@ -64,7 +64,7 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||
out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[a.pos],
|
||||
hintfmt("while evaluating attribute '%1%'", j));
|
||||
HintFmt("while evaluating attribute '%1%'", j));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
@ -80,8 +80,8 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||
try {
|
||||
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
|
||||
} catch (Error & e) {
|
||||
e.addTrace({},
|
||||
hintfmt("while evaluating list element at index %1%", i));
|
||||
e.addTrace(state.positions[pos],
|
||||
HintFmt("while evaluating list element at index %1%", i));
|
||||
throw;
|
||||
}
|
||||
i++;
|
||||
@ -99,13 +99,12 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||
|
||||
case nThunk:
|
||||
case nFunction:
|
||||
auto e = TypeError({
|
||||
.msg = hintfmt("cannot convert %1% to JSON", showType(v)),
|
||||
.errPos = state.positions[v.determinePos(pos)]
|
||||
});
|
||||
e.addTrace(state.positions[pos], hintfmt("message for the trace"));
|
||||
state.debugThrowLastTrace(e);
|
||||
throw e;
|
||||
state.error<TypeError>(
|
||||
"cannot convert %1% to JSON",
|
||||
showType(v)
|
||||
)
|
||||
.atPos(v.determinePos(pos))
|
||||
.debugThrow();
|
||||
}
|
||||
return out;
|
||||
}
|
||||
@ -119,7 +118,8 @@ void printValueAsJSON(EvalState & state, bool strict,
|
||||
json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
|
||||
NixStringContext & context, bool copyToStore) const
|
||||
{
|
||||
state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType()));
|
||||
state.error<TypeError>("cannot convert %1% to JSON", showType())
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
|
||||
|
@ -105,7 +105,7 @@ class ExternalValueBase
|
||||
* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
||||
* error.
|
||||
*/
|
||||
virtual std::string coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const;
|
||||
virtual std::string coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const;
|
||||
|
||||
/**
|
||||
* Compare to another value of the same type. Defaults to uncomparable,
|
||||
|
@ -19,8 +19,8 @@ public:
|
||||
: Error("")
|
||||
{
|
||||
raw = raw_;
|
||||
auto hf = hintfmt(args...);
|
||||
err.msg = hintfmt("Bad String Context element: %1%: %2%", normaltxt(hf.str()), raw);
|
||||
auto hf = HintFmt(args...);
|
||||
err.msg = HintFmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw);
|
||||
}
|
||||
};
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user