mirror of
https://github.com/NixOS/nix.git
synced 2024-11-22 23:02:27 +00:00
Merge remote-tracking branch 'upstream/master' into overlayfs-store
This commit is contained in:
commit
d5fe828d76
17
.github/labeler.yml
vendored
17
.github/labeler.yml
vendored
@ -2,5 +2,22 @@
|
||||
- doc/manual/*
|
||||
- src/nix/**/*.md
|
||||
|
||||
"store":
|
||||
- src/libstore/store-api.*
|
||||
- src/libstore/*-store.*
|
||||
|
||||
"fetching":
|
||||
- src/libfetchers/**/*
|
||||
|
||||
"repl":
|
||||
- src/libcmd/repl.*
|
||||
- src/nix/repl.*
|
||||
|
||||
"new-cli":
|
||||
- src/nix/**/*
|
||||
|
||||
"tests":
|
||||
# Unit tests
|
||||
- src/*/tests/**/*
|
||||
# Functional and integration tests
|
||||
- tests/**/*
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -19,9 +19,12 @@ perl/Makefile.config
|
||||
/doc/manual/nix.json
|
||||
/doc/manual/conf-file.json
|
||||
/doc/manual/builtins.json
|
||||
/doc/manual/xp-features.json
|
||||
/doc/manual/src/SUMMARY.md
|
||||
/doc/manual/src/command-ref/new-cli
|
||||
/doc/manual/src/command-ref/conf-file.md
|
||||
/doc/manual/src/command-ref/experimental-features-shortlist.md
|
||||
/doc/manual/src/contributing/experimental-feature-descriptions.md
|
||||
/doc/manual/src/language/builtins.md
|
||||
|
||||
# /scripts/
|
||||
|
@ -184,7 +184,7 @@ fi
|
||||
|
||||
# Look for OpenSSL, a required dependency. FIXME: this is only (maybe)
|
||||
# used by S3BinaryCacheStore.
|
||||
PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"])
|
||||
PKG_CHECK_MODULES([OPENSSL], [libcrypto >= 1.1.1], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"])
|
||||
|
||||
|
||||
# Look for libarchive.
|
||||
|
@ -1,8 +1,12 @@
|
||||
builtinsDump:
|
||||
let
|
||||
inherit (builtins) concatStringsSep attrNames;
|
||||
in
|
||||
|
||||
builtinsInfo:
|
||||
let
|
||||
showBuiltin = name:
|
||||
let
|
||||
inherit (builtinsDump.${name}) doc args;
|
||||
inherit (builtinsInfo.${name}) doc args;
|
||||
in
|
||||
''
|
||||
<dt id="builtins-${name}">
|
||||
@ -14,7 +18,7 @@ let
|
||||
|
||||
</dd>
|
||||
'';
|
||||
listArgs = args: builtins.concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
||||
listArgs = args: concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
||||
in
|
||||
with builtins; concatStringsSep "\n" (map showBuiltin (attrNames builtinsDump))
|
||||
concatStringsSep "\n" (map showBuiltin (attrNames builtinsInfo))
|
||||
|
||||
|
@ -1,16 +1,24 @@
|
||||
cliDumpStr:
|
||||
let
|
||||
inherit (builtins)
|
||||
attrNames attrValues fromJSON listToAttrs mapAttrs
|
||||
concatStringsSep concatMap length lessThan replaceStrings sort;
|
||||
inherit (import ./utils.nix) concatStrings optionalString filterAttrs trim squash unique showSettings;
|
||||
in
|
||||
|
||||
with builtins;
|
||||
with import ./utils.nix;
|
||||
commandDump:
|
||||
|
||||
let
|
||||
|
||||
commandInfo = fromJSON commandDump;
|
||||
|
||||
showCommand = { command, details, filename, toplevel }:
|
||||
let
|
||||
|
||||
result = ''
|
||||
> **Warning** \
|
||||
> This program is **experimental** and its interface is subject to change.
|
||||
> This program is
|
||||
> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command)
|
||||
> and its interface is subject to change.
|
||||
|
||||
# Name
|
||||
|
||||
@ -29,19 +37,18 @@ let
|
||||
|
||||
showSynopsis = command: args:
|
||||
let
|
||||
showArgument = arg: "*${arg.label}*" + (if arg ? arity then "" else "...");
|
||||
showArgument = arg: "*${arg.label}*" + optionalString (! arg ? arity) "...";
|
||||
arguments = concatStringsSep " " (map showArgument args);
|
||||
in ''
|
||||
`${command}` [*option*...] ${arguments}
|
||||
'';
|
||||
|
||||
maybeSubcommands = if details ? commands && details.commands != {}
|
||||
then ''
|
||||
maybeSubcommands = optionalString (details ? commands && details.commands != {})
|
||||
''
|
||||
where *subcommand* is one of the following:
|
||||
|
||||
${subcommands}
|
||||
''
|
||||
else "";
|
||||
'';
|
||||
|
||||
subcommands = if length categories > 1
|
||||
then listCategories
|
||||
@ -63,12 +70,11 @@ let
|
||||
* [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
|
||||
'';
|
||||
|
||||
maybeDocumentation =
|
||||
if details ? doc
|
||||
then replaceStrings ["@stores@"] [storeDocs] details.doc
|
||||
else "";
|
||||
maybeDocumentation = optionalString
|
||||
(details ? doc)
|
||||
(replaceStrings ["@stores@"] [storeDocs] details.doc);
|
||||
|
||||
maybeOptions = if details.flags == {} then "" else ''
|
||||
maybeOptions = optionalString (details.flags != {}) ''
|
||||
# Options
|
||||
|
||||
${showOptions details.flags toplevel.flags}
|
||||
@ -78,21 +84,25 @@ let
|
||||
let
|
||||
allOptions = options // commonOptions;
|
||||
showCategory = cat: ''
|
||||
${if cat != "" then "**${cat}:**" else ""}
|
||||
${optionalString (cat != "") "**${cat}:**"}
|
||||
|
||||
${listOptions (filterAttrs (n: v: v.category == cat) allOptions)}
|
||||
'';
|
||||
listOptions = opts: concatStringsSep "\n" (attrValues (mapAttrs showOption opts));
|
||||
showOption = name: option:
|
||||
let
|
||||
shortName = if option ? shortName then "/ `-${option.shortName}`" else "";
|
||||
labels = if option ? labels then (concatStringsSep " " (map (s: "*${s}*") option.labels)) else "";
|
||||
shortName = optionalString
|
||||
(option ? shortName)
|
||||
("/ `-${option.shortName}`");
|
||||
labels = optionalString
|
||||
(option ? labels)
|
||||
(concatStringsSep " " (map (s: "*${s}*") option.labels));
|
||||
in trim ''
|
||||
- `--${name}` ${shortName} ${labels}
|
||||
|
||||
${option.description}
|
||||
'';
|
||||
categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues allOptions)));
|
||||
categories = sort lessThan (unique (map (cmd: cmd.category) (attrValues allOptions)));
|
||||
in concatStrings (map showCategory categories);
|
||||
in squash result;
|
||||
|
||||
@ -113,13 +123,11 @@ let
|
||||
};
|
||||
in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {});
|
||||
|
||||
cliDump = builtins.fromJSON cliDumpStr;
|
||||
|
||||
manpages = processCommand {
|
||||
command = "nix";
|
||||
details = cliDump.args;
|
||||
details = commandInfo.args;
|
||||
filename = "nix";
|
||||
toplevel = cliDump.args;
|
||||
toplevel = commandInfo.args;
|
||||
};
|
||||
|
||||
tableOfContents = let
|
||||
@ -139,6 +147,6 @@ let
|
||||
|
||||
${showSettings { useAnchors = false; } settings}
|
||||
'';
|
||||
in concatStrings (attrValues (mapAttrs showStore cliDump.stores));
|
||||
in concatStrings (attrValues (mapAttrs showStore commandInfo.stores));
|
||||
|
||||
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
|
||||
|
9
doc/manual/generate-xp-features-shortlist.nix
Normal file
9
doc/manual/generate-xp-features-shortlist.nix
Normal file
@ -0,0 +1,9 @@
|
||||
with builtins;
|
||||
with import ./utils.nix;
|
||||
|
||||
let
|
||||
showExperimentalFeature = name: doc:
|
||||
''
|
||||
- [`${name}`](@docroot@/contributing/experimental-features.md#xp-feature-${name})
|
||||
'';
|
||||
in xps: indent " " (concatStrings (attrValues (mapAttrs showExperimentalFeature xps)))
|
11
doc/manual/generate-xp-features.nix
Normal file
11
doc/manual/generate-xp-features.nix
Normal file
@ -0,0 +1,11 @@
|
||||
with builtins;
|
||||
with import ./utils.nix;
|
||||
|
||||
let
|
||||
showExperimentalFeature = name: doc:
|
||||
squash ''
|
||||
## [`${name}`]{#xp-feature-${name}}
|
||||
|
||||
${doc}
|
||||
'';
|
||||
in xps: (concatStringsSep "\n" (attrValues (mapAttrs showExperimentalFeature xps)))
|
@ -81,19 +81,20 @@ $(d)/%.8: $(d)/src/command-ref/%.md
|
||||
$(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md
|
||||
@printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp
|
||||
@cat $^ >> $^.tmp
|
||||
@$(call process-includes,$^,$^.tmp)
|
||||
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@
|
||||
@rm $^.tmp
|
||||
|
||||
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
|
||||
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md
|
||||
@cp $< $@
|
||||
@$(call process-includes,$@,$@)
|
||||
|
||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(bindir)/nix
|
||||
@rm -rf $@ $@.tmp
|
||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix (builtins.readFile $<)'
|
||||
@mv $@.tmp $@
|
||||
|
||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix
|
||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||
$(trace-gen) $(nix-eval) --expr '(import doc/manual/utils.nix).showSettings { useAnchors = true; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
||||
@mv $@.tmp $@
|
||||
@ -106,6 +107,20 @@ $(d)/conf-file.json: $(bindir)/nix
|
||||
$(trace-gen) $(dummy-env) $(bindir)/nix show-config --json --experimental-features nix-command > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
|
||||
$(d)/src/contributing/experimental-feature-descriptions.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features.nix $(bindir)/nix
|
||||
@rm -rf $@ $@.tmp
|
||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features.nix (builtins.fromJSON (builtins.readFile $<))'
|
||||
@mv $@.tmp $@
|
||||
|
||||
$(d)/src/command-ref/experimental-features-shortlist.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features-shortlist.nix $(bindir)/nix
|
||||
@rm -rf $@ $@.tmp
|
||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features-shortlist.nix (builtins.fromJSON (builtins.readFile $<))'
|
||||
@mv $@.tmp $@
|
||||
|
||||
$(d)/xp-features.json: $(bindir)/nix
|
||||
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(bindir)/nix __dump-xp-features > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
|
||||
$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
|
||||
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
|
||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
||||
@ -145,7 +160,7 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
|
||||
done
|
||||
@touch $@
|
||||
|
||||
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
|
||||
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
|
||||
$(trace-gen) \
|
||||
tmp="$$(mktemp -d)"; \
|
||||
cp -r doc/manual "$$tmp"; \
|
||||
|
@ -338,6 +338,9 @@ const redirects = {
|
||||
"strings": "#string",
|
||||
"lists": "#list",
|
||||
"attribute-sets": "#attribute-set"
|
||||
},
|
||||
"installation/installing-binary.html": {
|
||||
"uninstalling": "uninstall.html"
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -15,6 +15,7 @@
|
||||
- [Multi-User Mode](installation/multi-user.md)
|
||||
- [Environment Variables](installation/env-variables.md)
|
||||
- [Upgrading Nix](installation/upgrading.md)
|
||||
- [Uninstalling Nix](installation/uninstall.md)
|
||||
- [Package Management](package-management/package-management.md)
|
||||
- [Basic Package Management](package-management/basic-package-mgmt.md)
|
||||
- [Profiles](package-management/profiles.md)
|
||||
@ -99,6 +100,7 @@
|
||||
- [CLI guideline](contributing/cli-guideline.md)
|
||||
- [Release Notes](release-notes/release-notes.md)
|
||||
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
||||
- [Release 2.15 (2023-04-11)](release-notes/rl-2.15.md)
|
||||
- [Release 2.14 (2023-02-28)](release-notes/rl-2.14.md)
|
||||
- [Release 2.13 (2023-01-17)](release-notes/rl-2.13.md)
|
||||
- [Release 2.12 (2022-12-06)](release-notes/rl-2.12.md)
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Experimental Commands
|
||||
|
||||
This section lists experimental commands.
|
||||
This section lists [experimental commands](@docroot@/contributing/experimental-features.md#xp-feature-nix-command).
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
|
@ -203,10 +203,9 @@ Most Nix commands accept the following command-line options:
|
||||
instead.
|
||||
|
||||
- <span id="opt-I">[`-I`](#opt-I)</span> *path*\
|
||||
Add a path to the Nix expression search path. This option may be
|
||||
given multiple times. See the `NIX_PATH` environment variable for
|
||||
information on the semantics of the Nix search path. Paths added
|
||||
through `-I` take precedence over `NIX_PATH`.
|
||||
Add an entry to the [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path).
|
||||
This option may be given multiple times.
|
||||
Paths added through `-I` take precedence over [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH).
|
||||
|
||||
- <span id="opt-option">[`--option`](#opt-option)</span> *name* *value*\
|
||||
Set the Nix configuration option *name* to *value*. This overrides
|
||||
|
@ -89,3 +89,7 @@ However they serve different purposes:
|
||||
It is primarily an issue of *design* and *communication*, targeting the broader community.
|
||||
|
||||
This means that experimental features and RFCs are orthogonal mechanisms, and can be used independently or together as needed.
|
||||
|
||||
# Currently available experimental features
|
||||
|
||||
{{#include ./experimental-feature-descriptions.md}}
|
||||
|
@ -127,7 +127,7 @@
|
||||
builder can rely on external inputs such as the network or the
|
||||
system time) but the Nix model assumes it.
|
||||
|
||||
- Nix database{#gloss-nix-database}\
|
||||
- [Nix database]{#gloss-nix-database}\
|
||||
An SQlite database to track [reference]s between [store object]s.
|
||||
This is an implementation detail of the [local store].
|
||||
|
||||
@ -225,3 +225,9 @@
|
||||
[string]: ./language/values.md#type-string
|
||||
[path]: ./language/values.md#type-path
|
||||
[attribute name]: ./language/values.md#attribute-set
|
||||
|
||||
- [experimental feature]{#gloss-experimental-feature}\
|
||||
Not yet stabilized functionality guarded by named experimental feature flags.
|
||||
These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting.
|
||||
|
||||
See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md).
|
||||
|
@ -47,12 +47,6 @@ The install script will modify the first writable file from amongst
|
||||
`NIX_INSTALLER_NO_MODIFY_PROFILE` environment variable before executing
|
||||
the install script to disable this behaviour.
|
||||
|
||||
You can uninstall Nix simply by running:
|
||||
|
||||
```console
|
||||
$ rm -rf /nix
|
||||
```
|
||||
|
||||
# Multi User Installation
|
||||
|
||||
The multi-user Nix installation creates system users, and a system
|
||||
@ -84,154 +78,8 @@ The installer will modify `/etc/bashrc`, and `/etc/zshrc` if they exist.
|
||||
The installer will first back up these files with a `.backup-before-nix`
|
||||
extension. The installer will also create `/etc/profile.d/nix.sh`.
|
||||
|
||||
## Uninstalling
|
||||
|
||||
### Linux
|
||||
|
||||
If you are on Linux with systemd:
|
||||
|
||||
1. Remove the Nix daemon service:
|
||||
|
||||
```console
|
||||
sudo systemctl stop nix-daemon.service
|
||||
sudo systemctl disable nix-daemon.socket nix-daemon.service
|
||||
sudo systemctl daemon-reload
|
||||
```
|
||||
|
||||
1. Remove systemd service files:
|
||||
|
||||
```console
|
||||
sudo rm /etc/systemd/system/nix-daemon.service /etc/systemd/system/nix-daemon.socket
|
||||
```
|
||||
|
||||
1. The installer script uses systemd-tmpfiles to create the socket directory.
|
||||
You may also want to remove the configuration for that:
|
||||
|
||||
```console
|
||||
sudo rm /etc/tmpfiles.d/nix-daemon.conf
|
||||
```
|
||||
|
||||
Remove files created by Nix:
|
||||
|
||||
```console
|
||||
sudo rm -rf /nix /etc/nix /etc/profile/nix.sh ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
|
||||
```
|
||||
|
||||
Remove build users and their group:
|
||||
|
||||
```console
|
||||
for i in $(seq 1 32); do
|
||||
sudo userdel nixbld$i
|
||||
done
|
||||
sudo groupdel nixbld
|
||||
```
|
||||
|
||||
There may also be references to Nix in
|
||||
|
||||
- `/etc/profile`
|
||||
- `/etc/bashrc`
|
||||
- `/etc/zshrc`
|
||||
|
||||
which you may remove.
|
||||
|
||||
### macOS
|
||||
|
||||
1. Edit `/etc/zshrc` and `/etc/bashrc` to remove the lines sourcing
|
||||
`nix-daemon.sh`, which should look like this:
|
||||
|
||||
```bash
|
||||
# Nix
|
||||
if [ -e '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' ]; then
|
||||
. '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh'
|
||||
fi
|
||||
# End Nix
|
||||
```
|
||||
|
||||
If these files haven't been altered since installing Nix you can simply put
|
||||
the backups back in place:
|
||||
|
||||
```console
|
||||
sudo mv /etc/zshrc.backup-before-nix /etc/zshrc
|
||||
sudo mv /etc/bashrc.backup-before-nix /etc/bashrc
|
||||
```
|
||||
|
||||
This will stop shells from sourcing the file and bringing everything you
|
||||
installed using Nix in scope.
|
||||
|
||||
2. Stop and remove the Nix daemon services:
|
||||
|
||||
```console
|
||||
sudo launchctl unload /Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
||||
sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
||||
sudo launchctl unload /Library/LaunchDaemons/org.nixos.darwin-store.plist
|
||||
sudo rm /Library/LaunchDaemons/org.nixos.darwin-store.plist
|
||||
```
|
||||
|
||||
This stops the Nix daemon and prevents it from being started next time you
|
||||
boot the system.
|
||||
|
||||
3. Remove the `nixbld` group and the `_nixbuildN` users:
|
||||
|
||||
```console
|
||||
sudo dscl . -delete /Groups/nixbld
|
||||
for u in $(sudo dscl . -list /Users | grep _nixbld); do sudo dscl . -delete /Users/$u; done
|
||||
```
|
||||
|
||||
This will remove all the build users that no longer serve a purpose.
|
||||
|
||||
4. Edit fstab using `sudo vifs` to remove the line mounting the Nix Store
|
||||
volume on `/nix`, which looks like
|
||||
`UUID=<uuid> /nix apfs rw,noauto,nobrowse,suid,owners` or
|
||||
`LABEL=Nix\040Store /nix apfs rw,nobrowse`. This will prevent automatic
|
||||
mounting of the Nix Store volume.
|
||||
|
||||
5. Edit `/etc/synthetic.conf` to remove the `nix` line. If this is the only
|
||||
line in the file you can remove it entirely, `sudo rm /etc/synthetic.conf`.
|
||||
This will prevent the creation of the empty `/nix` directory to provide a
|
||||
mountpoint for the Nix Store volume.
|
||||
|
||||
6. Remove the files Nix added to your system:
|
||||
|
||||
```console
|
||||
sudo rm -rf /etc/nix /var/root/.nix-profile /var/root/.nix-defexpr /var/root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
|
||||
```
|
||||
|
||||
This gets rid of any data Nix may have created except for the store which is
|
||||
removed next.
|
||||
|
||||
7. Remove the Nix Store volume:
|
||||
|
||||
```console
|
||||
sudo diskutil apfs deleteVolume /nix
|
||||
```
|
||||
|
||||
This will remove the Nix Store volume and everything that was added to the
|
||||
store.
|
||||
|
||||
If the output indicates that the command couldn't remove the volume, you should
|
||||
make sure you don't have an _unmounted_ Nix Store volume. Look for a
|
||||
"Nix Store" volume in the output of the following command:
|
||||
|
||||
```console
|
||||
diskutil list
|
||||
```
|
||||
|
||||
If you _do_ see a "Nix Store" volume, delete it by re-running the diskutil
|
||||
deleteVolume command, but replace `/nix` with the store volume's `diskXsY`
|
||||
identifier.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> After you complete the steps here, you will still have an empty `/nix`
|
||||
> directory. This is an expected sign of a successful uninstall. The empty
|
||||
> `/nix` directory will disappear the next time you reboot.
|
||||
>
|
||||
> You do not have to reboot to finish uninstalling Nix. The uninstall is
|
||||
> complete. macOS (Catalina+) directly controls root directories and its
|
||||
> read-only root will prevent you from manually deleting the empty `/nix`
|
||||
> mountpoint.
|
||||
|
||||
# macOS Installation
|
||||
|
||||
[]{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes}
|
||||
<!-- Note: anchors above to catch permalinks to old explanations -->
|
||||
|
||||
@ -280,19 +128,16 @@ this to run the installer, but it may help if you run into trouble:
|
||||
|
||||
# Installing a pinned Nix version from a URL
|
||||
|
||||
NixOS.org hosts version-specific installation URLs for all Nix versions
|
||||
since 1.11.16, at `https://releases.nixos.org/nix/nix-version/install`.
|
||||
Version-specific installation URLs for all Nix versions
|
||||
since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/).
|
||||
The corresponding SHA-256 hash can be found in the directory for the given version.
|
||||
|
||||
These install scripts can be used the same as the main NixOS.org
|
||||
installation script:
|
||||
These install scripts can be used the same as usual:
|
||||
|
||||
```console
|
||||
$ curl -L https://nixos.org/nix/install | sh
|
||||
$ curl -L https://releases.nixos.org/nix/nix-<version>/install | sh
|
||||
```
|
||||
|
||||
In the same directory of the install script are sha256 sums, and gpg
|
||||
signature files.
|
||||
|
||||
# Installing from a binary tarball
|
||||
|
||||
You can also download a binary tarball that contains Nix and all its
|
||||
|
159
doc/manual/src/installation/uninstall.md
Normal file
159
doc/manual/src/installation/uninstall.md
Normal file
@ -0,0 +1,159 @@
|
||||
# Uninstalling Nix
|
||||
|
||||
## Single User
|
||||
|
||||
If you have a [single-user installation](./installing-binary.md#single-user-installation) of Nix, uninstall it by running:
|
||||
|
||||
```console
|
||||
$ rm -rf /nix
|
||||
```
|
||||
|
||||
## Multi User
|
||||
|
||||
Removing a [multi-user installation](./installing-binary.md#multi-user-installation) of Nix is more involved, and depends on the operating system.
|
||||
|
||||
### Linux
|
||||
|
||||
If you are on Linux with systemd:
|
||||
|
||||
1. Remove the Nix daemon service:
|
||||
|
||||
```console
|
||||
sudo systemctl stop nix-daemon.service
|
||||
sudo systemctl disable nix-daemon.socket nix-daemon.service
|
||||
sudo systemctl daemon-reload
|
||||
```
|
||||
|
||||
1. Remove systemd service files:
|
||||
|
||||
```console
|
||||
sudo rm /etc/systemd/system/nix-daemon.service /etc/systemd/system/nix-daemon.socket
|
||||
```
|
||||
|
||||
1. The installer script uses systemd-tmpfiles to create the socket directory.
|
||||
You may also want to remove the configuration for that:
|
||||
|
||||
```console
|
||||
sudo rm /etc/tmpfiles.d/nix-daemon.conf
|
||||
```
|
||||
|
||||
Remove files created by Nix:
|
||||
|
||||
```console
|
||||
sudo rm -rf /nix /etc/nix /etc/profile/nix.sh ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
|
||||
```
|
||||
|
||||
Remove build users and their group:
|
||||
|
||||
```console
|
||||
for i in $(seq 1 32); do
|
||||
sudo userdel nixbld$i
|
||||
done
|
||||
sudo groupdel nixbld
|
||||
```
|
||||
|
||||
There may also be references to Nix in
|
||||
|
||||
- `/etc/profile`
|
||||
- `/etc/bashrc`
|
||||
- `/etc/zshrc`
|
||||
|
||||
which you may remove.
|
||||
|
||||
### macOS
|
||||
|
||||
1. Edit `/etc/zshrc`, `/etc/bashrc`, and `/etc/bash.bashrc` to remove the lines sourcing `nix-daemon.sh`, which should look like this:
|
||||
|
||||
```bash
|
||||
# Nix
|
||||
if [ -e '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' ]; then
|
||||
. '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh'
|
||||
fi
|
||||
# End Nix
|
||||
```
|
||||
|
||||
If these files haven't been altered since installing Nix you can simply put
|
||||
the backups back in place:
|
||||
|
||||
```console
|
||||
sudo mv /etc/zshrc.backup-before-nix /etc/zshrc
|
||||
sudo mv /etc/bashrc.backup-before-nix /etc/bashrc
|
||||
sudo mv /etc/bash.bashrc.backup-before-nix /etc/bash.bashrc
|
||||
```
|
||||
|
||||
This will stop shells from sourcing the file and bringing everything you
|
||||
installed using Nix in scope.
|
||||
|
||||
2. Stop and remove the Nix daemon services:
|
||||
|
||||
```console
|
||||
sudo launchctl unload /Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
||||
sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
||||
sudo launchctl unload /Library/LaunchDaemons/org.nixos.darwin-store.plist
|
||||
sudo rm /Library/LaunchDaemons/org.nixos.darwin-store.plist
|
||||
```
|
||||
|
||||
This stops the Nix daemon and prevents it from being started next time you
|
||||
boot the system.
|
||||
|
||||
3. Remove the `nixbld` group and the `_nixbuildN` users:
|
||||
|
||||
```console
|
||||
sudo dscl . -delete /Groups/nixbld
|
||||
for u in $(sudo dscl . -list /Users | grep _nixbld); do sudo dscl . -delete /Users/$u; done
|
||||
```
|
||||
|
||||
This will remove all the build users that no longer serve a purpose.
|
||||
|
||||
4. Edit fstab using `sudo vifs` to remove the line mounting the Nix Store
|
||||
volume on `/nix`, which looks like
|
||||
`UUID=<uuid> /nix apfs rw,noauto,nobrowse,suid,owners` or
|
||||
`LABEL=Nix\040Store /nix apfs rw,nobrowse`. This will prevent automatic
|
||||
mounting of the Nix Store volume.
|
||||
|
||||
5. Edit `/etc/synthetic.conf` to remove the `nix` line. If this is the only
|
||||
line in the file you can remove it entirely, `sudo rm /etc/synthetic.conf`.
|
||||
This will prevent the creation of the empty `/nix` directory to provide a
|
||||
mountpoint for the Nix Store volume.
|
||||
|
||||
6. Remove the files Nix added to your system:
|
||||
|
||||
```console
|
||||
sudo rm -rf /etc/nix /var/root/.nix-profile /var/root/.nix-defexpr /var/root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
|
||||
```
|
||||
|
||||
This gets rid of any data Nix may have created except for the store which is
|
||||
removed next.
|
||||
|
||||
7. Remove the Nix Store volume:
|
||||
|
||||
```console
|
||||
sudo diskutil apfs deleteVolume /nix
|
||||
```
|
||||
|
||||
This will remove the Nix Store volume and everything that was added to the
|
||||
store.
|
||||
|
||||
If the output indicates that the command couldn't remove the volume, you should
|
||||
make sure you don't have an _unmounted_ Nix Store volume. Look for a
|
||||
"Nix Store" volume in the output of the following command:
|
||||
|
||||
```console
|
||||
diskutil list
|
||||
```
|
||||
|
||||
If you _do_ see a "Nix Store" volume, delete it by re-running the diskutil
|
||||
deleteVolume command, but replace `/nix` with the store volume's `diskXsY`
|
||||
identifier.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> After you complete the steps here, you will still have an empty `/nix`
|
||||
> directory. This is an expected sign of a successful uninstall. The empty
|
||||
> `/nix` directory will disappear the next time you reboot.
|
||||
>
|
||||
> You do not have to reboot to finish uninstalling Nix. The uninstall is
|
||||
> complete. macOS (Catalina+) directly controls root directories and its
|
||||
> read-only root will prevent you from manually deleting the empty `/nix`
|
||||
> mountpoint.
|
||||
|
@ -208,12 +208,26 @@ Derivations can declare some infrequently used optional attributes.
|
||||
about converting to and from base-32 notation.)
|
||||
|
||||
- [`__contentAddressed`]{#adv-attr-__contentAddressed}
|
||||
If this **experimental** attribute is set to true, then the derivation
|
||||
> **Warning**
|
||||
> This attribute is part of an [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||
>
|
||||
> To use this attribute, you must enable the
|
||||
> [`ca-derivations`](@docroot@/contributing/experimental-features.md#xp-feature-ca-derivations) experimental feature.
|
||||
> For example, in [nix.conf](../command-ref/conf-file.md) you could add:
|
||||
>
|
||||
> ```
|
||||
> extra-experimental-features = ca-derivations
|
||||
> ```
|
||||
|
||||
If this attribute is set to `true`, then the derivation
|
||||
outputs will be stored in a content-addressed location rather than the
|
||||
traditional input-addressed one.
|
||||
This only has an effect if the `ca-derivations` experimental feature is enabled.
|
||||
|
||||
Setting this attribute also requires setting `outputHashMode` and `outputHashAlgo` like for *fixed-output derivations* (see above).
|
||||
Setting this attribute also requires setting
|
||||
[`outputHashMode`](#adv-attr-outputHashMode)
|
||||
and
|
||||
[`outputHashAlgo`](#adv-attr-outputHashAlgo)
|
||||
like for *fixed-output derivations* (see above).
|
||||
|
||||
- [`passAsFile`]{#adv-attr-passAsFile}\
|
||||
A list of names of attributes that should be passed via files rather
|
||||
@ -307,9 +321,11 @@ Derivations can declare some infrequently used optional attributes.
|
||||
|
||||
- [`unsafeDiscardReferences`]{#adv-attr-unsafeDiscardReferences}\
|
||||
> **Warning**
|
||||
> This is an experimental feature.
|
||||
> This attribute is part of an [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||
>
|
||||
> To enable it, add the following to [nix.conf](../command-ref/conf-file.md):
|
||||
> To use this attribute, you must enable the
|
||||
> [`discard-references`](@docroot@/contributing/experimental-features.md#xp-feature-discard-references) experimental feature.
|
||||
> For example, in [nix.conf](../command-ref/conf-file.md) you could add:
|
||||
>
|
||||
> ```
|
||||
> extra-experimental-features = discard-references
|
||||
|
@ -19,7 +19,7 @@ to subsequent chapters.
|
||||
channel:
|
||||
|
||||
```console
|
||||
$ nix-env -qaP
|
||||
$ nix-env --query --available --attr-path
|
||||
nixpkgs.docbook_xml_dtd_43 docbook-xml-4.3
|
||||
nixpkgs.docbook_xml_dtd_45 docbook-xml-4.5
|
||||
nixpkgs.firefox firefox-33.0.2
|
||||
@ -31,7 +31,7 @@ to subsequent chapters.
|
||||
1. Install some packages from the channel:
|
||||
|
||||
```console
|
||||
$ nix-env -iA nixpkgs.hello
|
||||
$ nix-env --install --attr nixpkgs.hello
|
||||
```
|
||||
|
||||
This should download pre-built packages; it should not build them
|
||||
@ -49,13 +49,13 @@ to subsequent chapters.
|
||||
1. Uninstall a package:
|
||||
|
||||
```console
|
||||
$ nix-env -e hello
|
||||
$ nix-env --uninstall hello
|
||||
```
|
||||
|
||||
1. You can also test a package without installing it:
|
||||
|
||||
```console
|
||||
$ nix-shell -p hello
|
||||
$ nix-shell --packages hello
|
||||
```
|
||||
|
||||
This builds or downloads GNU Hello and its dependencies, then drops
|
||||
@ -76,7 +76,7 @@ to subsequent chapters.
|
||||
|
||||
```console
|
||||
$ nix-channel --update nixpkgs
|
||||
$ nix-env -u '*'
|
||||
$ nix-env --upgrade '*'
|
||||
```
|
||||
|
||||
The latter command will upgrade each installed package for which
|
||||
@ -95,5 +95,5 @@ to subsequent chapters.
|
||||
them:
|
||||
|
||||
```console
|
||||
$ nix-collect-garbage -d
|
||||
$ nix-collect-garbage --delete-old
|
||||
```
|
||||
|
58
doc/manual/src/release-notes/rl-2.15.md
Normal file
58
doc/manual/src/release-notes/rl-2.15.md
Normal file
@ -0,0 +1,58 @@
|
||||
# Release 2.15 (2023-04-11)
|
||||
|
||||
* Commands which take installables on the command line can now read them from the standard input if
|
||||
passed the `--stdin` flag. This is primarily useful when you have a large amount of paths which
|
||||
exceed the OS argument limit.
|
||||
|
||||
* The `nix-hash` command now supports Base64 and SRI. Use the flags `--base64`
|
||||
or `--sri` to specify the format of output hash as Base64 or SRI, and `--to-base64`
|
||||
or `--to-sri` to convert a hash to Base64 or SRI format, respectively.
|
||||
|
||||
As the choice of hash formats is no longer binary, the `--base16` flag is also added
|
||||
to explicitly specify the Base16 format, which is still the default.
|
||||
|
||||
* The special handling of an [installable](../command-ref/new-cli/nix.md#installables) with `.drv` suffix being interpreted as all of the given [store derivation](../glossary.md#gloss-store-derivation)'s output paths is removed, and instead taken as the literal store path that it represents.
|
||||
|
||||
The new `^` syntax for store paths introduced in Nix 2.13 allows explicitly referencing output paths of a derivation.
|
||||
Using this is better and more clear than relying on the now-removed `.drv` special handling.
|
||||
|
||||
For example,
|
||||
```shell-session
|
||||
$ nix path-info /nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv
|
||||
```
|
||||
|
||||
now gives info about the derivation itself, while
|
||||
|
||||
```shell-session
|
||||
$ nix path-info /nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^*
|
||||
```
|
||||
provides information about each of its outputs.
|
||||
|
||||
* The experimental command `nix describe-stores` has been removed.
|
||||
|
||||
* Nix stores and their settings are now documented in [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
||||
|
||||
* Documentation for operations of `nix-store` and `nix-env` are now available on separate pages of the manual.
|
||||
They include all common options that can be specified and common environment variables that affect these commands.
|
||||
|
||||
These pages can be viewed offline with `man` using
|
||||
|
||||
* `man nix-store-<operation>` and `man nix-env-<operation>`
|
||||
* `nix-store --help --<operation>` and `nix-env --help --<operation>`.
|
||||
|
||||
* Nix when used as a client now checks whether the store (the server) trusts the client.
|
||||
(The store always had to check whether it trusts the client, but now the client is informed of the store's decision.)
|
||||
This is useful for scripting interactions with (non-legacy-ssh) remote Nix stores.
|
||||
|
||||
`nix store ping` and `nix doctor` now display this information.
|
||||
|
||||
* The new command `nix derivation add` allows adding derivations to the store without involving the Nix language.
|
||||
It exists to round out our collection of basic utility/plumbing commands, and allow for a low barrier-to-entry way of experimenting with alternative front-ends to the Nix Store.
|
||||
It uses the same JSON layout as `nix derivation show`, and is its inverse.
|
||||
|
||||
* `nix show-derivation` has been renamed to `nix derivation show`.
|
||||
This matches `nix derivation add`, and avoids bloating the top-level namespace.
|
||||
The old name is still kept as an alias for compatibility, however.
|
||||
|
||||
* The `nix derivation {add,show}` JSON format now includes the derivation name as a top-level field.
|
||||
This is useful in general, but especially necessary for the `add` direction, as otherwise we would need to pass in the name out of band for certain cases.
|
@ -1,58 +1,2 @@
|
||||
# Release X.Y (202?-??-??)
|
||||
|
||||
* Commands which take installables on the command line can now read them from the standard input if
|
||||
passed the `--stdin` flag. This is primarily useful when you have a large amount of paths which
|
||||
exceed the OS arg limit.
|
||||
|
||||
* The `nix-hash` command now supports Base64 and SRI. Use the flags `--base64`
|
||||
or `--sri` to specify the format of output hash as Base64 or SRI, and `--to-base64`
|
||||
or `--to-sri` to convert a hash to Base64 or SRI format, respectively.
|
||||
|
||||
As the choice of hash formats is no longer binary, the `--base16` flag is also added
|
||||
to explicitly specify the Base16 format, which is still the default.
|
||||
|
||||
* The special handling of an [installable](../command-ref/new-cli/nix.md#installables) with `.drv` suffix being interpreted as all of the given [store derivation](../glossary.md#gloss-store-derivation)'s output paths is removed, and instead taken as the literal store path that it represents.
|
||||
|
||||
The new `^` syntax for store paths introduced in Nix 2.13 allows explicitly referencing output paths of a derivation.
|
||||
Using this is better and more clear than relying on the now-removed `.drv` special handling.
|
||||
|
||||
For example,
|
||||
```shell-session
|
||||
$ nix path-info /nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv
|
||||
```
|
||||
|
||||
now gives info about the derivation itself, while
|
||||
|
||||
```shell-session
|
||||
$ nix path-info /nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^*
|
||||
```
|
||||
provides information about each of its outputs.
|
||||
|
||||
* The experimental command `nix describe-stores` has been removed.
|
||||
|
||||
* Nix stores and their settings are now documented in [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
||||
|
||||
* Documentation for operations of `nix-store` and `nix-env` are now available on separate pages of the manual.
|
||||
They include all common options that can be specified and common environment variables that affect these commands.
|
||||
|
||||
These pages can be viewed offline with `man` using
|
||||
|
||||
* `man nix-store-<operation>` and `man nix-env-<operation>`
|
||||
* `nix-store --help --<operation>` and `nix-env --help --<operation>`.
|
||||
|
||||
* Nix when used as a client now checks whether the store (the server) trusts the client.
|
||||
(The store always had to check whether it trusts the client, but now the client is informed of the store's decision.)
|
||||
This is useful for scripting interactions with (non-legacy-ssh) remote Nix stores.
|
||||
|
||||
`nix store ping` and `nix doctor` now display this information.
|
||||
|
||||
* A new command `nix derivation add` is created, to allow adding derivations to the store without involving the Nix language.
|
||||
It exists to round out our collection of basic utility/plumbing commands, and allow for a low barrier-to-entry way of experimenting with alternative front-ends to the Nix Store.
|
||||
It uses the same JSON layout as `nix show-derivation`, and is its inverse.
|
||||
|
||||
* `nix show-derivation` has been renamed to `nix derivation show`.
|
||||
This matches `nix derivation add`, and avoids bloating the top-level namespace.
|
||||
The old name is still kept as an alias for compatibility, however.
|
||||
|
||||
* The `nix derivation {add,show}` JSON format now includes the derivation name as a top-level field.
|
||||
This is useful in general, but especially necessary for the `add` direction, as otherwise we would need to pass in the name out of band for certain cases.
|
||||
|
@ -5,6 +5,9 @@ rec {
|
||||
|
||||
concatStrings = concatStringsSep "";
|
||||
|
||||
attrsToList = a:
|
||||
map (name: { inherit name; value = a.${name}; }) (builtins.attrNames a);
|
||||
|
||||
replaceStringsRec = from: to: string:
|
||||
# recursively replace occurrences of `from` with `to` within `string`
|
||||
# example:
|
||||
@ -39,7 +42,9 @@ rec {
|
||||
filterAttrs = pred: set:
|
||||
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
|
||||
|
||||
showSetting = { useAnchors }: name: { description, documentDefault, defaultValue, aliases, value }:
|
||||
optionalString = cond: string: if cond then string else "";
|
||||
|
||||
showSetting = { useAnchors }: name: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }:
|
||||
let
|
||||
result = squash ''
|
||||
- ${if useAnchors
|
||||
@ -49,10 +54,28 @@ rec {
|
||||
${indent " " body}
|
||||
'';
|
||||
|
||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||
> **Warning**
|
||||
> This setting is part of an
|
||||
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||
|
||||
To change this setting, you need to make sure the corresponding experimental feature,
|
||||
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
||||
is enabled.
|
||||
For example, include the following in [`nix.conf`](#):
|
||||
|
||||
```
|
||||
extra-experimental-features = ${experimentalFeature}
|
||||
${name} = ...
|
||||
```
|
||||
'';
|
||||
|
||||
# separate body to cleanly handle indentation
|
||||
body = ''
|
||||
${description}
|
||||
|
||||
${experimentalFeatureNote}
|
||||
|
||||
**Default:** ${showDefault documentDefault defaultValue}
|
||||
|
||||
${showAliases aliases}
|
||||
@ -71,13 +94,13 @@ rec {
|
||||
else "*machine-specific*";
|
||||
|
||||
showAliases = aliases:
|
||||
if aliases == [] then "" else
|
||||
optionalString (aliases != [])
|
||||
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
|
||||
|
||||
indent = prefix: s:
|
||||
concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
|
||||
|
||||
in result;
|
||||
|
||||
indent = prefix: s:
|
||||
concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
|
||||
|
||||
showSettings = args: settingsInfo: concatStrings (attrValues (mapAttrs (showSetting args) settingsInfo));
|
||||
}
|
||||
|
@ -42,12 +42,12 @@ The team meets twice a week:
|
||||
|
||||
- Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
||||
|
||||
1. Triage issues and pull requests from the _No Status_ column (30 min)
|
||||
2. Discuss issues and pull requests from the _To discuss_ column (30 min)
|
||||
1. Triage issues and pull requests from the [No Status](#no-status) column (30 min)
|
||||
2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min)
|
||||
|
||||
- Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
||||
|
||||
1. Code review on pull requests from _In review_.
|
||||
1. Code review on pull requests from [In review](#in-review).
|
||||
2. Other chores and tasks.
|
||||
|
||||
Meeting notes are collected on a [collaborative scratchpad](https://pad.lassul.us/Cv7FpYx-Ri-4VjUykQOLAw), and published on Discourse under the [Nix category](https://discourse.nixos.org/c/dev/nix/50).
|
||||
@ -58,64 +58,74 @@ The team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19
|
||||
|
||||
Items on the board progress through the following states:
|
||||
|
||||
- No Status
|
||||
### No Status
|
||||
|
||||
During the discussion meeting, the team triages new items.
|
||||
To be considered, issues and pull requests must have a high-level description to provide the whole team with the necessary context at a glance.
|
||||
During the discussion meeting, the team triages new items.
|
||||
To be considered, issues and pull requests must have a high-level description to provide the whole team with the necessary context at a glance.
|
||||
|
||||
On every meeting, at least one item from each of the following categories is inspected:
|
||||
On every meeting, at least one item from each of the following categories is inspected:
|
||||
|
||||
1. [critical](https://github.com/NixOS/nix/labels/critical)
|
||||
2. [security](https://github.com/NixOS/nix/labels/security)
|
||||
3. [regression](https://github.com/NixOS/nix/labels/regression)
|
||||
4. [bug](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Abug+sort%3Areactions-%2B1-desc)
|
||||
5. [tests of existing functionality](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Atests+-label%3Afeature+sort%3Areactions-%2B1-desc)
|
||||
1. [critical](https://github.com/NixOS/nix/labels/critical)
|
||||
2. [security](https://github.com/NixOS/nix/labels/security)
|
||||
3. [regression](https://github.com/NixOS/nix/labels/regression)
|
||||
4. [bug](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Abug+sort%3Areactions-%2B1-desc)
|
||||
5. [tests of existing functionality](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Atests+-label%3Afeature+sort%3Areactions-%2B1-desc)
|
||||
|
||||
- [oldest pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Acreated-asc)
|
||||
- [most popular pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Areactions-%2B1-desc)
|
||||
- [oldest issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Acreated-asc)
|
||||
- [most popular issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc)
|
||||
- [oldest pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Acreated-asc)
|
||||
- [most popular pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Areactions-%2B1-desc)
|
||||
- [oldest issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Acreated-asc)
|
||||
- [most popular issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc)
|
||||
|
||||
Team members can also add pull requests or issues they would like the whole team to consider.
|
||||
Team members can also add pull requests or issues they would like the whole team to consider.
|
||||
To ensure process quality and reliability, all non-trivial pull requests must be triaged before merging.
|
||||
|
||||
If there is disagreement on the general idea behind an issue or pull request, it is moved to _To discuss_, otherwise to _In review_.
|
||||
If there is disagreement on the general idea behind an issue or pull request, it is moved to [To discuss](#to-discuss).
|
||||
Otherwise, the issue or pull request in questions get the label [`idea approved`](https://github.com/NixOS/nix/labels/idea%20approved).
|
||||
For issues this means that an implementation is welcome and will be prioritised for review.
|
||||
For pull requests this means that:
|
||||
- Unfinished work is encouraged to be continued.
|
||||
- A reviewer is assigned to take responsibility for getting the pull request merged.
|
||||
The item is moved to the [Assigned](#assigned) column.
|
||||
- If needed, the team can decide to do a collarorative review.
|
||||
Then the item is moved to the [In review](#in-review) column, and review session is scheduled.
|
||||
|
||||
To ensure process quality and reliability, all non-trivial pull requests must be triaged before merging.
|
||||
What constitutes a trivial pull request is up to maintainers' judgement.
|
||||
What constitutes a trivial pull request is up to maintainers' judgement.
|
||||
|
||||
- To discuss
|
||||
### To discuss
|
||||
|
||||
Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings.
|
||||
Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings.
|
||||
|
||||
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
|
||||
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
|
||||
|
||||
As a general guideline, the order of items is determined as follows:
|
||||
As a general guideline, the order of items is determined as follows:
|
||||
|
||||
- Prioritise pull requests over issues
|
||||
- Prioritise pull requests over issues
|
||||
|
||||
Contributors who took the time to implement concrete change proposals should not wait indefinitely.
|
||||
Contributors who took the time to implement concrete change proposals should not wait indefinitely.
|
||||
|
||||
- Prioritise fixing bugs and testing over documentation, improvements or new features
|
||||
- Prioritise fixing bugs and testing over documentation, improvements or new features
|
||||
|
||||
The team values stability and accessibility higher than raw functionality.
|
||||
The team values stability and accessibility higher than raw functionality.
|
||||
|
||||
- Interleave issues and PRs
|
||||
- Interleave issues and PRs
|
||||
|
||||
This way issues without attempts at a solution get a chance to get addressed.
|
||||
This way issues without attempts at a solution get a chance to get addressed.
|
||||
|
||||
- In review
|
||||
### In review
|
||||
|
||||
Pull requests in this column are reviewed together during work meetings.
|
||||
This is both for spreading implementation knowledge and for establishing common values in code reviews.
|
||||
Pull requests in this column are reviewed together during work meetings.
|
||||
This is both for spreading implementation knowledge and for establishing common values in code reviews.
|
||||
|
||||
When the overall direction is agreed upon, even when further changes are required, the pull request is assigned to one team member.
|
||||
When the overall direction is agreed upon, even when further changes are required, the pull request is assigned to one team member.
|
||||
|
||||
- Assigned for merging
|
||||
### Assigned
|
||||
|
||||
One team member is assigned to each of these pull requests.
|
||||
They will communicate with the authors, and make the final approval once all remaining issues are addressed.
|
||||
One team member is assigned to each of these pull requests.
|
||||
They will communicate with the authors, and make the final approval once all remaining issues are addressed.
|
||||
|
||||
If more substantive issues arise, the assignee can move the pull request back to _To discuss_ to involve the team again.
|
||||
If more substantive issues arise, the assignee can move the pull request back to [To discuss](#to-discuss) or [In review](#in-review) to involve the team again.
|
||||
|
||||
### Flowchart
|
||||
|
||||
The process is illustrated in the following diagram:
|
||||
|
||||
|
@ -27,8 +27,6 @@ static ref<Store> store()
|
||||
if (!_store) {
|
||||
try {
|
||||
initLibStore();
|
||||
loadConfFile();
|
||||
settings.lockCPU = false;
|
||||
_store = openStore();
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -295,7 +293,13 @@ SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
||||
try {
|
||||
auto h = Hash::parseAny(hash, parseHashType(algo));
|
||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||
auto path = store()->makeFixedOutputPath(method, h, name);
|
||||
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = method,
|
||||
.hash = h,
|
||||
},
|
||||
.references = {},
|
||||
});
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
|
@ -92,7 +92,7 @@ poly_configure_nix_daemon_service() {
|
||||
task "Setting up the nix-daemon systemd service"
|
||||
|
||||
_sudo "to create the nix-daemon tmpfiles config" \
|
||||
ln -sfn /nix/var/nix/profiles/default/$TMPFILES_SRC $TMPFILES_DEST
|
||||
ln -sfn "/nix/var/nix/profiles/default$TMPFILES_SRC" "$TMPFILES_DEST"
|
||||
|
||||
_sudo "to run systemd-tmpfiles once to pick that path up" \
|
||||
systemd-tmpfiles --create --prefix=/nix/var/nix
|
||||
|
@ -311,8 +311,9 @@ connected:
|
||||
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
|
||||
if (!store->queryRealisation(thisOutputId)) {
|
||||
debug("missing output %s", outputName);
|
||||
assert(result.builtOutputs.count(thisOutputId));
|
||||
auto newRealisation = result.builtOutputs.at(thisOutputId);
|
||||
auto i = result.builtOutputs.find(outputName);
|
||||
assert(i != result.builtOutputs.end());
|
||||
auto & newRealisation = i->second;
|
||||
missingRealisations.insert(newRealisation);
|
||||
missingPaths.insert(newRealisation.outPath);
|
||||
}
|
||||
|
@ -121,6 +121,8 @@ ref<EvalState> EvalCommand::getEvalState()
|
||||
#endif
|
||||
;
|
||||
|
||||
evalState->repair = repair;
|
||||
|
||||
if (startReplOnEvalErrors) {
|
||||
evalState->debugRepl = &AbstractNixRepl::runSimple;
|
||||
};
|
||||
|
@ -153,7 +153,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||
for (auto & i : autoArgs) {
|
||||
auto v = state.allocValue();
|
||||
if (i.second[0] == 'E')
|
||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), absPath(".")));
|
||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(CanonPath::fromCwd())));
|
||||
else
|
||||
v->mkString(((std::string_view) i.second).substr(1));
|
||||
res.insert(state.symbols.create(i.first), v);
|
||||
@ -161,19 +161,19 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||
return res.finish();
|
||||
}
|
||||
|
||||
Path lookupFileArg(EvalState & state, std::string_view s)
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s)
|
||||
{
|
||||
if (EvalSettings::isPseudoUrl(s)) {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).first.storePath;
|
||||
return state.store->toRealPath(storePath);
|
||||
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
|
||||
}
|
||||
|
||||
else if (hasPrefix(s, "flake:")) {
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false);
|
||||
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath;
|
||||
return state.store->toRealPath(storePath);
|
||||
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
|
||||
}
|
||||
|
||||
else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
||||
@ -182,7 +182,7 @@ Path lookupFileArg(EvalState & state, std::string_view s)
|
||||
}
|
||||
|
||||
else
|
||||
return absPath(std::string(s));
|
||||
return state.rootPath(CanonPath::fromCwd(s));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -2,14 +2,16 @@
|
||||
///@file
|
||||
|
||||
#include "args.hh"
|
||||
#include "common-args.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class Store;
|
||||
class EvalState;
|
||||
class Bindings;
|
||||
struct SourcePath;
|
||||
|
||||
struct MixEvalArgs : virtual Args
|
||||
struct MixEvalArgs : virtual Args, virtual MixRepair
|
||||
{
|
||||
static constexpr auto category = "Common evaluation options";
|
||||
|
||||
@ -25,6 +27,6 @@ private:
|
||||
std::map<std::string, std::string> autoArgs;
|
||||
};
|
||||
|
||||
Path lookupFileArg(EvalState & state, std::string_view s);
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s);
|
||||
|
||||
}
|
||||
|
@ -3,8 +3,11 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
Strings editorFor(const Path & file, uint32_t line)
|
||||
Strings editorFor(const SourcePath & file, uint32_t line)
|
||||
{
|
||||
auto path = file.getPhysicalPath();
|
||||
if (!path)
|
||||
throw Error("cannot open '%s' in an editor because it has no physical path", file);
|
||||
auto editor = getEnv("EDITOR").value_or("cat");
|
||||
auto args = tokenizeString<Strings>(editor);
|
||||
if (line > 0 && (
|
||||
@ -13,7 +16,7 @@ Strings editorFor(const Path & file, uint32_t line)
|
||||
editor.find("vim") != std::string::npos ||
|
||||
editor.find("kak") != std::string::npos))
|
||||
args.push_back(fmt("+%d", line));
|
||||
args.push_back(file);
|
||||
args.push_back(path->abs());
|
||||
return args;
|
||||
}
|
||||
|
||||
|
@ -2,6 +2,7 @@
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include "input-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
@ -9,6 +10,6 @@ namespace nix {
|
||||
* Helper function to generate args that invoke $EDITOR on
|
||||
* filename:lineno.
|
||||
*/
|
||||
Strings editorFor(const Path & file, uint32_t line);
|
||||
Strings editorFor(const SourcePath & file, uint32_t line);
|
||||
|
||||
}
|
||||
|
@ -96,8 +96,7 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
||||
auto v = attr->forceValue();
|
||||
|
||||
if (v.type() == nPath) {
|
||||
PathSet context;
|
||||
auto storePath = state->copyPathToStore(context, Path(v.path));
|
||||
auto storePath = v.path().fetchToStore(state->store);
|
||||
return {{
|
||||
.path = DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
@ -107,10 +106,10 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
||||
}
|
||||
|
||||
else if (v.type() == nString) {
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto s = state->forceString(v, context, noPos, fmt("while evaluating the flake output attribute '%s'", attrPath));
|
||||
auto storePath = state->store->maybeParseStorePath(s);
|
||||
if (storePath && context.count(std::string(s))) {
|
||||
if (storePath && context.count(NixStringContextElem::Opaque { .path = *storePath })) {
|
||||
return {{
|
||||
.path = DerivedPath::Opaque {
|
||||
.path = std::move(*storePath),
|
||||
|
@ -449,7 +449,7 @@ Installables SourceExprCommand::parseInstallables(
|
||||
else if (file)
|
||||
state->evalFile(lookupFileArg(*state, *file), *vFile);
|
||||
else {
|
||||
auto e = state->parseExprFromString(*expr, absPath("."));
|
||||
auto e = state->parseExprFromString(*expr, state->rootPath(CanonPath::fromCwd()));
|
||||
state->eval(e, *vFile);
|
||||
}
|
||||
|
||||
@ -593,8 +593,8 @@ std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
std::map<std::string, StorePath> outputs;
|
||||
for (auto & path : buildResult.builtOutputs)
|
||||
outputs.emplace(path.first.outputName, path.second.outPath);
|
||||
for (auto & [outputName, realisation] : buildResult.builtOutputs)
|
||||
outputs.emplace(outputName, realisation.outPath);
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Built { bfd.drvPath, outputs },
|
||||
.info = aux.info,
|
||||
|
@ -40,6 +40,7 @@ extern "C" {
|
||||
#include "markdown.hh"
|
||||
#include "local-fs-store.hh"
|
||||
#include "progress-bar.hh"
|
||||
#include "print.hh"
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
#define GC_INCLUDE_NEW
|
||||
@ -54,8 +55,6 @@ struct NixRepl
|
||||
, gc
|
||||
#endif
|
||||
{
|
||||
std::string curDir;
|
||||
|
||||
size_t debugTraceIndex;
|
||||
|
||||
Strings loadedFiles;
|
||||
@ -113,7 +112,6 @@ NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store, ref<EvalStat
|
||||
, staticEnv(new StaticEnv(false, state->staticBaseEnv.get()))
|
||||
, historyFile(getDataDir() + "/nix/repl-history")
|
||||
{
|
||||
curDir = absPath(".");
|
||||
}
|
||||
|
||||
|
||||
@ -425,6 +423,7 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
||||
}
|
||||
|
||||
|
||||
// FIXME: DRY and match or use the parser
|
||||
static bool isVarName(std::string_view s)
|
||||
{
|
||||
if (s.size() == 0) return false;
|
||||
@ -592,14 +591,14 @@ bool NixRepl::processLine(std::string line)
|
||||
Value v;
|
||||
evalString(arg, v);
|
||||
|
||||
const auto [path, line] = [&] () -> std::pair<Path, uint32_t> {
|
||||
const auto [path, line] = [&] () -> std::pair<SourcePath, uint32_t> {
|
||||
if (v.type() == nPath || v.type() == nString) {
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto path = state->coerceToPath(noPos, v, context, "while evaluating the filename to edit");
|
||||
return {path, 0};
|
||||
} else if (v.isLambda()) {
|
||||
auto pos = state->positions[v.lambda.fun->pos];
|
||||
if (auto path = std::get_if<Path>(&pos.origin))
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
return {*path, pos.line};
|
||||
else
|
||||
throw Error("'%s' cannot be shown in an editor", pos);
|
||||
@ -874,8 +873,7 @@ void NixRepl::addVarToScope(const Symbol name, Value & v)
|
||||
|
||||
Expr * NixRepl::parseString(std::string s)
|
||||
{
|
||||
Expr * e = state->parseExprFromString(std::move(s), curDir, staticEnv);
|
||||
return e;
|
||||
return state->parseExprFromString(std::move(s), state->rootPath(CanonPath::fromCwd()), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
@ -894,17 +892,6 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
|
||||
}
|
||||
|
||||
|
||||
std::ostream & printStringValue(std::ostream & str, const char * string) {
|
||||
str << "\"";
|
||||
for (const char * i = string; *i; i++)
|
||||
if (*i == '\"' || *i == '\\') str << "\\" << *i;
|
||||
else if (*i == '\n') str << "\\n";
|
||||
else if (*i == '\r') str << "\\r";
|
||||
else if (*i == '\t') str << "\\t";
|
||||
else str << *i;
|
||||
str << "\"";
|
||||
return str;
|
||||
}
|
||||
|
||||
|
||||
// FIXME: lot of cut&paste from Nix's eval.cc.
|
||||
@ -922,17 +909,19 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
|
||||
break;
|
||||
|
||||
case nBool:
|
||||
str << ANSI_CYAN << (v.boolean ? "true" : "false") << ANSI_NORMAL;
|
||||
str << ANSI_CYAN;
|
||||
printLiteralBool(str, v.boolean);
|
||||
str << ANSI_NORMAL;
|
||||
break;
|
||||
|
||||
case nString:
|
||||
str << ANSI_WARNING;
|
||||
printStringValue(str, v.string.s);
|
||||
printLiteralString(str, v.string.s);
|
||||
str << ANSI_NORMAL;
|
||||
break;
|
||||
|
||||
case nPath:
|
||||
str << ANSI_GREEN << v.path << ANSI_NORMAL; // !!! escaping?
|
||||
str << ANSI_GREEN << v.path().to_string() << ANSI_NORMAL; // !!! escaping?
|
||||
break;
|
||||
|
||||
case nNull:
|
||||
@ -947,7 +936,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
|
||||
if (isDrv) {
|
||||
str << "«derivation ";
|
||||
Bindings::iterator i = v.attrs->find(state->sDrvPath);
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
if (i != v.attrs->end())
|
||||
str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
|
||||
else
|
||||
@ -964,10 +953,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
|
||||
sorted.emplace(state->symbols[i.name], i.value);
|
||||
|
||||
for (auto & i : sorted) {
|
||||
if (isVarName(i.first))
|
||||
str << i.first;
|
||||
else
|
||||
printStringValue(str, i.first.c_str());
|
||||
printAttributeName(str, i.first);
|
||||
str << " = ";
|
||||
if (seen.count(i.second))
|
||||
str << "«repeated»";
|
||||
|
@ -106,7 +106,7 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
||||
}
|
||||
|
||||
|
||||
std::pair<std::string, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what)
|
||||
std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what)
|
||||
{
|
||||
Value * v2;
|
||||
try {
|
||||
@ -118,21 +118,25 @@ std::pair<std::string, uint32_t> findPackageFilename(EvalState & state, Value &
|
||||
|
||||
// FIXME: is it possible to extract the Pos object instead of doing this
|
||||
// toString + parsing?
|
||||
auto pos = state.forceString(*v2, noPos, "while evaluating the 'meta.position' attribute of a derivation");
|
||||
NixStringContext context;
|
||||
auto path = state.coerceToPath(noPos, *v2, context, "while evaluating the 'meta.position' attribute of a derivation");
|
||||
|
||||
auto colon = pos.rfind(':');
|
||||
if (colon == std::string::npos)
|
||||
throw ParseError("cannot parse meta.position attribute '%s'", pos);
|
||||
auto fn = path.path.abs();
|
||||
|
||||
auto fail = [fn]() {
|
||||
throw ParseError("cannot parse 'meta.position' attribute '%s'", fn);
|
||||
};
|
||||
|
||||
std::string filename(pos, 0, colon);
|
||||
unsigned int lineno;
|
||||
try {
|
||||
lineno = std::stoi(std::string(pos, colon + 1, std::string::npos));
|
||||
auto colon = fn.rfind(':');
|
||||
if (colon == std::string::npos) fail();
|
||||
std::string filename(fn, 0, colon);
|
||||
auto lineno = std::stoi(std::string(fn, colon + 1, std::string::npos));
|
||||
return {CanonPath(fn.substr(0, colon)), lineno};
|
||||
} catch (std::invalid_argument & e) {
|
||||
throw ParseError("cannot parse line number '%s'", pos);
|
||||
fail();
|
||||
abort();
|
||||
}
|
||||
|
||||
return { std::move(filename), lineno };
|
||||
}
|
||||
|
||||
|
||||
|
@ -20,7 +20,7 @@ std::pair<Value *, PosIdx> findAlongAttrPath(
|
||||
/**
|
||||
* Heuristic to find the filename and lineno or a nix value.
|
||||
*/
|
||||
std::pair<std::string, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what);
|
||||
std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what);
|
||||
|
||||
std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s);
|
||||
|
||||
|
@ -47,7 +47,7 @@ struct AttrDb
|
||||
{
|
||||
auto state(_state->lock());
|
||||
|
||||
Path cacheDir = getCacheDir() + "/nix/eval-cache-v4";
|
||||
Path cacheDir = getCacheDir() + "/nix/eval-cache-v5";
|
||||
createDirs(cacheDir);
|
||||
|
||||
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
|
||||
@ -300,7 +300,7 @@ struct AttrDb
|
||||
NixStringContext context;
|
||||
if (!queryAttribute.isNull(3))
|
||||
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
|
||||
context.push_back(NixStringContextElem::parse(cfg, s));
|
||||
context.insert(NixStringContextElem::parse(s));
|
||||
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
|
||||
}
|
||||
case AttrType::Bool:
|
||||
@ -442,8 +442,10 @@ Value & AttrCursor::forceValue()
|
||||
if (v.type() == nString)
|
||||
cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context),
|
||||
string_t{v.string.s, {}}};
|
||||
else if (v.type() == nPath)
|
||||
cachedValue = {root->db->setString(getKey(), v.path), string_t{v.path, {}}};
|
||||
else if (v.type() == nPath) {
|
||||
auto path = v.path().path;
|
||||
cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}};
|
||||
}
|
||||
else if (v.type() == nBool)
|
||||
cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean};
|
||||
else if (v.type() == nInt)
|
||||
@ -580,7 +582,7 @@ std::string AttrCursor::getString()
|
||||
if (v.type() != nString && v.type() != nPath)
|
||||
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
||||
|
||||
return v.type() == nString ? v.string.s : v.path;
|
||||
return v.type() == nString ? v.string.s : v.path().to_string();
|
||||
}
|
||||
|
||||
string_t AttrCursor::getStringWithContext()
|
||||
@ -619,10 +621,13 @@ string_t AttrCursor::getStringWithContext()
|
||||
|
||||
auto & v = forceValue();
|
||||
|
||||
if (v.type() == nString)
|
||||
return {v.string.s, v.getContext(*root->state.store)};
|
||||
if (v.type() == nString) {
|
||||
NixStringContext context;
|
||||
copyContext(v, context);
|
||||
return {v.string.s, std::move(context)};
|
||||
}
|
||||
else if (v.type() == nPath)
|
||||
return {v.path, {}};
|
||||
return {v.path().to_string(), {}};
|
||||
else
|
||||
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
||||
}
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "filetransfer.hh"
|
||||
#include "function-trace.hh"
|
||||
#include "profiles.hh"
|
||||
#include "print.hh"
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
@ -104,21 +105,13 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
|
||||
str << integer;
|
||||
break;
|
||||
case tBool:
|
||||
str << (boolean ? "true" : "false");
|
||||
printLiteralBool(str, boolean);
|
||||
break;
|
||||
case tString:
|
||||
str << "\"";
|
||||
for (const char * i = string.s; *i; i++)
|
||||
if (*i == '\"' || *i == '\\') str << "\\" << *i;
|
||||
else if (*i == '\n') str << "\\n";
|
||||
else if (*i == '\r') str << "\\r";
|
||||
else if (*i == '\t') str << "\\t";
|
||||
else if (*i == '$' && *(i+1) == '{') str << "\\" << *i;
|
||||
else str << *i;
|
||||
str << "\"";
|
||||
printLiteralString(str, string.s);
|
||||
break;
|
||||
case tPath:
|
||||
str << path; // !!! escaping?
|
||||
str << path().to_string(); // !!! escaping?
|
||||
break;
|
||||
case tNull:
|
||||
str << "null";
|
||||
@ -542,6 +535,7 @@ EvalState::EvalState(
|
||||
, sOutputSpecified(symbols.create("outputSpecified"))
|
||||
, repair(NoRepair)
|
||||
, emptyBindings(0)
|
||||
, derivationInternal(rootPath(CanonPath("/builtin/derivation.nix")))
|
||||
, store(store)
|
||||
, buildStore(buildStore ? buildStore : store)
|
||||
, debugRepl(nullptr)
|
||||
@ -616,15 +610,14 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value &
|
||||
{
|
||||
allowPath(storePath);
|
||||
|
||||
auto path = store->printStorePath(storePath);
|
||||
v.mkString(path, PathSet({path}));
|
||||
mkStorePathString(storePath, v);
|
||||
}
|
||||
|
||||
Path EvalState::checkSourcePath(const Path & path_)
|
||||
SourcePath EvalState::checkSourcePath(const SourcePath & path_)
|
||||
{
|
||||
if (!allowedPaths) return path_;
|
||||
|
||||
auto i = resolvedPaths.find(path_);
|
||||
auto i = resolvedPaths.find(path_.path.abs());
|
||||
if (i != resolvedPaths.end())
|
||||
return i->second;
|
||||
|
||||
@ -634,9 +627,9 @@ Path EvalState::checkSourcePath(const Path & path_)
|
||||
* attacker can't append ../../... to a path that would be in allowedPaths
|
||||
* and thus leak symlink targets.
|
||||
*/
|
||||
Path abspath = canonPath(path_);
|
||||
Path abspath = canonPath(path_.path.abs());
|
||||
|
||||
if (hasPrefix(abspath, corepkgsPrefix)) return abspath;
|
||||
if (hasPrefix(abspath, corepkgsPrefix)) return CanonPath(abspath);
|
||||
|
||||
for (auto & i : *allowedPaths) {
|
||||
if (isDirOrInDir(abspath, i)) {
|
||||
@ -654,11 +647,11 @@ Path EvalState::checkSourcePath(const Path & path_)
|
||||
|
||||
/* Resolve symlinks. */
|
||||
debug("checking access to '%s'", abspath);
|
||||
Path path = canonPath(abspath, true);
|
||||
SourcePath path = CanonPath(canonPath(abspath, true));
|
||||
|
||||
for (auto & i : *allowedPaths) {
|
||||
if (isDirOrInDir(path, i)) {
|
||||
resolvedPaths[path_] = path;
|
||||
if (isDirOrInDir(path.path.abs(), i)) {
|
||||
resolvedPaths.insert_or_assign(path_.path.abs(), path);
|
||||
return path;
|
||||
}
|
||||
}
|
||||
@ -686,12 +679,12 @@ void EvalState::checkURI(const std::string & uri)
|
||||
/* If the URI is a path, then check it against allowedPaths as
|
||||
well. */
|
||||
if (hasPrefix(uri, "/")) {
|
||||
checkSourcePath(uri);
|
||||
checkSourcePath(CanonPath(uri));
|
||||
return;
|
||||
}
|
||||
|
||||
if (hasPrefix(uri, "file://")) {
|
||||
checkSourcePath(std::string(uri, 7));
|
||||
checkSourcePath(CanonPath(std::string(uri, 7)));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -699,7 +692,7 @@ void EvalState::checkURI(const std::string & uri)
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::toRealPath(const Path & path, const PathSet & context)
|
||||
Path EvalState::toRealPath(const Path & path, const NixStringContext & context)
|
||||
{
|
||||
// FIXME: check whether 'path' is in 'context'.
|
||||
return
|
||||
@ -951,34 +944,34 @@ void Value::mkString(std::string_view s)
|
||||
}
|
||||
|
||||
|
||||
static void copyContextToValue(Value & v, const PathSet & context)
|
||||
static void copyContextToValue(Value & v, const NixStringContext & context)
|
||||
{
|
||||
if (!context.empty()) {
|
||||
size_t n = 0;
|
||||
v.string.context = (const char * *)
|
||||
allocBytes((context.size() + 1) * sizeof(char *));
|
||||
for (auto & i : context)
|
||||
v.string.context[n++] = dupString(i.c_str());
|
||||
v.string.context[n++] = dupString(i.to_string().c_str());
|
||||
v.string.context[n] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
void Value::mkString(std::string_view s, const PathSet & context)
|
||||
void Value::mkString(std::string_view s, const NixStringContext & context)
|
||||
{
|
||||
mkString(s);
|
||||
copyContextToValue(*this, context);
|
||||
}
|
||||
|
||||
void Value::mkStringMove(const char * s, const PathSet & context)
|
||||
void Value::mkStringMove(const char * s, const NixStringContext & context)
|
||||
{
|
||||
mkString(s);
|
||||
copyContextToValue(*this, context);
|
||||
}
|
||||
|
||||
|
||||
void Value::mkPath(std::string_view s)
|
||||
void Value::mkPath(const SourcePath & path)
|
||||
{
|
||||
mkPath(makeImmutableString(s));
|
||||
mkPath(makeImmutableString(path.path.abs()));
|
||||
}
|
||||
|
||||
|
||||
@ -1034,9 +1027,9 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
|
||||
void EvalState::mkPos(Value & v, PosIdx p)
|
||||
{
|
||||
auto pos = positions[p];
|
||||
if (auto path = std::get_if<Path>(&pos.origin)) {
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin)) {
|
||||
auto attrs = buildBindings(3);
|
||||
attrs.alloc(sFile).mkString(*path);
|
||||
attrs.alloc(sFile).mkString(path->path.abs());
|
||||
attrs.alloc(sLine).mkInt(pos.line);
|
||||
attrs.alloc(sColumn).mkInt(pos.column);
|
||||
v.mkAttrs(attrs);
|
||||
@ -1045,6 +1038,16 @@ void EvalState::mkPos(Value & v, PosIdx p)
|
||||
}
|
||||
|
||||
|
||||
void EvalState::mkStorePathString(const StorePath & p, Value & v)
|
||||
{
|
||||
v.mkString(
|
||||
store->printStorePath(p),
|
||||
NixStringContext {
|
||||
NixStringContextElem::Opaque { .path = p },
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/* Create a thunk for the delayed computation of the given expression
|
||||
in the given environment. But if the expression is a variable,
|
||||
then look it up right away. This significantly reduces the number
|
||||
@ -1092,7 +1095,7 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env)
|
||||
}
|
||||
|
||||
|
||||
void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
|
||||
void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial)
|
||||
{
|
||||
auto path = checkSourcePath(path_);
|
||||
|
||||
@ -1102,7 +1105,7 @@ void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
|
||||
return;
|
||||
}
|
||||
|
||||
Path resolvedPath = resolveExprPath(path);
|
||||
auto resolvedPath = resolveExprPath(path);
|
||||
if ((i = fileEvalCache.find(resolvedPath)) != fileEvalCache.end()) {
|
||||
v = i->second;
|
||||
return;
|
||||
@ -1130,8 +1133,8 @@ void EvalState::resetFileCache()
|
||||
|
||||
|
||||
void EvalState::cacheFile(
|
||||
const Path & path,
|
||||
const Path & resolvedPath,
|
||||
const SourcePath & path,
|
||||
const SourcePath & resolvedPath,
|
||||
Expr * e,
|
||||
Value & v,
|
||||
bool mustBeTrivial)
|
||||
@ -1145,7 +1148,7 @@ void EvalState::cacheFile(
|
||||
*e,
|
||||
this->baseEnv,
|
||||
e->getPos() ? static_cast<std::shared_ptr<AbstractPos>>(positions[e->getPos()]) : nullptr,
|
||||
"while evaluating the file '%1%':", resolvedPath)
|
||||
"while evaluating the file '%1%':", resolvedPath.to_string())
|
||||
: nullptr;
|
||||
|
||||
// Enforce that 'flake.nix' is a direct attrset, not a
|
||||
@ -1155,7 +1158,7 @@ void EvalState::cacheFile(
|
||||
error("file '%s' must be an attribute set", path).debugThrow<EvalError>();
|
||||
eval(e, v);
|
||||
} catch (Error & e) {
|
||||
addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath);
|
||||
addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath.to_string());
|
||||
throw;
|
||||
}
|
||||
|
||||
@ -1416,8 +1419,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
|
||||
} catch (Error & e) {
|
||||
if (pos2) {
|
||||
auto pos2r = state.positions[pos2];
|
||||
auto origin = std::get_if<Path>(&pos2r.origin);
|
||||
if (!(origin && *origin == state.derivationNixPath))
|
||||
auto origin = std::get_if<SourcePath>(&pos2r.origin);
|
||||
if (!(origin && *origin == state.derivationInternal))
|
||||
state.addErrorTrace(e, pos2, "while evaluating the attribute '%1%'",
|
||||
showAttrPath(state, env, attrPath));
|
||||
}
|
||||
@ -1907,7 +1910,7 @@ void EvalState::concatLists(Value & v, size_t nrLists, Value * * lists, const Po
|
||||
|
||||
void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
std::vector<BackedStringView> s;
|
||||
size_t sSize = 0;
|
||||
NixInt n = 0;
|
||||
@ -1990,7 +1993,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||
else if (firstType == nPath) {
|
||||
if (!context.empty())
|
||||
state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
v.mkPath(canonPath(str()));
|
||||
v.mkPath(CanonPath(canonPath(str())));
|
||||
} else
|
||||
v.mkStringMove(c_str(), context);
|
||||
}
|
||||
@ -2116,26 +2119,15 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
|
||||
}
|
||||
|
||||
|
||||
void copyContext(const Value & v, PathSet & context)
|
||||
void copyContext(const Value & v, NixStringContext & context)
|
||||
{
|
||||
if (v.string.context)
|
||||
for (const char * * p = v.string.context; *p; ++p)
|
||||
context.insert(*p);
|
||||
context.insert(NixStringContextElem::parse(*p));
|
||||
}
|
||||
|
||||
|
||||
NixStringContext Value::getContext(const Store & store)
|
||||
{
|
||||
NixStringContext res;
|
||||
assert(internalType == tString);
|
||||
if (string.context)
|
||||
for (const char * * p = string.context; *p; ++p)
|
||||
res.push_back(NixStringContextElem::parse(store, *p));
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
std::string_view EvalState::forceString(Value & v, PathSet & context, const PosIdx pos, std::string_view errorCtx)
|
||||
std::string_view EvalState::forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
auto s = forceString(v, pos, errorCtx);
|
||||
copyContext(v, context);
|
||||
@ -2165,7 +2157,7 @@ bool EvalState::isDerivation(Value & v)
|
||||
|
||||
|
||||
std::optional<std::string> EvalState::tryAttrsToString(const PosIdx pos, Value & v,
|
||||
PathSet & context, bool coerceMore, bool copyToStore)
|
||||
NixStringContext & context, bool coerceMore, bool copyToStore)
|
||||
{
|
||||
auto i = v.attrs->find(sToString);
|
||||
if (i != v.attrs->end()) {
|
||||
@ -2179,8 +2171,14 @@ std::optional<std::string> EvalState::tryAttrsToString(const PosIdx pos, Value &
|
||||
return {};
|
||||
}
|
||||
|
||||
BackedStringView EvalState::coerceToString(const PosIdx pos, Value &v, PathSet &context,
|
||||
std::string_view errorCtx, bool coerceMore, bool copyToStore, bool canonicalizePath)
|
||||
BackedStringView EvalState::coerceToString(
|
||||
const PosIdx pos,
|
||||
Value & v,
|
||||
NixStringContext & context,
|
||||
std::string_view errorCtx,
|
||||
bool coerceMore,
|
||||
bool copyToStore,
|
||||
bool canonicalizePath)
|
||||
{
|
||||
forceValue(v, pos);
|
||||
|
||||
@ -2190,12 +2188,14 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value &v, PathSet &
|
||||
}
|
||||
|
||||
if (v.type() == nPath) {
|
||||
BackedStringView path(PathView(v.path));
|
||||
if (canonicalizePath)
|
||||
path = canonPath(*path);
|
||||
if (copyToStore)
|
||||
path = store->printStorePath(copyPathToStore(context, std::move(path).toOwned()));
|
||||
return path;
|
||||
return
|
||||
!canonicalizePath && !copyToStore
|
||||
? // FIXME: hack to preserve path literals that end in a
|
||||
// slash, as in /foo/${x}.
|
||||
v._path
|
||||
: copyToStore
|
||||
? store->printStorePath(copyPathToStore(context, v.path()))
|
||||
: std::string(v.path().path.abs());
|
||||
}
|
||||
|
||||
if (v.type() == nAttrs) {
|
||||
@ -2256,40 +2256,40 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value &v, PathSet &
|
||||
}
|
||||
|
||||
|
||||
StorePath EvalState::copyPathToStore(PathSet & context, const Path & path)
|
||||
StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePath & path)
|
||||
{
|
||||
if (nix::isDerivation(path))
|
||||
if (nix::isDerivation(path.path.abs()))
|
||||
error("file names are not allowed to end in '%1%'", drvExtension).debugThrow<EvalError>();
|
||||
|
||||
auto dstPath = [&]() -> StorePath
|
||||
{
|
||||
auto i = srcToStore.find(path);
|
||||
if (i != srcToStore.end()) return i->second;
|
||||
auto i = srcToStore.find(path);
|
||||
|
||||
auto dstPath = settings.readOnlyMode
|
||||
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
|
||||
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
|
||||
allowPath(dstPath);
|
||||
srcToStore.insert_or_assign(path, dstPath);
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||
return dstPath;
|
||||
}();
|
||||
auto dstPath = i != srcToStore.end()
|
||||
? i->second
|
||||
: [&]() {
|
||||
auto dstPath = path.fetchToStore(store, path.baseName(), nullptr, repair);
|
||||
allowPath(dstPath);
|
||||
srcToStore.insert_or_assign(path, dstPath);
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||
return dstPath;
|
||||
}();
|
||||
|
||||
context.insert(store->printStorePath(dstPath));
|
||||
context.insert(NixStringContextElem::Opaque {
|
||||
.path = dstPath
|
||||
});
|
||||
return dstPath;
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::coerceToPath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx)
|
||||
SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx)
|
||||
{
|
||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||
if (path == "" || path[0] != '/')
|
||||
error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
return path;
|
||||
return CanonPath(path);
|
||||
}
|
||||
|
||||
|
||||
StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx)
|
||||
StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx)
|
||||
{
|
||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||
if (auto storePath = store->maybeParseStorePath(path))
|
||||
@ -2328,7 +2328,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
||||
return strcmp(v1.string.s, v2.string.s) == 0;
|
||||
|
||||
case nPath:
|
||||
return strcmp(v1.path, v2.path) == 0;
|
||||
return strcmp(v1._path, v2._path) == 0;
|
||||
|
||||
case nNull:
|
||||
return true;
|
||||
@ -2455,8 +2455,8 @@ void EvalState::printStats()
|
||||
else
|
||||
obj["name"] = nullptr;
|
||||
if (auto pos = positions[fun->pos]) {
|
||||
if (auto path = std::get_if<Path>(&pos.origin))
|
||||
obj["file"] = *path;
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
obj["file"] = path->to_string();
|
||||
obj["line"] = pos.line;
|
||||
obj["column"] = pos.column;
|
||||
}
|
||||
@ -2470,8 +2470,8 @@ void EvalState::printStats()
|
||||
for (auto & i : attrSelects) {
|
||||
json obj = json::object();
|
||||
if (auto pos = positions[i.first]) {
|
||||
if (auto path = std::get_if<Path>(&pos.origin))
|
||||
obj["file"] = *path;
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
obj["file"] = path->to_string();
|
||||
obj["line"] = pos.line;
|
||||
obj["column"] = pos.column;
|
||||
}
|
||||
@ -2496,7 +2496,7 @@ void EvalState::printStats()
|
||||
}
|
||||
|
||||
|
||||
std::string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const
|
||||
std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const
|
||||
{
|
||||
throw TypeError({
|
||||
.msg = hintfmt("cannot coerce %1% to a string", showType())
|
||||
|
@ -8,6 +8,7 @@
|
||||
#include "symbol-table.hh"
|
||||
#include "config.hh"
|
||||
#include "experimental-features.hh"
|
||||
#include "input-accessor.hh"
|
||||
|
||||
#include <map>
|
||||
#include <optional>
|
||||
@ -56,20 +57,14 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env &
|
||||
|
||||
std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env);
|
||||
|
||||
void copyContext(const Value & v, PathSet & context);
|
||||
|
||||
|
||||
/**
|
||||
* Cache for calls to addToStore(); maps source paths to the store
|
||||
* paths.
|
||||
*/
|
||||
typedef std::map<Path, StorePath> SrcToStore;
|
||||
void copyContext(const Value & v, NixStringContext & context);
|
||||
|
||||
|
||||
std::string printValue(const EvalState & state, const Value & v);
|
||||
std::ostream & operator << (std::ostream & os, const ValueType t);
|
||||
|
||||
|
||||
// FIXME: maybe change this to an std::variant<SourcePath, URL>.
|
||||
typedef std::pair<std::string, std::string> SearchPathElem;
|
||||
typedef std::list<SearchPathElem> SearchPath;
|
||||
|
||||
@ -137,8 +132,6 @@ public:
|
||||
SymbolTable symbols;
|
||||
PosTable positions;
|
||||
|
||||
static inline std::string derivationNixPath = "//builtin/derivation.nix";
|
||||
|
||||
const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue,
|
||||
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
|
||||
sFile, sLine, sColumn, sFunctor, sToString,
|
||||
@ -149,7 +142,6 @@ public:
|
||||
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
|
||||
sPrefix,
|
||||
sOutputSpecified;
|
||||
Symbol sDerivationNix;
|
||||
|
||||
/**
|
||||
* If set, force copying files to the Nix store even if they
|
||||
@ -165,6 +157,8 @@ public:
|
||||
|
||||
Bindings emptyBindings;
|
||||
|
||||
const SourcePath derivationInternal;
|
||||
|
||||
/**
|
||||
* Store used to materialise .drv files.
|
||||
*/
|
||||
@ -234,15 +228,18 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
SrcToStore srcToStore;
|
||||
|
||||
/* Cache for calls to addToStore(); maps source paths to the store
|
||||
paths. */
|
||||
std::map<SourcePath, StorePath> srcToStore;
|
||||
|
||||
/**
|
||||
* A cache from path names to parse trees.
|
||||
*/
|
||||
#if HAVE_BOEHMGC
|
||||
typedef std::map<Path, Expr *, std::less<Path>, traceable_allocator<std::pair<const Path, Expr *>>> FileParseCache;
|
||||
typedef std::map<SourcePath, Expr *, std::less<SourcePath>, traceable_allocator<std::pair<const SourcePath, Expr *>>> FileParseCache;
|
||||
#else
|
||||
typedef std::map<Path, Expr *> FileParseCache;
|
||||
typedef std::map<SourcePath, Expr *> FileParseCache;
|
||||
#endif
|
||||
FileParseCache fileParseCache;
|
||||
|
||||
@ -250,9 +247,9 @@ private:
|
||||
* A cache from path names to values.
|
||||
*/
|
||||
#if HAVE_BOEHMGC
|
||||
typedef std::map<Path, Value, std::less<Path>, traceable_allocator<std::pair<const Path, Value>>> FileEvalCache;
|
||||
typedef std::map<SourcePath, Value, std::less<SourcePath>, traceable_allocator<std::pair<const SourcePath, Value>>> FileEvalCache;
|
||||
#else
|
||||
typedef std::map<Path, Value> FileEvalCache;
|
||||
typedef std::map<SourcePath, Value> FileEvalCache;
|
||||
#endif
|
||||
FileEvalCache fileEvalCache;
|
||||
|
||||
@ -263,7 +260,7 @@ private:
|
||||
/**
|
||||
* Cache used by checkSourcePath().
|
||||
*/
|
||||
std::unordered_map<Path, Path> resolvedPaths;
|
||||
std::unordered_map<Path, SourcePath> resolvedPaths;
|
||||
|
||||
/**
|
||||
* Cache used by prim_match().
|
||||
@ -294,6 +291,12 @@ public:
|
||||
|
||||
SearchPath getSearchPath() { return searchPath; }
|
||||
|
||||
/**
|
||||
* Return a `SourcePath` that refers to `path` in the root
|
||||
* filesystem.
|
||||
*/
|
||||
SourcePath rootPath(CanonPath path);
|
||||
|
||||
/**
|
||||
* Allow access to a path.
|
||||
*/
|
||||
@ -314,7 +317,7 @@ public:
|
||||
* Check whether access to a path is allowed and throw an error if
|
||||
* not. Otherwise return the canonicalised path.
|
||||
*/
|
||||
Path checkSourcePath(const Path & path);
|
||||
SourcePath checkSourcePath(const SourcePath & path);
|
||||
|
||||
void checkURI(const std::string & uri);
|
||||
|
||||
@ -327,19 +330,19 @@ public:
|
||||
* intended to distinguish between import-from-derivation and
|
||||
* sources stored in the actual /nix/store.
|
||||
*/
|
||||
Path toRealPath(const Path & path, const PathSet & context);
|
||||
Path toRealPath(const Path & path, const NixStringContext & context);
|
||||
|
||||
/**
|
||||
* Parse a Nix expression from the specified file.
|
||||
*/
|
||||
Expr * parseExprFromFile(const Path & path);
|
||||
Expr * parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv);
|
||||
Expr * parseExprFromFile(const SourcePath & path);
|
||||
Expr * parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv);
|
||||
|
||||
/**
|
||||
* Parse a Nix expression from the specified string.
|
||||
*/
|
||||
Expr * parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv);
|
||||
Expr * parseExprFromString(std::string s, const Path & basePath);
|
||||
Expr * parseExprFromString(std::string s, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv);
|
||||
Expr * parseExprFromString(std::string s, const SourcePath & basePath);
|
||||
|
||||
Expr * parseStdin();
|
||||
|
||||
@ -348,14 +351,14 @@ public:
|
||||
* form. Optionally enforce that the top-level expression is
|
||||
* trivial (i.e. doesn't require arbitrary computation).
|
||||
*/
|
||||
void evalFile(const Path & path, Value & v, bool mustBeTrivial = false);
|
||||
void evalFile(const SourcePath & path, Value & v, bool mustBeTrivial = false);
|
||||
|
||||
/**
|
||||
* Like `evalFile`, but with an already parsed expression.
|
||||
*/
|
||||
void cacheFile(
|
||||
const Path & path,
|
||||
const Path & resolvedPath,
|
||||
const SourcePath & path,
|
||||
const SourcePath & resolvedPath,
|
||||
Expr * e,
|
||||
Value & v,
|
||||
bool mustBeTrivial = false);
|
||||
@ -365,8 +368,8 @@ public:
|
||||
/**
|
||||
* Look up a file in the search path.
|
||||
*/
|
||||
Path findFile(const std::string_view path);
|
||||
Path findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
|
||||
SourcePath findFile(const std::string_view path);
|
||||
SourcePath findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
|
||||
|
||||
/**
|
||||
* If the specified search path element is a URI, download it.
|
||||
@ -423,7 +426,7 @@ public:
|
||||
*/
|
||||
void forceFunction(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
std::string_view forceString(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
std::string_view forceString(Value & v, PathSet & context, const PosIdx pos, std::string_view errorCtx);
|
||||
std::string_view forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx);
|
||||
std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
|
||||
[[gnu::noinline]]
|
||||
@ -439,7 +442,7 @@ public:
|
||||
bool isDerivation(Value & v);
|
||||
|
||||
std::optional<std::string> tryAttrsToString(const PosIdx pos, Value & v,
|
||||
PathSet & context, bool coerceMore = false, bool copyToStore = true);
|
||||
NixStringContext & context, bool coerceMore = false, bool copyToStore = true);
|
||||
|
||||
/**
|
||||
* String coercion.
|
||||
@ -449,12 +452,12 @@ public:
|
||||
* booleans and lists to a string. If `copyToStore` is set,
|
||||
* referenced paths are copied to the Nix store as a side effect.
|
||||
*/
|
||||
BackedStringView coerceToString(const PosIdx pos, Value & v, PathSet & context,
|
||||
BackedStringView coerceToString(const PosIdx pos, Value & v, NixStringContext & context,
|
||||
std::string_view errorCtx,
|
||||
bool coerceMore = false, bool copyToStore = true,
|
||||
bool canonicalizePath = true);
|
||||
|
||||
StorePath copyPathToStore(PathSet & context, const Path & path);
|
||||
StorePath copyPathToStore(NixStringContext & context, const SourcePath & path);
|
||||
|
||||
/**
|
||||
* Path coercion.
|
||||
@ -463,12 +466,12 @@ public:
|
||||
* path. The result is guaranteed to be a canonicalised, absolute
|
||||
* path. Nothing is copied to the store.
|
||||
*/
|
||||
Path coerceToPath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx);
|
||||
SourcePath coerceToPath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx);
|
||||
|
||||
/**
|
||||
* Like coerceToPath, but the result must be a store path.
|
||||
*/
|
||||
StorePath coerceToStorePath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx);
|
||||
StorePath coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx);
|
||||
|
||||
public:
|
||||
|
||||
@ -525,7 +528,7 @@ private:
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
Path basePath,
|
||||
const SourcePath & basePath,
|
||||
std::shared_ptr<StaticEnv> & staticEnv);
|
||||
|
||||
public:
|
||||
@ -573,6 +576,12 @@ public:
|
||||
void mkThunk_(Value & v, Expr * expr);
|
||||
void mkPos(Value & v, PosIdx pos);
|
||||
|
||||
/* Create a string representing a store path.
|
||||
|
||||
The string is the printed store path with a context containing a single
|
||||
`Opaque` element of that store path. */
|
||||
void mkStorePathString(const StorePath & storePath, Value & v);
|
||||
|
||||
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
|
||||
|
||||
/**
|
||||
@ -584,7 +593,7 @@ public:
|
||||
* Realise the given context, and return a mapping from the placeholders
|
||||
* used to construct the associated value to their final store path
|
||||
*/
|
||||
[[nodiscard]] StringMap realiseContext(const PathSet & context);
|
||||
[[nodiscard]] StringMap realiseContext(const NixStringContext & context);
|
||||
|
||||
private:
|
||||
|
||||
@ -650,7 +659,7 @@ std::string showType(const Value & v);
|
||||
/**
|
||||
* If `path` refers to a directory, then append "/default.nix".
|
||||
*/
|
||||
Path resolveExprPath(Path path);
|
||||
SourcePath resolveExprPath(const SourcePath & path);
|
||||
|
||||
struct InvalidPathError : EvalError
|
||||
{
|
||||
|
@ -31,7 +31,7 @@ static void writeTrustedList(const TrustedList & trustedList)
|
||||
|
||||
void ConfigFile::apply()
|
||||
{
|
||||
std::set<std::string> whitelist{"bash-prompt", "bash-prompt-prefix", "bash-prompt-suffix", "flake-registry"};
|
||||
std::set<std::string> whitelist{"bash-prompt", "bash-prompt-prefix", "bash-prompt-suffix", "flake-registry", "commit-lockfile-summary"};
|
||||
|
||||
for (auto & [name, value] : settings) {
|
||||
|
||||
|
@ -222,9 +222,9 @@ static Flake getFlake(
|
||||
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
|
||||
|
||||
Value vInfo;
|
||||
state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack
|
||||
state.evalFile(CanonPath(flakeFile), vInfo, true); // FIXME: symlink attack
|
||||
|
||||
expectType(state, nAttrs, vInfo, state.positions.add({flakeFile}, 1, 1));
|
||||
expectType(state, nAttrs, vInfo, state.positions.add({CanonPath(flakeFile)}, 1, 1));
|
||||
|
||||
if (auto description = vInfo.attrs->get(state.sDescription)) {
|
||||
expectType(state, nString, *description->value, description->pos);
|
||||
@ -265,7 +265,7 @@ static Flake getFlake(
|
||||
state.symbols[setting.name],
|
||||
std::string(state.forceStringNoCtx(*setting.value, setting.pos, "")));
|
||||
else if (setting.value->type() == nPath) {
|
||||
PathSet emptyContext = {};
|
||||
NixStringContext emptyContext = {};
|
||||
flake.config.settings.emplace(
|
||||
state.symbols[setting.name],
|
||||
state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true) .toOwned());
|
||||
@ -745,7 +745,7 @@ void callFlake(EvalState & state,
|
||||
state.vCallFlake = allocRootValue(state.allocValue());
|
||||
state.eval(state.parseExprFromString(
|
||||
#include "call-flake.nix.gen.hh"
|
||||
, "/"), **state.vCallFlake);
|
||||
, CanonPath::root), **state.vCallFlake);
|
||||
}
|
||||
|
||||
state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos);
|
||||
|
@ -234,6 +234,11 @@ bool LockFile::operator ==(const LockFile & other) const
|
||||
return toJSON() == other.toJSON();
|
||||
}
|
||||
|
||||
bool LockFile::operator !=(const LockFile & other) const
|
||||
{
|
||||
return !(*this == other);
|
||||
}
|
||||
|
||||
InputPath parseInputPath(std::string_view s)
|
||||
{
|
||||
InputPath path;
|
||||
|
@ -73,6 +73,9 @@ struct LockFile
|
||||
std::optional<FlakeRef> isUnlocked() const;
|
||||
|
||||
bool operator ==(const LockFile & other) const;
|
||||
// Needed for old gcc versions that don't synthesize it (like gcc 8.2.2
|
||||
// that is still the default on aarch64-linux)
|
||||
bool operator !=(const LockFile & other) const;
|
||||
|
||||
std::shared_ptr<Node> findInput(const InputPath & path);
|
||||
|
||||
|
@ -71,7 +71,7 @@ std::optional<StorePath> DrvInfo::queryDrvPath() const
|
||||
{
|
||||
if (!drvPath && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sDrvPath);
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
if (i == attrs->end())
|
||||
drvPath = {std::nullopt};
|
||||
else
|
||||
@ -93,7 +93,7 @@ StorePath DrvInfo::queryOutPath() const
|
||||
{
|
||||
if (!outPath && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sOutPath);
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
if (i != attrs->end())
|
||||
outPath = state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the output path of a derivation");
|
||||
}
|
||||
@ -124,7 +124,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
|
||||
/* And evaluate its ‘outPath’ attribute. */
|
||||
Bindings::iterator outPath = out->value->attrs->find(state->sOutPath);
|
||||
if (outPath == out->value->attrs->end()) continue; // FIXME: throw error?
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
outputs.emplace(output, state->coerceToStorePath(outPath->pos, *outPath->value, context, "while evaluating an output path of a derivation"));
|
||||
} else
|
||||
outputs.emplace(output, std::nullopt);
|
||||
|
@ -3,6 +3,7 @@
|
||||
#include "eval.hh"
|
||||
#include "symbol-table.hh"
|
||||
#include "util.hh"
|
||||
#include "print.hh"
|
||||
|
||||
#include <cstdlib>
|
||||
|
||||
@ -31,9 +32,9 @@ struct PosAdapter : AbstractPos
|
||||
// Get rid of the null terminators added by the parser.
|
||||
return std::string(s.source->c_str());
|
||||
},
|
||||
[](const Path & path) -> std::optional<std::string> {
|
||||
[](const SourcePath & path) -> std::optional<std::string> {
|
||||
try {
|
||||
return readFile(path);
|
||||
return path.readFile();
|
||||
} catch (Error &) {
|
||||
return std::nullopt;
|
||||
}
|
||||
@ -47,7 +48,7 @@ struct PosAdapter : AbstractPos
|
||||
[&](const Pos::none_tag &) { out << "«none»"; },
|
||||
[&](const Pos::Stdin &) { out << "«stdin»"; },
|
||||
[&](const Pos::String & s) { out << "«string»"; },
|
||||
[&](const Path & path) { out << path; }
|
||||
[&](const SourcePath & path) { out << path; }
|
||||
}, origin);
|
||||
}
|
||||
};
|
||||
@ -60,45 +61,12 @@ Pos::operator std::shared_ptr<AbstractPos>() const
|
||||
return pos;
|
||||
}
|
||||
|
||||
/* Displaying abstract syntax trees. */
|
||||
|
||||
static void showString(std::ostream & str, std::string_view s)
|
||||
{
|
||||
str << '"';
|
||||
for (auto c : s)
|
||||
if (c == '"' || c == '\\' || c == '$') str << "\\" << c;
|
||||
else if (c == '\n') str << "\\n";
|
||||
else if (c == '\r') str << "\\r";
|
||||
else if (c == '\t') str << "\\t";
|
||||
else str << c;
|
||||
str << '"';
|
||||
}
|
||||
|
||||
// FIXME: remove, because *symbols* are abstract and do not have a single
|
||||
// textual representation; see printIdentifier()
|
||||
std::ostream & operator <<(std::ostream & str, const SymbolStr & symbol)
|
||||
{
|
||||
std::string_view s = symbol;
|
||||
|
||||
if (s.empty())
|
||||
str << "\"\"";
|
||||
else if (s == "if") // FIXME: handle other keywords
|
||||
str << '"' << s << '"';
|
||||
else {
|
||||
char c = s[0];
|
||||
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_')) {
|
||||
showString(str, s);
|
||||
return str;
|
||||
}
|
||||
for (auto c : s)
|
||||
if (!((c >= 'a' && c <= 'z') ||
|
||||
(c >= 'A' && c <= 'Z') ||
|
||||
(c >= '0' && c <= '9') ||
|
||||
c == '_' || c == '\'' || c == '-')) {
|
||||
showString(str, s);
|
||||
return str;
|
||||
}
|
||||
str << s;
|
||||
}
|
||||
return str;
|
||||
return printIdentifier(str, s);
|
||||
}
|
||||
|
||||
void Expr::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
@ -118,7 +86,7 @@ void ExprFloat::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
|
||||
void ExprString::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
{
|
||||
showString(str, s);
|
||||
printLiteralString(str, s);
|
||||
}
|
||||
|
||||
void ExprPath::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
|
@ -34,7 +34,7 @@ struct Pos
|
||||
struct Stdin { ref<std::string> source; };
|
||||
struct String { ref<std::string> source; };
|
||||
|
||||
typedef std::variant<none_tag, Stdin, String, Path> Origin;
|
||||
typedef std::variant<none_tag, Stdin, String, SourcePath> Origin;
|
||||
|
||||
Origin origin;
|
||||
|
||||
|
@ -31,7 +31,7 @@ namespace nix {
|
||||
EvalState & state;
|
||||
SymbolTable & symbols;
|
||||
Expr * result;
|
||||
Path basePath;
|
||||
SourcePath basePath;
|
||||
PosTable::Origin origin;
|
||||
std::optional<ErrorInfo> error;
|
||||
};
|
||||
@ -509,7 +509,7 @@ string_parts_interpolated
|
||||
|
||||
path_start
|
||||
: PATH {
|
||||
Path path(absPath({$1.p, $1.l}, data->basePath));
|
||||
Path path(absPath({$1.p, $1.l}, data->basePath.path.abs()));
|
||||
/* add back in the trailing '/' to the first segment */
|
||||
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
||||
path += "/";
|
||||
@ -651,7 +651,7 @@ Expr * EvalState::parse(
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
Path basePath,
|
||||
const SourcePath & basePath,
|
||||
std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
yyscan_t scanner;
|
||||
@ -675,48 +675,36 @@ Expr * EvalState::parse(
|
||||
}
|
||||
|
||||
|
||||
Path resolveExprPath(Path path)
|
||||
SourcePath resolveExprPath(const SourcePath & path)
|
||||
{
|
||||
assert(path[0] == '/');
|
||||
|
||||
unsigned int followCount = 0, maxFollow = 1024;
|
||||
|
||||
/* If `path' is a symlink, follow it. This is so that relative
|
||||
path references work. */
|
||||
struct stat st;
|
||||
while (true) {
|
||||
// Basic cycle/depth limit to avoid infinite loops.
|
||||
if (++followCount >= maxFollow)
|
||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||
st = lstat(path);
|
||||
if (!S_ISLNK(st.st_mode)) break;
|
||||
path = absPath(readLink(path), dirOf(path));
|
||||
}
|
||||
auto path2 = path.resolveSymlinks();
|
||||
|
||||
/* If `path' refers to a directory, append `/default.nix'. */
|
||||
if (S_ISDIR(st.st_mode))
|
||||
path = canonPath(path + "/default.nix");
|
||||
if (path2.lstat().type == InputAccessor::tDirectory)
|
||||
return path2 + "default.nix";
|
||||
|
||||
return path;
|
||||
return path2;
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const Path & path)
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
||||
{
|
||||
return parseExprFromFile(path, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto buffer = readFile(path);
|
||||
// readFile should have left some extra space for terminators
|
||||
auto buffer = path.readFile();
|
||||
// readFile hopefully have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), path, dirOf(path), staticEnv);
|
||||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s_, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto s = make_ref<std::string>(std::move(s_));
|
||||
s->append("\0\0", 2);
|
||||
@ -724,7 +712,7 @@ Expr * EvalState::parseExprFromString(std::string s_, const Path & basePath, std
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath)
|
||||
Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath)
|
||||
{
|
||||
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
||||
}
|
||||
@ -737,7 +725,7 @@ Expr * EvalState::parseStdin()
|
||||
// drainFD should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
auto s = make_ref<std::string>(std::move(buffer));
|
||||
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, absPath("."), staticBaseEnv);
|
||||
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
@ -757,13 +745,13 @@ void EvalState::addToSearchPath(const std::string & s)
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::findFile(const std::string_view path)
|
||||
SourcePath EvalState::findFile(const std::string_view path)
|
||||
{
|
||||
return findFile(searchPath, path);
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos)
|
||||
SourcePath EvalState::findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos)
|
||||
{
|
||||
for (auto & i : searchPath) {
|
||||
std::string suffix;
|
||||
@ -779,11 +767,11 @@ Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, c
|
||||
auto r = resolveSearchPathElem(i);
|
||||
if (!r.first) continue;
|
||||
Path res = r.second + suffix;
|
||||
if (pathExists(res)) return canonPath(res);
|
||||
if (pathExists(res)) return CanonPath(canonPath(res));
|
||||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
return concatStrings(corepkgsPrefix, path.substr(4));
|
||||
return CanonPath(concatStrings(corepkgsPrefix, path.substr(4)));
|
||||
|
||||
debugThrow(ThrownError({
|
||||
.msg = hintfmt(evalSettings.pureEval
|
||||
|
10
src/libexpr/paths.cc
Normal file
10
src/libexpr/paths.cc
Normal file
@ -0,0 +1,10 @@
|
||||
#include "eval.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
SourcePath EvalState::rootPath(CanonPath path)
|
||||
{
|
||||
return std::move(path);
|
||||
}
|
||||
|
||||
}
|
@ -38,17 +38,16 @@ namespace nix {
|
||||
InvalidPathError::InvalidPathError(const Path & path) :
|
||||
EvalError("path '%s' is not valid", path), path(path) {}
|
||||
|
||||
StringMap EvalState::realiseContext(const PathSet & context)
|
||||
StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||
{
|
||||
std::vector<DerivedPath::Built> drvs;
|
||||
StringMap res;
|
||||
|
||||
for (auto & c_ : context) {
|
||||
for (auto & c : context) {
|
||||
auto ensureValid = [&](const StorePath & p) {
|
||||
if (!store->isValidPath(p))
|
||||
debugThrowLastTrace(InvalidPathError(store->printStorePath(p)));
|
||||
};
|
||||
auto c = NixStringContextElem::parse(*store, c_);
|
||||
std::visit(overloaded {
|
||||
[&](const NixStringContextElem::Built & b) {
|
||||
drvs.push_back(DerivedPath::Built {
|
||||
@ -110,16 +109,16 @@ struct RealisePathFlags {
|
||||
bool checkForPureEval = true;
|
||||
};
|
||||
|
||||
static Path realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {})
|
||||
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {})
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
|
||||
auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path");
|
||||
|
||||
try {
|
||||
StringMap rewrites = state.realiseContext(context);
|
||||
|
||||
auto realPath = state.toRealPath(rewriteStrings(path, rewrites), context);
|
||||
auto realPath = state.rootPath(CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context)));
|
||||
|
||||
return flags.checkForPureEval
|
||||
? state.checkSourcePath(realPath)
|
||||
@ -158,7 +157,12 @@ static void mkOutputString(
|
||||
/* FIXME: we need to depend on the basic derivation, not
|
||||
derivation */
|
||||
: downstreamPlaceholder(*state.store, drvPath, o.first),
|
||||
{"!" + o.first + "!" + state.store->printStorePath(drvPath)});
|
||||
NixStringContext {
|
||||
NixStringContextElem::Built {
|
||||
.drvPath = drvPath,
|
||||
.output = o.first,
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/* Load and evaluate an expression from path specified by the
|
||||
@ -166,28 +170,30 @@ static void mkOutputString(
|
||||
static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, vPath);
|
||||
auto path2 = path.path.abs();
|
||||
|
||||
// FIXME
|
||||
auto isValidDerivationInStore = [&]() -> std::optional<StorePath> {
|
||||
if (!state.store->isStorePath(path))
|
||||
if (!state.store->isStorePath(path2))
|
||||
return std::nullopt;
|
||||
auto storePath = state.store->parseStorePath(path);
|
||||
if (!(state.store->isValidPath(storePath) && isDerivation(path)))
|
||||
auto storePath = state.store->parseStorePath(path2);
|
||||
if (!(state.store->isValidPath(storePath) && isDerivation(path2)))
|
||||
return std::nullopt;
|
||||
return storePath;
|
||||
};
|
||||
|
||||
if (auto optStorePath = isValidDerivationInStore()) {
|
||||
auto storePath = *optStorePath;
|
||||
Derivation drv = state.store->readDerivation(storePath);
|
||||
if (auto storePath = isValidDerivationInStore()) {
|
||||
Derivation drv = state.store->readDerivation(*storePath);
|
||||
auto attrs = state.buildBindings(3 + drv.outputs.size());
|
||||
attrs.alloc(state.sDrvPath).mkString(path, {"=" + path});
|
||||
attrs.alloc(state.sDrvPath).mkString(path2, {
|
||||
NixStringContextElem::DrvDeep { .drvPath = *storePath },
|
||||
});
|
||||
attrs.alloc(state.sName).mkString(drv.env["name"]);
|
||||
auto & outputsVal = attrs.alloc(state.sOutputs);
|
||||
state.mkList(outputsVal, drv.outputs.size());
|
||||
|
||||
for (const auto & [i, o] : enumerate(drv.outputs)) {
|
||||
mkOutputString(state, attrs, storePath, drv, o);
|
||||
mkOutputString(state, attrs, *storePath, drv, o);
|
||||
(outputsVal.listElems()[i] = state.allocValue())->mkString(o.first);
|
||||
}
|
||||
|
||||
@ -198,7 +204,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
|
||||
state.vImportedDrvToDerivation = allocRootValue(state.allocValue());
|
||||
state.eval(state.parseExprFromString(
|
||||
#include "imported-drv-to-derivation.nix.gen.hh"
|
||||
, "/"), **state.vImportedDrvToDerivation);
|
||||
, CanonPath::root), **state.vImportedDrvToDerivation);
|
||||
}
|
||||
|
||||
state.forceFunction(**state.vImportedDrvToDerivation, pos, "while evaluating imported-drv-to-derivation.nix.gen.hh");
|
||||
@ -206,10 +212,10 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
|
||||
state.forceAttrs(v, pos, "while calling imported-drv-to-derivation.nix.gen.hh");
|
||||
}
|
||||
|
||||
else if (path == corepkgsPrefix + "fetchurl.nix") {
|
||||
else if (path2 == corepkgsPrefix + "fetchurl.nix") {
|
||||
state.eval(state.parseExprFromString(
|
||||
#include "fetchurl.nix.gen.hh"
|
||||
, "/"), v);
|
||||
, CanonPath::root), v);
|
||||
}
|
||||
|
||||
else {
|
||||
@ -330,7 +336,7 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
|
||||
|
||||
std::string sym(state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.importNative"));
|
||||
|
||||
void *handle = dlopen(path.c_str(), RTLD_LAZY | RTLD_LOCAL);
|
||||
void *handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL);
|
||||
if (!handle)
|
||||
state.debugThrowLastTrace(EvalError("could not open '%1%': %2%", path, dlerror()));
|
||||
|
||||
@ -358,7 +364,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
auto count = args[0]->listSize();
|
||||
if (count == 0)
|
||||
state.error("at least one argument to 'exec' required").atPos(pos).debugThrow<EvalError>();
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto program = state.coerceToString(pos, *elems[0], context,
|
||||
"while evaluating the first element of the argument passed to builtins.exec",
|
||||
false, false).toOwned();
|
||||
@ -378,7 +384,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
auto output = runProgram(program, true, commandArgs);
|
||||
Expr * parsed;
|
||||
try {
|
||||
parsed = state.parseExprFromString(std::move(output), "/");
|
||||
parsed = state.parseExprFromString(std::move(output), state.rootPath(CanonPath::root));
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[pos], "while parsing the output from '%1%'", program);
|
||||
throw;
|
||||
@ -588,7 +594,7 @@ struct CompareValues
|
||||
case nString:
|
||||
return strcmp(v1->string.s, v2->string.s) < 0;
|
||||
case nPath:
|
||||
return strcmp(v1->path, v2->path) < 0;
|
||||
return strcmp(v1->_path, v2->_path) < 0;
|
||||
case nList:
|
||||
// Lexicographic comparison
|
||||
for (size_t i = 0;; i++) {
|
||||
@ -700,12 +706,14 @@ static RegisterPrimOp primop_genericClosure(RegisterPrimOp::Info {
|
||||
.arity = 1,
|
||||
.doc = R"(
|
||||
Take an *attrset* with values named `startSet` and `operator` in order to
|
||||
return a *list of attrsets* by starting with the `startSet`, recursively
|
||||
applying the `operator` function to each element. The *attrsets* in the
|
||||
`startSet` and produced by the `operator` must each contain value named
|
||||
`key` which are comparable to each other. The result is produced by
|
||||
repeatedly calling the operator for each element encountered with a
|
||||
unique key, terminating when no new elements are produced. For example,
|
||||
return a *list of attrsets* by starting with the `startSet` and recursively
|
||||
applying the `operator` function to each `item`. The *attrsets* in the
|
||||
`startSet` and the *attrsets* produced by `operator` must contain a value
|
||||
named `key` which is comparable. The result is produced by calling `operator`
|
||||
for each `item` with a value for `key` that has not been called yet including
|
||||
newly produced `item`s. The function terminates when no new `item`s are
|
||||
produced. The resulting *list of attrsets* contains only *attrsets* with a
|
||||
unique key. For example,
|
||||
|
||||
```
|
||||
builtins.genericClosure {
|
||||
@ -768,7 +776,7 @@ static RegisterPrimOp primop_abort({
|
||||
)",
|
||||
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtins.abort").toOwned();
|
||||
state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s));
|
||||
@ -787,7 +795,7 @@ static RegisterPrimOp primop_throw({
|
||||
)",
|
||||
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtin.throw").toOwned();
|
||||
state.debugThrowLastTrace(ThrownError(s));
|
||||
@ -800,7 +808,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
|
||||
state.forceValue(*args[1], pos);
|
||||
v = *args[1];
|
||||
} catch (Error & e) {
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto message = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtins.addErrorContext",
|
||||
false, false).toOwned();
|
||||
@ -1086,7 +1094,7 @@ drvName, Bindings * attrs, Value & v)
|
||||
Derivation drv;
|
||||
drv.name = drvName;
|
||||
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
|
||||
bool contentAddressed = false;
|
||||
bool isImpure = false;
|
||||
@ -1232,8 +1240,7 @@ drvName, Bindings * attrs, Value & v)
|
||||
/* Everything in the context of the strings in the derivation
|
||||
attributes should be added as dependencies of the resulting
|
||||
derivation. */
|
||||
for (auto & c_ : context) {
|
||||
auto c = NixStringContextElem::parse(*state.store, c_);
|
||||
for (auto & c : context) {
|
||||
std::visit(overloaded {
|
||||
/* Since this allows the builder to gain access to every
|
||||
path in the dependency graph of the derivation (including
|
||||
@ -1293,7 +1300,13 @@ drvName, Bindings * attrs, Value & v)
|
||||
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
|
||||
|
||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
|
||||
auto outPath = state.store->makeFixedOutputPath(method, h, drvName);
|
||||
auto outPath = state.store->makeFixedOutputPath(drvName, FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = method,
|
||||
.hash = h,
|
||||
},
|
||||
.references = {},
|
||||
});
|
||||
drv.env["out"] = state.store->printStorePath(outPath);
|
||||
drv.outputs.insert_or_assign("out",
|
||||
DerivationOutput::CAFixed {
|
||||
@ -1386,7 +1399,9 @@ drvName, Bindings * attrs, Value & v)
|
||||
}
|
||||
|
||||
auto result = state.buildBindings(1 + drv.outputs.size());
|
||||
result.alloc(state.sDrvPath).mkString(drvPathS, {"=" + drvPathS});
|
||||
result.alloc(state.sDrvPath).mkString(drvPathS, {
|
||||
NixStringContextElem::DrvDeep { .drvPath = drvPath },
|
||||
});
|
||||
for (auto & i : drv.outputs)
|
||||
mkOutputString(state, result, drvPath, drv, i);
|
||||
|
||||
@ -1431,9 +1446,9 @@ static RegisterPrimOp primop_placeholder({
|
||||
/* Convert the argument to a path. !!! obsolete? */
|
||||
static void prim_toPath(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
Path path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.toPath");
|
||||
v.mkString(canonPath(path), context);
|
||||
NixStringContext context;
|
||||
auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.toPath");
|
||||
v.mkString(path.path.abs(), context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_toPath({
|
||||
@ -1462,22 +1477,23 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
|
||||
PathSet context;
|
||||
Path path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.storePath"));
|
||||
NixStringContext context;
|
||||
auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.storePath")).path;
|
||||
/* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink
|
||||
directly in the store. The latter condition is necessary so
|
||||
e.g. nix-push does the right thing. */
|
||||
if (!state.store->isStorePath(path)) path = canonPath(path, true);
|
||||
if (!state.store->isInStore(path))
|
||||
if (!state.store->isStorePath(path.abs()))
|
||||
path = CanonPath(canonPath(path.abs(), true));
|
||||
if (!state.store->isInStore(path.abs()))
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("path '%1%' is not in the Nix store", path),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
auto path2 = state.store->toStorePath(path).first;
|
||||
auto path2 = state.store->toStorePath(path.abs()).first;
|
||||
if (!settings.readOnlyMode)
|
||||
state.store->ensurePath(path2);
|
||||
context.insert(state.store->printStorePath(path2));
|
||||
v.mkString(path, context);
|
||||
context.insert(NixStringContextElem::Opaque { .path = path2 });
|
||||
v.mkString(path.abs(), context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_storePath({
|
||||
@ -1508,7 +1524,7 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args,
|
||||
auto path = realisePath(state, pos, *args[0], { .checkForPureEval = false });
|
||||
|
||||
try {
|
||||
v.mkBool(pathExists(state.checkSourcePath(path)));
|
||||
v.mkBool(state.checkSourcePath(path).pathExists());
|
||||
} catch (SysError & e) {
|
||||
/* Don't give away info from errors while canonicalising
|
||||
‘path’ in restricted mode. */
|
||||
@ -1532,7 +1548,7 @@ static RegisterPrimOp primop_pathExists({
|
||||
following the last slash. */
|
||||
static void prim_baseNameOf(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
v.mkString(baseNameOf(*state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the first argument passed to builtins.baseNameOf",
|
||||
false, false)), context);
|
||||
@ -1554,12 +1570,18 @@ static RegisterPrimOp primop_baseNameOf({
|
||||
of the argument. */
|
||||
static void prim_dirOf(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
auto path = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the first argument passed to builtins.dirOf",
|
||||
state.forceValue(*args[0], pos);
|
||||
if (args[0]->type() == nPath) {
|
||||
auto path = args[0]->path();
|
||||
v.mkPath(path.path.isRoot() ? path : path.parent());
|
||||
} else {
|
||||
NixStringContext context;
|
||||
auto path = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the first argument passed to 'builtins.dirOf'",
|
||||
false, false);
|
||||
auto dir = dirOf(*path);
|
||||
if (args[0]->type() == nPath) v.mkPath(dir); else v.mkString(dir, context);
|
||||
auto dir = dirOf(*path);
|
||||
v.mkString(dir, context);
|
||||
}
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_dirOf({
|
||||
@ -1577,13 +1599,13 @@ static RegisterPrimOp primop_dirOf({
|
||||
static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, *args[0]);
|
||||
auto s = readFile(path);
|
||||
auto s = path.readFile();
|
||||
if (s.find((char) 0) != std::string::npos)
|
||||
state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path));
|
||||
StorePathSet refs;
|
||||
if (state.store->isInStore(path)) {
|
||||
if (state.store->isInStore(path.path.abs())) {
|
||||
try {
|
||||
refs = state.store->queryPathInfo(state.store->toStorePath(path).first)->references;
|
||||
refs = state.store->queryPathInfo(state.store->toStorePath(path.path.abs()).first)->references;
|
||||
} catch (Error &) { // FIXME: should be InvalidPathError
|
||||
}
|
||||
// Re-scan references to filter down to just the ones that actually occur in the file.
|
||||
@ -1591,7 +1613,12 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
refsSink << s;
|
||||
refs = refsSink.getResultPaths();
|
||||
}
|
||||
auto context = state.store->printStorePathSet(refs);
|
||||
NixStringContext context;
|
||||
for (auto && p : std::move(refs)) {
|
||||
context.insert(NixStringContextElem::Opaque {
|
||||
.path = std::move((StorePath &&)p),
|
||||
});
|
||||
}
|
||||
v.mkString(s, context);
|
||||
}
|
||||
|
||||
@ -1622,7 +1649,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
|
||||
i = getAttr(state, state.sPath, v2->attrs, "in an element of the __nixPath");
|
||||
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto path = state.coerceToString(pos, *i->value, context,
|
||||
"while evaluating the `path` attribute of an element of the list passed to builtins.findFile",
|
||||
false, false).toOwned();
|
||||
@ -1664,7 +1691,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
|
||||
auto path = realisePath(state, pos, *args[1]);
|
||||
|
||||
v.mkString(hashFile(*ht, path).to_string(Base16, false));
|
||||
v.mkString(hashString(*ht, path.readFile()).to_string(Base16, false));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hashFile({
|
||||
@ -1678,26 +1705,20 @@ static RegisterPrimOp primop_hashFile({
|
||||
.fun = prim_hashFile,
|
||||
});
|
||||
|
||||
|
||||
/* Stringize a directory entry enum. Used by `readFileType' and `readDir'. */
|
||||
static const char * dirEntTypeToString(unsigned char dtType)
|
||||
static std::string_view fileTypeToString(InputAccessor::Type type)
|
||||
{
|
||||
/* Enum DT_(DIR|LNK|REG|UNKNOWN) */
|
||||
switch(dtType) {
|
||||
case DT_REG: return "regular"; break;
|
||||
case DT_DIR: return "directory"; break;
|
||||
case DT_LNK: return "symlink"; break;
|
||||
default: return "unknown"; break;
|
||||
}
|
||||
return "unknown"; /* Unreachable */
|
||||
return
|
||||
type == InputAccessor::Type::tRegular ? "regular" :
|
||||
type == InputAccessor::Type::tDirectory ? "directory" :
|
||||
type == InputAccessor::Type::tSymlink ? "symlink" :
|
||||
"unknown";
|
||||
}
|
||||
|
||||
|
||||
static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, *args[0]);
|
||||
/* Retrieve the directory entry type and stringize it. */
|
||||
v.mkString(dirEntTypeToString(getFileType(path)));
|
||||
v.mkString(fileTypeToString(path.lstat().type));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_readFileType({
|
||||
@ -1718,8 +1739,7 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va
|
||||
// Retrieve directory entries for all nodes in a directory.
|
||||
// This is similar to `getFileType` but is optimized to reduce system calls
|
||||
// on many systems.
|
||||
DirEntries entries = readDirectory(path);
|
||||
|
||||
auto entries = path.readDirectory();
|
||||
auto attrs = state.buildBindings(entries.size());
|
||||
|
||||
// If we hit unknown directory entry types we may need to fallback to
|
||||
@ -1728,22 +1748,21 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va
|
||||
// `builtins.readFileType` application.
|
||||
Value * readFileType = nullptr;
|
||||
|
||||
for (auto & ent : entries) {
|
||||
auto & attr = attrs.alloc(ent.name);
|
||||
if (ent.type == DT_UNKNOWN) {
|
||||
for (auto & [name, type] : entries) {
|
||||
auto & attr = attrs.alloc(name);
|
||||
if (!type) {
|
||||
// Some filesystems or operating systems may not be able to return
|
||||
// detailed node info quickly in this case we produce a thunk to
|
||||
// query the file type lazily.
|
||||
auto epath = state.allocValue();
|
||||
Path path2 = path + "/" + ent.name;
|
||||
epath->mkString(path2);
|
||||
epath->mkPath(path + name);
|
||||
if (!readFileType)
|
||||
readFileType = &state.getBuiltin("readFileType");
|
||||
attr.mkApp(readFileType, epath);
|
||||
} else {
|
||||
// This branch of the conditional is much more likely.
|
||||
// Here we just stringize the directory entry type.
|
||||
attr.mkString(dirEntTypeToString(ent.type));
|
||||
attr.mkString(fileTypeToString(*type));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1781,7 +1800,7 @@ static RegisterPrimOp primop_readDir({
|
||||
static void prim_toXML(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
std::ostringstream out;
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
printValueAsXML(state, true, false, *args[0], out, context, pos);
|
||||
v.mkString(out.str(), context);
|
||||
}
|
||||
@ -1889,7 +1908,7 @@ static RegisterPrimOp primop_toXML({
|
||||
static void prim_toJSON(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
std::ostringstream out;
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
printValueAsJSON(state, true, *args[0], pos, out, context);
|
||||
v.mkString(out.str(), context);
|
||||
}
|
||||
@ -1939,22 +1958,23 @@ static RegisterPrimOp primop_fromJSON({
|
||||
as an input by derivations. */
|
||||
static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
std::string name(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.toFile"));
|
||||
std::string contents(state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.toFile"));
|
||||
|
||||
StorePathSet refs;
|
||||
|
||||
for (auto path : context) {
|
||||
if (path.at(0) != '/')
|
||||
for (auto c : context) {
|
||||
if (auto p = std::get_if<NixStringContextElem::Opaque>(&c))
|
||||
refs.insert(p->path);
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt(
|
||||
"in 'toFile': the file named '%1%' must not contain a reference "
|
||||
"to a derivation but contains (%2%)",
|
||||
name, path),
|
||||
name, c.to_string()),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
refs.insert(state.store->parseStorePath(path));
|
||||
}
|
||||
|
||||
auto storePath = settings.readOnlyMode
|
||||
@ -2049,13 +2069,13 @@ static RegisterPrimOp primop_toFile({
|
||||
static void addPath(
|
||||
EvalState & state,
|
||||
const PosIdx pos,
|
||||
const std::string & name,
|
||||
std::string_view name,
|
||||
Path path,
|
||||
Value * filterFun,
|
||||
FileIngestionMethod method,
|
||||
const std::optional<Hash> expectedHash,
|
||||
Value & v,
|
||||
const PathSet & context)
|
||||
const NixStringContext & context)
|
||||
{
|
||||
try {
|
||||
// FIXME: handle CA derivation outputs (where path needs to
|
||||
@ -2077,7 +2097,7 @@ static void addPath(
|
||||
|
||||
path = evalSettings.pureEval && expectedHash
|
||||
? path
|
||||
: state.checkSourcePath(path);
|
||||
: state.checkSourcePath(CanonPath(path)).path.abs();
|
||||
|
||||
PathFilter filter = filterFun ? ([&](const Path & path) {
|
||||
auto st = lstat(path);
|
||||
@ -2103,7 +2123,13 @@ static void addPath(
|
||||
|
||||
std::optional<StorePath> expectedStorePath;
|
||||
if (expectedHash)
|
||||
expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
|
||||
expectedStorePath = state.store->makeFixedOutputPath(name, FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = method,
|
||||
.hash = *expectedHash,
|
||||
},
|
||||
.references = {},
|
||||
});
|
||||
|
||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||
StorePath dstPath = settings.readOnlyMode
|
||||
@ -2123,10 +2149,11 @@ static void addPath(
|
||||
|
||||
static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
Path path = state.coerceToPath(pos, *args[1], context, "while evaluating the second argument (the path to filter) passed to builtins.filterSource");
|
||||
NixStringContext context;
|
||||
auto path = state.coerceToPath(pos, *args[1], context,
|
||||
"while evaluating the second argument (the path to filter) passed to builtins.filterSource");
|
||||
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
|
||||
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
||||
addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_filterSource({
|
||||
@ -2186,18 +2213,19 @@ static RegisterPrimOp primop_filterSource({
|
||||
|
||||
static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.path");
|
||||
Path path;
|
||||
std::optional<SourcePath> path;
|
||||
std::string name;
|
||||
Value * filterFun = nullptr;
|
||||
auto method = FileIngestionMethod::Recursive;
|
||||
std::optional<Hash> expectedHash;
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
|
||||
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to 'builtins.path'");
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
auto n = state.symbols[attr.name];
|
||||
if (n == "path")
|
||||
path = state.coerceToPath(attr.pos, *attr.value, context, "while evaluating the `path` attribute passed to builtins.path");
|
||||
path.emplace(state.coerceToPath(attr.pos, *attr.value, context, "while evaluating the 'path' attribute passed to 'builtins.path'"));
|
||||
else if (attr.name == state.sName)
|
||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.path");
|
||||
else if (n == "filter")
|
||||
@ -2212,15 +2240,15 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||
.errPos = state.positions[attr.pos]
|
||||
}));
|
||||
}
|
||||
if (path.empty())
|
||||
if (!path)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("missing required 'path' attribute in the first argument to builtins.path"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
if (name.empty())
|
||||
name = baseNameOf(path);
|
||||
name = path->baseName();
|
||||
|
||||
addPath(state, pos, name, path, filterFun, method, expectedHash, v, context);
|
||||
addPath(state, pos, name, path->path.abs(), filterFun, method, expectedHash, v, context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_path({
|
||||
@ -3526,7 +3554,7 @@ static RegisterPrimOp primop_lessThan({
|
||||
`"/nix/store/whatever..."'. */
|
||||
static void prim_toString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the first argument passed to builtins.toString",
|
||||
true, false);
|
||||
@ -3565,7 +3593,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args,
|
||||
{
|
||||
int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring");
|
||||
int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring");
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring");
|
||||
|
||||
if (start < 0)
|
||||
@ -3599,7 +3627,7 @@ static RegisterPrimOp primop_substring({
|
||||
|
||||
static void prim_stringLength(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.stringLength");
|
||||
v.mkInt(s->size());
|
||||
}
|
||||
@ -3625,7 +3653,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
|
||||
PathSet context; // discarded
|
||||
NixStringContext context; // discarded
|
||||
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
||||
|
||||
v.mkString(hashString(*ht, s).to_string(Base16, false));
|
||||
@ -3671,7 +3699,7 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
|
||||
auto regex = state.regexCache->get(re);
|
||||
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.match");
|
||||
|
||||
std::cmatch match;
|
||||
@ -3751,7 +3779,7 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
|
||||
auto regex = state.regexCache->get(re);
|
||||
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.split");
|
||||
|
||||
auto begin = std::cregex_iterator(str.begin(), str.end(), regex);
|
||||
@ -3848,7 +3876,7 @@ static RegisterPrimOp primop_split({
|
||||
|
||||
static void prim_concatStringsSep(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
|
||||
auto sep = state.forceString(*args[0], context, pos, "while evaluating the first argument (the separator string) passed to builtins.concatStringsSep");
|
||||
state.forceList(*args[1], pos, "while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep");
|
||||
@ -3888,15 +3916,15 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
|
||||
for (auto elem : args[0]->listItems())
|
||||
from.emplace_back(state.forceString(*elem, pos, "while evaluating one of the strings to replace passed to builtins.replaceStrings"));
|
||||
|
||||
std::vector<std::pair<std::string, PathSet>> to;
|
||||
std::vector<std::pair<std::string, NixStringContext>> to;
|
||||
to.reserve(args[1]->listSize());
|
||||
for (auto elem : args[1]->listItems()) {
|
||||
PathSet ctx;
|
||||
NixStringContext ctx;
|
||||
auto s = state.forceString(*elem, ctx, pos, "while evaluating one of the replacement strings passed to builtins.replaceStrings");
|
||||
to.emplace_back(s, std::move(ctx));
|
||||
}
|
||||
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto s = state.forceString(*args[2], context, pos, "while evaluating the third argument passed to builtins.replaceStrings");
|
||||
|
||||
std::string res;
|
||||
@ -4138,7 +4166,6 @@ void EvalState::createBaseEnv()
|
||||
|
||||
/* Add a wrapper around the derivation primop that computes the
|
||||
`drvPath' and `outPath' attributes lazily. */
|
||||
sDerivationNix = symbols.create(derivationNixPath);
|
||||
auto vDerivation = allocValue();
|
||||
addConstant("derivation", vDerivation);
|
||||
|
||||
@ -4155,7 +4182,7 @@ void EvalState::createBaseEnv()
|
||||
// the parser needs two NUL bytes as terminators; one of them
|
||||
// is implied by being a C string.
|
||||
"\0";
|
||||
eval(parse(code, sizeof(code), derivationNixPath, "/", staticBaseEnv), *vDerivation);
|
||||
eval(parse(code, sizeof(code), derivationInternal, {CanonPath::root}, staticBaseEnv), *vDerivation);
|
||||
}
|
||||
|
||||
|
||||
|
@ -7,7 +7,7 @@ namespace nix {
|
||||
|
||||
static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardStringContext");
|
||||
v.mkString(*s);
|
||||
}
|
||||
@ -17,7 +17,7 @@ static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringCo
|
||||
|
||||
static void prim_hasContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.hasContext");
|
||||
v.mkBool(!context.empty());
|
||||
}
|
||||
@ -33,17 +33,18 @@ static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
|
||||
drv.inputDrvs. */
|
||||
static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency");
|
||||
|
||||
PathSet context2;
|
||||
for (auto && p : context) {
|
||||
auto c = NixStringContextElem::parse(*state.store, p);
|
||||
NixStringContext context2;
|
||||
for (auto && c : context) {
|
||||
if (auto * ptr = std::get_if<NixStringContextElem::DrvDeep>(&c)) {
|
||||
context2.emplace(state.store->printStorePath(ptr->drvPath));
|
||||
context2.emplace(NixStringContextElem::Opaque {
|
||||
.path = ptr->drvPath
|
||||
});
|
||||
} else {
|
||||
/* Can reuse original item */
|
||||
context2.emplace(std::move(p));
|
||||
context2.emplace(std::move(c));
|
||||
}
|
||||
}
|
||||
|
||||
@ -79,22 +80,21 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
|
||||
bool allOutputs = false;
|
||||
Strings outputs;
|
||||
};
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.getContext");
|
||||
auto contextInfos = std::map<StorePath, ContextInfo>();
|
||||
for (const auto & p : context) {
|
||||
NixStringContextElem ctx = NixStringContextElem::parse(*state.store, p);
|
||||
for (auto && i : context) {
|
||||
std::visit(overloaded {
|
||||
[&](NixStringContextElem::DrvDeep & d) {
|
||||
contextInfos[d.drvPath].allOutputs = true;
|
||||
[&](NixStringContextElem::DrvDeep && d) {
|
||||
contextInfos[std::move(d.drvPath)].allOutputs = true;
|
||||
},
|
||||
[&](NixStringContextElem::Built & b) {
|
||||
contextInfos[b.drvPath].outputs.emplace_back(std::move(b.output));
|
||||
[&](NixStringContextElem::Built && b) {
|
||||
contextInfos[std::move(b.drvPath)].outputs.emplace_back(std::move(b.output));
|
||||
},
|
||||
[&](NixStringContextElem::Opaque & o) {
|
||||
contextInfos[o.path].path = true;
|
||||
[&](NixStringContextElem::Opaque && o) {
|
||||
contextInfos[std::move(o.path)].path = true;
|
||||
},
|
||||
}, ctx.raw());
|
||||
}, ((NixStringContextElem &&) i).raw());
|
||||
}
|
||||
|
||||
auto attrs = state.buildBindings(contextInfos.size());
|
||||
@ -129,7 +129,7 @@ static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext);
|
||||
*/
|
||||
static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
auto orig = state.forceString(*args[0], context, noPos, "while evaluating the first argument passed to builtins.appendContext");
|
||||
|
||||
state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.appendContext");
|
||||
@ -143,13 +143,16 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||
.msg = hintfmt("context key '%s' is not a store path", name),
|
||||
.errPos = state.positions[i.pos]
|
||||
});
|
||||
auto namePath = state.store->parseStorePath(name);
|
||||
if (!settings.readOnlyMode)
|
||||
state.store->ensurePath(state.store->parseStorePath(name));
|
||||
state.store->ensurePath(namePath);
|
||||
state.forceAttrs(*i.value, i.pos, "while evaluating the value of a string context");
|
||||
auto iter = i.value->attrs->find(sPath);
|
||||
if (iter != i.value->attrs->end()) {
|
||||
if (state.forceBool(*iter->value, iter->pos, "while evaluating the `path` attribute of a string context"))
|
||||
context.emplace(name);
|
||||
context.emplace(NixStringContextElem::Opaque {
|
||||
.path = namePath,
|
||||
});
|
||||
}
|
||||
|
||||
iter = i.value->attrs->find(sAllOutputs);
|
||||
@ -161,7 +164,9 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||
.errPos = state.positions[i.pos]
|
||||
});
|
||||
}
|
||||
context.insert(concatStrings("=", name));
|
||||
context.emplace(NixStringContextElem::DrvDeep {
|
||||
.drvPath = namePath,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -176,7 +181,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||
}
|
||||
for (auto elem : iter->value->listItems()) {
|
||||
auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context");
|
||||
context.insert(concatStrings("!", outputName, "!", name));
|
||||
context.emplace(NixStringContextElem::Built {
|
||||
.drvPath = namePath,
|
||||
.output = std::string { outputName },
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||
const auto & attrName = state.symbols[attr.name];
|
||||
|
||||
if (attrName == "fromPath") {
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
fromPath = state.coerceToStorePath(attr.pos, *attr.value, context,
|
||||
"while evaluating the 'fromPath' attribute passed to builtins.fetchClosure");
|
||||
}
|
||||
@ -27,7 +27,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||
state.forceValue(*attr.value, attr.pos);
|
||||
toCA = true;
|
||||
if (attr.value->type() != nString || attr.value->string.s != std::string("")) {
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
toPath = state.coerceToStorePath(attr.pos, *attr.value, context,
|
||||
"while evaluating the 'toPath' attribute passed to builtins.fetchClosure");
|
||||
}
|
||||
@ -114,8 +114,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||
});
|
||||
}
|
||||
|
||||
auto toPathS = state.store->printStorePath(*toPath);
|
||||
v.mkString(toPathS, {toPathS});
|
||||
state.mkStorePathString(*toPath, v);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_fetchClosure({
|
||||
|
@ -13,7 +13,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||
std::optional<Hash> rev;
|
||||
std::optional<std::string> ref;
|
||||
std::string_view name = "source";
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
|
||||
state.forceValue(*args[0], pos);
|
||||
|
||||
@ -73,8 +73,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||
auto [tree, input2] = input.fetch(state.store);
|
||||
|
||||
auto attrs2 = state.buildBindings(8);
|
||||
auto storePath = state.store->printStorePath(tree.storePath);
|
||||
attrs2.alloc(state.sOutPath).mkString(storePath, {storePath});
|
||||
state.mkStorePathString(tree.storePath, attrs2.alloc(state.sOutPath));
|
||||
if (input2.getRef())
|
||||
attrs2.alloc("branch").mkString(*input2.getRef());
|
||||
// Backward compatibility: set 'rev' to
|
||||
|
@ -24,9 +24,8 @@ void emitTreeAttrs(
|
||||
|
||||
auto attrs = state.buildBindings(8);
|
||||
|
||||
auto storePath = state.store->printStorePath(tree.storePath);
|
||||
|
||||
attrs.alloc(state.sOutPath).mkString(storePath, {storePath});
|
||||
state.mkStorePathString(tree.storePath, attrs.alloc(state.sOutPath));
|
||||
|
||||
// FIXME: support arbitrary input attributes.
|
||||
|
||||
@ -107,7 +106,7 @@ static void fetchTree(
|
||||
const FetchTreeParams & params = FetchTreeParams{}
|
||||
) {
|
||||
fetchers::Input input;
|
||||
PathSet context;
|
||||
NixStringContext context;
|
||||
|
||||
state.forceValue(*args[0], pos);
|
||||
|
||||
@ -243,10 +242,15 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
|
||||
// early exit if pinned and already in the store
|
||||
if (expectedHash && expectedHash->type == htSHA256) {
|
||||
auto expectedPath =
|
||||
unpack
|
||||
? state.store->makeFixedOutputPath(FileIngestionMethod::Recursive, *expectedHash, name, {})
|
||||
: state.store->makeFixedOutputPath(FileIngestionMethod::Flat, *expectedHash, name, {});
|
||||
auto expectedPath = state.store->makeFixedOutputPath(
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat,
|
||||
.hash = *expectedHash,
|
||||
},
|
||||
.references = {}
|
||||
});
|
||||
|
||||
if (state.store->isValidPath(expectedPath)) {
|
||||
state.allowAndSetStorePathString(expectedPath, v);
|
||||
|
78
src/libexpr/print.cc
Normal file
78
src/libexpr/print.cc
Normal file
@ -0,0 +1,78 @@
|
||||
#include "print.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::ostream &
|
||||
printLiteralString(std::ostream & str, const std::string_view string)
|
||||
{
|
||||
str << "\"";
|
||||
for (auto i = string.begin(); i != string.end(); ++i) {
|
||||
if (*i == '\"' || *i == '\\') str << "\\" << *i;
|
||||
else if (*i == '\n') str << "\\n";
|
||||
else if (*i == '\r') str << "\\r";
|
||||
else if (*i == '\t') str << "\\t";
|
||||
else if (*i == '$' && *(i+1) == '{') str << "\\" << *i;
|
||||
else str << *i;
|
||||
}
|
||||
str << "\"";
|
||||
return str;
|
||||
}
|
||||
|
||||
std::ostream &
|
||||
printLiteralBool(std::ostream & str, bool boolean)
|
||||
{
|
||||
str << (boolean ? "true" : "false");
|
||||
return str;
|
||||
}
|
||||
|
||||
std::ostream &
|
||||
printIdentifier(std::ostream & str, std::string_view s) {
|
||||
if (s.empty())
|
||||
str << "\"\"";
|
||||
else if (s == "if") // FIXME: handle other keywords
|
||||
str << '"' << s << '"';
|
||||
else {
|
||||
char c = s[0];
|
||||
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_')) {
|
||||
printLiteralString(str, s);
|
||||
return str;
|
||||
}
|
||||
for (auto c : s)
|
||||
if (!((c >= 'a' && c <= 'z') ||
|
||||
(c >= 'A' && c <= 'Z') ||
|
||||
(c >= '0' && c <= '9') ||
|
||||
c == '_' || c == '\'' || c == '-')) {
|
||||
printLiteralString(str, s);
|
||||
return str;
|
||||
}
|
||||
str << s;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
// FIXME: keywords
|
||||
static bool isVarName(std::string_view s)
|
||||
{
|
||||
if (s.size() == 0) return false;
|
||||
char c = s[0];
|
||||
if ((c >= '0' && c <= '9') || c == '-' || c == '\'') return false;
|
||||
for (auto & i : s)
|
||||
if (!((i >= 'a' && i <= 'z') ||
|
||||
(i >= 'A' && i <= 'Z') ||
|
||||
(i >= '0' && i <= '9') ||
|
||||
i == '_' || i == '-' || i == '\''))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
std::ostream &
|
||||
printAttributeName(std::ostream & str, std::string_view name) {
|
||||
if (isVarName(name))
|
||||
str << name;
|
||||
else
|
||||
printLiteralString(str, name);
|
||||
return str;
|
||||
}
|
||||
|
||||
|
||||
}
|
48
src/libexpr/print.hh
Normal file
48
src/libexpr/print.hh
Normal file
@ -0,0 +1,48 @@
|
||||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
* @brief Common printing functions for the Nix language
|
||||
*
|
||||
* While most types come with their own methods for printing, they share some
|
||||
* functions that are placed here.
|
||||
*/
|
||||
|
||||
#include <iostream>
|
||||
|
||||
namespace nix {
|
||||
/**
|
||||
* Print a string as a Nix string literal.
|
||||
*
|
||||
* Quotes and fairly minimal escaping are added.
|
||||
*
|
||||
* @param s The logical string
|
||||
*/
|
||||
std::ostream & printLiteralString(std::ostream & o, std::string_view s);
|
||||
inline std::ostream & printLiteralString(std::ostream & o, const char * s) {
|
||||
return printLiteralString(o, std::string_view(s));
|
||||
}
|
||||
inline std::ostream & printLiteralString(std::ostream & o, const std::string & s) {
|
||||
return printLiteralString(o, std::string_view(s));
|
||||
}
|
||||
|
||||
/** Print `true` or `false`. */
|
||||
std::ostream & printLiteralBool(std::ostream & o, bool b);
|
||||
|
||||
/**
|
||||
* Print a string as an attribute name in the Nix expression language syntax.
|
||||
*
|
||||
* Prints a quoted string if necessary.
|
||||
*/
|
||||
std::ostream & printAttributeName(std::ostream & o, std::string_view s);
|
||||
|
||||
/**
|
||||
* Print a string as an identifier in the Nix expression language syntax.
|
||||
*
|
||||
* FIXME: "identifier" is ambiguous. Identifiers do not have a single
|
||||
* textual representation. They can be used in variable references,
|
||||
* let bindings, left-hand sides or attribute names in a select
|
||||
* expression, or something else entirely, like JSON. Use one of the
|
||||
* `print*` functions instead.
|
||||
*/
|
||||
std::ostream & printIdentifier(std::ostream & o, std::string_view s);
|
||||
}
|
@ -8,7 +8,7 @@ namespace nix {
|
||||
protected:
|
||||
std::string getJSONValue(Value& value) {
|
||||
std::stringstream ss;
|
||||
PathSet ps;
|
||||
NixStringContext ps;
|
||||
printValueAsJSON(state, true, value, noPos, ss, ps);
|
||||
return ss.str();
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ namespace nix {
|
||||
}
|
||||
Value eval(std::string input, bool forceValue = true) {
|
||||
Value v;
|
||||
Expr * e = state.parseExprFromString(input, "");
|
||||
Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
|
||||
assert(e);
|
||||
state.eval(e, v);
|
||||
if (forceValue)
|
||||
|
@ -12,7 +12,7 @@ libexpr-tests_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc)
|
||||
|
||||
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests
|
||||
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests -I src/libfetchers
|
||||
|
||||
libexpr-tests_LIBS = libstore-tests libutils-tests libexpr libutil libstore libfetchers
|
||||
|
||||
|
@ -8,69 +8,62 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
// Testing of trivial expressions
|
||||
struct NixStringContextElemTest : public LibExprTest {
|
||||
const Store & store() const {
|
||||
return *LibExprTest::store;
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(NixStringContextElemTest, empty_invalid) {
|
||||
TEST(NixStringContextElemTest, empty_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(store(), ""),
|
||||
NixStringContextElem::parse(""),
|
||||
BadNixStringContextElem);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, single_bang_invalid) {
|
||||
TEST(NixStringContextElemTest, single_bang_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(store(), "!"),
|
||||
NixStringContextElem::parse("!"),
|
||||
BadNixStringContextElem);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, double_bang_invalid) {
|
||||
TEST(NixStringContextElemTest, double_bang_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(store(), "!!/"),
|
||||
NixStringContextElem::parse("!!/"),
|
||||
BadStorePath);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, eq_slash_invalid) {
|
||||
TEST(NixStringContextElemTest, eq_slash_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(store(), "=/"),
|
||||
NixStringContextElem::parse("=/"),
|
||||
BadStorePath);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, slash_invalid) {
|
||||
TEST(NixStringContextElemTest, slash_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(store(), "/"),
|
||||
NixStringContextElem::parse("/"),
|
||||
BadStorePath);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, opaque) {
|
||||
std::string_view opaque = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x";
|
||||
auto elem = NixStringContextElem::parse(store(), opaque);
|
||||
TEST(NixStringContextElemTest, opaque) {
|
||||
std::string_view opaque = "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x";
|
||||
auto elem = NixStringContextElem::parse(opaque);
|
||||
auto * p = std::get_if<NixStringContextElem::Opaque>(&elem);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->path, store().parseStorePath(opaque));
|
||||
ASSERT_EQ(elem.to_string(store()), opaque);
|
||||
ASSERT_EQ(p->path, StorePath { opaque });
|
||||
ASSERT_EQ(elem.to_string(), opaque);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, drvDeep) {
|
||||
std::string_view drvDeep = "=/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(store(), drvDeep);
|
||||
TEST(NixStringContextElemTest, drvDeep) {
|
||||
std::string_view drvDeep = "=g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(drvDeep);
|
||||
auto * p = std::get_if<NixStringContextElem::DrvDeep>(&elem);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->drvPath, store().parseStorePath(drvDeep.substr(1)));
|
||||
ASSERT_EQ(elem.to_string(store()), drvDeep);
|
||||
ASSERT_EQ(p->drvPath, StorePath { drvDeep.substr(1) });
|
||||
ASSERT_EQ(elem.to_string(), drvDeep);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, built) {
|
||||
std::string_view built = "!foo!/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(store(), built);
|
||||
TEST(NixStringContextElemTest, built) {
|
||||
std::string_view built = "!foo!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(built);
|
||||
auto * p = std::get_if<NixStringContextElem::Built>(&elem);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->output, "foo");
|
||||
ASSERT_EQ(p->drvPath, store().parseStorePath(built.substr(5)));
|
||||
ASSERT_EQ(elem.to_string(store()), built);
|
||||
ASSERT_EQ(p->drvPath, StorePath { built.substr(5) });
|
||||
ASSERT_EQ(elem.to_string(), built);
|
||||
}
|
||||
|
||||
}
|
||||
@ -116,12 +109,12 @@ Gen<NixStringContextElem> Arbitrary<NixStringContextElem>::arbitrary()
|
||||
|
||||
namespace nix {
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
RC_GTEST_PROP(
|
||||
NixStringContextElemTest,
|
||||
prop_round_rip,
|
||||
(const NixStringContextElem & o))
|
||||
{
|
||||
RC_ASSERT(o == NixStringContextElem::parse(store(), o.to_string(store())));
|
||||
RC_ASSERT(o == NixStringContextElem::parse(o.to_string()));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -11,7 +11,7 @@
|
||||
namespace nix {
|
||||
using json = nlohmann::json;
|
||||
json printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, const PosIdx pos, PathSet & context, bool copyToStore)
|
||||
Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore)
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
@ -36,9 +36,10 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||
|
||||
case nPath:
|
||||
if (copyToStore)
|
||||
out = state.store->printStorePath(state.copyPathToStore(context, v.path));
|
||||
out = state.store->printStorePath(
|
||||
state.copyPathToStore(context, v.path()));
|
||||
else
|
||||
out = v.path;
|
||||
out = v.path().path.abs();
|
||||
break;
|
||||
|
||||
case nNull:
|
||||
@ -94,13 +95,13 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||
}
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore)
|
||||
Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore)
|
||||
{
|
||||
str << printValueAsJSON(state, strict, v, pos, context, copyToStore);
|
||||
}
|
||||
|
||||
json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
|
||||
PathSet & context, bool copyToStore) const
|
||||
NixStringContext & context, bool copyToStore) const
|
||||
{
|
||||
state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType()));
|
||||
}
|
||||
|
@ -11,9 +11,9 @@
|
||||
namespace nix {
|
||||
|
||||
nlohmann::json printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, const PosIdx pos, PathSet & context, bool copyToStore = true);
|
||||
Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore = true);
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore = true);
|
||||
Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore = true);
|
||||
|
||||
}
|
||||
|
@ -18,21 +18,21 @@ static XMLAttrs singletonAttrs(const std::string & name, const std::string & val
|
||||
|
||||
|
||||
static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
Value & v, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen,
|
||||
const PosIdx pos);
|
||||
|
||||
|
||||
static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos)
|
||||
{
|
||||
if (auto path = std::get_if<Path>(&pos.origin))
|
||||
xmlAttrs["path"] = *path;
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
xmlAttrs["path"] = path->path.abs();
|
||||
xmlAttrs["line"] = fmt("%1%", pos.line);
|
||||
xmlAttrs["column"] = fmt("%1%", pos.column);
|
||||
}
|
||||
|
||||
|
||||
static void showAttrs(EvalState & state, bool strict, bool location,
|
||||
Bindings & attrs, XMLWriter & doc, PathSet & context, PathSet & drvsSeen)
|
||||
Bindings & attrs, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen)
|
||||
{
|
||||
StringSet names;
|
||||
|
||||
@ -54,7 +54,7 @@ static void showAttrs(EvalState & state, bool strict, bool location,
|
||||
|
||||
|
||||
static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
Value & v, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen,
|
||||
const PosIdx pos)
|
||||
{
|
||||
checkInterrupt();
|
||||
@ -78,7 +78,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
break;
|
||||
|
||||
case nPath:
|
||||
doc.writeEmptyElement("path", singletonAttrs("value", v.path));
|
||||
doc.writeEmptyElement("path", singletonAttrs("value", v.path().to_string()));
|
||||
break;
|
||||
|
||||
case nNull:
|
||||
@ -166,7 +166,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
|
||||
|
||||
void ExternalValueBase::printValueAsXML(EvalState & state, bool strict,
|
||||
bool location, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
bool location, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen,
|
||||
const PosIdx pos) const
|
||||
{
|
||||
doc.writeEmptyElement("unevaluated");
|
||||
@ -174,7 +174,7 @@ void ExternalValueBase::printValueAsXML(EvalState & state, bool strict,
|
||||
|
||||
|
||||
void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, std::ostream & out, PathSet & context, const PosIdx pos)
|
||||
Value & v, std::ostream & out, NixStringContext & context, const PosIdx pos)
|
||||
{
|
||||
XMLWriter doc(true, out);
|
||||
XMLOpenElement root(doc, "expr");
|
||||
|
@ -10,6 +10,6 @@
|
||||
namespace nix {
|
||||
|
||||
void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, std::ostream & out, PathSet & context, const PosIdx pos);
|
||||
Value & v, std::ostream & out, NixStringContext & context, const PosIdx pos);
|
||||
|
||||
}
|
||||
|
@ -5,6 +5,7 @@
|
||||
|
||||
#include "symbol-table.hh"
|
||||
#include "value/context.hh"
|
||||
#include "input-accessor.hh"
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
#include <gc/gc_allocator.h>
|
||||
@ -100,7 +101,7 @@ class ExternalValueBase
|
||||
* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
||||
* error.
|
||||
*/
|
||||
virtual std::string coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const;
|
||||
virtual std::string coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const;
|
||||
|
||||
/**
|
||||
* Compare to another value of the same type. Defaults to uncomparable,
|
||||
@ -112,13 +113,13 @@ class ExternalValueBase
|
||||
* Print the value as JSON. Defaults to unconvertable, i.e. throws an error
|
||||
*/
|
||||
virtual nlohmann::json printValueAsJSON(EvalState & state, bool strict,
|
||||
PathSet & context, bool copyToStore = true) const;
|
||||
NixStringContext & context, bool copyToStore = true) const;
|
||||
|
||||
/**
|
||||
* Print the value as XML. Defaults to unevaluated
|
||||
*/
|
||||
virtual void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen,
|
||||
const PosIdx pos) const;
|
||||
|
||||
virtual ~ExternalValueBase()
|
||||
@ -188,7 +189,7 @@ public:
|
||||
const char * * context; // must be in sorted order
|
||||
} string;
|
||||
|
||||
const char * path;
|
||||
const char * _path;
|
||||
Bindings * attrs;
|
||||
struct {
|
||||
size_t size;
|
||||
@ -268,19 +269,24 @@ public:
|
||||
|
||||
void mkString(std::string_view s);
|
||||
|
||||
void mkString(std::string_view s, const PathSet & context);
|
||||
void mkString(std::string_view s, const NixStringContext & context);
|
||||
|
||||
void mkStringMove(const char * s, const PathSet & context);
|
||||
void mkStringMove(const char * s, const NixStringContext & context);
|
||||
|
||||
inline void mkPath(const char * s)
|
||||
inline void mkString(const Symbol & s)
|
||||
{
|
||||
mkString(((const std::string &) s).c_str());
|
||||
}
|
||||
|
||||
void mkPath(const SourcePath & path);
|
||||
|
||||
inline void mkPath(const char * path)
|
||||
{
|
||||
clearValue();
|
||||
internalType = tPath;
|
||||
path = s;
|
||||
_path = path;
|
||||
}
|
||||
|
||||
void mkPath(std::string_view s);
|
||||
|
||||
inline void mkNull()
|
||||
{
|
||||
clearValue();
|
||||
@ -394,8 +400,6 @@ public:
|
||||
*/
|
||||
bool isTrivial() const;
|
||||
|
||||
NixStringContext getContext(const Store &);
|
||||
|
||||
auto listItems()
|
||||
{
|
||||
struct ListIterable
|
||||
@ -423,6 +427,18 @@ public:
|
||||
auto begin = listElems();
|
||||
return ConstListIterable { begin, begin + listSize() };
|
||||
}
|
||||
|
||||
SourcePath path() const
|
||||
{
|
||||
assert(internalType == tPath);
|
||||
return SourcePath{CanonPath(_path)};
|
||||
}
|
||||
|
||||
std::string_view str() const
|
||||
{
|
||||
assert(internalType == tString);
|
||||
return std::string_view(string.s);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
@ -1,11 +1,10 @@
|
||||
#include "value/context.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
#include <optional>
|
||||
|
||||
namespace nix {
|
||||
|
||||
NixStringContextElem NixStringContextElem::parse(const Store & store, std::string_view s0)
|
||||
NixStringContextElem NixStringContextElem::parse(std::string_view s0)
|
||||
{
|
||||
std::string_view s = s0;
|
||||
|
||||
@ -25,41 +24,41 @@ NixStringContextElem NixStringContextElem::parse(const Store & store, std::strin
|
||||
"String content element beginning with '!' should have a second '!'");
|
||||
}
|
||||
return NixStringContextElem::Built {
|
||||
.drvPath = store.parseStorePath(s.substr(index + 1)),
|
||||
.drvPath = StorePath { s.substr(index + 1) },
|
||||
.output = std::string(s.substr(0, index)),
|
||||
};
|
||||
}
|
||||
case '=': {
|
||||
return NixStringContextElem::DrvDeep {
|
||||
.drvPath = store.parseStorePath(s.substr(1)),
|
||||
.drvPath = StorePath { s.substr(1) },
|
||||
};
|
||||
}
|
||||
default: {
|
||||
return NixStringContextElem::Opaque {
|
||||
.path = store.parseStorePath(s),
|
||||
.path = StorePath { s },
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::string NixStringContextElem::to_string(const Store & store) const {
|
||||
std::string NixStringContextElem::to_string() const {
|
||||
return std::visit(overloaded {
|
||||
[&](const NixStringContextElem::Built & b) {
|
||||
std::string res;
|
||||
res += '!';
|
||||
res += b.output;
|
||||
res += '!';
|
||||
res += store.printStorePath(b.drvPath);
|
||||
res += b.drvPath.to_string();
|
||||
return res;
|
||||
},
|
||||
[&](const NixStringContextElem::DrvDeep & d) {
|
||||
std::string res;
|
||||
res += '=';
|
||||
res += store.printStorePath(d.drvPath);
|
||||
res += d.drvPath.to_string();
|
||||
return res;
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) {
|
||||
return store.printStorePath(o.path);
|
||||
return std::string { o.path.to_string() };
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
@ -26,8 +26,6 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
class Store;
|
||||
|
||||
/**
|
||||
* Plain opaque path to some store object.
|
||||
*
|
||||
@ -80,12 +78,15 @@ struct NixStringContextElem : _NixStringContextElem_Raw {
|
||||
using DrvDeep = NixStringContextElem_DrvDeep;
|
||||
using Built = NixStringContextElem_Built;
|
||||
|
||||
inline const Raw & raw() const {
|
||||
inline const Raw & raw() const & {
|
||||
return static_cast<const Raw &>(*this);
|
||||
}
|
||||
inline Raw & raw() {
|
||||
inline Raw & raw() & {
|
||||
return static_cast<Raw &>(*this);
|
||||
}
|
||||
inline Raw && raw() && {
|
||||
return static_cast<Raw &&>(*this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a context string, one of:
|
||||
@ -93,10 +94,10 @@ struct NixStringContextElem : _NixStringContextElem_Raw {
|
||||
* - ‘=<path>’
|
||||
* - ‘!<name>!<path>’
|
||||
*/
|
||||
static NixStringContextElem parse(const Store & store, std::string_view s);
|
||||
std::string to_string(const Store & store) const;
|
||||
static NixStringContextElem parse(std::string_view s);
|
||||
std::string to_string() const;
|
||||
};
|
||||
|
||||
typedef std::vector<NixStringContextElem> NixStringContext;
|
||||
typedef std::set<NixStringContextElem> NixStringContext;
|
||||
|
||||
}
|
||||
|
@ -210,7 +210,13 @@ StorePath Input::computeStorePath(Store & store) const
|
||||
auto narHash = getNarHash();
|
||||
if (!narHash)
|
||||
throw Error("cannot compute store path for unlocked input '%s'", to_string());
|
||||
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, getName());
|
||||
return store.makeFixedOutputPath(getName(), FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = *narHash,
|
||||
},
|
||||
.references = {},
|
||||
});
|
||||
}
|
||||
|
||||
std::string Input::getType() const
|
||||
|
@ -21,7 +21,7 @@ struct DownloadUrl
|
||||
};
|
||||
|
||||
// A github, gitlab, or sourcehut host
|
||||
const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
|
||||
const static std::string hostRegexS = "[a-zA-Z0-9.-]*"; // FIXME: check
|
||||
std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
|
||||
|
||||
struct GitArchiveInputScheme : InputScheme
|
||||
|
100
src/libfetchers/input-accessor.cc
Normal file
100
src/libfetchers/input-accessor.cc
Normal file
@ -0,0 +1,100 @@
|
||||
#include "input-accessor.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const SourcePath & path)
|
||||
{
|
||||
str << path.to_string();
|
||||
return str;
|
||||
}
|
||||
|
||||
std::string_view SourcePath::baseName() const
|
||||
{
|
||||
return path.baseName().value_or("source");
|
||||
}
|
||||
|
||||
SourcePath SourcePath::parent() const
|
||||
{
|
||||
auto p = path.parent();
|
||||
assert(p);
|
||||
return std::move(*p);
|
||||
}
|
||||
|
||||
InputAccessor::Stat SourcePath::lstat() const
|
||||
{
|
||||
auto st = nix::lstat(path.abs());
|
||||
return InputAccessor::Stat {
|
||||
.type =
|
||||
S_ISREG(st.st_mode) ? InputAccessor::tRegular :
|
||||
S_ISDIR(st.st_mode) ? InputAccessor::tDirectory :
|
||||
S_ISLNK(st.st_mode) ? InputAccessor::tSymlink :
|
||||
InputAccessor::tMisc,
|
||||
.isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR
|
||||
};
|
||||
}
|
||||
|
||||
std::optional<InputAccessor::Stat> SourcePath::maybeLstat() const
|
||||
{
|
||||
// FIXME: merge these into one operation.
|
||||
if (!pathExists())
|
||||
return {};
|
||||
return lstat();
|
||||
}
|
||||
|
||||
InputAccessor::DirEntries SourcePath::readDirectory() const
|
||||
{
|
||||
InputAccessor::DirEntries res;
|
||||
for (auto & entry : nix::readDirectory(path.abs())) {
|
||||
std::optional<InputAccessor::Type> type;
|
||||
switch (entry.type) {
|
||||
case DT_REG: type = InputAccessor::Type::tRegular; break;
|
||||
case DT_LNK: type = InputAccessor::Type::tSymlink; break;
|
||||
case DT_DIR: type = InputAccessor::Type::tDirectory; break;
|
||||
}
|
||||
res.emplace(entry.name, type);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
StorePath SourcePath::fetchToStore(
|
||||
ref<Store> store,
|
||||
std::string_view name,
|
||||
PathFilter * filter,
|
||||
RepairFlag repair) const
|
||||
{
|
||||
return
|
||||
settings.readOnlyMode
|
||||
? store->computeStorePathForPath(name, path.abs(), FileIngestionMethod::Recursive, htSHA256, filter ? *filter : defaultPathFilter).first
|
||||
: store->addToStore(name, path.abs(), FileIngestionMethod::Recursive, htSHA256, filter ? *filter : defaultPathFilter, repair);
|
||||
}
|
||||
|
||||
SourcePath SourcePath::resolveSymlinks() const
|
||||
{
|
||||
SourcePath res(CanonPath::root);
|
||||
|
||||
int linksAllowed = 1024;
|
||||
|
||||
for (auto & component : path) {
|
||||
res.path.push(component);
|
||||
while (true) {
|
||||
if (auto st = res.maybeLstat()) {
|
||||
if (!linksAllowed--)
|
||||
throw Error("infinite symlink recursion in path '%s'", path);
|
||||
if (st->type != InputAccessor::tSymlink) break;
|
||||
auto target = res.readLink();
|
||||
if (hasPrefix(target, "/"))
|
||||
res = CanonPath(target);
|
||||
else {
|
||||
res.path.pop();
|
||||
res.path.extend(CanonPath(target));
|
||||
}
|
||||
} else
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
167
src/libfetchers/input-accessor.hh
Normal file
167
src/libfetchers/input-accessor.hh
Normal file
@ -0,0 +1,167 @@
|
||||
#pragma once
|
||||
|
||||
#include "ref.hh"
|
||||
#include "types.hh"
|
||||
#include "archive.hh"
|
||||
#include "canon-path.hh"
|
||||
#include "repair-flag.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class StorePath;
|
||||
class Store;
|
||||
|
||||
struct InputAccessor
|
||||
{
|
||||
enum Type {
|
||||
tRegular, tSymlink, tDirectory,
|
||||
/**
|
||||
Any other node types that may be encountered on the file system, such as device nodes, sockets, named pipe, and possibly even more exotic things.
|
||||
|
||||
Responsible for `"unknown"` from `builtins.readFileType "/dev/null"`.
|
||||
|
||||
Unlike `DT_UNKNOWN`, this must not be used for deferring the lookup of types.
|
||||
*/
|
||||
tMisc
|
||||
};
|
||||
|
||||
struct Stat
|
||||
{
|
||||
Type type = tMisc;
|
||||
//uint64_t fileSize = 0; // regular files only
|
||||
bool isExecutable = false; // regular files only
|
||||
};
|
||||
|
||||
typedef std::optional<Type> DirEntry;
|
||||
|
||||
typedef std::map<std::string, DirEntry> DirEntries;
|
||||
};
|
||||
|
||||
/**
|
||||
* An abstraction for accessing source files during
|
||||
* evaluation. Currently, it's just a wrapper around `CanonPath` that
|
||||
* accesses files in the regular filesystem, but in the future it will
|
||||
* support fetching files in other ways.
|
||||
*/
|
||||
struct SourcePath
|
||||
{
|
||||
CanonPath path;
|
||||
|
||||
SourcePath(CanonPath path)
|
||||
: path(std::move(path))
|
||||
{ }
|
||||
|
||||
std::string_view baseName() const;
|
||||
|
||||
/**
|
||||
* Construct the parent of this `SourcePath`. Aborts if `this`
|
||||
* denotes the root.
|
||||
*/
|
||||
SourcePath parent() const;
|
||||
|
||||
/**
|
||||
* If this `SourcePath` denotes a regular file (not a symlink),
|
||||
* return its contents; otherwise throw an error.
|
||||
*/
|
||||
std::string readFile() const
|
||||
{ return nix::readFile(path.abs()); }
|
||||
|
||||
/**
|
||||
* Return whether this `SourcePath` denotes a file (of any type)
|
||||
* that exists
|
||||
*/
|
||||
bool pathExists() const
|
||||
{ return nix::pathExists(path.abs()); }
|
||||
|
||||
/**
|
||||
* Return stats about this `SourcePath`, or throw an exception if
|
||||
* it doesn't exist.
|
||||
*/
|
||||
InputAccessor::Stat lstat() const;
|
||||
|
||||
/**
|
||||
* Return stats about this `SourcePath`, or std::nullopt if it
|
||||
* doesn't exist.
|
||||
*/
|
||||
std::optional<InputAccessor::Stat> maybeLstat() const;
|
||||
|
||||
/**
|
||||
* If this `SourcePath` denotes a directory (not a symlink),
|
||||
* return its directory entries; otherwise throw an error.
|
||||
*/
|
||||
InputAccessor::DirEntries readDirectory() const;
|
||||
|
||||
/**
|
||||
* If this `SourcePath` denotes a symlink, return its target;
|
||||
* otherwise throw an error.
|
||||
*/
|
||||
std::string readLink() const
|
||||
{ return nix::readLink(path.abs()); }
|
||||
|
||||
/**
|
||||
* Dump this `SourcePath` to `sink` as a NAR archive.
|
||||
*/
|
||||
void dumpPath(
|
||||
Sink & sink,
|
||||
PathFilter & filter = defaultPathFilter) const
|
||||
{ return nix::dumpPath(path.abs(), sink, filter); }
|
||||
|
||||
/**
|
||||
* Copy this `SourcePath` to the Nix store.
|
||||
*/
|
||||
StorePath fetchToStore(
|
||||
ref<Store> store,
|
||||
std::string_view name = "source",
|
||||
PathFilter * filter = nullptr,
|
||||
RepairFlag repair = NoRepair) const;
|
||||
|
||||
/**
|
||||
* Return the location of this path in the "real" filesystem, if
|
||||
* it has a physical location.
|
||||
*/
|
||||
std::optional<CanonPath> getPhysicalPath() const
|
||||
{ return path; }
|
||||
|
||||
std::string to_string() const
|
||||
{ return path.abs(); }
|
||||
|
||||
/**
|
||||
* Append a `CanonPath` to this path.
|
||||
*/
|
||||
SourcePath operator + (const CanonPath & x) const
|
||||
{ return {path + x}; }
|
||||
|
||||
/**
|
||||
* Append a single component `c` to this path. `c` must not
|
||||
* contain a slash. A slash is implicitly added between this path
|
||||
* and `c`.
|
||||
*/
|
||||
SourcePath operator + (std::string_view c) const
|
||||
{ return {path + c}; }
|
||||
|
||||
bool operator == (const SourcePath & x) const
|
||||
{
|
||||
return path == x.path;
|
||||
}
|
||||
|
||||
bool operator != (const SourcePath & x) const
|
||||
{
|
||||
return path != x.path;
|
||||
}
|
||||
|
||||
bool operator < (const SourcePath & x) const
|
||||
{
|
||||
return path < x.path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve any symlinks in this `SourcePath` (including its
|
||||
* parents). The result is a `SourcePath` in which no element is a
|
||||
* symlink.
|
||||
*/
|
||||
SourcePath resolveSymlinks() const;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const SourcePath & path);
|
||||
|
||||
}
|
@ -71,15 +71,19 @@ DownloadFileResult downloadFile(
|
||||
dumpString(res.data, sink);
|
||||
auto hash = hashString(htSHA256, res.data);
|
||||
ValidPathInfo info {
|
||||
store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
|
||||
*store,
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = hash,
|
||||
},
|
||||
.references = {},
|
||||
},
|
||||
hashString(htSHA256, sink.s),
|
||||
};
|
||||
info.narSize = sink.s.size();
|
||||
info.ca = FixedOutputHash {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = hash,
|
||||
};
|
||||
auto source = StringSource(sink.s);
|
||||
auto source = StringSource { sink.s };
|
||||
store->addToStore(info, source, NoRepair, NoCheckSigs);
|
||||
storePath = std::move(info.path);
|
||||
}
|
||||
|
@ -2,6 +2,7 @@
|
||||
///@file
|
||||
|
||||
#include "args.hh"
|
||||
#include "repair-flag.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
@ -49,4 +50,21 @@ struct MixJSON : virtual Args
|
||||
}
|
||||
};
|
||||
|
||||
struct MixRepair : virtual Args
|
||||
{
|
||||
RepairFlag repair = NoRepair;
|
||||
|
||||
MixRepair()
|
||||
{
|
||||
addFlag({
|
||||
.longName = "repair",
|
||||
.description =
|
||||
"During evaluation, rewrite missing or corrupted files in the Nix store. "
|
||||
"During building, rebuild missing or corrupted store paths.",
|
||||
.category = miscCategory,
|
||||
.handler = {&repair, Repair},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -10,7 +10,6 @@
|
||||
#include <cctype>
|
||||
#include <exception>
|
||||
#include <iostream>
|
||||
#include <mutex>
|
||||
|
||||
#include <cstdlib>
|
||||
#include <sys/time.h>
|
||||
@ -20,16 +19,9 @@
|
||||
#ifdef __linux__
|
||||
#include <features.h>
|
||||
#endif
|
||||
#ifdef __GLIBC__
|
||||
#include <gnu/lib-names.h>
|
||||
#include <nss.h>
|
||||
#include <dlfcn.h>
|
||||
#endif
|
||||
|
||||
#include <openssl/crypto.h>
|
||||
|
||||
#include <sodium.h>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
@ -115,57 +107,6 @@ std::string getArg(const std::string & opt,
|
||||
return *i;
|
||||
}
|
||||
|
||||
|
||||
#if OPENSSL_VERSION_NUMBER < 0x10101000L
|
||||
/* OpenSSL is not thread-safe by default - it will randomly crash
|
||||
unless the user supplies a mutex locking function. So let's do
|
||||
that. */
|
||||
static std::vector<std::mutex> opensslLocks;
|
||||
|
||||
static void opensslLockCallback(int mode, int type, const char * file, int line)
|
||||
{
|
||||
if (mode & CRYPTO_LOCK)
|
||||
opensslLocks[type].lock();
|
||||
else
|
||||
opensslLocks[type].unlock();
|
||||
}
|
||||
#endif
|
||||
|
||||
static std::once_flag dns_resolve_flag;
|
||||
|
||||
static void preloadNSS() {
|
||||
/* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
|
||||
one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
|
||||
been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
|
||||
load its lookup libraries in the parent before any child gets a chance to. */
|
||||
std::call_once(dns_resolve_flag, []() {
|
||||
#ifdef __GLIBC__
|
||||
/* On linux, glibc will run every lookup through the nss layer.
|
||||
* That means every lookup goes, by default, through nscd, which acts as a local
|
||||
* cache.
|
||||
* Because we run builds in a sandbox, we also remove access to nscd otherwise
|
||||
* lookups would leak into the sandbox.
|
||||
*
|
||||
* But now we have a new problem, we need to make sure the nss_dns backend that
|
||||
* does the dns lookups when nscd is not available is loaded or available.
|
||||
*
|
||||
* We can't make it available without leaking nix's environment, so instead we'll
|
||||
* load the backend, and configure nss so it does not try to run dns lookups
|
||||
* through nscd.
|
||||
*
|
||||
* This is technically only used for builtins:fetch* functions so we only care
|
||||
* about dns.
|
||||
*
|
||||
* All other platforms are unaffected.
|
||||
*/
|
||||
if (!dlopen(LIBNSS_DNS_SO, RTLD_NOW))
|
||||
warn("unable to load nss_dns backend");
|
||||
// FIXME: get hosts entry from nsswitch.conf.
|
||||
__nss_configure_lookup("hosts", "files dns");
|
||||
#endif
|
||||
});
|
||||
}
|
||||
|
||||
static void sigHandler(int signo) { }
|
||||
|
||||
|
||||
@ -177,16 +118,7 @@ void initNix()
|
||||
std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf));
|
||||
#endif
|
||||
|
||||
#if OPENSSL_VERSION_NUMBER < 0x10101000L
|
||||
/* Initialise OpenSSL locking. */
|
||||
opensslLocks = std::vector<std::mutex>(CRYPTO_num_locks());
|
||||
CRYPTO_set_locking_callback(opensslLockCallback);
|
||||
#endif
|
||||
|
||||
if (sodium_init() == -1)
|
||||
throw Error("could not initialise libsodium");
|
||||
|
||||
loadConfFile();
|
||||
initLibStore();
|
||||
|
||||
startSignalHandlerThread();
|
||||
|
||||
@ -223,7 +155,10 @@ void initNix()
|
||||
if (sigaction(SIGTRAP, &act, 0)) throw SysError("handling SIGTRAP");
|
||||
#endif
|
||||
|
||||
/* Register a SIGSEGV handler to detect stack overflows. */
|
||||
/* Register a SIGSEGV handler to detect stack overflows.
|
||||
Why not initLibExpr()? initGC() is essentially that, but
|
||||
detectStackOverflow is not an instance of the init function concept, as
|
||||
it may have to be invoked more than once per process. */
|
||||
detectStackOverflow();
|
||||
|
||||
/* There is no privacy in the Nix system ;-) At least not for
|
||||
@ -236,16 +171,6 @@ void initNix()
|
||||
gettimeofday(&tv, 0);
|
||||
srandom(tv.tv_usec);
|
||||
|
||||
/* On macOS, don't use the per-session TMPDIR (as set e.g. by
|
||||
sshd). This breaks build users because they don't have access
|
||||
to the TMPDIR, in particular in ‘nix-store --serve’. */
|
||||
#if __APPLE__
|
||||
if (hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
|
||||
unsetenv("TMPDIR");
|
||||
#endif
|
||||
|
||||
preloadNSS();
|
||||
initLibStore();
|
||||
}
|
||||
|
||||
|
||||
|
@ -306,11 +306,22 @@ StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view n
|
||||
unsupported("addToStoreFromDump");
|
||||
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
makeFixedOutputPath(method, nar.first, name, references),
|
||||
*this,
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = method,
|
||||
.hash = nar.first,
|
||||
},
|
||||
.references = {
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
|
||||
.self = false,
|
||||
},
|
||||
},
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
info.references = references;
|
||||
return info;
|
||||
})->path;
|
||||
}
|
||||
@ -414,15 +425,22 @@ StorePath BinaryCacheStore::addToStore(
|
||||
});
|
||||
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
makeFixedOutputPath(method, h, name, references),
|
||||
*this,
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = method,
|
||||
.hash = h,
|
||||
},
|
||||
.references = {
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
|
||||
.self = false,
|
||||
},
|
||||
},
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
info.references = references;
|
||||
info.ca = FixedOutputHash {
|
||||
.method = method,
|
||||
.hash = h,
|
||||
};
|
||||
return info;
|
||||
})->path;
|
||||
}
|
||||
@ -434,7 +452,7 @@ StorePath BinaryCacheStore::addTextToStore(
|
||||
RepairFlag repair)
|
||||
{
|
||||
auto textHash = hashString(htSHA256, s);
|
||||
auto path = makeTextPath(name, textHash, references);
|
||||
auto path = makeTextPath(name, TextInfo { { textHash }, references });
|
||||
|
||||
if (!repair && isValidPath(path))
|
||||
return path;
|
||||
@ -443,10 +461,16 @@ StorePath BinaryCacheStore::addTextToStore(
|
||||
dumpString(s, sink);
|
||||
StringSource source(sink.s);
|
||||
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info { path, nar.first };
|
||||
ValidPathInfo info {
|
||||
*this,
|
||||
std::string { name },
|
||||
TextInfo {
|
||||
{ .hash = textHash },
|
||||
references,
|
||||
},
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
info.ca = TextHash { textHash };
|
||||
info.references = references;
|
||||
return info;
|
||||
})->path;
|
||||
}
|
||||
|
@ -83,16 +83,11 @@ struct BuildResult
|
||||
*/
|
||||
bool isNonDeterministic = false;
|
||||
|
||||
/**
|
||||
* The derivation we built or the store path we substituted.
|
||||
*/
|
||||
DerivedPath path;
|
||||
|
||||
/**
|
||||
* For derivations, a mapping from the names of the wanted outputs
|
||||
* to actual paths.
|
||||
*/
|
||||
DrvOutputs builtOutputs;
|
||||
SingleDrvOutputs builtOutputs;
|
||||
|
||||
/**
|
||||
* The start/stop times of the build (or one of the rounds, if it
|
||||
@ -116,4 +111,15 @@ struct BuildResult
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* A `BuildResult` together with its "primary key".
|
||||
*/
|
||||
struct KeyedBuildResult : BuildResult
|
||||
{
|
||||
/**
|
||||
* The derivation we built or the store path we substituted.
|
||||
*/
|
||||
DerivedPath path;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -145,8 +145,20 @@ void DerivationGoal::work()
|
||||
void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
|
||||
{
|
||||
auto newWanted = wantedOutputs.union_(outputs);
|
||||
if (!newWanted.isSubsetOf(wantedOutputs))
|
||||
needRestart = true;
|
||||
switch (needRestart) {
|
||||
case NeedRestartForMoreOutputs::OutputsUnmodifedDontNeed:
|
||||
if (!newWanted.isSubsetOf(wantedOutputs))
|
||||
needRestart = NeedRestartForMoreOutputs::OutputsAddedDoNeed;
|
||||
break;
|
||||
case NeedRestartForMoreOutputs::OutputsAddedDoNeed:
|
||||
/* No need to check whether we added more outputs, because a
|
||||
restart is already queued up. */
|
||||
break;
|
||||
case NeedRestartForMoreOutputs::BuildInProgressWillNotNeed:
|
||||
/* We are already building all outputs, so it doesn't matter if
|
||||
we now want more. */
|
||||
break;
|
||||
};
|
||||
wantedOutputs = newWanted;
|
||||
}
|
||||
|
||||
@ -297,12 +309,29 @@ void DerivationGoal::outputsSubstitutionTried()
|
||||
In particular, it may be the case that the hole in the closure is
|
||||
an output of the current derivation, which causes a loop if retried.
|
||||
*/
|
||||
if (nrIncompleteClosure > 0 && nrIncompleteClosure == nrFailed) retrySubstitution = true;
|
||||
{
|
||||
bool substitutionFailed =
|
||||
nrIncompleteClosure > 0 &&
|
||||
nrIncompleteClosure == nrFailed;
|
||||
switch (retrySubstitution) {
|
||||
case RetrySubstitution::NoNeed:
|
||||
if (substitutionFailed)
|
||||
retrySubstitution = RetrySubstitution::YesNeed;
|
||||
break;
|
||||
case RetrySubstitution::YesNeed:
|
||||
// Should not be able to reach this state from here.
|
||||
assert(false);
|
||||
break;
|
||||
case RetrySubstitution::AlreadyRetried:
|
||||
debug("substitution failed again, but we already retried once. Not retrying again.");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
nrFailed = nrNoSubstituters = nrIncompleteClosure = 0;
|
||||
|
||||
if (needRestart) {
|
||||
needRestart = false;
|
||||
if (needRestart == NeedRestartForMoreOutputs::OutputsAddedDoNeed) {
|
||||
needRestart = NeedRestartForMoreOutputs::OutputsUnmodifedDontNeed;
|
||||
haveDerivation();
|
||||
return;
|
||||
}
|
||||
@ -330,6 +359,10 @@ void DerivationGoal::outputsSubstitutionTried()
|
||||
produced using a substitute. So we have to build instead. */
|
||||
void DerivationGoal::gaveUpOnSubstitution()
|
||||
{
|
||||
/* At this point we are building all outputs, so if more are wanted there
|
||||
is no need to restart. */
|
||||
needRestart = NeedRestartForMoreOutputs::BuildInProgressWillNotNeed;
|
||||
|
||||
/* The inputs must be built before we can build this goal. */
|
||||
inputDrvOutputs.clear();
|
||||
if (useDerivation)
|
||||
@ -451,8 +484,8 @@ void DerivationGoal::inputsRealised()
|
||||
return;
|
||||
}
|
||||
|
||||
if (retrySubstitution && !retriedSubstitution) {
|
||||
retriedSubstitution = true;
|
||||
if (retrySubstitution == RetrySubstitution::YesNeed) {
|
||||
retrySubstitution = RetrySubstitution::AlreadyRetried;
|
||||
haveDerivation();
|
||||
return;
|
||||
}
|
||||
@ -570,8 +603,6 @@ void DerivationGoal::inputsRealised()
|
||||
build hook. */
|
||||
state = &DerivationGoal::tryToBuild;
|
||||
worker.wakeUp(shared_from_this());
|
||||
|
||||
buildResult = BuildResult { .path = buildResult.path };
|
||||
}
|
||||
|
||||
void DerivationGoal::started()
|
||||
@ -982,7 +1013,7 @@ void DerivationGoal::resolvedFinished()
|
||||
auto resolvedDrv = *resolvedDrvGoal->drv;
|
||||
auto & resolvedResult = resolvedDrvGoal->buildResult;
|
||||
|
||||
DrvOutputs builtOutputs;
|
||||
SingleDrvOutputs builtOutputs;
|
||||
|
||||
if (resolvedResult.success()) {
|
||||
auto resolvedHashes = staticOutputHashes(worker.store, resolvedDrv);
|
||||
@ -1008,7 +1039,7 @@ void DerivationGoal::resolvedFinished()
|
||||
worker.store.printStorePath(drvPath), wantedOutput);
|
||||
|
||||
auto realisation = [&]{
|
||||
auto take1 = get(resolvedResult.builtOutputs, DrvOutput { *resolvedHash, wantedOutput });
|
||||
auto take1 = get(resolvedResult.builtOutputs, wantedOutput);
|
||||
if (take1) return *take1;
|
||||
|
||||
/* The above `get` should work. But sateful tracking of
|
||||
@ -1033,7 +1064,7 @@ void DerivationGoal::resolvedFinished()
|
||||
worker.store.registerDrvOutput(newRealisation);
|
||||
}
|
||||
outputPaths.insert(realisation.outPath);
|
||||
builtOutputs.emplace(realisation.id, realisation);
|
||||
builtOutputs.emplace(wantedOutput, realisation);
|
||||
}
|
||||
|
||||
runPostBuildHook(
|
||||
@ -1158,7 +1189,7 @@ HookReply DerivationGoal::tryBuildHook()
|
||||
}
|
||||
|
||||
|
||||
DrvOutputs DerivationGoal::registerOutputs()
|
||||
SingleDrvOutputs DerivationGoal::registerOutputs()
|
||||
{
|
||||
/* When using a build hook, the build hook can register the output
|
||||
as valid (by doing `nix-store --import'). If so we don't have
|
||||
@ -1320,7 +1351,7 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap()
|
||||
}
|
||||
|
||||
|
||||
std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||
std::pair<bool, SingleDrvOutputs> DerivationGoal::checkPathValidity()
|
||||
{
|
||||
if (!drv->type().isPure()) return { false, {} };
|
||||
|
||||
@ -1333,7 +1364,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||
return static_cast<StringSet>(names);
|
||||
},
|
||||
}, wantedOutputs.raw());
|
||||
DrvOutputs validOutputs;
|
||||
SingleDrvOutputs validOutputs;
|
||||
|
||||
for (auto & i : queryPartialDerivationOutputMap()) {
|
||||
auto initialOutput = get(initialOutputs, i.first);
|
||||
@ -1376,7 +1407,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||
}
|
||||
}
|
||||
if (info.wanted && info.known && info.known->isValid())
|
||||
validOutputs.emplace(drvOutput, Realisation { drvOutput, info.known->path });
|
||||
validOutputs.emplace(i.first, Realisation { drvOutput, info.known->path });
|
||||
}
|
||||
|
||||
// If we requested all the outputs, we are always fine.
|
||||
@ -1400,7 +1431,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||
}
|
||||
|
||||
|
||||
DrvOutputs DerivationGoal::assertPathValidity()
|
||||
SingleDrvOutputs DerivationGoal::assertPathValidity()
|
||||
{
|
||||
auto [allValid, validOutputs] = checkPathValidity();
|
||||
if (!allValid)
|
||||
@ -1411,7 +1442,7 @@ DrvOutputs DerivationGoal::assertPathValidity()
|
||||
|
||||
void DerivationGoal::done(
|
||||
BuildResult::Status status,
|
||||
DrvOutputs builtOutputs,
|
||||
SingleDrvOutputs builtOutputs,
|
||||
std::optional<Error> ex)
|
||||
{
|
||||
buildResult.status = status;
|
||||
@ -1452,12 +1483,28 @@ void DerivationGoal::waiteeDone(GoalPtr waitee, ExitCode result)
|
||||
{
|
||||
Goal::waiteeDone(waitee, result);
|
||||
|
||||
if (waitee->buildResult.success())
|
||||
if (auto bfd = std::get_if<DerivedPath::Built>(&waitee->buildResult.path))
|
||||
for (auto & [output, realisation] : waitee->buildResult.builtOutputs)
|
||||
if (!useDerivation) return;
|
||||
auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
|
||||
|
||||
auto * dg = dynamic_cast<DerivationGoal *>(&*waitee);
|
||||
if (!dg) return;
|
||||
|
||||
auto outputs = fullDrv.inputDrvs.find(dg->drvPath);
|
||||
if (outputs == fullDrv.inputDrvs.end()) return;
|
||||
|
||||
for (auto & outputName : outputs->second) {
|
||||
auto buildResult = dg->getBuildResult(DerivedPath::Built {
|
||||
.drvPath = dg->drvPath,
|
||||
.outputs = OutputsSpec::Names { outputName },
|
||||
});
|
||||
if (buildResult.success()) {
|
||||
auto i = buildResult.builtOutputs.find(outputName);
|
||||
if (i != buildResult.builtOutputs.end())
|
||||
inputDrvOutputs.insert_or_assign(
|
||||
{ bfd->drvPath, output.outputName },
|
||||
realisation.outPath);
|
||||
{ dg->drvPath, outputName },
|
||||
i->second.outPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -78,22 +78,58 @@ struct DerivationGoal : public Goal
|
||||
*/
|
||||
std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs;
|
||||
|
||||
/**
|
||||
* See `needRestart`; just for that field.
|
||||
*/
|
||||
enum struct NeedRestartForMoreOutputs {
|
||||
/**
|
||||
* The goal state machine is progressing based on the current value of
|
||||
* `wantedOutputs. No actions are needed.
|
||||
*/
|
||||
OutputsUnmodifedDontNeed,
|
||||
/**
|
||||
* `wantedOutputs` has been extended, but the state machine is
|
||||
* proceeding according to its old value, so we need to restart.
|
||||
*/
|
||||
OutputsAddedDoNeed,
|
||||
/**
|
||||
* The goal state machine has progressed to the point of doing a build,
|
||||
* in which case all outputs will be produced, so extensions to
|
||||
* `wantedOutputs` no longer require a restart.
|
||||
*/
|
||||
BuildInProgressWillNotNeed,
|
||||
};
|
||||
|
||||
/**
|
||||
* Whether additional wanted outputs have been added.
|
||||
*/
|
||||
bool needRestart = false;
|
||||
NeedRestartForMoreOutputs needRestart = NeedRestartForMoreOutputs::OutputsUnmodifedDontNeed;
|
||||
|
||||
/**
|
||||
* See `retrySubstitution`; just for that field.
|
||||
*/
|
||||
enum RetrySubstitution {
|
||||
/**
|
||||
* No issues have yet arose, no need to restart.
|
||||
*/
|
||||
NoNeed,
|
||||
/**
|
||||
* Something failed and there is an incomplete closure. Let's retry
|
||||
* substituting.
|
||||
*/
|
||||
YesNeed,
|
||||
/**
|
||||
* We are current or have already retried substitution, and whether or
|
||||
* not something goes wrong we will not retry again.
|
||||
*/
|
||||
AlreadyRetried,
|
||||
};
|
||||
|
||||
/**
|
||||
* Whether to retry substituting the outputs after building the
|
||||
* inputs. This is done in case of an incomplete closure.
|
||||
*/
|
||||
bool retrySubstitution = false;
|
||||
|
||||
/**
|
||||
* Whether we've retried substitution, in which case we won't try
|
||||
* again.
|
||||
*/
|
||||
bool retriedSubstitution = false;
|
||||
RetrySubstitution retrySubstitution = RetrySubstitution::NoNeed;
|
||||
|
||||
/**
|
||||
* The derivation stored at drvPath.
|
||||
@ -217,7 +253,7 @@ struct DerivationGoal : public Goal
|
||||
* Check that the derivation outputs all exist and register them
|
||||
* as valid.
|
||||
*/
|
||||
virtual DrvOutputs registerOutputs();
|
||||
virtual SingleDrvOutputs registerOutputs();
|
||||
|
||||
/**
|
||||
* Open a log file and a pipe to it.
|
||||
@ -270,17 +306,17 @@ struct DerivationGoal : public Goal
|
||||
* Update 'initialOutputs' to determine the current status of the
|
||||
* outputs of the derivation. Also returns a Boolean denoting
|
||||
* whether all outputs are valid and non-corrupt, and a
|
||||
* 'DrvOutputs' structure containing the valid and wanted
|
||||
* 'SingleDrvOutputs' structure containing the valid and wanted
|
||||
* outputs.
|
||||
*/
|
||||
std::pair<bool, DrvOutputs> checkPathValidity();
|
||||
std::pair<bool, SingleDrvOutputs> checkPathValidity();
|
||||
|
||||
/**
|
||||
* Aborts if any output is not valid or corrupt, and otherwise
|
||||
* returns a 'DrvOutputs' structure containing the wanted
|
||||
* returns a 'SingleDrvOutputs' structure containing the wanted
|
||||
* outputs.
|
||||
*/
|
||||
DrvOutputs assertPathValidity();
|
||||
SingleDrvOutputs assertPathValidity();
|
||||
|
||||
/**
|
||||
* Forcibly kill the child process, if any.
|
||||
@ -293,7 +329,7 @@ struct DerivationGoal : public Goal
|
||||
|
||||
void done(
|
||||
BuildResult::Status status,
|
||||
DrvOutputs builtOutputs = {},
|
||||
SingleDrvOutputs builtOutputs = {},
|
||||
std::optional<Error> ex = {});
|
||||
|
||||
void waiteeDone(GoalPtr waitee, ExitCode result) override;
|
||||
|
@ -10,16 +10,8 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
||||
Worker worker(*this, evalStore ? *evalStore : *this);
|
||||
|
||||
Goals goals;
|
||||
for (const auto & br : reqs) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode));
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair));
|
||||
},
|
||||
}, br.raw());
|
||||
}
|
||||
for (auto & br : reqs)
|
||||
goals.insert(worker.makeGoal(br, buildMode));
|
||||
|
||||
worker.run(goals);
|
||||
|
||||
@ -47,7 +39,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<BuildResult> Store::buildPathsWithResults(
|
||||
std::vector<KeyedBuildResult> Store::buildPathsWithResults(
|
||||
const std::vector<DerivedPath> & reqs,
|
||||
BuildMode buildMode,
|
||||
std::shared_ptr<Store> evalStore)
|
||||
@ -55,23 +47,23 @@ std::vector<BuildResult> Store::buildPathsWithResults(
|
||||
Worker worker(*this, evalStore ? *evalStore : *this);
|
||||
|
||||
Goals goals;
|
||||
for (const auto & br : reqs) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode));
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair));
|
||||
},
|
||||
}, br.raw());
|
||||
std::vector<std::pair<const DerivedPath &, GoalPtr>> state;
|
||||
|
||||
for (const auto & req : reqs) {
|
||||
auto goal = worker.makeGoal(req, buildMode);
|
||||
goals.insert(goal);
|
||||
state.push_back({req, goal});
|
||||
}
|
||||
|
||||
worker.run(goals);
|
||||
|
||||
std::vector<BuildResult> results;
|
||||
std::vector<KeyedBuildResult> results;
|
||||
|
||||
for (auto & i : goals)
|
||||
results.push_back(i->buildResult);
|
||||
for (auto & [req, goalPtr] : state)
|
||||
results.emplace_back(KeyedBuildResult {
|
||||
goalPtr->getBuildResult(req),
|
||||
/* .path = */ req,
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
@ -84,15 +76,14 @@ BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivat
|
||||
|
||||
try {
|
||||
worker.run(Goals{goal});
|
||||
return goal->buildResult;
|
||||
return goal->getBuildResult(DerivedPath::Built {
|
||||
.drvPath = drvPath,
|
||||
.outputs = OutputsSpec::All {},
|
||||
});
|
||||
} catch (Error & e) {
|
||||
return BuildResult {
|
||||
.status = BuildResult::MiscFailure,
|
||||
.errorMsg = e.msg(),
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = drvPath,
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
|
@ -11,6 +11,29 @@ bool CompareGoalPtrs::operator() (const GoalPtr & a, const GoalPtr & b) const {
|
||||
}
|
||||
|
||||
|
||||
BuildResult Goal::getBuildResult(const DerivedPath & req) {
|
||||
BuildResult res { buildResult };
|
||||
|
||||
if (auto pbp = std::get_if<DerivedPath::Built>(&req)) {
|
||||
auto & bp = *pbp;
|
||||
|
||||
/* Because goals are in general shared between derived paths
|
||||
that share the same derivation, we need to filter their
|
||||
results to get back just the results we care about.
|
||||
*/
|
||||
|
||||
for (auto it = res.builtOutputs.begin(); it != res.builtOutputs.end();) {
|
||||
if (bp.outputs.contains(it->first))
|
||||
++it;
|
||||
else
|
||||
it = res.builtOutputs.erase(it);
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
void addToWeakGoals(WeakGoals & goals, GoalPtr p)
|
||||
{
|
||||
if (goals.find(p) != goals.end())
|
||||
|
@ -81,11 +81,26 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
||||
*/
|
||||
ExitCode exitCode = ecBusy;
|
||||
|
||||
protected:
|
||||
/**
|
||||
* Build result.
|
||||
*/
|
||||
BuildResult buildResult;
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* Project a `BuildResult` with just the information that pertains
|
||||
* to the given request.
|
||||
*
|
||||
* In general, goals may be aliased between multiple requests, and
|
||||
* the stored `BuildResult` has information for the union of all
|
||||
* requests. We don't want to leak what the other request are for
|
||||
* sake of both privacy and determinism, and this "safe accessor"
|
||||
* ensures we don't.
|
||||
*/
|
||||
BuildResult getBuildResult(const DerivedPath &);
|
||||
|
||||
/**
|
||||
* Exception containing an error message, if any.
|
||||
*/
|
||||
@ -93,7 +108,6 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
||||
|
||||
Goal(Worker & worker, DerivedPath path)
|
||||
: worker(worker)
|
||||
, buildResult { .path = std::move(path) }
|
||||
{ }
|
||||
|
||||
virtual ~Goal()
|
||||
|
@ -1335,7 +1335,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
||||
result.rethrow();
|
||||
}
|
||||
|
||||
std::vector<BuildResult> buildPathsWithResults(
|
||||
std::vector<KeyedBuildResult> buildPathsWithResults(
|
||||
const std::vector<DerivedPath> & paths,
|
||||
BuildMode buildMode = bmNormal,
|
||||
std::shared_ptr<Store> evalStore = nullptr) override
|
||||
@ -2174,7 +2174,7 @@ void LocalDerivationGoal::runChild()
|
||||
}
|
||||
|
||||
|
||||
DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
{
|
||||
/* When using a build hook, the build hook can register the output
|
||||
as valid (by doing `nix-store --import'). If so we don't have
|
||||
@ -2395,27 +2395,26 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
}
|
||||
};
|
||||
|
||||
auto rewriteRefs = [&]() -> std::pair<bool, StorePathSet> {
|
||||
auto rewriteRefs = [&]() -> StoreReferences {
|
||||
/* In the CA case, we need the rewritten refs to calculate the
|
||||
final path, therefore we look for a *non-rewritten
|
||||
self-reference, and use a bool rather try to solve the
|
||||
computationally intractable fixed point. */
|
||||
std::pair<bool, StorePathSet> res {
|
||||
false,
|
||||
{},
|
||||
StoreReferences res {
|
||||
.self = false,
|
||||
};
|
||||
for (auto & r : references) {
|
||||
auto name = r.name();
|
||||
auto origHash = std::string { r.hashPart() };
|
||||
if (r == *scratchPath) {
|
||||
res.first = true;
|
||||
res.self = true;
|
||||
} else if (auto outputRewrite = get(outputRewrites, origHash)) {
|
||||
std::string newRef = *outputRewrite;
|
||||
newRef += '-';
|
||||
newRef += name;
|
||||
res.second.insert(StorePath { newRef });
|
||||
res.others.insert(StorePath { newRef });
|
||||
} else {
|
||||
res.second.insert(r);
|
||||
res.others.insert(r);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
@ -2448,18 +2447,22 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
break;
|
||||
}
|
||||
auto got = caSink.finish().first;
|
||||
auto refs = rewriteRefs();
|
||||
|
||||
auto finalPath = worker.store.makeFixedOutputPath(
|
||||
outputHash.method,
|
||||
got,
|
||||
outputPathName(drv->name, outputName),
|
||||
refs.second,
|
||||
refs.first);
|
||||
if (*scratchPath != finalPath) {
|
||||
ValidPathInfo newInfo0 {
|
||||
worker.store,
|
||||
outputPathName(drv->name, outputName),
|
||||
FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = outputHash.method,
|
||||
.hash = got,
|
||||
},
|
||||
.references = rewriteRefs(),
|
||||
},
|
||||
Hash::dummy,
|
||||
};
|
||||
if (*scratchPath != newInfo0.path) {
|
||||
// Also rewrite the output path
|
||||
auto source = sinkToSource([&](Sink & nextSink) {
|
||||
RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink);
|
||||
RewritingSink rsink2(oldHashPart, std::string(newInfo0.path.hashPart()), nextSink);
|
||||
dumpPath(actualPath, rsink2);
|
||||
rsink2.flush();
|
||||
});
|
||||
@ -2470,19 +2473,8 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
}
|
||||
|
||||
HashResult narHashAndSize = hashPath(htSHA256, actualPath);
|
||||
ValidPathInfo newInfo0 {
|
||||
finalPath,
|
||||
narHashAndSize.first,
|
||||
};
|
||||
|
||||
newInfo0.narHash = narHashAndSize.first;
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
newInfo0.ca = FixedOutputHash {
|
||||
.method = outputHash.method,
|
||||
.hash = got,
|
||||
};
|
||||
newInfo0.references = refs.second;
|
||||
if (refs.first)
|
||||
newInfo0.references.insert(newInfo0.path);
|
||||
|
||||
assert(newInfo0.ca);
|
||||
return newInfo0;
|
||||
@ -2504,8 +2496,8 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
auto refs = rewriteRefs();
|
||||
newInfo0.references = refs.second;
|
||||
if (refs.first)
|
||||
newInfo0.references = std::move(refs.others);
|
||||
if (refs.self)
|
||||
newInfo0.references.insert(newInfo0.path);
|
||||
return newInfo0;
|
||||
},
|
||||
@ -2519,7 +2511,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
/* Check wanted hash */
|
||||
const Hash & wanted = dof.hash.hash;
|
||||
assert(newInfo0.ca);
|
||||
auto got = getContentAddressHash(*newInfo0.ca);
|
||||
auto got = newInfo0.ca->getHash();
|
||||
if (wanted != got) {
|
||||
/* Throw an error after registering the path as
|
||||
valid. */
|
||||
@ -2691,7 +2683,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
means it's safe to link the derivation to the output hash. We must do
|
||||
that for floating CA derivations, which otherwise couldn't be cached,
|
||||
but it's fine to do in all cases. */
|
||||
DrvOutputs builtOutputs;
|
||||
SingleDrvOutputs builtOutputs;
|
||||
|
||||
for (auto & [outputName, newInfo] : infos) {
|
||||
auto oldinfo = get(initialOutputs, outputName);
|
||||
@ -2710,7 +2702,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
worker.store.registerDrvOutput(thisRealisation);
|
||||
}
|
||||
if (wantedOutputs.contains(outputName))
|
||||
builtOutputs.emplace(thisRealisation.id, thisRealisation);
|
||||
builtOutputs.emplace(outputName, thisRealisation);
|
||||
}
|
||||
|
||||
return builtOutputs;
|
||||
|
@ -237,7 +237,7 @@ struct LocalDerivationGoal : public DerivationGoal
|
||||
* Check that the derivation outputs all exist and register them
|
||||
* as valid.
|
||||
*/
|
||||
DrvOutputs registerOutputs() override;
|
||||
SingleDrvOutputs registerOutputs() override;
|
||||
|
||||
void signRealisation(Realisation &) override;
|
||||
|
||||
|
@ -95,7 +95,9 @@ void PathSubstitutionGoal::tryNext()
|
||||
subs.pop_front();
|
||||
|
||||
if (ca) {
|
||||
subPath = sub->makeFixedOutputPathFromCA(storePath.name(), *ca);
|
||||
subPath = sub->makeFixedOutputPathFromCA(
|
||||
std::string { storePath.name() },
|
||||
ContentAddressWithReferences::withoutRefs(*ca));
|
||||
if (sub->storeDir == worker.store.storeDir)
|
||||
assert(subPath == storePath);
|
||||
} else if (sub->storeDir != worker.store.storeDir) {
|
||||
|
@ -92,6 +92,7 @@ std::shared_ptr<PathSubstitutionGoal> Worker::makePathSubstitutionGoal(const Sto
|
||||
return goal;
|
||||
}
|
||||
|
||||
|
||||
std::shared_ptr<DrvOutputSubstitutionGoal> Worker::makeDrvOutputSubstitutionGoal(const DrvOutput& id, RepairFlag repair, std::optional<ContentAddress> ca)
|
||||
{
|
||||
std::weak_ptr<DrvOutputSubstitutionGoal> & goal_weak = drvOutputSubstitutionGoals[id];
|
||||
@ -104,6 +105,20 @@ std::shared_ptr<DrvOutputSubstitutionGoal> Worker::makeDrvOutputSubstitutionGoal
|
||||
return goal;
|
||||
}
|
||||
|
||||
|
||||
GoalPtr Worker::makeGoal(const DerivedPath & req, BuildMode buildMode)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) -> GoalPtr {
|
||||
return makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode);
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) -> GoalPtr {
|
||||
return makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair);
|
||||
},
|
||||
}, req.raw());
|
||||
}
|
||||
|
||||
|
||||
template<typename K, typename G>
|
||||
static void removeGoal(std::shared_ptr<G> goal, std::map<K, std::weak_ptr<G>> & goalMap)
|
||||
{
|
||||
|
@ -181,7 +181,7 @@ public:
|
||||
*/
|
||||
|
||||
/**
|
||||
* derivation goal
|
||||
* @ref DerivationGoal "derivation goal"
|
||||
*/
|
||||
private:
|
||||
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
|
||||
@ -196,11 +196,19 @@ public:
|
||||
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||
|
||||
/**
|
||||
* substitution goal
|
||||
* @ref SubstitutionGoal "substitution goal"
|
||||
*/
|
||||
std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||
std::shared_ptr<DrvOutputSubstitutionGoal> makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||
|
||||
/**
|
||||
* Make a goal corresponding to the `DerivedPath`.
|
||||
*
|
||||
* It will be a `DerivationGoal` for a `DerivedPath::Built` or
|
||||
* a `SubstitutionGoal` for a `DerivedPath::Opaque`.
|
||||
*/
|
||||
GoalPtr makeGoal(const DerivedPath & req, BuildMode buildMode = bmNormal);
|
||||
|
||||
/**
|
||||
* Remove a dead goal.
|
||||
*/
|
||||
|
@ -9,7 +9,7 @@ std::string FixedOutputHash::printMethodAlgo() const
|
||||
return makeFileIngestionPrefix(method) + printHashType(hash.type);
|
||||
}
|
||||
|
||||
std::string makeFileIngestionPrefix(const FileIngestionMethod m)
|
||||
std::string makeFileIngestionPrefix(FileIngestionMethod m)
|
||||
{
|
||||
switch (m) {
|
||||
case FileIngestionMethod::Flat:
|
||||
@ -21,39 +21,35 @@ std::string makeFileIngestionPrefix(const FileIngestionMethod m)
|
||||
}
|
||||
}
|
||||
|
||||
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
|
||||
{
|
||||
return "fixed:"
|
||||
+ makeFileIngestionPrefix(method)
|
||||
+ hash.to_string(Base32, true);
|
||||
}
|
||||
|
||||
std::string renderContentAddress(ContentAddress ca)
|
||||
std::string ContentAddress::render() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](TextHash & th) {
|
||||
return "text:" + th.hash.to_string(Base32, true);
|
||||
[](const TextHash & th) {
|
||||
return "text:"
|
||||
+ th.hash.to_string(Base32, true);
|
||||
},
|
||||
[](FixedOutputHash & fsh) {
|
||||
return makeFixedOutputCA(fsh.method, fsh.hash);
|
||||
[](const FixedOutputHash & fsh) {
|
||||
return "fixed:"
|
||||
+ makeFileIngestionPrefix(fsh.method)
|
||||
+ fsh.hash.to_string(Base32, true);
|
||||
}
|
||||
}, ca);
|
||||
}, raw);
|
||||
}
|
||||
|
||||
std::string renderContentAddressMethod(ContentAddressMethod cam)
|
||||
std::string ContentAddressMethod::render() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](TextHashMethod & th) {
|
||||
[](const TextHashMethod & th) {
|
||||
return std::string{"text:"} + printHashType(htSHA256);
|
||||
},
|
||||
[](FixedOutputHashMethod & fshm) {
|
||||
[](const FixedOutputHashMethod & fshm) {
|
||||
return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType);
|
||||
}
|
||||
}, cam);
|
||||
}, raw);
|
||||
}
|
||||
|
||||
/*
|
||||
Parses content address strings up to the hash.
|
||||
/**
|
||||
* Parses content address strings up to the hash.
|
||||
*/
|
||||
static ContentAddressMethod parseContentAddressMethodPrefix(std::string_view & rest)
|
||||
{
|
||||
@ -97,7 +93,7 @@ static ContentAddressMethod parseContentAddressMethodPrefix(std::string_view & r
|
||||
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
|
||||
}
|
||||
|
||||
ContentAddress parseContentAddress(std::string_view rawCa) {
|
||||
ContentAddress ContentAddress::parse(std::string_view rawCa) {
|
||||
auto rest = rawCa;
|
||||
|
||||
ContentAddressMethod caMethod = parseContentAddressMethodPrefix(rest);
|
||||
@ -115,10 +111,10 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
|
||||
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)),
|
||||
});
|
||||
},
|
||||
}, caMethod);
|
||||
}, caMethod.raw);
|
||||
}
|
||||
|
||||
ContentAddressMethod parseContentAddressMethod(std::string_view caMethod)
|
||||
ContentAddressMethod ContentAddressMethod::parse(std::string_view caMethod)
|
||||
{
|
||||
std::string asPrefix = std::string{caMethod} + ":";
|
||||
// parseContentAddressMethodPrefix takes its argument by reference
|
||||
@ -126,26 +122,55 @@ ContentAddressMethod parseContentAddressMethod(std::string_view caMethod)
|
||||
return parseContentAddressMethodPrefix(asPrefixView);
|
||||
}
|
||||
|
||||
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt)
|
||||
std::optional<ContentAddress> ContentAddress::parseOpt(std::string_view rawCaOpt)
|
||||
{
|
||||
return rawCaOpt == "" ? std::optional<ContentAddress>() : parseContentAddress(rawCaOpt);
|
||||
return rawCaOpt == ""
|
||||
? std::nullopt
|
||||
: std::optional { ContentAddress::parse(rawCaOpt) };
|
||||
};
|
||||
|
||||
std::string renderContentAddress(std::optional<ContentAddress> ca)
|
||||
{
|
||||
return ca ? renderContentAddress(*ca) : "";
|
||||
return ca ? ca->render() : "";
|
||||
}
|
||||
|
||||
Hash getContentAddressHash(const ContentAddress & ca)
|
||||
const Hash & ContentAddress::getHash() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const TextHash & th) {
|
||||
[](const TextHash & th) -> auto & {
|
||||
return th.hash;
|
||||
},
|
||||
[](const FixedOutputHash & fsh) {
|
||||
[](const FixedOutputHash & fsh) -> auto & {
|
||||
return fsh.hash;
|
||||
}
|
||||
}, ca);
|
||||
},
|
||||
}, raw);
|
||||
}
|
||||
|
||||
bool StoreReferences::empty() const
|
||||
{
|
||||
return !self && others.empty();
|
||||
}
|
||||
|
||||
size_t StoreReferences::size() const
|
||||
{
|
||||
return (self ? 1 : 0) + others.size();
|
||||
}
|
||||
|
||||
ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const ContentAddress & ca) {
|
||||
return std::visit(overloaded {
|
||||
[&](const TextHash & h) -> ContentAddressWithReferences {
|
||||
return TextInfo {
|
||||
.hash = h,
|
||||
.references = {},
|
||||
};
|
||||
},
|
||||
[&](const FixedOutputHash & h) -> ContentAddressWithReferences {
|
||||
return FixedOutputInfo {
|
||||
.hash = h,
|
||||
.references = {},
|
||||
};
|
||||
},
|
||||
}, ca.raw);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -3,12 +3,30 @@
|
||||
|
||||
#include <variant>
|
||||
#include "hash.hh"
|
||||
#include "path.hh"
|
||||
#include "comparator.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/*
|
||||
* Content addressing method
|
||||
*/
|
||||
|
||||
/* We only have one way to hash text with references, so this is a single-value
|
||||
type, mainly useful with std::variant.
|
||||
*/
|
||||
|
||||
/**
|
||||
* An enumeration of the ways we can serialize file system objects.
|
||||
* The single way we can serialize "text" file system objects.
|
||||
*
|
||||
* Somewhat obscure, used by \ref Derivation derivations and
|
||||
* `builtins.toFile` currently.
|
||||
*/
|
||||
struct TextHashMethod : std::monostate { };
|
||||
|
||||
/**
|
||||
* An enumeration of the main ways we can serialize file system
|
||||
* objects.
|
||||
*/
|
||||
enum struct FileIngestionMethod : uint8_t {
|
||||
/**
|
||||
@ -22,6 +40,53 @@ enum struct FileIngestionMethod : uint8_t {
|
||||
Recursive = true
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the prefix to the hash algorithm which indicates how the
|
||||
* files were ingested.
|
||||
*/
|
||||
std::string makeFileIngestionPrefix(FileIngestionMethod m);
|
||||
|
||||
struct FixedOutputHashMethod {
|
||||
FileIngestionMethod fileIngestionMethod;
|
||||
HashType hashType;
|
||||
|
||||
GENERATE_CMP(FixedOutputHashMethod, me->fileIngestionMethod, me->hashType);
|
||||
};
|
||||
|
||||
/**
|
||||
* An enumeration of all the ways we can serialize file system objects.
|
||||
*
|
||||
* Just the type of a content address. Combine with the hash itself, and
|
||||
* we have a `ContentAddress` as defined below. Combine that, in turn,
|
||||
* with info on references, and we have `ContentAddressWithReferences`,
|
||||
* as defined further below.
|
||||
*/
|
||||
struct ContentAddressMethod
|
||||
{
|
||||
typedef std::variant<
|
||||
TextHashMethod,
|
||||
FixedOutputHashMethod
|
||||
> Raw;
|
||||
|
||||
Raw raw;
|
||||
|
||||
GENERATE_CMP(ContentAddressMethod, me->raw);
|
||||
|
||||
/* The moral equivalent of `using Raw::Raw;` */
|
||||
ContentAddressMethod(auto &&... arg)
|
||||
: raw(std::forward<decltype(arg)>(arg)...)
|
||||
{ }
|
||||
|
||||
static ContentAddressMethod parse(std::string_view rawCaMethod);
|
||||
|
||||
std::string render() const;
|
||||
};
|
||||
|
||||
|
||||
/*
|
||||
* Mini content address
|
||||
*/
|
||||
|
||||
/**
|
||||
* Somewhat obscure, used by \ref Derivation derivations and
|
||||
* `builtins.toFile` currently.
|
||||
@ -36,7 +101,7 @@ struct TextHash {
|
||||
};
|
||||
|
||||
/**
|
||||
* For path computed by makeFixedOutputPath.
|
||||
* Used by most store objects that are content-addressed.
|
||||
*/
|
||||
struct FixedOutputHash {
|
||||
/**
|
||||
@ -65,41 +130,96 @@ struct FixedOutputHash {
|
||||
* - ‘fixed:<r?>:<ht>:<h>’: For paths computed by
|
||||
* Store::makeFixedOutputPath() / Store::addToStore().
|
||||
*/
|
||||
typedef std::variant<
|
||||
TextHash,
|
||||
FixedOutputHash
|
||||
> ContentAddress;
|
||||
struct ContentAddress
|
||||
{
|
||||
typedef std::variant<
|
||||
TextHash,
|
||||
FixedOutputHash
|
||||
> Raw;
|
||||
|
||||
/**
|
||||
* Compute the prefix to the hash algorithm which indicates how the
|
||||
* files were ingested.
|
||||
*/
|
||||
std::string makeFileIngestionPrefix(const FileIngestionMethod m);
|
||||
Raw raw;
|
||||
|
||||
/**
|
||||
* Compute the content-addressability assertion (ValidPathInfo::ca) for
|
||||
* paths created by Store::makeFixedOutputPath() / Store::addToStore().
|
||||
*/
|
||||
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
|
||||
GENERATE_CMP(ContentAddress, me->raw);
|
||||
|
||||
std::string renderContentAddress(ContentAddress ca);
|
||||
/* The moral equivalent of `using Raw::Raw;` */
|
||||
ContentAddress(auto &&... arg)
|
||||
: raw(std::forward<decltype(arg)>(arg)...)
|
||||
{ }
|
||||
|
||||
/**
|
||||
* Compute the content-addressability assertion (ValidPathInfo::ca) for
|
||||
* paths created by Store::makeFixedOutputPath() / Store::addToStore().
|
||||
*/
|
||||
std::string render() const;
|
||||
|
||||
static ContentAddress parse(std::string_view rawCa);
|
||||
|
||||
static std::optional<ContentAddress> parseOpt(std::string_view rawCaOpt);
|
||||
|
||||
const Hash & getHash() const;
|
||||
};
|
||||
|
||||
std::string renderContentAddress(std::optional<ContentAddress> ca);
|
||||
|
||||
ContentAddress parseContentAddress(std::string_view rawCa);
|
||||
|
||||
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
|
||||
|
||||
Hash getContentAddressHash(const ContentAddress & ca);
|
||||
|
||||
/*
|
||||
We only have one way to hash text with references, so this is single-value
|
||||
type is only useful in std::variant.
|
||||
*/
|
||||
struct TextHashMethod { };
|
||||
struct FixedOutputHashMethod {
|
||||
FileIngestionMethod fileIngestionMethod;
|
||||
HashType hashType;
|
||||
* Full content address
|
||||
*
|
||||
* See the schema for store paths in store-api.cc
|
||||
*/
|
||||
|
||||
/**
|
||||
* A set of references to other store objects.
|
||||
*
|
||||
* References to other store objects are tracked with store paths, self
|
||||
* references however are tracked with a boolean.
|
||||
*/
|
||||
struct StoreReferences {
|
||||
/**
|
||||
* References to other store objects
|
||||
*/
|
||||
StorePathSet others;
|
||||
|
||||
/**
|
||||
* Reference to this store object
|
||||
*/
|
||||
bool self = false;
|
||||
|
||||
/**
|
||||
* @return true iff no references, i.e. others is empty and self is
|
||||
* false.
|
||||
*/
|
||||
bool empty() const;
|
||||
|
||||
/**
|
||||
* Returns the numbers of references, i.e. the size of others + 1
|
||||
* iff self is true.
|
||||
*/
|
||||
size_t size() const;
|
||||
|
||||
GENERATE_CMP(StoreReferences, me->self, me->others);
|
||||
};
|
||||
|
||||
// This matches the additional info that we need for makeTextPath
|
||||
struct TextInfo {
|
||||
TextHash hash;
|
||||
/**
|
||||
* References to other store objects only; self references
|
||||
* disallowed
|
||||
*/
|
||||
StorePathSet references;
|
||||
|
||||
GENERATE_CMP(TextInfo, me->hash, me->references);
|
||||
};
|
||||
|
||||
struct FixedOutputInfo {
|
||||
FixedOutputHash hash;
|
||||
/**
|
||||
* References to other store objects or this one.
|
||||
*/
|
||||
StoreReferences references;
|
||||
|
||||
GENERATE_CMP(FixedOutputInfo, me->hash, me->references);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -107,13 +227,27 @@ struct FixedOutputHashMethod {
|
||||
*
|
||||
* A ContentAddress without a Hash.
|
||||
*/
|
||||
typedef std::variant<
|
||||
TextHashMethod,
|
||||
FixedOutputHashMethod
|
||||
> ContentAddressMethod;
|
||||
struct ContentAddressWithReferences
|
||||
{
|
||||
typedef std::variant<
|
||||
TextInfo,
|
||||
FixedOutputInfo
|
||||
> Raw;
|
||||
|
||||
ContentAddressMethod parseContentAddressMethod(std::string_view rawCaMethod);
|
||||
Raw raw;
|
||||
|
||||
std::string renderContentAddressMethod(ContentAddressMethod caMethod);
|
||||
GENERATE_CMP(ContentAddressWithReferences, me->raw);
|
||||
|
||||
/* The moral equivalent of `using Raw::Raw;` */
|
||||
ContentAddressWithReferences(auto &&... arg)
|
||||
: raw(std::forward<decltype(arg)>(arg)...)
|
||||
{ }
|
||||
|
||||
/**
|
||||
* Create a ContentAddressWithReferences from a mere ContentAddress, by
|
||||
* assuming no references in all cases.
|
||||
*/
|
||||
static ContentAddressWithReferences withoutRefs(const ContentAddress &);
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -401,21 +401,21 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||
logger->startWork();
|
||||
auto pathInfo = [&]() {
|
||||
// NB: FramedSource must be out of scope before logger->stopWork();
|
||||
ContentAddressMethod contentAddressMethod = parseContentAddressMethod(camStr);
|
||||
ContentAddressMethod contentAddressMethod = ContentAddressMethod::parse(camStr);
|
||||
FramedSource source(from);
|
||||
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
|
||||
return std::visit(overloaded {
|
||||
[&](TextHashMethod &) {
|
||||
[&](const TextHashMethod &) {
|
||||
// We could stream this by changing Store
|
||||
std::string contents = source.drain();
|
||||
auto path = store->addTextToStore(name, contents, refs, repair);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
[&](FixedOutputHashMethod & fohm) {
|
||||
[&](const FixedOutputHashMethod & fohm) {
|
||||
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair, refs);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
}, contentAddressMethod);
|
||||
}, contentAddressMethod.raw);
|
||||
}();
|
||||
logger->stopWork();
|
||||
|
||||
@ -637,7 +637,10 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||
to << res.timesBuilt << res.isNonDeterministic << res.startTime << res.stopTime;
|
||||
}
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 28) {
|
||||
worker_proto::write(*store, to, res.builtOutputs);
|
||||
DrvOutputs builtOutputs;
|
||||
for (auto & [output, realisation] : res.builtOutputs)
|
||||
builtOutputs.insert_or_assign(realisation.id, realisation);
|
||||
worker_proto::write(*store, to, builtOutputs);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@ -880,7 +883,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||
info.references = worker_proto::read(*store, from, Phantom<StorePathSet> {});
|
||||
from >> info.registrationTime >> info.narSize >> info.ultimate;
|
||||
info.sigs = readStrings<StringSet>(from);
|
||||
info.ca = parseContentAddressOpt(readString(from));
|
||||
info.ca = ContentAddress::parseOpt(readString(from));
|
||||
from >> repair >> dontCheckSigs;
|
||||
if (!trusted && dontCheckSigs)
|
||||
dontCheckSigs = false;
|
||||
@ -1064,6 +1067,8 @@ void processConnection(
|
||||
|
||||
opCount++;
|
||||
|
||||
debug("performing daemon worker op: %d", op);
|
||||
|
||||
try {
|
||||
performOp(tunnelLogger, store, trusted, recursive, clientVersion, from, to, op);
|
||||
} catch (Error & e) {
|
||||
|
@ -36,8 +36,8 @@ std::optional<StorePath> DerivationOutput::path(const Store & store, std::string
|
||||
StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const
|
||||
{
|
||||
return store.makeFixedOutputPath(
|
||||
hash.method, hash.hash,
|
||||
outputPathName(drvName, outputName));
|
||||
outputPathName(drvName, outputName),
|
||||
{ hash, {} });
|
||||
}
|
||||
|
||||
|
||||
@ -313,6 +313,15 @@ Derivation parseDerivation(const Store & store, std::string && s, std::string_vi
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Print a derivation string literal to an `std::string`.
|
||||
*
|
||||
* This syntax does not generalize to the expression language, which needs to
|
||||
* escape `$`.
|
||||
*
|
||||
* @param res Where to print to
|
||||
* @param s Which logical string to print
|
||||
*/
|
||||
static void printString(std::string & res, std::string_view s)
|
||||
{
|
||||
boost::container::small_vector<char, 64 * 1024> buffer;
|
||||
@ -933,7 +942,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const
|
||||
envHasRightPath(doia.path, i.first);
|
||||
},
|
||||
[&](const DerivationOutput::CAFixed & dof) {
|
||||
StorePath path = store.makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName);
|
||||
StorePath path = store.makeFixedOutputPath(drvName, { dof.hash, {} });
|
||||
envHasRightPath(path, i.first);
|
||||
},
|
||||
[&](const DerivationOutput::CAFloating &) {
|
||||
@ -980,7 +989,8 @@ nlohmann::json DerivationOutput::toJSON(
|
||||
|
||||
DerivationOutput DerivationOutput::fromJSON(
|
||||
const Store & store, std::string_view drvName, std::string_view outputName,
|
||||
const nlohmann::json & _json)
|
||||
const nlohmann::json & _json,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
std::set<std::string_view> keys;
|
||||
auto json = (std::map<std::string, nlohmann::json>) _json;
|
||||
@ -1019,6 +1029,7 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||
}
|
||||
|
||||
else if (keys == (std::set<std::string_view> { "hashAlgo" })) {
|
||||
xpSettings.require(Xp::CaDerivations);
|
||||
auto [method, hashType] = methodAlgo();
|
||||
return DerivationOutput::CAFloating {
|
||||
.method = method,
|
||||
@ -1031,6 +1042,7 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||
}
|
||||
|
||||
else if (keys == (std::set<std::string_view> { "hashAlgo", "impure" })) {
|
||||
xpSettings.require(Xp::ImpureDerivations);
|
||||
auto [method, hashType] = methodAlgo();
|
||||
return DerivationOutput::Impure {
|
||||
.method = method,
|
||||
|
@ -136,11 +136,15 @@ struct DerivationOutput : _DerivationOutputRaw
|
||||
const Store & store,
|
||||
std::string_view drvName,
|
||||
std::string_view outputName) const;
|
||||
/**
|
||||
* @param xpSettings Stop-gap to avoid globals during unit tests.
|
||||
*/
|
||||
static DerivationOutput fromJSON(
|
||||
const Store & store,
|
||||
std::string_view drvName,
|
||||
std::string_view outputName,
|
||||
const nlohmann::json & json);
|
||||
const nlohmann::json & json,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
};
|
||||
|
||||
typedef std::map<std::string, DerivationOutput> DerivationOutputs;
|
||||
|
@ -62,15 +62,31 @@ std::string DerivedPath::Opaque::to_string(const Store & store) const
|
||||
std::string DerivedPath::Built::to_string(const Store & store) const
|
||||
{
|
||||
return store.printStorePath(drvPath)
|
||||
+ "!"
|
||||
+ '^'
|
||||
+ outputs.to_string();
|
||||
}
|
||||
|
||||
std::string DerivedPath::Built::to_string_legacy(const Store & store) const
|
||||
{
|
||||
return store.printStorePath(drvPath)
|
||||
+ '!'
|
||||
+ outputs.to_string();
|
||||
}
|
||||
|
||||
std::string DerivedPath::to_string(const Store & store) const
|
||||
{
|
||||
return std::visit(
|
||||
[&](const auto & req) { return req.to_string(store); },
|
||||
this->raw());
|
||||
return std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & req) { return req.to_string(store); },
|
||||
[&](const DerivedPath::Opaque & req) { return req.to_string(store); },
|
||||
}, this->raw());
|
||||
}
|
||||
|
||||
std::string DerivedPath::to_string_legacy(const Store & store) const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & req) { return req.to_string_legacy(store); },
|
||||
[&](const DerivedPath::Opaque & req) { return req.to_string(store); },
|
||||
}, this->raw());
|
||||
}
|
||||
|
||||
|
||||
@ -87,14 +103,24 @@ DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_vi
|
||||
};
|
||||
}
|
||||
|
||||
DerivedPath DerivedPath::parse(const Store & store, std::string_view s)
|
||||
static inline DerivedPath parseWith(const Store & store, std::string_view s, std::string_view separator)
|
||||
{
|
||||
size_t n = s.find("!");
|
||||
size_t n = s.find(separator);
|
||||
return n == s.npos
|
||||
? (DerivedPath) DerivedPath::Opaque::parse(store, s)
|
||||
: (DerivedPath) DerivedPath::Built::parse(store, s.substr(0, n), s.substr(n + 1));
|
||||
}
|
||||
|
||||
DerivedPath DerivedPath::parse(const Store & store, std::string_view s)
|
||||
{
|
||||
return parseWith(store, s, "^");
|
||||
}
|
||||
|
||||
DerivedPath DerivedPath::parseLegacy(const Store & store, std::string_view s)
|
||||
{
|
||||
return parseWith(store, s, "!");
|
||||
}
|
||||
|
||||
RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
||||
{
|
||||
RealisedPath::Set res;
|
||||
|
@ -48,8 +48,18 @@ struct DerivedPathBuilt {
|
||||
StorePath drvPath;
|
||||
OutputsSpec outputs;
|
||||
|
||||
/**
|
||||
* Uses `^` as the separator
|
||||
*/
|
||||
std::string to_string(const Store & store) const;
|
||||
static DerivedPathBuilt parse(const Store & store, std::string_view, std::string_view);
|
||||
/**
|
||||
* Uses `!` as the separator
|
||||
*/
|
||||
std::string to_string_legacy(const Store & store) const;
|
||||
/**
|
||||
* The caller splits on the separator, so it works for both variants.
|
||||
*/
|
||||
static DerivedPathBuilt parse(const Store & store, std::string_view drvPath, std::string_view outputs);
|
||||
nlohmann::json toJSON(ref<Store> store) const;
|
||||
|
||||
GENERATE_CMP(DerivedPathBuilt, me->drvPath, me->outputs);
|
||||
@ -81,8 +91,22 @@ struct DerivedPath : _DerivedPathRaw {
|
||||
return static_cast<const Raw &>(*this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses `^` as the separator
|
||||
*/
|
||||
std::string to_string(const Store & store) const;
|
||||
/**
|
||||
* Uses `!` as the separator
|
||||
*/
|
||||
std::string to_string_legacy(const Store & store) const;
|
||||
/**
|
||||
* Uses `^` as the separator
|
||||
*/
|
||||
static DerivedPath parse(const Store & store, std::string_view);
|
||||
/**
|
||||
* Uses `!` as the separator
|
||||
*/
|
||||
static DerivedPath parseLegacy(const Store & store, std::string_view);
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -71,6 +71,9 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
{ callback(nullptr); }
|
||||
|
||||
virtual ref<FSAccessor> getFSAccessor() override
|
||||
{ unsupported("getFSAccessor"); }
|
||||
};
|
||||
|
||||
static RegisterStoreImplementation<DummyStore, DummyStoreConfig> regDummyStore;
|
||||
|
@ -7,12 +7,23 @@
|
||||
|
||||
#include <algorithm>
|
||||
#include <map>
|
||||
#include <mutex>
|
||||
#include <thread>
|
||||
#include <dlfcn.h>
|
||||
#include <sys/utsname.h>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include <sodium/core.h>
|
||||
|
||||
#ifdef __GLIBC__
|
||||
#include <gnu/lib-names.h>
|
||||
#include <nss.h>
|
||||
#include <dlfcn.h>
|
||||
#endif
|
||||
|
||||
#include "config-impl.hh"
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
@ -41,7 +52,6 @@ Settings::Settings()
|
||||
, nixDaemonSocketFile(canonPath(getEnvNonEmpty("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
|
||||
{
|
||||
buildUsersGroup = getuid() == 0 ? "nixbld" : "";
|
||||
lockCPU = getEnv("NIX_AFFINITY_HACK") == "1";
|
||||
allowSymlinkedStore = getEnv("NIX_IGNORE_SYMLINK_STORE") == "1";
|
||||
|
||||
auto sslOverride = getEnv("NIX_SSL_CERT_FILE").value_or(getEnv("SSL_CERT_FILE").value_or(""));
|
||||
@ -185,18 +195,18 @@ NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, {
|
||||
{SandboxMode::smDisabled, false},
|
||||
});
|
||||
|
||||
template<> void BaseSetting<SandboxMode>::set(const std::string & str, bool append)
|
||||
template<> SandboxMode BaseSetting<SandboxMode>::parse(const std::string & str) const
|
||||
{
|
||||
if (str == "true") value = smEnabled;
|
||||
else if (str == "relaxed") value = smRelaxed;
|
||||
else if (str == "false") value = smDisabled;
|
||||
if (str == "true") return smEnabled;
|
||||
else if (str == "relaxed") return smRelaxed;
|
||||
else if (str == "false") return smDisabled;
|
||||
else throw UsageError("option '%s' has invalid value '%s'", name, str);
|
||||
}
|
||||
|
||||
template<> bool BaseSetting<SandboxMode>::isAppendable()
|
||||
template<> struct BaseSetting<SandboxMode>::trait
|
||||
{
|
||||
return false;
|
||||
}
|
||||
static constexpr bool appendable = false;
|
||||
};
|
||||
|
||||
template<> std::string BaseSetting<SandboxMode>::to_string() const
|
||||
{
|
||||
@ -228,23 +238,23 @@ template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::s
|
||||
});
|
||||
}
|
||||
|
||||
void MaxBuildJobsSetting::set(const std::string & str, bool append)
|
||||
unsigned int MaxBuildJobsSetting::parse(const std::string & str) const
|
||||
{
|
||||
if (str == "auto") value = std::max(1U, std::thread::hardware_concurrency());
|
||||
if (str == "auto") return std::max(1U, std::thread::hardware_concurrency());
|
||||
else {
|
||||
if (auto n = string2Int<decltype(value)>(str))
|
||||
value = *n;
|
||||
return *n;
|
||||
else
|
||||
throw UsageError("configuration setting '%s' should be 'auto' or an integer", name);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void PluginFilesSetting::set(const std::string & str, bool append)
|
||||
Paths PluginFilesSetting::parse(const std::string & str) const
|
||||
{
|
||||
if (pluginsLoaded)
|
||||
throw UsageError("plugin-files set after plugins were loaded, you may need to move the flag before the subcommand");
|
||||
BaseSetting<Paths>::set(str, append);
|
||||
return BaseSetting<Paths>::parse(str);
|
||||
}
|
||||
|
||||
|
||||
@ -281,6 +291,42 @@ void initPlugins()
|
||||
settings.pluginFiles.pluginsLoaded = true;
|
||||
}
|
||||
|
||||
static void preloadNSS()
|
||||
{
|
||||
/* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
|
||||
one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
|
||||
been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
|
||||
load its lookup libraries in the parent before any child gets a chance to. */
|
||||
static std::once_flag dns_resolve_flag;
|
||||
|
||||
std::call_once(dns_resolve_flag, []() {
|
||||
#ifdef __GLIBC__
|
||||
/* On linux, glibc will run every lookup through the nss layer.
|
||||
* That means every lookup goes, by default, through nscd, which acts as a local
|
||||
* cache.
|
||||
* Because we run builds in a sandbox, we also remove access to nscd otherwise
|
||||
* lookups would leak into the sandbox.
|
||||
*
|
||||
* But now we have a new problem, we need to make sure the nss_dns backend that
|
||||
* does the dns lookups when nscd is not available is loaded or available.
|
||||
*
|
||||
* We can't make it available without leaking nix's environment, so instead we'll
|
||||
* load the backend, and configure nss so it does not try to run dns lookups
|
||||
* through nscd.
|
||||
*
|
||||
* This is technically only used for builtins:fetch* functions so we only care
|
||||
* about dns.
|
||||
*
|
||||
* All other platforms are unaffected.
|
||||
*/
|
||||
if (!dlopen(LIBNSS_DNS_SO, RTLD_NOW))
|
||||
warn("unable to load nss_dns backend");
|
||||
// FIXME: get hosts entry from nsswitch.conf.
|
||||
__nss_configure_lookup("hosts", "files dns");
|
||||
#endif
|
||||
});
|
||||
}
|
||||
|
||||
static bool initLibStoreDone = false;
|
||||
|
||||
void assertLibStoreInitialized() {
|
||||
@ -291,6 +337,24 @@ void assertLibStoreInitialized() {
|
||||
}
|
||||
|
||||
void initLibStore() {
|
||||
|
||||
initLibUtil();
|
||||
|
||||
if (sodium_init() == -1)
|
||||
throw Error("could not initialise libsodium");
|
||||
|
||||
loadConfFile();
|
||||
|
||||
preloadNSS();
|
||||
|
||||
/* On macOS, don't use the per-session TMPDIR (as set e.g. by
|
||||
sshd). This breaks build users because they don't have access
|
||||
to the TMPDIR, in particular in ‘nix-store --serve’. */
|
||||
#if __APPLE__
|
||||
if (hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
|
||||
unsetenv("TMPDIR");
|
||||
#endif
|
||||
|
||||
initLibStoreDone = true;
|
||||
}
|
||||
|
||||
|
@ -26,7 +26,7 @@ struct MaxBuildJobsSetting : public BaseSetting<unsigned int>
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
||||
void set(const std::string & str, bool append = false) override;
|
||||
unsigned int parse(const std::string & str) const override;
|
||||
};
|
||||
|
||||
struct PluginFilesSetting : public BaseSetting<Paths>
|
||||
@ -43,7 +43,7 @@ struct PluginFilesSetting : public BaseSetting<Paths>
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
||||
void set(const std::string & str, bool append = false) override;
|
||||
Paths parse(const std::string & str) const override;
|
||||
};
|
||||
|
||||
const uint32_t maxIdsPerBuild =
|
||||
@ -328,16 +328,6 @@ public:
|
||||
users in `build-users-group`.
|
||||
|
||||
UIDs are allocated starting at 872415232 (0x34000000) on Linux and 56930 on macOS.
|
||||
|
||||
> **Warning**
|
||||
> This is an experimental feature.
|
||||
|
||||
To enable it, add the following to [`nix.conf`](#):
|
||||
|
||||
```
|
||||
extra-experimental-features = auto-allocate-uids
|
||||
auto-allocate-uids = true
|
||||
```
|
||||
)"};
|
||||
|
||||
Setting<uint32_t> startId{this,
|
||||
@ -367,16 +357,6 @@ public:
|
||||
|
||||
Cgroups are required and enabled automatically for derivations
|
||||
that require the `uid-range` system feature.
|
||||
|
||||
> **Warning**
|
||||
> This is an experimental feature.
|
||||
|
||||
To enable it, add the following to [`nix.conf`](#):
|
||||
|
||||
```
|
||||
extra-experimental-features = cgroups
|
||||
use-cgroups = true
|
||||
```
|
||||
)"};
|
||||
#endif
|
||||
|
||||
@ -478,11 +458,6 @@ public:
|
||||
)",
|
||||
{"env-keep-derivations"}};
|
||||
|
||||
/**
|
||||
* Whether to lock the Nix client and worker to the same CPU.
|
||||
*/
|
||||
bool lockCPU;
|
||||
|
||||
Setting<SandboxMode> sandboxMode{
|
||||
this,
|
||||
#if __linux__
|
||||
|
@ -156,7 +156,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||
throw Error("NAR hash is now mandatory");
|
||||
info->narHash = Hash::parseAnyPrefixed(s);
|
||||
}
|
||||
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||
info->ca = ContentAddress::parseOpt(readString(conn->from));
|
||||
info->sigs = readStrings<StringSet>(conn->from);
|
||||
|
||||
auto s = readString(conn->from);
|
||||
@ -287,19 +287,18 @@ public:
|
||||
|
||||
conn->to.flush();
|
||||
|
||||
BuildResult status {
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = drvPath,
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
};
|
||||
BuildResult status;
|
||||
status.status = (BuildResult::Status) readInt(conn->from);
|
||||
conn->from >> status.errorMsg;
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3)
|
||||
conn->from >> status.timesBuilt >> status.isNonDeterministic >> status.startTime >> status.stopTime;
|
||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 6) {
|
||||
status.builtOutputs = worker_proto::read(*this, conn->from, Phantom<DrvOutputs> {});
|
||||
auto builtOutputs = worker_proto::read(*this, conn->from, Phantom<DrvOutputs> {});
|
||||
for (auto && [output, realisation] : builtOutputs)
|
||||
status.builtOutputs.insert_or_assign(
|
||||
std::move(output.outputName),
|
||||
std::move(realisation));
|
||||
}
|
||||
return status;
|
||||
}
|
||||
@ -330,7 +329,7 @@ public:
|
||||
|
||||
conn->to.flush();
|
||||
|
||||
BuildResult result { .path = DerivedPath::Opaque { StorePath::dummy } };
|
||||
BuildResult result;
|
||||
result.status = (BuildResult::Status) readInt(conn->from);
|
||||
|
||||
if (!result.success()) {
|
||||
@ -342,6 +341,9 @@ public:
|
||||
void ensurePath(const StorePath & path) override
|
||||
{ unsupported("ensurePath"); }
|
||||
|
||||
virtual ref<FSAccessor> getFSAccessor() override
|
||||
{ unsupported("getFSAccessor"); }
|
||||
|
||||
void computeFSClosure(const StorePathSet & paths,
|
||||
StorePathSet & out, bool flipDirection = false,
|
||||
bool includeOutputs = false, bool includeDerivers = false) override
|
||||
|
@ -710,6 +710,7 @@ void canonicalisePathMetaData(const Path & path,
|
||||
canonicalisePathMetaData(path, uidRange, inodesSeen);
|
||||
}
|
||||
|
||||
|
||||
void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs)
|
||||
{
|
||||
experimentalFeatureSettings.require(Xp::CaDerivations);
|
||||
@ -888,7 +889,7 @@ std::shared_ptr<const ValidPathInfo> LocalStore::queryPathInfoInternal(State & s
|
||||
if (s) info->sigs = tokenizeString<StringSet>(s, " ");
|
||||
|
||||
s = (const char *) sqlite3_column_text(state.stmts->QueryPathInfo, 7);
|
||||
if (s) info->ca = parseContentAddressOpt(s);
|
||||
if (s) info->ca = ContentAddress::parseOpt(s);
|
||||
|
||||
/* Get the references. */
|
||||
auto useQueryReferences(state.stmts->QueryReferences.use()(info->id));
|
||||
@ -1221,7 +1222,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
printStorePath(info.path), info.narSize, hashResult.second);
|
||||
|
||||
if (info.ca) {
|
||||
if (auto foHash = std::get_if<FixedOutputHash>(&*info.ca)) {
|
||||
if (auto foHash = std::get_if<FixedOutputHash>(&info.ca->raw)) {
|
||||
auto actualFoHash = hashCAPath(
|
||||
foHash->method,
|
||||
foHash->hash.type,
|
||||
@ -1234,7 +1235,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
actualFoHash.hash.to_string(Base32, true));
|
||||
}
|
||||
}
|
||||
if (auto textHash = std::get_if<TextHash>(&*info.ca)) {
|
||||
if (auto textHash = std::get_if<TextHash>(&info.ca->raw)) {
|
||||
auto actualTextHash = hashString(htSHA256, readFile(realPath));
|
||||
if (textHash->hash != actualTextHash) {
|
||||
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
@ -1320,7 +1321,19 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
||||
|
||||
auto [hash, size] = hashSink->finish();
|
||||
|
||||
auto dstPath = makeFixedOutputPath(method, hash, name, references);
|
||||
ContentAddressWithReferences desc = FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = method,
|
||||
.hash = hash,
|
||||
},
|
||||
.references = {
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
|
||||
.self = false,
|
||||
},
|
||||
};
|
||||
|
||||
auto dstPath = makeFixedOutputPathFromCA(name, desc);
|
||||
|
||||
addTempRoot(dstPath);
|
||||
|
||||
@ -1340,7 +1353,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
||||
autoGC();
|
||||
|
||||
if (inMemory) {
|
||||
StringSource dumpSource { dump };
|
||||
StringSource dumpSource { dump };
|
||||
/* Restore from the NAR in memory. */
|
||||
if (method == FileIngestionMethod::Recursive)
|
||||
restorePath(realPath, dumpSource);
|
||||
@ -1364,10 +1377,13 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
||||
|
||||
optimisePath(realPath, repair);
|
||||
|
||||
ValidPathInfo info { dstPath, narHash.first };
|
||||
ValidPathInfo info {
|
||||
*this,
|
||||
name,
|
||||
std::move(desc),
|
||||
narHash.first
|
||||
};
|
||||
info.narSize = narHash.second;
|
||||
info.references = references;
|
||||
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
||||
registerValidPath(info);
|
||||
}
|
||||
|
||||
@ -1384,7 +1400,10 @@ StorePath LocalStore::addTextToStore(
|
||||
const StorePathSet & references, RepairFlag repair)
|
||||
{
|
||||
auto hash = hashString(htSHA256, s);
|
||||
auto dstPath = makeTextPath(name, hash, references);
|
||||
auto dstPath = makeTextPath(name, TextInfo {
|
||||
{ .hash = hash },
|
||||
references,
|
||||
});
|
||||
|
||||
addTempRoot(dstPath);
|
||||
|
||||
|
@ -27,18 +27,17 @@ std::map<StorePath, StorePath> makeContentAddressed(
|
||||
|
||||
StringMap rewrites;
|
||||
|
||||
StorePathSet references;
|
||||
bool hasSelfReference = false;
|
||||
StoreReferences refs;
|
||||
for (auto & ref : oldInfo->references) {
|
||||
if (ref == path)
|
||||
hasSelfReference = true;
|
||||
refs.self = true;
|
||||
else {
|
||||
auto i = remappings.find(ref);
|
||||
auto replacement = i != remappings.end() ? i->second : ref;
|
||||
// FIXME: warn about unremapped paths?
|
||||
if (replacement != ref)
|
||||
rewrites.insert_or_assign(srcStore.printStorePath(ref), srcStore.printStorePath(replacement));
|
||||
references.insert(std::move(replacement));
|
||||
refs.others.insert(std::move(replacement));
|
||||
}
|
||||
}
|
||||
|
||||
@ -49,24 +48,28 @@ std::map<StorePath, StorePath> makeContentAddressed(
|
||||
|
||||
auto narModuloHash = hashModuloSink.finish().first;
|
||||
|
||||
auto dstPath = dstStore.makeFixedOutputPath(
|
||||
FileIngestionMethod::Recursive, narModuloHash, path.name(), references, hasSelfReference);
|
||||
ValidPathInfo info {
|
||||
dstStore,
|
||||
path.name(),
|
||||
FixedOutputInfo {
|
||||
.hash = {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = narModuloHash,
|
||||
},
|
||||
.references = std::move(refs),
|
||||
},
|
||||
Hash::dummy,
|
||||
};
|
||||
|
||||
printInfo("rewriting '%s' to '%s'", pathS, srcStore.printStorePath(dstPath));
|
||||
printInfo("rewriting '%s' to '%s'", pathS, dstStore.printStorePath(info.path));
|
||||
|
||||
StringSink sink2;
|
||||
RewritingSink rsink2(oldHashPart, std::string(dstPath.hashPart()), sink2);
|
||||
RewritingSink rsink2(oldHashPart, std::string(info.path.hashPart()), sink2);
|
||||
rsink2(sink.s);
|
||||
rsink2.flush();
|
||||
|
||||
ValidPathInfo info { dstPath, hashString(htSHA256, sink2.s) };
|
||||
info.references = std::move(references);
|
||||
if (hasSelfReference) info.references.insert(info.path);
|
||||
info.narHash = hashString(htSHA256, sink2.s);
|
||||
info.narSize = sink.s.size();
|
||||
info.ca = FixedOutputHash {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = narModuloHash,
|
||||
};
|
||||
|
||||
StringSource source(sink2.s);
|
||||
dstStore.addToStore(info, source);
|
||||
|
@ -273,7 +273,7 @@ public:
|
||||
narInfo->deriver = StorePath(queryNAR.getStr(9));
|
||||
for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " "))
|
||||
narInfo->sigs.insert(sig);
|
||||
narInfo->ca = parseContentAddressOpt(queryNAR.getStr(11));
|
||||
narInfo->ca = ContentAddress::parseOpt(queryNAR.getStr(11));
|
||||
|
||||
return {oValid, narInfo};
|
||||
});
|
||||
|
@ -7,15 +7,18 @@ namespace nix {
|
||||
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
|
||||
: ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
|
||||
{
|
||||
auto corrupt = [&]() {
|
||||
return Error("NAR info file '%1%' is corrupt", whence);
|
||||
unsigned line = 1;
|
||||
|
||||
auto corrupt = [&](const char * reason) {
|
||||
return Error("NAR info file '%1%' is corrupt: %2%", whence,
|
||||
std::string(reason) + (line > 0 ? " at line " + std::to_string(line) : ""));
|
||||
};
|
||||
|
||||
auto parseHashField = [&](const std::string & s) {
|
||||
try {
|
||||
return Hash::parseAnyPrefixed(s);
|
||||
} catch (BadHash &) {
|
||||
throw corrupt();
|
||||
throw corrupt("bad hash");
|
||||
}
|
||||
};
|
||||
|
||||
@ -26,12 +29,12 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||
while (pos < s.size()) {
|
||||
|
||||
size_t colon = s.find(':', pos);
|
||||
if (colon == std::string::npos) throw corrupt();
|
||||
if (colon == std::string::npos) throw corrupt("expecting ':'");
|
||||
|
||||
std::string name(s, pos, colon - pos);
|
||||
|
||||
size_t eol = s.find('\n', colon + 2);
|
||||
if (eol == std::string::npos) throw corrupt();
|
||||
if (eol == std::string::npos) throw corrupt("expecting '\\n'");
|
||||
|
||||
std::string value(s, colon + 2, eol - colon - 2);
|
||||
|
||||
@ -47,7 +50,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||
fileHash = parseHashField(value);
|
||||
else if (name == "FileSize") {
|
||||
auto n = string2Int<decltype(fileSize)>(value);
|
||||
if (!n) throw corrupt();
|
||||
if (!n) throw corrupt("invalid FileSize");
|
||||
fileSize = *n;
|
||||
}
|
||||
else if (name == "NarHash") {
|
||||
@ -56,12 +59,12 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||
}
|
||||
else if (name == "NarSize") {
|
||||
auto n = string2Int<decltype(narSize)>(value);
|
||||
if (!n) throw corrupt();
|
||||
if (!n) throw corrupt("invalid NarSize");
|
||||
narSize = *n;
|
||||
}
|
||||
else if (name == "References") {
|
||||
auto refs = tokenizeString<Strings>(value, " ");
|
||||
if (!references.empty()) throw corrupt();
|
||||
if (!references.empty()) throw corrupt("extra References");
|
||||
for (auto & r : refs)
|
||||
references.insert(StorePath(r));
|
||||
}
|
||||
@ -72,17 +75,26 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||
else if (name == "Sig")
|
||||
sigs.insert(value);
|
||||
else if (name == "CA") {
|
||||
if (ca) throw corrupt();
|
||||
if (ca) throw corrupt("extra CA");
|
||||
// FIXME: allow blank ca or require skipping field?
|
||||
ca = parseContentAddressOpt(value);
|
||||
ca = ContentAddress::parseOpt(value);
|
||||
}
|
||||
|
||||
pos = eol + 1;
|
||||
line += 1;
|
||||
}
|
||||
|
||||
if (compression == "") compression = "bzip2";
|
||||
|
||||
if (!havePath || !haveNarHash || url.empty() || narSize == 0) throw corrupt();
|
||||
if (!havePath || !haveNarHash || url.empty() || narSize == 0) {
|
||||
line = 0; // don't include line information in the error
|
||||
throw corrupt(
|
||||
!havePath ? "StorePath missing" :
|
||||
!haveNarHash ? "NarHash missing" :
|
||||
url.empty() ? "URL missing" :
|
||||
narSize == 0 ? "NarSize missing or zero"
|
||||
: "?");
|
||||
}
|
||||
}
|
||||
|
||||
std::string NarInfo::to_string(const Store & store) const
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user