mirror of
https://github.com/NixOS/nix.git
synced 2024-10-30 05:40:52 +00:00
Merge remote-tracking branch 'upstream/master' into package-nix
This commit is contained in:
commit
2220a4a22c
31
.github/labeler.yml
vendored
31
.github/labeler.yml
vendored
@ -1,23 +1,30 @@
|
||||
"documentation":
|
||||
- doc/manual/*
|
||||
- src/nix/**/*.md
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "doc/manual/*"
|
||||
- any-glob-to-any-file: "src/nix/**/*.md"
|
||||
|
||||
"store":
|
||||
- src/libstore/store-api.*
|
||||
- src/libstore/*-store.*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "src/libstore/store-api.*"
|
||||
- any-glob-to-any-file: "src/libstore/*-store.*"
|
||||
|
||||
"fetching":
|
||||
- src/libfetchers/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "src/libfetchers/**/*"
|
||||
|
||||
"repl":
|
||||
- src/libcmd/repl.*
|
||||
- src/nix/repl.*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "src/libcmd/repl.*"
|
||||
- any-glob-to-any-file: "src/nix/repl.*"
|
||||
|
||||
"new-cli":
|
||||
- src/nix/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "src/nix/**/*"
|
||||
|
||||
"with-tests":
|
||||
# Unit tests
|
||||
- src/*/tests/**/*
|
||||
# Functional and integration tests
|
||||
- tests/functional/**/*
|
||||
- changed-files:
|
||||
# Unit tests
|
||||
- any-glob-to-any-file: "src/*/tests/**/*"
|
||||
# Functional and integration tests
|
||||
- any-glob-to-any-file: "tests/functional/**/*"
|
||||
|
||||
|
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- name: Create backport PRs
|
||||
# should be kept in sync with `version`
|
||||
uses: zeebe-io/backport-action@v2.1.1
|
||||
uses: zeebe-io/backport-action@v2.2.0
|
||||
with:
|
||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
@ -20,12 +20,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: cachix/install-nix-action@v23
|
||||
- uses: cachix/install-nix-action@v24
|
||||
with:
|
||||
# The sandbox would otherwise be disabled by default on Darwin
|
||||
extra_nix_config: "sandbox = true"
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v13
|
||||
if: needs.check_secrets.outputs.cachix == 'true'
|
||||
with:
|
||||
name: '${{ env.CACHIX_NAME }}'
|
||||
@ -62,10 +62,10 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- uses: cachix/install-nix-action@v23
|
||||
- uses: cachix/install-nix-action@v24
|
||||
with:
|
||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v13
|
||||
with:
|
||||
name: '${{ env.CACHIX_NAME }}'
|
||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||
@ -84,7 +84,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- uses: cachix/install-nix-action@v23
|
||||
- uses: cachix/install-nix-action@v24
|
||||
with:
|
||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||
@ -114,12 +114,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: cachix/install-nix-action@v23
|
||||
- uses: cachix/install-nix-action@v24
|
||||
with:
|
||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v13
|
||||
if: needs.check_secrets.outputs.cachix == 'true'
|
||||
with:
|
||||
name: '${{ env.CACHIX_NAME }}'
|
||||
|
2
.github/workflows/labels.yml
vendored
2
.github/workflows/labels.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'NixOS'
|
||||
steps:
|
||||
- uses: actions/labeler@v4
|
||||
- uses: actions/labeler@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
sync-labels: false
|
||||
|
2
Makefile
2
Makefile
@ -64,4 +64,4 @@ $(eval $(call include-sub-makefile, doc/manual/local.mk))
|
||||
endif
|
||||
$(eval $(call include-sub-makefile, doc/internal-api/local.mk))
|
||||
|
||||
GLOBAL_CXXFLAGS += -g -Wall -include config.h -std=c++2a -I src
|
||||
GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src
|
||||
|
@ -31,9 +31,9 @@
|
||||
/installation/installation /installation 301!
|
||||
|
||||
/package-management/basic-package-mgmt /command-ref/nix-env 301!
|
||||
/package-management/channels* /command-ref/nix-channel 301!
|
||||
/package-management/channels /command-ref/nix-channel 301!
|
||||
/package-management/package-management /package-management 301!
|
||||
/package-management/s3-substituter* /command-ref/new-cli/nix3-help-stores#s3-binary-cache-store 301!
|
||||
/package-management/s3-substituter /store/types/s3-binary-cache-store 301!
|
||||
|
||||
/protocols/protocols /protocols 301!
|
||||
|
||||
|
@ -8,7 +8,15 @@ let
|
||||
showBuiltin = name: { doc, args, arity, experimental-feature }:
|
||||
let
|
||||
experimentalNotice = optionalString (experimental-feature != null) ''
|
||||
This function is only available if the [${experimental-feature}](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) experimental feature is enabled.
|
||||
> **Note**
|
||||
>
|
||||
> This function is only available if the [`${experimental-feature}` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) is enabled.
|
||||
>
|
||||
> For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md):
|
||||
>
|
||||
> ```
|
||||
> extra-experimental-features = ${experimental-feature}
|
||||
> ```
|
||||
'';
|
||||
in
|
||||
squash ''
|
||||
@ -17,10 +25,9 @@ let
|
||||
</dt>
|
||||
<dd>
|
||||
|
||||
${doc}
|
||||
|
||||
${experimentalNotice}
|
||||
|
||||
${doc}
|
||||
</dd>
|
||||
'';
|
||||
listArgs = args: concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
||||
|
@ -103,7 +103,8 @@ let
|
||||
${allStores}
|
||||
'';
|
||||
index = replaceStrings
|
||||
[ "@store-types@" ] [ storesOverview ]
|
||||
[ "@store-types@" "./local-store.md" "./local-daemon-store.md" ]
|
||||
[ storesOverview "#local-store" "#local-daemon-store" ]
|
||||
details.doc;
|
||||
storesOverview =
|
||||
let
|
||||
|
@ -20,10 +20,10 @@ let
|
||||
else "`${setting}`";
|
||||
# separate body to cleanly handle indentation
|
||||
body = ''
|
||||
${description}
|
||||
|
||||
${experimentalFeatureNote}
|
||||
|
||||
${description}
|
||||
|
||||
**Default:** ${showDefault documentDefault defaultValue}
|
||||
|
||||
${showAliases aliases}
|
||||
|
@ -19,10 +19,10 @@ let
|
||||
result = squash ''
|
||||
# ${name}
|
||||
|
||||
${doc}
|
||||
|
||||
${experimentalFeatureNote}
|
||||
|
||||
${doc}
|
||||
|
||||
## Settings
|
||||
|
||||
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
|
||||
|
23
doc/manual/rl-next/hash-format-nix32.md
Normal file
23
doc/manual/rl-next/hash-format-nix32.md
Normal file
@ -0,0 +1,23 @@
|
||||
---
|
||||
synopsis: Rename hash format `base32` to `nix32`
|
||||
prs: 9452
|
||||
---
|
||||
|
||||
Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for
|
||||
[Base32](https://en.wikipedia.org/wiki/Base32).
|
||||
|
||||
## Deprecation: Use `nix32` instead of `base32` as `toHashFormat`
|
||||
|
||||
For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from`
|
||||
parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value
|
||||
remains as a deprecated alias for `"base32"`. Please convert your code from:
|
||||
|
||||
```nix
|
||||
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}
|
||||
```
|
||||
|
||||
to
|
||||
|
||||
```nix
|
||||
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";}
|
||||
```
|
@ -1,9 +1,8 @@
|
||||
---
|
||||
synopsis: Mounted SSH Store
|
||||
issues: #7890
|
||||
prs: #7912
|
||||
description: {
|
||||
issues: 7890
|
||||
prs: 7912
|
||||
---
|
||||
|
||||
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
||||
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.
|
||||
|
||||
}
|
||||
|
@ -1,8 +1,7 @@
|
||||
synopsis: `nix config show`
|
||||
issues: #7672
|
||||
prs: #9477
|
||||
description: {
|
||||
---
|
||||
synopsis: Rename to `nix config show`
|
||||
issues: 7672
|
||||
prs: 9477
|
||||
---
|
||||
|
||||
`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface.
|
||||
|
||||
}
|
||||
|
@ -1,9 +1,6 @@
|
||||
---
|
||||
synopsis: Fix `nix-env --query --drv-path --json`
|
||||
prs: #9257
|
||||
description: {
|
||||
prs: 9257
|
||||
---
|
||||
|
||||
Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set.
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
47
doc/manual/rl-next/nix-hash-convert.md
Normal file
47
doc/manual/rl-next/nix-hash-convert.md
Normal file
@ -0,0 +1,47 @@
|
||||
---
|
||||
synopsis: Add `nix hash convert`
|
||||
prs: 9452
|
||||
---
|
||||
|
||||
New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track
|
||||
to stabilization! Examples:
|
||||
|
||||
- Convert the hash to `nix32`.
|
||||
|
||||
```bash
|
||||
$ nix hash convert --algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
||||
vw46m23bizj4n8afrc0fj19wrp7mj3c0
|
||||
```
|
||||
`nix32` is a base32 encoding with a nix-specific character set.
|
||||
Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input
|
||||
hash.
|
||||
- Convert the hash to the `sri` format that includes an algorithm specification:
|
||||
```bash
|
||||
nix hash convert --algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
||||
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
|
||||
```
|
||||
or with an explicit `-to` format:
|
||||
```bash
|
||||
nix hash convert --algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
||||
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
|
||||
```
|
||||
- Assert the input format of the hash:
|
||||
```bash
|
||||
nix hash convert --algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
|
||||
error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32'
|
||||
nix hash convert --algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
|
||||
sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=
|
||||
```
|
||||
|
||||
The `--to`/`--from`/`--algo` parameters have context-sensitive auto-completion.
|
||||
|
||||
## Related Deprecations
|
||||
|
||||
The following commands are still available but will emit a deprecation warning. Please convert your code to
|
||||
`nix hash convert`:
|
||||
|
||||
- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead.
|
||||
- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead.
|
||||
- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead.
|
||||
- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2`
|
||||
or even just `nix hash convert $hash1 $hash2` instead.
|
42
doc/manual/rl-next/source-positions-in-errors.md
Normal file
42
doc/manual/rl-next/source-positions-in-errors.md
Normal file
@ -0,0 +1,42 @@
|
||||
---
|
||||
synopsis: Source locations are printed more consistently in errors
|
||||
issues: 561
|
||||
prs: 9555
|
||||
---
|
||||
|
||||
Source location information is now included in error messages more
|
||||
consistently. Given this code:
|
||||
|
||||
```nix
|
||||
let
|
||||
attr = {foo = "bar";};
|
||||
key = {};
|
||||
in
|
||||
attr.${key}
|
||||
```
|
||||
|
||||
Previously, Nix would show this unhelpful message when attempting to evaluate
|
||||
it:
|
||||
|
||||
```
|
||||
error:
|
||||
… while evaluating an attribute name
|
||||
|
||||
error: value is a set while a string was expected
|
||||
```
|
||||
|
||||
Now, the error message displays where the problematic value was found:
|
||||
|
||||
```
|
||||
error:
|
||||
… while evaluating an attribute name
|
||||
|
||||
at bad.nix:4:11:
|
||||
|
||||
3| key = {};
|
||||
4| in attr.${key}
|
||||
| ^
|
||||
5|
|
||||
|
||||
error: value is a set while a string was expected
|
||||
```
|
@ -257,17 +257,16 @@ User-visible changes should come with a release note.
|
||||
Here's what a complete entry looks like. The file name is not incorporated in the document.
|
||||
|
||||
```
|
||||
---
|
||||
synopsis: Basically a title
|
||||
issues: #1234
|
||||
prs: #1238
|
||||
description: {
|
||||
issues: 1234
|
||||
prs: 1238
|
||||
---
|
||||
|
||||
Here's one or more paragraphs that describe the change.
|
||||
|
||||
- It's markdown
|
||||
- Add references to the manual using @docroot@
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
Significant changes should add the following header, which moves them to the top.
|
||||
@ -283,3 +282,45 @@ See also the [format documentation](https://github.com/haskell/cabal/blob/master
|
||||
|
||||
Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`.
|
||||
Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly.
|
||||
|
||||
## Branches
|
||||
|
||||
- [`master`](https://github.com/NixOS/nix/commits/master)
|
||||
|
||||
The main development branch. All changes are approved and merged here.
|
||||
When developing a change, create a branch based on the latest `master`.
|
||||
|
||||
Maintainers try to [keep it in a release-worthy state](#reverting).
|
||||
|
||||
- [`maintenance-*.*`](https://github.com/NixOS/nix/branches/all?query=maintenance)
|
||||
|
||||
These branches are the subject of backports only, and are
|
||||
also [kept](#reverting) in a release-worthy state.
|
||||
|
||||
See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md)
|
||||
|
||||
- [`latest-release`](https://github.com/NixOS/nix/tree/latest-release)
|
||||
|
||||
The latest patch release of the latest minor version.
|
||||
|
||||
See [`maintainers/release-process.md`](https://github.com/NixOS/nix/blob/master/maintainers/release-process.md)
|
||||
|
||||
- [`backport-*-to-*`](https://github.com/NixOS/nix/branches/all?query=backport)
|
||||
|
||||
Generally branches created by the backport action.
|
||||
|
||||
See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md)
|
||||
|
||||
- [_other_](https://github.com/NixOS/nix/branches/all)
|
||||
|
||||
Branches that do not conform to the above patterns should be feature branches.
|
||||
|
||||
## Reverting
|
||||
|
||||
If a change turns out to be merged by mistake, or contain a regression, it may be reverted.
|
||||
A revert is not a rejection of the contribution, but merely part of an effective development process.
|
||||
It makes sure that development keeps running smoothly, with minimal uncertainty, and less overhead.
|
||||
If maintainers have to worry too much about avoiding reverts, they would not be able to merge as much.
|
||||
By embracing reverts as a good part of the development process, everyone wins.
|
||||
|
||||
However, taking a step back may be frustrating, so maintainers will be extra supportive on the next try.
|
||||
|
@ -1,10 +1,9 @@
|
||||
# Quick Start
|
||||
|
||||
This chapter is for impatient people who don't like reading
|
||||
documentation. For more in-depth information you are kindly referred
|
||||
to subsequent chapters.
|
||||
This chapter is for impatient people who don't like reading documentation.
|
||||
For more in-depth information you are kindly referred to subsequent chapters.
|
||||
|
||||
1. Install Nix by running the following:
|
||||
1. Install Nix:
|
||||
|
||||
```console
|
||||
$ curl -L https://nixos.org/nix/install | sh
|
||||
@ -13,87 +12,33 @@ to subsequent chapters.
|
||||
The install script will use `sudo`, so make sure you have sufficient rights.
|
||||
On Linux, `--daemon` can be omitted for a single-user install.
|
||||
|
||||
For other installation methods, see [here](installation/index.md).
|
||||
For other installation methods, see the detailed [installation instructions](installation/index.md).
|
||||
|
||||
1. See what installable packages are currently available in the
|
||||
channel:
|
||||
1. Run software without installing it permanently:
|
||||
|
||||
```console
|
||||
$ nix-env --query --available --attr-path
|
||||
nixpkgs.docbook_xml_dtd_43 docbook-xml-4.3
|
||||
nixpkgs.docbook_xml_dtd_45 docbook-xml-4.5
|
||||
nixpkgs.firefox firefox-33.0.2
|
||||
nixpkgs.hello hello-2.9
|
||||
nixpkgs.libxslt libxslt-1.1.28
|
||||
…
|
||||
$ nix-shell --packages cowsay lolcat
|
||||
```
|
||||
|
||||
1. Install some packages from the channel:
|
||||
This downloads the specified packages with all their dependencies, and drops you into a Bash shell where the commands provided by those packages are present.
|
||||
This will not affect your normal environment:
|
||||
|
||||
```console
|
||||
$ nix-env --install --attr nixpkgs.hello
|
||||
[nix-shell:~]$ cowsay Hello, Nix! | lolcat
|
||||
```
|
||||
|
||||
This should download pre-built packages; it should not build them
|
||||
locally (if it does, something went wrong).
|
||||
|
||||
1. Test that they work:
|
||||
Exiting the shell will make the programs disappear again:
|
||||
|
||||
```console
|
||||
$ which hello
|
||||
/home/eelco/.nix-profile/bin/hello
|
||||
$ hello
|
||||
Hello, world!
|
||||
```
|
||||
|
||||
1. Uninstall a package:
|
||||
|
||||
```console
|
||||
$ nix-env --uninstall hello
|
||||
```
|
||||
|
||||
1. You can also test a package without installing it:
|
||||
|
||||
```console
|
||||
$ nix-shell --packages hello
|
||||
```
|
||||
|
||||
This builds or downloads GNU Hello and its dependencies, then drops
|
||||
you into a Bash shell where the `hello` command is present, all
|
||||
without affecting your normal environment:
|
||||
|
||||
```console
|
||||
[nix-shell:~]$ hello
|
||||
Hello, world!
|
||||
|
||||
[nix-shell:~]$ exit
|
||||
|
||||
$ hello
|
||||
hello: command not found
|
||||
$ lolcat
|
||||
lolcat: command not found
|
||||
```
|
||||
|
||||
1. To keep up-to-date with the channel, do:
|
||||
1. Search for more packages on <search.nixos.org> to try them out.
|
||||
|
||||
1. Free up storage space:
|
||||
|
||||
```console
|
||||
$ nix-channel --update nixpkgs
|
||||
$ nix-env --upgrade '*'
|
||||
```
|
||||
|
||||
The latter command will upgrade each installed package for which
|
||||
there is a “newer” version (as determined by comparing the version
|
||||
numbers).
|
||||
|
||||
1. If you're unhappy with the result of a `nix-env` action (e.g., an
|
||||
upgraded package turned out not to work properly), you can go back:
|
||||
|
||||
```console
|
||||
$ nix-env --rollback
|
||||
```
|
||||
|
||||
1. You should periodically run the Nix garbage collector to get rid of
|
||||
unused packages, since uninstalls or upgrades don't actually delete
|
||||
them:
|
||||
|
||||
```console
|
||||
$ nix-collect-garbage --delete-old
|
||||
$ nix-collect-garbage
|
||||
```
|
||||
|
@ -18,7 +18,7 @@
|
||||
- `nix-shell` shebang lines now support single-quoted arguments.
|
||||
|
||||
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/contributing/experimental-features.md#xp-fetch-tree).
|
||||
As described in the documentation for that feature, this is because we anticipate polishing it and then stabilizing it before the rest of flakes.
|
||||
This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/contributing/experimental-features.md#xp-fetch-tree).
|
||||
|
||||
- The interface for creating and updating lock files has been overhauled:
|
||||
|
||||
|
@ -29,15 +29,15 @@ supported settings for each store type are documented below.
|
||||
The special store URL `auto` causes Nix to automatically select a
|
||||
store as follows:
|
||||
|
||||
* Use the [local store](#local-store) `/nix/store` if `/nix/var/nix`
|
||||
* Use the [local store](./local-store.md) `/nix/store` if `/nix/var/nix`
|
||||
is writable by the current user.
|
||||
|
||||
* Otherwise, if `/nix/var/nix/daemon-socket/socket` exists, [connect
|
||||
to the Nix daemon listening on that socket](#local-daemon-store).
|
||||
to the Nix daemon listening on that socket](./local-daemon-store.md).
|
||||
|
||||
* Otherwise, on Linux only, use the [local chroot store](#local-store)
|
||||
* Otherwise, on Linux only, use the [local chroot store](./local-store.md)
|
||||
`~/.local/share/nix/root`, which will be created automatically if it
|
||||
does not exist.
|
||||
|
||||
* Otherwise, use the [local store](#local-store) `/nix/store`.
|
||||
* Otherwise, use the [local store](./local-store.md) `/nix/store`.
|
||||
|
||||
|
@ -50,16 +50,16 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1700748986,
|
||||
"narHash": "sha256-/nqLrNU297h3PCw4QyDpZKZEUHmialJdZW2ceYFobds=",
|
||||
"lastModified": 1701355166,
|
||||
"narHash": "sha256-4V7XMI0Gd+y0zsi++cEHd99u3GNL0xSTGRmiWKzGnUQ=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "9ba29e2346bc542e9909d1021e8fd7d4b3f64db0",
|
||||
"rev": "36c4ac09e9bebcec1fa7b7539cddb0c9e837409c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-23.05-small",
|
||||
"ref": "staging-23.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
|
18
flake.nix
18
flake.nix
@ -1,7 +1,13 @@
|
||||
{
|
||||
description = "The purely functional package manager";
|
||||
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
|
||||
# TODO Go back to nixos-23.05-small once
|
||||
# https://github.com/NixOS/nixpkgs/pull/271202 is merged.
|
||||
#
|
||||
# Also, do not grab arbitrary further staging commits. This PR was
|
||||
# carefully made to be based on release-23.05 and just contain
|
||||
# rebuild-causing changes to packages that Nix actually uses.
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/staging-23.05";
|
||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
||||
@ -178,6 +184,8 @@
|
||||
];
|
||||
});
|
||||
|
||||
changelog-d-nix = final.buildPackages.callPackage ./misc/changelog-d.nix { };
|
||||
|
||||
nix =
|
||||
let
|
||||
officialRelease = false;
|
||||
@ -197,6 +205,7 @@
|
||||
libgit2 = final.libgit2-nix;
|
||||
lowdown = final.lowdown-nix;
|
||||
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
|
||||
changelog-d = final.changelog-d-nix;
|
||||
} // {
|
||||
# this is a proper separate downstream package, but put
|
||||
# here also for back compat reasons.
|
||||
@ -219,6 +228,8 @@
|
||||
# Binary package for various platforms.
|
||||
build = forAllSystems (system: self.packages.${system}.nix);
|
||||
|
||||
shellInputs = forAllSystems (system: self.devShells.${system}.default.inputDerivation);
|
||||
|
||||
buildStatic = lib.genAttrs linux64BitSystems (system: self.packages.${system}.nix-static);
|
||||
|
||||
buildCross = forAllCrossSystems (crossSystem:
|
||||
@ -342,6 +353,11 @@
|
||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||
installTests = self.hydraJobs.installTests.${system};
|
||||
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||
rl-next =
|
||||
let pkgs = nixpkgsFor.${system}.native;
|
||||
in pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } ''
|
||||
LANG=C.UTF-8 ${pkgs.changelog-d-nix}/bin/changelog-d ${./doc/manual/rl-next} >$out
|
||||
'';
|
||||
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||
});
|
||||
|
31
misc/changelog-d.cabal.nix
Normal file
31
misc/changelog-d.cabal.nix
Normal file
@ -0,0 +1,31 @@
|
||||
{ mkDerivation, aeson, base, bytestring, cabal-install-parsers
|
||||
, Cabal-syntax, containers, directory, filepath, frontmatter
|
||||
, generic-lens-lite, lib, mtl, optparse-applicative, parsec, pretty
|
||||
, regex-applicative, text, pkgs
|
||||
}:
|
||||
let rev = "f30f6969e9cd8b56242309639d58acea21c99d06";
|
||||
in
|
||||
mkDerivation {
|
||||
pname = "changelog-d";
|
||||
version = "0.1";
|
||||
src = pkgs.fetchurl {
|
||||
name = "changelog-d-${rev}.tar.gz";
|
||||
url = "https://codeberg.org/roberth/changelog-d/archive/${rev}.tar.gz";
|
||||
hash = "sha256-8a2+i5u7YoszAgd5OIEW0eYUcP8yfhtoOIhLJkylYJ4=";
|
||||
} // { inherit rev; };
|
||||
isLibrary = false;
|
||||
isExecutable = true;
|
||||
libraryHaskellDepends = [
|
||||
aeson base bytestring cabal-install-parsers Cabal-syntax containers
|
||||
directory filepath frontmatter generic-lens-lite mtl parsec pretty
|
||||
regex-applicative text
|
||||
];
|
||||
executableHaskellDepends = [
|
||||
base bytestring Cabal-syntax directory filepath
|
||||
optparse-applicative
|
||||
];
|
||||
doHaddock = false;
|
||||
description = "Concatenate changelog entries into a single one";
|
||||
license = lib.licenses.gpl3Plus;
|
||||
mainProgram = "changelog-d";
|
||||
}
|
31
misc/changelog-d.nix
Normal file
31
misc/changelog-d.nix
Normal file
@ -0,0 +1,31 @@
|
||||
# Taken temporarily from <nixpkgs/pkgs/by-name/ch/changelog-d/package.nix>
|
||||
{
|
||||
callPackage,
|
||||
lib,
|
||||
haskell,
|
||||
haskellPackages,
|
||||
}:
|
||||
|
||||
let
|
||||
hsPkg = haskellPackages.callPackage ./changelog-d.cabal.nix { };
|
||||
|
||||
addCompletions = haskellPackages.generateOptparseApplicativeCompletions ["changelog-d"];
|
||||
|
||||
haskellModifications =
|
||||
lib.flip lib.pipe [
|
||||
addCompletions
|
||||
haskell.lib.justStaticExecutables
|
||||
];
|
||||
|
||||
mkDerivationOverrides = finalAttrs: oldAttrs: {
|
||||
|
||||
version = oldAttrs.version + "-git-${lib.strings.substring 0 7 oldAttrs.src.rev}";
|
||||
|
||||
meta = oldAttrs.meta // {
|
||||
homepage = "https://codeberg.org/roberth/changelog-d";
|
||||
maintainers = [ lib.maintainers.roberth ];
|
||||
};
|
||||
|
||||
};
|
||||
in
|
||||
(haskellModifications hsPkg).overrideAttrs mkDerivationOverrides
|
@ -78,7 +78,7 @@ SV * queryReferences(char * path)
|
||||
SV * queryPathHash(char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true);
|
||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -104,7 +104,7 @@ SV * queryPathInfo(char * path, int base32)
|
||||
XPUSHs(&PL_sv_undef);
|
||||
else
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||
auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true);
|
||||
auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
mXPUSHi(info->registrationTime);
|
||||
mXPUSHi(info->narSize);
|
||||
@ -205,8 +205,8 @@ void importPaths(int fd, int dontCheckSigs)
|
||||
SV * hashPath(char * algo, int base32, char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
Hash h = hashPath(parseHashType(algo), path).first;
|
||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
Hash h = hashPath(parseHashAlgo(algo), path).first;
|
||||
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -216,8 +216,8 @@ SV * hashPath(char * algo, int base32, char * path)
|
||||
SV * hashFile(char * algo, int base32, char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
Hash h = hashFile(parseHashType(algo), path);
|
||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
Hash h = hashFile(parseHashAlgo(algo), path);
|
||||
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -227,8 +227,8 @@ SV * hashFile(char * algo, int base32, char * path)
|
||||
SV * hashString(char * algo, int base32, char * s)
|
||||
PPCODE:
|
||||
try {
|
||||
Hash h = hashString(parseHashType(algo), s);
|
||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
Hash h = hashString(parseHashAlgo(algo), s);
|
||||
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -238,8 +238,8 @@ SV * hashString(char * algo, int base32, char * s)
|
||||
SV * convertHash(char * algo, char * s, int toBase32)
|
||||
PPCODE:
|
||||
try {
|
||||
auto h = Hash::parseAny(s, parseHashType(algo));
|
||||
auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
auto h = Hash::parseAny(s, parseHashAlgo(algo));
|
||||
auto s = h.to_string(toBase32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -281,7 +281,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
|
||||
PPCODE:
|
||||
try {
|
||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashType(algo));
|
||||
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashAlgo(algo));
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -291,7 +291,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
|
||||
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
||||
PPCODE:
|
||||
try {
|
||||
auto h = Hash::parseAny(hash, parseHashType(algo));
|
||||
auto h = Hash::parseAny(hash, parseHashAlgo(algo));
|
||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo {
|
||||
.method = method,
|
||||
|
@ -1,4 +1,5 @@
|
||||
#include "command.hh"
|
||||
#include "markdown.hh"
|
||||
#include "store-api.hh"
|
||||
#include "local-fs-store.hh"
|
||||
#include "derivations.hh"
|
||||
@ -34,6 +35,19 @@ nlohmann::json NixMultiCommand::toJSON()
|
||||
return MultiCommand::toJSON();
|
||||
}
|
||||
|
||||
void NixMultiCommand::run()
|
||||
{
|
||||
if (!command) {
|
||||
std::set<std::string> subCommandTextLines;
|
||||
for (auto & [name, _] : commands)
|
||||
subCommandTextLines.insert(fmt("- `%s`", name));
|
||||
std::string markdownError = fmt("`nix %s` requires a sub-command. Available sub-commands:\n\n%s\n",
|
||||
commandName, concatStringsSep("\n", subCommandTextLines));
|
||||
throw UsageError(renderMarkdownToTerminal(markdownError));
|
||||
}
|
||||
command->second->run();
|
||||
}
|
||||
|
||||
StoreCommand::StoreCommand()
|
||||
{
|
||||
}
|
||||
|
@ -26,9 +26,13 @@ static constexpr Command::Category catNixInstallation = 102;
|
||||
|
||||
static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)";
|
||||
|
||||
struct NixMultiCommand : virtual MultiCommand, virtual Command
|
||||
struct NixMultiCommand : MultiCommand, virtual Command
|
||||
{
|
||||
nlohmann::json toJSON() override;
|
||||
|
||||
using MultiCommand::MultiCommand;
|
||||
|
||||
virtual void run() override;
|
||||
};
|
||||
|
||||
// For the overloaded run methods
|
||||
|
@ -260,9 +260,10 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s
|
||||
|
||||
evalSettings.pureEval = false;
|
||||
auto state = getEvalState();
|
||||
Expr *e = state->parseExprFromFile(
|
||||
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
|
||||
);
|
||||
auto e =
|
||||
state->parseExprFromFile(
|
||||
resolveExprPath(
|
||||
lookupFileArg(*state, *file)));
|
||||
|
||||
Value root;
|
||||
state->eval(e, root);
|
||||
|
@ -103,8 +103,10 @@ void EvalState::forceValue(Value & v, Callable getPos)
|
||||
throw;
|
||||
}
|
||||
}
|
||||
else if (v.isApp())
|
||||
callFunction(*v.app.left, *v.app.right, v, noPos);
|
||||
else if (v.isApp()) {
|
||||
PosIdx pos = getPos();
|
||||
callFunction(*v.app.left, *v.app.right, v, pos);
|
||||
}
|
||||
else if (v.isBlackhole())
|
||||
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
|
||||
}
|
||||
@ -121,9 +123,9 @@ template <typename Callable>
|
||||
[[gnu::always_inline]]
|
||||
inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx)
|
||||
{
|
||||
forceValue(v, noPos);
|
||||
PosIdx pos = getPos();
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nAttrs) {
|
||||
PosIdx pos = getPos();
|
||||
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
}
|
||||
}
|
||||
@ -132,7 +134,7 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e
|
||||
[[gnu::always_inline]]
|
||||
inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
forceValue(v, noPos);
|
||||
forceValue(v, pos);
|
||||
if (!v.isList()) {
|
||||
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
}
|
||||
|
@ -14,6 +14,7 @@
|
||||
#include "profiles.hh"
|
||||
#include "print.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "filtering-input-accessor.hh"
|
||||
#include "memory-input-accessor.hh"
|
||||
#include "signals.hh"
|
||||
#include "gc-small-vector.hh"
|
||||
@ -344,7 +345,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env)
|
||||
} else {
|
||||
Value nameValue;
|
||||
name.expr->eval(state, env, nameValue);
|
||||
state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name");
|
||||
state.forceStringNoCtx(nameValue, name.expr->getPos(), "while evaluating an attribute name");
|
||||
return state.symbols.create(nameValue.string_view());
|
||||
}
|
||||
}
|
||||
@ -509,7 +510,16 @@ EvalState::EvalState(
|
||||
, sOutputSpecified(symbols.create("outputSpecified"))
|
||||
, repair(NoRepair)
|
||||
, emptyBindings(0)
|
||||
, rootFS(makeFSInputAccessor(CanonPath::root))
|
||||
, rootFS(
|
||||
evalSettings.restrictEval || evalSettings.pureEval
|
||||
? ref<InputAccessor>(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {},
|
||||
[](const CanonPath & path) -> RestrictedPathError {
|
||||
auto modeInformation = evalSettings.pureEval
|
||||
? "in pure evaluation mode (use '--impure' to override)"
|
||||
: "in restricted mode";
|
||||
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
|
||||
}))
|
||||
: makeFSInputAccessor(CanonPath::root))
|
||||
, corepkgsFS(makeMemoryInputAccessor())
|
||||
, internalFS(makeMemoryInputAccessor())
|
||||
, derivationInternal{corepkgsFS->addFile(
|
||||
@ -551,28 +561,10 @@ EvalState::EvalState(
|
||||
searchPath.elements.emplace_back(SearchPath::Elem::parse(i));
|
||||
}
|
||||
|
||||
if (evalSettings.restrictEval || evalSettings.pureEval) {
|
||||
allowedPaths = PathSet();
|
||||
|
||||
for (auto & i : searchPath.elements) {
|
||||
auto r = resolveSearchPathPath(i.path);
|
||||
if (!r) continue;
|
||||
|
||||
auto path = std::move(*r);
|
||||
|
||||
if (store->isInStore(path)) {
|
||||
try {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(store->toStorePath(path).first, closure);
|
||||
for (auto & path : closure)
|
||||
allowPath(path);
|
||||
} catch (InvalidPath &) {
|
||||
allowPath(path);
|
||||
}
|
||||
} else
|
||||
allowPath(path);
|
||||
}
|
||||
}
|
||||
/* Allow access to all paths in the search path. */
|
||||
if (rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||
for (auto & i : searchPath.elements)
|
||||
resolveSearchPathPath(i.path, true);
|
||||
|
||||
corepkgsFS->addFile(
|
||||
CanonPath("fetchurl.nix"),
|
||||
@ -590,14 +582,14 @@ EvalState::~EvalState()
|
||||
|
||||
void EvalState::allowPath(const Path & path)
|
||||
{
|
||||
if (allowedPaths)
|
||||
allowedPaths->insert(path);
|
||||
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||
rootFS2->allowPath(CanonPath(path));
|
||||
}
|
||||
|
||||
void EvalState::allowPath(const StorePath & storePath)
|
||||
{
|
||||
if (allowedPaths)
|
||||
allowedPaths->insert(store->toRealPath(storePath));
|
||||
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||
rootFS2->allowPath(CanonPath(store->toRealPath(storePath)));
|
||||
}
|
||||
|
||||
void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
|
||||
@ -607,54 +599,6 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value &
|
||||
mkStorePathString(storePath, v);
|
||||
}
|
||||
|
||||
SourcePath EvalState::checkSourcePath(const SourcePath & path_)
|
||||
{
|
||||
// Don't check non-rootFS accessors, they're in a different namespace.
|
||||
if (path_.accessor != ref<InputAccessor>(rootFS)) return path_;
|
||||
|
||||
if (!allowedPaths) return path_;
|
||||
|
||||
auto i = resolvedPaths.find(path_.path.abs());
|
||||
if (i != resolvedPaths.end())
|
||||
return i->second;
|
||||
|
||||
bool found = false;
|
||||
|
||||
/* First canonicalize the path without symlinks, so we make sure an
|
||||
* attacker can't append ../../... to a path that would be in allowedPaths
|
||||
* and thus leak symlink targets.
|
||||
*/
|
||||
Path abspath = canonPath(path_.path.abs());
|
||||
|
||||
for (auto & i : *allowedPaths) {
|
||||
if (isDirOrInDir(abspath, i)) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
auto modeInformation = evalSettings.pureEval
|
||||
? "in pure eval mode (use '--impure' to override)"
|
||||
: "in restricted mode";
|
||||
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", abspath, modeInformation);
|
||||
}
|
||||
|
||||
/* Resolve symlinks. */
|
||||
debug("checking access to '%s'", abspath);
|
||||
SourcePath path = rootPath(CanonPath(canonPath(abspath, true)));
|
||||
|
||||
for (auto & i : *allowedPaths) {
|
||||
if (isDirOrInDir(path.path.abs(), i)) {
|
||||
resolvedPaths.insert_or_assign(path_.path.abs(), path);
|
||||
return path;
|
||||
}
|
||||
}
|
||||
|
||||
throw RestrictedPathError("access to canonical path '%1%' is forbidden in restricted mode", path);
|
||||
}
|
||||
|
||||
|
||||
void EvalState::checkURI(const std::string & uri)
|
||||
{
|
||||
if (!evalSettings.restrictEval) return;
|
||||
@ -674,12 +618,14 @@ void EvalState::checkURI(const std::string & uri)
|
||||
/* If the URI is a path, then check it against allowedPaths as
|
||||
well. */
|
||||
if (hasPrefix(uri, "/")) {
|
||||
checkSourcePath(rootPath(CanonPath(uri)));
|
||||
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||
rootFS2->checkAccess(CanonPath(uri));
|
||||
return;
|
||||
}
|
||||
|
||||
if (hasPrefix(uri, "file://")) {
|
||||
checkSourcePath(rootPath(CanonPath(std::string(uri, 7))));
|
||||
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||
rootFS2->checkAccess(CanonPath(uri.substr(7)));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1181,10 +1127,8 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env)
|
||||
}
|
||||
|
||||
|
||||
void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial)
|
||||
void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial)
|
||||
{
|
||||
auto path = checkSourcePath(path_);
|
||||
|
||||
FileEvalCache::iterator i;
|
||||
if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) {
|
||||
v = i->second;
|
||||
@ -1205,7 +1149,7 @@ void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial
|
||||
e = j->second;
|
||||
|
||||
if (!e)
|
||||
e = parseExprFromFile(checkSourcePath(resolvedPath));
|
||||
e = parseExprFromFile(resolvedPath);
|
||||
|
||||
fileParseCache[resolvedPath] = e;
|
||||
|
||||
@ -1514,7 +1458,7 @@ void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v)
|
||||
e->eval(state, env, vTmp);
|
||||
|
||||
for (auto & i : attrPath) {
|
||||
state.forceValue(*vAttrs, noPos);
|
||||
state.forceValue(*vAttrs, getPos());
|
||||
Bindings::iterator j;
|
||||
auto name = getName(i, state, env);
|
||||
if (vAttrs->type() != nAttrs ||
|
||||
@ -1683,7 +1627,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||
if (countCalls) primOpCalls[name]++;
|
||||
|
||||
try {
|
||||
vCur.primOp->fun(*this, noPos, args, vCur);
|
||||
vCur.primOp->fun(*this, vCur.determinePos(noPos), args, vCur);
|
||||
} catch (Error & e) {
|
||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
||||
throw;
|
||||
@ -1731,7 +1675,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||
// 1. Unify this and above code. Heavily redundant.
|
||||
// 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc)
|
||||
// so the debugger allows to inspect the wrong parameters passed to the builtin.
|
||||
primOp->primOp->fun(*this, noPos, vArgs, vCur);
|
||||
primOp->primOp->fun(*this, vCur.determinePos(noPos), vArgs, vCur);
|
||||
} catch (Error & e) {
|
||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
||||
throw;
|
||||
@ -1839,7 +1783,7 @@ https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbo
|
||||
}
|
||||
}
|
||||
|
||||
callFunction(fun, allocValue()->mkAttrs(attrs), res, noPos);
|
||||
callFunction(fun, allocValue()->mkAttrs(attrs), res, pos);
|
||||
}
|
||||
|
||||
|
||||
@ -1875,7 +1819,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
|
||||
|
||||
void ExprOpNot::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
v.mkBool(!state.evalBool(env, e, noPos, "in the argument of the not operator")); // XXX: FIXME: !
|
||||
v.mkBool(!state.evalBool(env, e, getPos(), "in the argument of the not operator")); // XXX: FIXME: !
|
||||
}
|
||||
|
||||
|
||||
@ -2316,7 +2260,7 @@ BackedStringView EvalState::coerceToString(
|
||||
std::string result;
|
||||
for (auto [n, v2] : enumerate(v.listItems())) {
|
||||
try {
|
||||
result += *coerceToString(noPos, *v2, context,
|
||||
result += *coerceToString(pos, *v2, context,
|
||||
"while evaluating one element of the list",
|
||||
coerceMore, copyToStore, canonicalizePath);
|
||||
} catch (Error & e) {
|
||||
@ -2463,8 +2407,8 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value &
|
||||
|
||||
bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
forceValue(v1, noPos);
|
||||
forceValue(v2, noPos);
|
||||
forceValue(v1, pos);
|
||||
forceValue(v2, pos);
|
||||
|
||||
/* !!! Hack to support some old broken code that relies on pointer
|
||||
equality tests between sets. (Specifically, builderDefs calls
|
||||
|
@ -30,7 +30,6 @@ class EvalState;
|
||||
class StorePath;
|
||||
struct SingleDerivedPath;
|
||||
enum RepairFlag : bool;
|
||||
struct FSInputAccessor;
|
||||
struct MemoryInputAccessor;
|
||||
|
||||
|
||||
@ -217,18 +216,12 @@ public:
|
||||
*/
|
||||
RepairFlag repair;
|
||||
|
||||
/**
|
||||
* The allowed filesystem paths in restricted or pure evaluation
|
||||
* mode.
|
||||
*/
|
||||
std::optional<PathSet> allowedPaths;
|
||||
|
||||
Bindings emptyBindings;
|
||||
|
||||
/**
|
||||
* The accessor for the root filesystem.
|
||||
*/
|
||||
const ref<FSInputAccessor> rootFS;
|
||||
const ref<InputAccessor> rootFS;
|
||||
|
||||
/**
|
||||
* The in-memory filesystem for <nix/...> paths.
|
||||
@ -396,12 +389,6 @@ public:
|
||||
*/
|
||||
void allowAndSetStorePathString(const StorePath & storePath, Value & v);
|
||||
|
||||
/**
|
||||
* Check whether access to a path is allowed and throw an error if
|
||||
* not. Otherwise return the canonicalised path.
|
||||
*/
|
||||
SourcePath checkSourcePath(const SourcePath & path);
|
||||
|
||||
void checkURI(const std::string & uri);
|
||||
|
||||
/**
|
||||
@ -445,13 +432,15 @@ public:
|
||||
SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
|
||||
|
||||
/**
|
||||
* Try to resolve a search path value (not the optional key part)
|
||||
* Try to resolve a search path value (not the optional key part).
|
||||
*
|
||||
* If the specified search path element is a URI, download it.
|
||||
*
|
||||
* If it is not found, return `std::nullopt`
|
||||
*/
|
||||
std::optional<std::string> resolveSearchPathPath(const SearchPath::Path & path);
|
||||
std::optional<std::string> resolveSearchPathPath(
|
||||
const SearchPath::Path & elem,
|
||||
bool initAccessControl = false);
|
||||
|
||||
/**
|
||||
* Evaluate an expression to normal form
|
||||
@ -756,6 +745,13 @@ public:
|
||||
*/
|
||||
[[nodiscard]] StringMap realiseContext(const NixStringContext & context);
|
||||
|
||||
/* Call the binary path filter predicate used builtins.path etc. */
|
||||
bool callPathFilter(
|
||||
Value * filterFun,
|
||||
const SourcePath & path,
|
||||
std::string_view pathArg,
|
||||
PosIdx pos);
|
||||
|
||||
private:
|
||||
|
||||
/**
|
||||
|
@ -904,7 +904,7 @@ Fingerprint LockedFlake::getFingerprint() const
|
||||
// FIXME: as an optimization, if the flake contains a lock file
|
||||
// and we haven't changed it, then it's sufficient to use
|
||||
// flake.sourceInfo.storePath for the fingerprint.
|
||||
return hashString(htSHA256,
|
||||
return hashString(HashAlgorithm::SHA256,
|
||||
fmt("%s;%s;%d;%d;%s",
|
||||
flake.storePath.to_string(),
|
||||
flake.lockedRef.subdir,
|
||||
|
@ -405,6 +405,7 @@ struct ExprOpNot : Expr
|
||||
{
|
||||
Expr * e;
|
||||
ExprOpNot(Expr * e) : e(e) { };
|
||||
PosIdx getPos() const override { return e->getPos(); }
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
|
@ -692,16 +692,17 @@ SourcePath resolveExprPath(SourcePath path)
|
||||
|
||||
/* If `path' is a symlink, follow it. This is so that relative
|
||||
path references work. */
|
||||
while (true) {
|
||||
while (!path.path.isRoot()) {
|
||||
// Basic cycle/depth limit to avoid infinite loops.
|
||||
if (++followCount >= maxFollow)
|
||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||
if (path.lstat().type != InputAccessor::tSymlink) break;
|
||||
path = {path.accessor, CanonPath(path.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||
auto p = path.parent().resolveSymlinks() + path.baseName();
|
||||
if (p.lstat().type != InputAccessor::tSymlink) break;
|
||||
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||
}
|
||||
|
||||
/* If `path' refers to a directory, append `/default.nix'. */
|
||||
if (path.lstat().type == InputAccessor::tDirectory)
|
||||
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
|
||||
return path + "default.nix";
|
||||
|
||||
return path;
|
||||
@ -716,7 +717,7 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto buffer = path.readFile();
|
||||
auto buffer = path.resolveSymlinks().readFile();
|
||||
// readFile hopefully have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||
@ -783,7 +784,7 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_
|
||||
}
|
||||
|
||||
|
||||
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0)
|
||||
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
|
||||
{
|
||||
auto & value = value0.s;
|
||||
auto i = searchPathResolved.find(value);
|
||||
@ -800,7 +801,6 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||
});
|
||||
res = std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
@ -814,6 +814,20 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||
|
||||
else {
|
||||
auto path = absPath(value);
|
||||
|
||||
/* Allow access to paths in the search path. */
|
||||
if (initAccessControl) {
|
||||
allowPath(path);
|
||||
if (store->isInStore(path)) {
|
||||
try {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(store->toStorePath(path).first, closure);
|
||||
for (auto & p : closure)
|
||||
allowPath(p);
|
||||
} catch (InvalidPath &) { }
|
||||
}
|
||||
}
|
||||
|
||||
if (pathExists(path))
|
||||
res = { path };
|
||||
else {
|
||||
@ -829,7 +843,7 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||
else
|
||||
debug("failed to resolve search path element '%s'", value);
|
||||
|
||||
searchPathResolved[value] = res;
|
||||
searchPathResolved.emplace(value, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -15,6 +15,7 @@
|
||||
#include "value-to-json.hh"
|
||||
#include "value-to-xml.hh"
|
||||
#include "primops.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
|
||||
#include <boost/container/small_vector.hpp>
|
||||
#include <nlohmann/json.hpp>
|
||||
@ -90,9 +91,8 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||
for (auto & [outputName, outputPath] : outputs) {
|
||||
/* Add the output of this derivations to the allowed
|
||||
paths. */
|
||||
if (allowedPaths) {
|
||||
allowPath(outputPath);
|
||||
}
|
||||
allowPath(store->toRealPath(outputPath));
|
||||
|
||||
/* Get all the output paths corresponding to the placeholders we had */
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||
res.insert_or_assign(
|
||||
@ -110,27 +110,19 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||
return res;
|
||||
}
|
||||
|
||||
struct RealisePathFlags {
|
||||
// Whether to check that the path is allowed in pure eval mode
|
||||
bool checkForPureEval = true;
|
||||
};
|
||||
|
||||
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {})
|
||||
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bool resolveSymlinks = true)
|
||||
{
|
||||
NixStringContext context;
|
||||
|
||||
auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path");
|
||||
|
||||
try {
|
||||
if (!context.empty()) {
|
||||
if (!context.empty() && path.accessor == state.rootFS) {
|
||||
auto rewrites = state.realiseContext(context);
|
||||
auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
|
||||
return {path.accessor, CanonPath(realPath)};
|
||||
path = {path.accessor, CanonPath(realPath)};
|
||||
}
|
||||
|
||||
return flags.checkForPureEval
|
||||
? state.checkSourcePath(path)
|
||||
: path;
|
||||
return resolveSymlinks ? path.resolveSymlinks() : path;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
|
||||
throw;
|
||||
@ -170,7 +162,7 @@ static void mkOutputString(
|
||||
argument. */
|
||||
static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, vPath);
|
||||
auto path = realisePath(state, pos, vPath, false);
|
||||
auto path2 = path.path.abs();
|
||||
|
||||
// FIXME
|
||||
@ -1317,7 +1309,7 @@ drvName, Bindings * attrs, Value & v)
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
|
||||
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
|
||||
auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo));
|
||||
|
||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
|
||||
|
||||
@ -1339,7 +1331,7 @@ drvName, Bindings * attrs, Value & v)
|
||||
.errPos = state.positions[noPos]
|
||||
});
|
||||
|
||||
auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256);
|
||||
auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256);
|
||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
|
||||
|
||||
for (auto & i : outputs) {
|
||||
@ -1348,13 +1340,13 @@ drvName, Bindings * attrs, Value & v)
|
||||
drv.outputs.insert_or_assign(i,
|
||||
DerivationOutput::Impure {
|
||||
.method = method,
|
||||
.hashType = ht,
|
||||
.hashAlgo = ha,
|
||||
});
|
||||
else
|
||||
drv.outputs.insert_or_assign(i,
|
||||
DerivationOutput::CAFloating {
|
||||
.method = method,
|
||||
.hashType = ht,
|
||||
.hashAlgo = ha,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -1493,7 +1485,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
|
||||
}));
|
||||
|
||||
NixStringContext context;
|
||||
auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'")).path;
|
||||
auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path;
|
||||
/* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink
|
||||
directly in the store. The latter condition is necessary so
|
||||
e.g. nix-push does the right thing. */
|
||||
@ -1533,29 +1525,19 @@ static RegisterPrimOp primop_storePath({
|
||||
|
||||
static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto & arg = *args[0];
|
||||
|
||||
/* We don’t check the path right now, because we don’t want to
|
||||
throw if the path isn’t allowed, but just return false (and we
|
||||
can’t just catch the exception here because we still want to
|
||||
throw if something in the evaluation of `arg` tries to
|
||||
access an unauthorized path). */
|
||||
auto path = realisePath(state, pos, arg, { .checkForPureEval = false });
|
||||
|
||||
/* SourcePath doesn't know about trailing slash. */
|
||||
auto mustBeDir = arg.type() == nString
|
||||
&& (arg.string_view().ends_with("/")
|
||||
|| arg.string_view().ends_with("/."));
|
||||
|
||||
try {
|
||||
auto checked = state.checkSourcePath(path);
|
||||
auto st = checked.maybeLstat();
|
||||
auto & arg = *args[0];
|
||||
|
||||
auto path = realisePath(state, pos, arg);
|
||||
|
||||
/* SourcePath doesn't know about trailing slash. */
|
||||
auto mustBeDir = arg.type() == nString
|
||||
&& (arg.string_view().ends_with("/")
|
||||
|| arg.string_view().ends_with("/."));
|
||||
|
||||
auto st = path.maybeLstat();
|
||||
auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
|
||||
v.mkBool(exists);
|
||||
} catch (SysError & e) {
|
||||
/* Don't give away info from errors while canonicalising
|
||||
‘path’ in restricted mode. */
|
||||
v.mkBool(false);
|
||||
} catch (RestrictedPathError & e) {
|
||||
v.mkBool(false);
|
||||
}
|
||||
@ -1699,7 +1681,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
|
||||
auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile");
|
||||
|
||||
v.mkPath(state.checkSourcePath(state.findFile(searchPath, path, pos)));
|
||||
v.mkPath(state.findFile(searchPath, path, pos));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_findFile(PrimOp {
|
||||
@ -1754,17 +1736,17 @@ static RegisterPrimOp primop_findFile(PrimOp {
|
||||
/* Return the cryptographic hash of a file in base-16. */
|
||||
static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
|
||||
std::optional<HashType> ht = parseHashType(type);
|
||||
if (!ht)
|
||||
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
|
||||
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||
if (!ha)
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("unknown hash type '%1%'", type),
|
||||
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
|
||||
auto path = realisePath(state, pos, *args[1]);
|
||||
|
||||
v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false));
|
||||
v.mkString(hashString(*ha, path.readFile()).to_string(HashFormat::Base16, false));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hashFile({
|
||||
@ -1789,7 +1771,7 @@ static std::string_view fileTypeToString(InputAccessor::Type type)
|
||||
|
||||
static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, *args[0]);
|
||||
auto path = realisePath(state, pos, *args[0], false);
|
||||
/* Retrieve the directory entry type and stringize it. */
|
||||
v.mkString(fileTypeToString(path.lstat().type));
|
||||
}
|
||||
@ -2178,11 +2160,35 @@ static RegisterPrimOp primop_toFile({
|
||||
.fun = prim_toFile,
|
||||
});
|
||||
|
||||
bool EvalState::callPathFilter(
|
||||
Value * filterFun,
|
||||
const SourcePath & path,
|
||||
std::string_view pathArg,
|
||||
PosIdx pos)
|
||||
{
|
||||
auto st = path.lstat();
|
||||
|
||||
/* Call the filter function. The first argument is the path, the
|
||||
second is a string indicating the type of the file. */
|
||||
Value arg1;
|
||||
arg1.mkString(pathArg);
|
||||
|
||||
Value arg2;
|
||||
// assert that type is not "unknown"
|
||||
arg2.mkString(fileTypeToString(st.type));
|
||||
|
||||
Value * args []{&arg1, &arg2};
|
||||
Value res;
|
||||
callFunction(*filterFun, 2, args, res, pos);
|
||||
|
||||
return forceBool(res, pos, "while evaluating the return value of the path filter function");
|
||||
}
|
||||
|
||||
static void addPath(
|
||||
EvalState & state,
|
||||
const PosIdx pos,
|
||||
std::string_view name,
|
||||
Path path,
|
||||
SourcePath path,
|
||||
Value * filterFun,
|
||||
FileIngestionMethod method,
|
||||
const std::optional<Hash> expectedHash,
|
||||
@ -2190,48 +2196,29 @@ static void addPath(
|
||||
const NixStringContext & context)
|
||||
{
|
||||
try {
|
||||
// FIXME: handle CA derivation outputs (where path needs to
|
||||
// be rewritten to the actual output).
|
||||
auto rewrites = state.realiseContext(context);
|
||||
path = state.toRealPath(rewriteStrings(path, rewrites), context);
|
||||
|
||||
StorePathSet refs;
|
||||
|
||||
if (state.store->isInStore(path)) {
|
||||
if (path.accessor == state.rootFS && state.store->isInStore(path.path.abs())) {
|
||||
// FIXME: handle CA derivation outputs (where path needs to
|
||||
// be rewritten to the actual output).
|
||||
auto rewrites = state.realiseContext(context);
|
||||
path = {state.rootFS, CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context))};
|
||||
|
||||
try {
|
||||
auto [storePath, subPath] = state.store->toStorePath(path);
|
||||
auto [storePath, subPath] = state.store->toStorePath(path.path.abs());
|
||||
// FIXME: we should scanForReferences on the path before adding it
|
||||
refs = state.store->queryPathInfo(storePath)->references;
|
||||
path = state.store->toRealPath(storePath) + subPath;
|
||||
path = {state.rootFS, CanonPath(state.store->toRealPath(storePath) + subPath)};
|
||||
} catch (Error &) { // FIXME: should be InvalidPathError
|
||||
}
|
||||
}
|
||||
|
||||
path = evalSettings.pureEval && expectedHash
|
||||
? path
|
||||
: state.checkSourcePath(state.rootPath(CanonPath(path))).path.abs();
|
||||
|
||||
PathFilter filter = filterFun ? ([&](const Path & path) {
|
||||
auto st = lstat(path);
|
||||
|
||||
/* Call the filter function. The first argument is the path,
|
||||
the second is a string indicating the type of the file. */
|
||||
Value arg1;
|
||||
arg1.mkString(path);
|
||||
|
||||
Value arg2;
|
||||
arg2.mkString(
|
||||
S_ISREG(st.st_mode) ? "regular" :
|
||||
S_ISDIR(st.st_mode) ? "directory" :
|
||||
S_ISLNK(st.st_mode) ? "symlink" :
|
||||
"unknown" /* not supported, will fail! */);
|
||||
|
||||
Value * args []{&arg1, &arg2};
|
||||
Value res;
|
||||
state.callFunction(*filterFun, 2, args, res, pos);
|
||||
|
||||
return state.forceBool(res, pos, "while evaluating the return value of the path filter function");
|
||||
}) : defaultPathFilter;
|
||||
std::unique_ptr<PathFilter> filter;
|
||||
if (filterFun)
|
||||
filter = std::make_unique<PathFilter>([&](const Path & p) {
|
||||
auto p2 = CanonPath(p);
|
||||
return state.callPathFilter(filterFun, {path.accessor, p2}, p2.abs(), pos);
|
||||
});
|
||||
|
||||
std::optional<StorePath> expectedStorePath;
|
||||
if (expectedHash)
|
||||
@ -2242,7 +2229,7 @@ static void addPath(
|
||||
});
|
||||
|
||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||
auto dstPath = state.rootPath(CanonPath(path)).fetchToStore(state.store, name, method, &filter, state.repair);
|
||||
auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair);
|
||||
if (expectedHash && expectedStorePath != dstPath)
|
||||
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
||||
state.allowAndSetStorePathString(dstPath, v);
|
||||
@ -2261,7 +2248,8 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg
|
||||
auto path = state.coerceToPath(pos, *args[1], context,
|
||||
"while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'");
|
||||
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
|
||||
addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
||||
|
||||
addPath(state, pos, path.baseName(), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_filterSource({
|
||||
@ -2341,7 +2329,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||
else if (n == "recursive")
|
||||
method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") };
|
||||
else if (n == "sha256")
|
||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), htSHA256);
|
||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
|
||||
@ -2356,7 +2344,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||
if (name.empty())
|
||||
name = path->baseName();
|
||||
|
||||
addPath(state, pos, name, path->path.abs(), filterFun, method, expectedHash, v, context);
|
||||
addPath(state, pos, name, *path, filterFun, method, expectedHash, v, context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_path({
|
||||
@ -3766,18 +3754,18 @@ static RegisterPrimOp primop_stringLength({
|
||||
/* Return the cryptographic hash of a string in base-16. */
|
||||
static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
|
||||
std::optional<HashType> ht = parseHashType(type);
|
||||
if (!ht)
|
||||
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
|
||||
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||
if (!ha)
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("unknown hash type '%1%'", type),
|
||||
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
|
||||
NixStringContext context; // discarded
|
||||
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
||||
|
||||
v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false));
|
||||
v.mkString(hashString(*ha, s).to_string(HashFormat::Base16, false));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hashString({
|
||||
@ -3800,15 +3788,15 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args
|
||||
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");
|
||||
|
||||
Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo"));
|
||||
std::optional<HashType> ht = std::nullopt;
|
||||
std::optional<HashAlgorithm> ha = std::nullopt;
|
||||
if (iteratorHashAlgo != inputAttrs->end()) {
|
||||
ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
|
||||
ha = parseHashAlgo(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
|
||||
}
|
||||
|
||||
Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'");
|
||||
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));
|
||||
|
||||
v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI));
|
||||
v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_convertHash({
|
||||
@ -3837,7 +3825,8 @@ static RegisterPrimOp primop_convertHash({
|
||||
|
||||
The format of the resulting hash. Must be one of
|
||||
- `"base16"`
|
||||
- `"base32"`
|
||||
- `"nix32"`
|
||||
- `"base32"` (deprecated alias for `"nix32"`)
|
||||
- `"base64"`
|
||||
- `"sri"`
|
||||
|
||||
|
@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||
// be both a revision or a branch/tag name.
|
||||
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
|
||||
if (std::regex_match(value.begin(), value.end(), revRegex))
|
||||
rev = Hash::parseAny(value, htSHA1);
|
||||
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
|
||||
else
|
||||
ref = value;
|
||||
}
|
||||
@ -79,7 +79,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||
attrs2.alloc("branch").mkString(*input2.getRef());
|
||||
// Backward compatibility: set 'rev' to
|
||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
|
||||
auto rev2 = input2.getRev().value_or(Hash(HashAlgorithm::SHA1));
|
||||
attrs2.alloc("rev").mkString(rev2.gitRev());
|
||||
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
|
||||
if (auto revCount = input2.getRevCount())
|
||||
|
@ -46,7 +46,7 @@ void emitTreeAttrs(
|
||||
attrs.alloc("shortRev").mkString(rev->gitShortRev());
|
||||
} else if (emptyRevFallback) {
|
||||
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
||||
auto emptyHash = Hash(htSHA1);
|
||||
auto emptyHash = Hash(HashAlgorithm::SHA1);
|
||||
attrs.alloc("rev").mkString(emptyHash.gitRev());
|
||||
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
|
||||
}
|
||||
@ -187,45 +187,215 @@ static RegisterPrimOp primop_fetchTree({
|
||||
.name = "fetchTree",
|
||||
.args = {"input"},
|
||||
.doc = R"(
|
||||
Fetch a source tree or a plain file using one of the supported backends.
|
||||
*input* must be a [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references), either in attribute set representation or in the URL-like syntax.
|
||||
The input should be "locked", that is, it should contain a commit hash or content hash unless impure evaluation (`--impure`) is enabled.
|
||||
Fetch a file system tree or a plain file using one of the supported backends and return an attribute set with:
|
||||
|
||||
> **Note**
|
||||
- the resulting fixed-output [store path](@docroot@/glossary.md#gloss-store-path)
|
||||
- the corresponding [NAR](@docroot@/glossary.md#gloss-nar) hash
|
||||
- backend-specific metadata (currently not documented). <!-- TODO: document output attributes -->
|
||||
|
||||
*input* must be an attribute set with the following attributes:
|
||||
|
||||
- `type` (String, required)
|
||||
|
||||
One of the [supported source types](#source-types).
|
||||
This determines other required and allowed input attributes.
|
||||
|
||||
- `narHash` (String, optional)
|
||||
|
||||
The `narHash` parameter can be used to substitute the source of the tree.
|
||||
It also allows for verification of tree contents that may not be provided by the underlying transfer mechanism.
|
||||
If `narHash` is set, the source is first looked up is the Nix store and [substituters](@docroot@/command-ref/conf-file.md#conf-substituters), and only fetched if not available.
|
||||
|
||||
A subset of the output attributes of `fetchTree` can be re-used for subsequent calls to `fetchTree` to produce the same result again.
|
||||
That is, `fetchTree` is idempotent.
|
||||
|
||||
Downloads are cached in `$XDG_CACHE_HOME/nix`.
|
||||
The remote source will be fetched from the network if both are true:
|
||||
- A NAR hash is supplied and the corresponding store path is not [valid](@docroot@/glossary.md#gloss-validity), that is, not available in the store
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> [Substituters](@docroot@/command-ref/conf-file.md#conf-substituters) are not used in fetching.
|
||||
|
||||
- There is no cache entry or the cache entry is older than [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl)
|
||||
|
||||
## Source types
|
||||
|
||||
The following source types and associated input attributes are supported.
|
||||
|
||||
<!-- TODO: It would be soooo much more predictable to work with (and
|
||||
document) if `fetchTree` was a curried call with the first paramter for
|
||||
`type` or an attribute like `builtins.fetchTree.git`! -->
|
||||
|
||||
- `"file"`
|
||||
|
||||
Place a plain file into the Nix store.
|
||||
This is similar to [`builtins.fetchurl`](@docroot@/language/builtins.md#builtins-fetchurl)
|
||||
|
||||
- `url` (String, required)
|
||||
|
||||
Supported protocols:
|
||||
|
||||
- `https`
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> fetchTree {
|
||||
> type = "file";
|
||||
> url = "https://example.com/index.html";
|
||||
> }
|
||||
> ```
|
||||
|
||||
- `http`
|
||||
|
||||
Insecure HTTP transfer for legacy sources.
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> HTTP performs no encryption or authentication.
|
||||
> Use a `narHash` known in advance to ensure the output has expected contents.
|
||||
|
||||
- `file`
|
||||
|
||||
A file on the local file system.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> fetchTree {
|
||||
> type = "file";
|
||||
> url = "file:///home/eelco/nix/README.md";
|
||||
> }
|
||||
> ```
|
||||
|
||||
- `"tarball"`
|
||||
|
||||
Download a tar archive and extract it into the Nix store.
|
||||
This has the same underyling implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball)
|
||||
|
||||
- `url` (String, required)
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> fetchTree {
|
||||
> type = "tarball";
|
||||
> url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11";
|
||||
> }
|
||||
> ```
|
||||
|
||||
- `"git"`
|
||||
|
||||
Fetch a Git tree and copy it to the Nix store.
|
||||
This is similar to [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit).
|
||||
|
||||
- `url` (String, required)
|
||||
|
||||
The URL formats supported are the same as for Git itself.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> fetchTree {
|
||||
> type = "git";
|
||||
> url = "git@github.com:NixOS/nixpkgs.git";
|
||||
> }
|
||||
> ```
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> If the URL points to a local directory, and no `ref` or `rev` is given, Nix will only consider files added to the Git index, as listed by `git ls-files` but use the *current file contents* of the Git working directory.
|
||||
|
||||
- `ref` (String, optional)
|
||||
|
||||
A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name.
|
||||
|
||||
Default: `"HEAD"`
|
||||
|
||||
- `rev` (String, optional)
|
||||
|
||||
A Git revision; a commit hash.
|
||||
|
||||
Default: the tip of `ref`
|
||||
|
||||
- `shallow` (Bool, optional)
|
||||
|
||||
Make a shallow clone when fetching the Git tree.
|
||||
|
||||
Default: `false`
|
||||
|
||||
- `submodules` (Bool, optional)
|
||||
|
||||
Also fetch submodules if available.
|
||||
|
||||
Default: `false`
|
||||
|
||||
- `allRefs` (Bool, optional)
|
||||
|
||||
If set to `true`, always fetch the entire repository, even if the latest commit is still in the cache.
|
||||
Otherwise, only the latest commit is fetched if it is not already cached.
|
||||
|
||||
Default: `false`
|
||||
|
||||
- `lastModified` (Integer, optional)
|
||||
|
||||
Unix timestamp of the fetched commit.
|
||||
|
||||
If set, pass through the value to the output attribute set.
|
||||
Otherwise, generated from the fetched Git tree.
|
||||
|
||||
- `revCount` (Integer, optional)
|
||||
|
||||
Number of revisions in the history of the Git repository before the fetched commit.
|
||||
|
||||
If set, pass through the value to the output attribute set.
|
||||
Otherwise, generated from the fetched Git tree.
|
||||
|
||||
The following input types are still subject to change:
|
||||
|
||||
- `"path"`
|
||||
- `"github"`
|
||||
- `"gitlab"`
|
||||
- `"sourcehut"`
|
||||
- `"mercurial"`
|
||||
|
||||
*input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references).
|
||||
The additional input types and the URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> The URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
|
||||
> Fetch a GitHub repository using the attribute set representation:
|
||||
>
|
||||
> ```nix
|
||||
> builtins.fetchTree {
|
||||
> type = "github";
|
||||
> owner = "NixOS";
|
||||
> repo = "nixpkgs";
|
||||
> rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> This evaluates to the following attribute set:
|
||||
>
|
||||
> ```nix
|
||||
> {
|
||||
> lastModified = 1686503798;
|
||||
> lastModifiedDate = "20230611171638";
|
||||
> narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
|
||||
> outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
|
||||
> rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
||||
> shortRev = "ae2e6b3";
|
||||
> }
|
||||
> ```
|
||||
|
||||
Here are some examples of how to use `fetchTree`:
|
||||
|
||||
- Fetch a GitHub repository using the attribute set representation:
|
||||
|
||||
```nix
|
||||
builtins.fetchTree {
|
||||
type = "github";
|
||||
owner = "NixOS";
|
||||
repo = "nixpkgs";
|
||||
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
||||
}
|
||||
```
|
||||
|
||||
This evaluates to the following attribute set:
|
||||
|
||||
```
|
||||
{
|
||||
lastModified = 1686503798;
|
||||
lastModifiedDate = "20230611171638";
|
||||
narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
|
||||
outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
|
||||
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
||||
shortRev = "ae2e6b3";
|
||||
}
|
||||
```
|
||||
|
||||
- Fetch the same GitHub repository using the URL-like syntax:
|
||||
|
||||
```
|
||||
builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd"
|
||||
```
|
||||
> **Example**
|
||||
>
|
||||
> Fetch the same GitHub repository using the URL-like syntax:
|
||||
>
|
||||
> ```nix
|
||||
> builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd"
|
||||
> ```
|
||||
)",
|
||||
.fun = prim_fetchTree,
|
||||
.experimentalFeature = Xp::FetchTree,
|
||||
@ -246,7 +416,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
if (n == "url")
|
||||
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
|
||||
else if (n == "sha256")
|
||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), htSHA256);
|
||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256);
|
||||
else if (n == "name")
|
||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
||||
else
|
||||
@ -276,7 +446,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
|
||||
|
||||
// early exit if pinned and already in the store
|
||||
if (expectedHash && expectedHash->type == htSHA256) {
|
||||
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
|
||||
auto expectedPath = state.store->makeFixedOutputPath(
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
@ -301,10 +471,10 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
if (expectedHash) {
|
||||
auto hash = unpack
|
||||
? state.store->queryPathInfo(storePath)->narHash
|
||||
: hashFile(htSHA256, state.store->toRealPath(storePath));
|
||||
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
|
||||
if (hash != *expectedHash)
|
||||
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||
*url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)));
|
||||
*url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)));
|
||||
}
|
||||
|
||||
state.allowAndSetStorePathString(storePath, v);
|
||||
|
@ -423,10 +423,9 @@ public:
|
||||
SourcePath path() const
|
||||
{
|
||||
assert(internalType == tPath);
|
||||
return SourcePath {
|
||||
.accessor = ref(_path.accessor->shared_from_this()),
|
||||
.path = CanonPath(CanonPath::unchecked_t(), _path.path)
|
||||
};
|
||||
return SourcePath(
|
||||
ref(_path.accessor->shared_from_this()),
|
||||
CanonPath(CanonPath::unchecked_t(), _path.path));
|
||||
}
|
||||
|
||||
std::string_view string_view() const
|
||||
|
@ -289,8 +289,8 @@ std::string Input::getType() const
|
||||
std::optional<Hash> Input::getNarHash() const
|
||||
{
|
||||
if (auto s = maybeGetStrAttr(attrs, "narHash")) {
|
||||
auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s);
|
||||
if (hash.type != htSHA256)
|
||||
auto hash = s->empty() ? Hash(HashAlgorithm::SHA256) : Hash::parseSRI(*s);
|
||||
if (hash.algo != HashAlgorithm::SHA256)
|
||||
throw UsageError("narHash must use SHA-256");
|
||||
return hash;
|
||||
}
|
||||
@ -314,7 +314,7 @@ std::optional<Hash> Input::getRev() const
|
||||
} catch (BadHash &e) {
|
||||
// Default to sha1 for backwards compatibility with existing
|
||||
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
|
||||
hash = Hash::parseAny(*s, htSHA1);
|
||||
hash = Hash::parseAny(*s, HashAlgorithm::SHA1);
|
||||
}
|
||||
}
|
||||
|
||||
@ -374,7 +374,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
|
||||
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
|
||||
{
|
||||
auto [accessor, input2] = getAccessor(store, input);
|
||||
auto storePath = accessor->root().fetchToStore(store, input2.getName());
|
||||
auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName());
|
||||
return {storePath, input2};
|
||||
}
|
||||
|
||||
|
83
src/libfetchers/filtering-input-accessor.cc
Normal file
83
src/libfetchers/filtering-input-accessor.cc
Normal file
@ -0,0 +1,83 @@
|
||||
#include "filtering-input-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::string FilteringInputAccessor::readFile(const CanonPath & path)
|
||||
{
|
||||
checkAccess(path);
|
||||
return next->readFile(prefix + path);
|
||||
}
|
||||
|
||||
bool FilteringInputAccessor::pathExists(const CanonPath & path)
|
||||
{
|
||||
return isAllowed(path) && next->pathExists(prefix + path);
|
||||
}
|
||||
|
||||
std::optional<InputAccessor::Stat> FilteringInputAccessor::maybeLstat(const CanonPath & path)
|
||||
{
|
||||
checkAccess(path);
|
||||
return next->maybeLstat(prefix + path);
|
||||
}
|
||||
|
||||
InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path)
|
||||
{
|
||||
checkAccess(path);
|
||||
DirEntries entries;
|
||||
for (auto & entry : next->readDirectory(prefix + path)) {
|
||||
if (isAllowed(path + entry.first))
|
||||
entries.insert(std::move(entry));
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
|
||||
std::string FilteringInputAccessor::readLink(const CanonPath & path)
|
||||
{
|
||||
checkAccess(path);
|
||||
return next->readLink(prefix + path);
|
||||
}
|
||||
|
||||
std::string FilteringInputAccessor::showPath(const CanonPath & path)
|
||||
{
|
||||
return next->showPath(prefix + path);
|
||||
}
|
||||
|
||||
void FilteringInputAccessor::checkAccess(const CanonPath & path)
|
||||
{
|
||||
if (!isAllowed(path))
|
||||
throw makeNotAllowedError
|
||||
? makeNotAllowedError(path)
|
||||
: RestrictedPathError("access to path '%s' is forbidden", showPath(path));
|
||||
}
|
||||
|
||||
struct AllowListInputAccessorImpl : AllowListInputAccessor
|
||||
{
|
||||
std::set<CanonPath> allowedPaths;
|
||||
|
||||
AllowListInputAccessorImpl(
|
||||
ref<InputAccessor> next,
|
||||
std::set<CanonPath> && allowedPaths,
|
||||
MakeNotAllowedError && makeNotAllowedError)
|
||||
: AllowListInputAccessor(SourcePath(next), std::move(makeNotAllowedError))
|
||||
, allowedPaths(std::move(allowedPaths))
|
||||
{ }
|
||||
|
||||
bool isAllowed(const CanonPath & path) override
|
||||
{
|
||||
return path.isAllowed(allowedPaths);
|
||||
}
|
||||
|
||||
void allowPath(CanonPath path) override
|
||||
{
|
||||
allowedPaths.insert(std::move(path));
|
||||
}
|
||||
};
|
||||
|
||||
ref<AllowListInputAccessor> AllowListInputAccessor::create(
|
||||
ref<InputAccessor> next,
|
||||
std::set<CanonPath> && allowedPaths,
|
||||
MakeNotAllowedError && makeNotAllowedError)
|
||||
{
|
||||
return make_ref<AllowListInputAccessorImpl>(next, std::move(allowedPaths), std::move(makeNotAllowedError));
|
||||
}
|
||||
|
||||
}
|
73
src/libfetchers/filtering-input-accessor.hh
Normal file
73
src/libfetchers/filtering-input-accessor.hh
Normal file
@ -0,0 +1,73 @@
|
||||
#pragma once
|
||||
|
||||
#include "input-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* A function that should throw an exception of type
|
||||
* `RestrictedPathError` explaining that access to `path` is
|
||||
* forbidden.
|
||||
*/
|
||||
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
|
||||
|
||||
/**
|
||||
* An abstract wrapping `InputAccessor` that performs access
|
||||
* control. Subclasses should override `isAllowed()` to implement an
|
||||
* access control policy. The error message is customized at construction.
|
||||
*/
|
||||
struct FilteringInputAccessor : InputAccessor
|
||||
{
|
||||
ref<InputAccessor> next;
|
||||
CanonPath prefix;
|
||||
MakeNotAllowedError makeNotAllowedError;
|
||||
|
||||
FilteringInputAccessor(const SourcePath & src, MakeNotAllowedError && makeNotAllowedError)
|
||||
: next(src.accessor)
|
||||
, prefix(src.path)
|
||||
, makeNotAllowedError(std::move(makeNotAllowedError))
|
||||
{ }
|
||||
|
||||
std::string readFile(const CanonPath & path) override;
|
||||
|
||||
bool pathExists(const CanonPath & path) override;
|
||||
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path) override;
|
||||
|
||||
DirEntries readDirectory(const CanonPath & path) override;
|
||||
|
||||
std::string readLink(const CanonPath & path) override;
|
||||
|
||||
std::string showPath(const CanonPath & path) override;
|
||||
|
||||
/**
|
||||
* Call `makeNotAllowedError` to throw a `RestrictedPathError`
|
||||
* exception if `isAllowed()` returns `false` for `path`.
|
||||
*/
|
||||
void checkAccess(const CanonPath & path);
|
||||
|
||||
/**
|
||||
* Return `true` iff access to path is allowed.
|
||||
*/
|
||||
virtual bool isAllowed(const CanonPath & path) = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* A wrapping `InputAccessor` that checks paths against an allow-list.
|
||||
*/
|
||||
struct AllowListInputAccessor : public FilteringInputAccessor
|
||||
{
|
||||
/**
|
||||
* Grant access to the specified path.
|
||||
*/
|
||||
virtual void allowPath(CanonPath path) = 0;
|
||||
|
||||
static ref<AllowListInputAccessor> create(
|
||||
ref<InputAccessor> next,
|
||||
std::set<CanonPath> && allowedPaths,
|
||||
MakeNotAllowedError && makeNotAllowedError);
|
||||
|
||||
using FilteringInputAccessor::FilteringInputAccessor;
|
||||
};
|
||||
|
||||
}
|
@ -4,19 +4,12 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
|
||||
struct FSInputAccessor : InputAccessor, PosixSourceAccessor
|
||||
{
|
||||
CanonPath root;
|
||||
std::optional<std::set<CanonPath>> allowedPaths;
|
||||
MakeNotAllowedError makeNotAllowedError;
|
||||
|
||||
FSInputAccessorImpl(
|
||||
const CanonPath & root,
|
||||
std::optional<std::set<CanonPath>> && allowedPaths,
|
||||
MakeNotAllowedError && makeNotAllowedError)
|
||||
FSInputAccessor(const CanonPath & root)
|
||||
: root(root)
|
||||
, allowedPaths(std::move(allowedPaths))
|
||||
, makeNotAllowedError(std::move(makeNotAllowedError))
|
||||
{
|
||||
displayPrefix = root.isRoot() ? "" : root.abs();
|
||||
}
|
||||
@ -27,39 +20,30 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
|
||||
std::function<void(uint64_t)> sizeCallback) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
checkAllowed(absPath);
|
||||
PosixSourceAccessor::readFile(absPath, sink, sizeCallback);
|
||||
}
|
||||
|
||||
bool pathExists(const CanonPath & path) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath);
|
||||
return PosixSourceAccessor::pathExists(makeAbsPath(path));
|
||||
}
|
||||
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
checkAllowed(absPath);
|
||||
return PosixSourceAccessor::maybeLstat(absPath);
|
||||
return PosixSourceAccessor::maybeLstat(makeAbsPath(path));
|
||||
}
|
||||
|
||||
DirEntries readDirectory(const CanonPath & path) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
checkAllowed(absPath);
|
||||
DirEntries res;
|
||||
for (auto & entry : PosixSourceAccessor::readDirectory(absPath))
|
||||
if (isAllowed(absPath + entry.first))
|
||||
res.emplace(entry);
|
||||
for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path)))
|
||||
res.emplace(entry);
|
||||
return res;
|
||||
}
|
||||
|
||||
std::string readLink(const CanonPath & path) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
checkAllowed(absPath);
|
||||
return PosixSourceAccessor::readLink(absPath);
|
||||
return PosixSourceAccessor::readLink(makeAbsPath(path));
|
||||
}
|
||||
|
||||
CanonPath makeAbsPath(const CanonPath & path)
|
||||
@ -67,59 +51,22 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
|
||||
return root + path;
|
||||
}
|
||||
|
||||
void checkAllowed(const CanonPath & absPath) override
|
||||
{
|
||||
if (!isAllowed(absPath))
|
||||
throw makeNotAllowedError
|
||||
? makeNotAllowedError(absPath)
|
||||
: RestrictedPathError("access to path '%s' is forbidden", absPath);
|
||||
}
|
||||
|
||||
bool isAllowed(const CanonPath & absPath)
|
||||
{
|
||||
if (!absPath.isWithin(root))
|
||||
return false;
|
||||
|
||||
if (allowedPaths) {
|
||||
auto p = absPath.removePrefix(root);
|
||||
if (!p.isAllowed(*allowedPaths))
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void allowPath(CanonPath path) override
|
||||
{
|
||||
if (allowedPaths)
|
||||
allowedPaths->insert(std::move(path));
|
||||
}
|
||||
|
||||
bool hasAccessControl() override
|
||||
{
|
||||
return (bool) allowedPaths;
|
||||
}
|
||||
|
||||
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
|
||||
{
|
||||
return makeAbsPath(path);
|
||||
}
|
||||
};
|
||||
|
||||
ref<FSInputAccessor> makeFSInputAccessor(
|
||||
const CanonPath & root,
|
||||
std::optional<std::set<CanonPath>> && allowedPaths,
|
||||
MakeNotAllowedError && makeNotAllowedError)
|
||||
ref<InputAccessor> makeFSInputAccessor(const CanonPath & root)
|
||||
{
|
||||
return make_ref<FSInputAccessorImpl>(root, std::move(allowedPaths), std::move(makeNotAllowedError));
|
||||
return make_ref<FSInputAccessor>(root);
|
||||
}
|
||||
|
||||
ref<FSInputAccessor> makeStorePathAccessor(
|
||||
ref<InputAccessor> makeStorePathAccessor(
|
||||
ref<Store> store,
|
||||
const StorePath & storePath,
|
||||
MakeNotAllowedError && makeNotAllowedError)
|
||||
const StorePath & storePath)
|
||||
{
|
||||
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)), {}, std::move(makeNotAllowedError));
|
||||
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)));
|
||||
}
|
||||
|
||||
SourcePath getUnfilteredRootPath(CanonPath path)
|
||||
|
@ -7,26 +7,12 @@ namespace nix {
|
||||
class StorePath;
|
||||
class Store;
|
||||
|
||||
struct FSInputAccessor : InputAccessor
|
||||
{
|
||||
virtual void checkAllowed(const CanonPath & absPath) = 0;
|
||||
ref<InputAccessor> makeFSInputAccessor(
|
||||
const CanonPath & root);
|
||||
|
||||
virtual void allowPath(CanonPath path) = 0;
|
||||
|
||||
virtual bool hasAccessControl() = 0;
|
||||
};
|
||||
|
||||
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
|
||||
|
||||
ref<FSInputAccessor> makeFSInputAccessor(
|
||||
const CanonPath & root,
|
||||
std::optional<std::set<CanonPath>> && allowedPaths = {},
|
||||
MakeNotAllowedError && makeNotAllowedError = {});
|
||||
|
||||
ref<FSInputAccessor> makeStorePathAccessor(
|
||||
ref<InputAccessor> makeStorePathAccessor(
|
||||
ref<Store> store,
|
||||
const StorePath & storePath,
|
||||
MakeNotAllowedError && makeNotAllowedError = {});
|
||||
const StorePath & storePath);
|
||||
|
||||
SourcePath getUnfilteredRootPath(CanonPath path);
|
||||
|
||||
|
@ -91,7 +91,7 @@ Hash toHash(const git_oid & oid)
|
||||
#ifdef GIT_EXPERIMENTAL_SHA256
|
||||
assert(oid.type == GIT_OID_SHA1);
|
||||
#endif
|
||||
Hash hash(htSHA1);
|
||||
Hash hash(HashAlgorithm::SHA1);
|
||||
memcpy(hash.hash, oid.id, hash.hashSize);
|
||||
return hash;
|
||||
}
|
||||
@ -439,7 +439,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
|
||||
for (const fetchers::PublicKey & k : publicKeys){
|
||||
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
|
||||
auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
|
||||
auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
|
||||
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
|
||||
re += "(" + escaped_fingerprint + ")";
|
||||
}
|
||||
@ -554,7 +554,7 @@ struct GitInputAccessor : InputAccessor
|
||||
return toHash(*git_tree_entry_id(entry));
|
||||
}
|
||||
|
||||
std::map<CanonPath, TreeEntry> lookupCache;
|
||||
std::unordered_map<CanonPath, TreeEntry> lookupCache;
|
||||
|
||||
/* Recursively look up 'path' relative to the root. */
|
||||
git_tree_entry * lookup(const CanonPath & path)
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "processes.hh"
|
||||
#include "git.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "filtering-input-accessor.hh"
|
||||
#include "mounted-input-accessor.hh"
|
||||
#include "git-utils.hh"
|
||||
#include "logging.hh"
|
||||
@ -52,7 +53,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
|
||||
Path getCachePath(std::string_view key)
|
||||
{
|
||||
return getCacheDir() + "/nix/gitv3/" +
|
||||
hashString(htSHA256, key).to_string(HashFormat::Base32, false);
|
||||
hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false);
|
||||
}
|
||||
|
||||
// Returns the name of the HEAD branch.
|
||||
@ -369,7 +370,7 @@ struct GitInputScheme : InputScheme
|
||||
{
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
|
||||
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
|
||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
||||
};
|
||||
|
||||
@ -559,7 +560,7 @@ struct GitInputScheme : InputScheme
|
||||
repoInfo.url
|
||||
);
|
||||
} else
|
||||
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
|
||||
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashAlgorithm::SHA1).gitRev());
|
||||
|
||||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||
}
|
||||
@ -639,7 +640,10 @@ struct GitInputScheme : InputScheme
|
||||
repoInfo.workdirInfo.files.insert(submodule.path);
|
||||
|
||||
ref<InputAccessor> accessor =
|
||||
makeFSInputAccessor(CanonPath(repoInfo.url), repoInfo.workdirInfo.files, makeNotAllowedError(repoInfo.url));
|
||||
AllowListInputAccessor::create(
|
||||
makeFSInputAccessor(CanonPath(repoInfo.url)),
|
||||
std::move(repoInfo.workdirInfo.files),
|
||||
makeNotAllowedError(repoInfo.url));
|
||||
|
||||
/* If the repo has submodules, return a mounted input accessor
|
||||
consisting of the accessor for the top-level repo and the
|
||||
|
@ -42,7 +42,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||
auto size = path.size();
|
||||
if (size == 3) {
|
||||
if (std::regex_match(path[2], revRegex))
|
||||
rev = Hash::parseAny(path[2], htSHA1);
|
||||
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
|
||||
else if (std::regex_match(path[2], refRegex))
|
||||
ref = path[2];
|
||||
else
|
||||
@ -68,7 +68,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||
if (name == "rev") {
|
||||
if (rev)
|
||||
throw BadURL("URL '%s' contains multiple commit hashes", url.url);
|
||||
rev = Hash::parseAny(value, htSHA1);
|
||||
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
|
||||
}
|
||||
else if (name == "ref") {
|
||||
if (!std::regex_match(value, refRegex))
|
||||
@ -284,7 +284,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||
readFile(
|
||||
store->toRealPath(
|
||||
downloadFile(store, url, "source", false, headers).storePath)));
|
||||
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
|
||||
auto rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1);
|
||||
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
||||
return rev;
|
||||
}
|
||||
@ -356,7 +356,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
||||
readFile(
|
||||
store->toRealPath(
|
||||
downloadFile(store, url, "source", false, headers).storePath)));
|
||||
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
|
||||
auto rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1);
|
||||
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
||||
return rev;
|
||||
}
|
||||
@ -448,7 +448,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||
if(!id)
|
||||
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
|
||||
|
||||
auto rev = Hash::parseAny(*id, htSHA1);
|
||||
auto rev = Hash::parseAny(*id, HashAlgorithm::SHA1);
|
||||
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
|
||||
return rev;
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ struct IndirectInputScheme : InputScheme
|
||||
if (path.size() == 1) {
|
||||
} else if (path.size() == 2) {
|
||||
if (std::regex_match(path[1], revRegex))
|
||||
rev = Hash::parseAny(path[1], htSHA1);
|
||||
rev = Hash::parseAny(path[1], HashAlgorithm::SHA1);
|
||||
else if (std::regex_match(path[1], refRegex))
|
||||
ref = path[1];
|
||||
else
|
||||
@ -31,7 +31,7 @@ struct IndirectInputScheme : InputScheme
|
||||
ref = path[1];
|
||||
if (!std::regex_match(path[2], revRegex))
|
||||
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
|
||||
rev = Hash::parseAny(path[2], htSHA1);
|
||||
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
|
||||
} else
|
||||
throw BadURL("GitHub URL '%s' is invalid", url.url);
|
||||
|
||||
|
@ -44,8 +44,8 @@ StorePath InputAccessor::fetchToStore(
|
||||
|
||||
auto storePath =
|
||||
settings.readOnlyMode
|
||||
? store->computeStorePathFromDump(*source, name, method, htSHA256).first
|
||||
: store->addToStoreFromDump(*source, name, method, htSHA256, repair);
|
||||
? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first
|
||||
: store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair);
|
||||
|
||||
if (cacheKey)
|
||||
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
|
||||
@ -53,11 +53,6 @@ StorePath InputAccessor::fetchToStore(
|
||||
return storePath;
|
||||
}
|
||||
|
||||
SourcePath InputAccessor::root()
|
||||
{
|
||||
return {ref(shared_from_this()), CanonPath::root};
|
||||
}
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const SourcePath & path)
|
||||
{
|
||||
str << path.to_string();
|
||||
@ -88,7 +83,7 @@ SourcePath SourcePath::parent() const
|
||||
|
||||
SourcePath SourcePath::resolveSymlinks() const
|
||||
{
|
||||
auto res = accessor->root();
|
||||
auto res = SourcePath(accessor);
|
||||
|
||||
int linksAllowed = 1024;
|
||||
|
||||
|
@ -36,8 +36,6 @@ struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<Inpu
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
PathFilter * filter = nullptr,
|
||||
RepairFlag repair = NoRepair);
|
||||
|
||||
SourcePath root();
|
||||
};
|
||||
|
||||
/**
|
||||
@ -51,6 +49,11 @@ struct SourcePath
|
||||
ref<InputAccessor> accessor;
|
||||
CanonPath path;
|
||||
|
||||
SourcePath(ref<InputAccessor> accessor, CanonPath path = CanonPath::root)
|
||||
: accessor(std::move(accessor))
|
||||
, path(std::move(path))
|
||||
{ }
|
||||
|
||||
std::string_view baseName() const;
|
||||
|
||||
/**
|
||||
|
@ -210,7 +210,7 @@ struct MercurialInputScheme : InputScheme
|
||||
return files.count(file);
|
||||
};
|
||||
|
||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter);
|
||||
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
@ -220,7 +220,7 @@ struct MercurialInputScheme : InputScheme
|
||||
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && hash->type != htSHA1)
|
||||
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
|
||||
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
|
||||
};
|
||||
|
||||
@ -260,14 +260,14 @@ struct MercurialInputScheme : InputScheme
|
||||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
|
||||
if (!input.getRev() || input.getRev() == rev2) {
|
||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
}
|
||||
}
|
||||
|
||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false));
|
||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false));
|
||||
|
||||
/* If this is a commit hash that we already have, we don't
|
||||
have to pull again. */
|
||||
@ -301,7 +301,7 @@ struct MercurialInputScheme : InputScheme
|
||||
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
||||
assert(tokens.size() == 3);
|
||||
|
||||
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev());
|
||||
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashAlgorithm::SHA1).gitRev());
|
||||
auto revCount = std::stoull(tokens[1]);
|
||||
input.attrs.insert_or_assign("ref", tokens[2]);
|
||||
|
||||
|
@ -73,7 +73,7 @@ DownloadFileResult downloadFile(
|
||||
} else {
|
||||
StringSink sink;
|
||||
dumpString(res.data, sink);
|
||||
auto hash = hashString(htSHA256, res.data);
|
||||
auto hash = hashString(HashAlgorithm::SHA256, res.data);
|
||||
ValidPathInfo info {
|
||||
*store,
|
||||
name,
|
||||
@ -82,7 +82,7 @@ DownloadFileResult downloadFile(
|
||||
.hash = hash,
|
||||
.references = {},
|
||||
},
|
||||
hashString(htSHA256, sink.s),
|
||||
hashString(HashAlgorithm::SHA256, sink.s),
|
||||
};
|
||||
info.narSize = sink.s.size();
|
||||
auto source = StringSource { sink.s };
|
||||
@ -156,7 +156,7 @@ DownloadTarballResult downloadTarball(
|
||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||
lastModified = lstat(topDir).st_mtime;
|
||||
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
|
||||
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair);
|
||||
}
|
||||
|
||||
Attrs infoAttrs({
|
||||
|
@ -143,9 +143,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||
/* Read the NAR simultaneously into a CompressionSink+FileSink (to
|
||||
write the compressed NAR to disk), into a HashSink (to get the
|
||||
NAR hash), and into a NarAccessor (to get the NAR listing). */
|
||||
HashSink fileHashSink { htSHA256 };
|
||||
HashSink fileHashSink { HashAlgorithm::SHA256 };
|
||||
std::shared_ptr<SourceAccessor> narAccessor;
|
||||
HashSink narHashSink { htSHA256 };
|
||||
HashSink narHashSink { HashAlgorithm::SHA256 };
|
||||
{
|
||||
FdSink fileSink(fdTemp.get());
|
||||
TeeSink teeSinkCompressed { fileSink, fileHashSink };
|
||||
@ -165,8 +165,8 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||
auto [fileHash, fileSize] = fileHashSink.finish();
|
||||
narInfo->fileHash = fileHash;
|
||||
narInfo->fileSize = fileSize;
|
||||
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Base32, false) + ".nar"
|
||||
+ (compression == "xz" ? ".xz" :
|
||||
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Nix32, false) + ".nar"
|
||||
+ (compression == "xz" ? ".xz" :
|
||||
compression == "bzip2" ? ".bz2" :
|
||||
compression == "zstd" ? ".zst" :
|
||||
compression == "lzip" ? ".lzip" :
|
||||
@ -301,9 +301,9 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
{
|
||||
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
|
||||
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
|
||||
unsupported("addToStoreFromDump");
|
||||
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
@ -399,13 +399,13 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references)
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references)
|
||||
{
|
||||
/* FIXME: Make BinaryCacheStore::addToStoreCommon support
|
||||
non-recursive+sha256 so we can just use the default
|
||||
@ -448,7 +448,7 @@ StorePath BinaryCacheStore::addTextToStore(
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
auto textHash = hashString(htSHA256, s);
|
||||
auto textHash = hashString(HashAlgorithm::SHA256, s);
|
||||
auto path = makeTextPath(name, TextInfo { { textHash }, references });
|
||||
|
||||
if (!repair && isValidPath(path))
|
||||
|
@ -124,16 +124,16 @@ public:
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override;
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override;
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
|
@ -558,7 +558,7 @@ void DerivationGoal::inputsRealised()
|
||||
inputDrvOutputs statefully, sometimes it gets out of sync with
|
||||
the real source of truth (store). So we query the store
|
||||
directly if there's a problem. */
|
||||
attempt = fullDrv.tryResolve(worker.store);
|
||||
attempt = fullDrv.tryResolve(worker.store, &worker.evalStore);
|
||||
}
|
||||
assert(attempt);
|
||||
Derivation drvResolved { std::move(*attempt) };
|
||||
|
@ -1066,8 +1066,8 @@ void LocalDerivationGoal::initTmpDir() {
|
||||
if (passAsFile.find(i.first) == passAsFile.end()) {
|
||||
env[i.first] = i.second;
|
||||
} else {
|
||||
auto hash = hashString(htSHA256, i.first);
|
||||
std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false);
|
||||
auto hash = hashString(HashAlgorithm::SHA256, i.first);
|
||||
std::string fn = ".attr-" + hash.to_string(HashFormat::Nix32, false);
|
||||
Path p = tmpDir + "/" + fn;
|
||||
writeFile(p, rewriteStrings(i.second, inputRewrites));
|
||||
chownToBuilder(p);
|
||||
@ -1290,13 +1290,13 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
||||
{ throw Error("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{ throw Error("addToStore"); }
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||
@ -1318,12 +1318,12 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
||||
}
|
||||
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{
|
||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references);
|
||||
goal.addDependency(path);
|
||||
@ -2466,7 +2466,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
rewriteOutput(outputRewrites);
|
||||
/* FIXME optimize and deduplicate with addToStore */
|
||||
std::string oldHashPart { scratchPath->hashPart() };
|
||||
HashModuloSink caSink { outputHash.hashType, oldHashPart };
|
||||
HashModuloSink caSink {outputHash.hashAlgo, oldHashPart };
|
||||
std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) {
|
||||
readFile(actualPath, caSink);
|
||||
@ -2511,7 +2511,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
std::string(newInfo0.path.hashPart())}});
|
||||
}
|
||||
|
||||
HashResult narHashAndSize = hashPath(htSHA256, actualPath);
|
||||
HashResult narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
|
||||
newInfo0.narHash = narHashAndSize.first;
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
|
||||
@ -2531,7 +2531,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
std::string { scratchPath->hashPart() },
|
||||
std::string { requiredFinalPath.hashPart() });
|
||||
rewriteOutput(outputRewrites);
|
||||
auto narHashAndSize = hashPath(htSHA256, actualPath);
|
||||
auto narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
|
||||
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
auto refs = rewriteRefs();
|
||||
@ -2546,7 +2546,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
|
||||
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
|
||||
.method = dof.ca.method,
|
||||
.hashType = wanted.type,
|
||||
.hashAlgo = wanted.algo,
|
||||
});
|
||||
|
||||
/* Check wanted hash */
|
||||
@ -2583,7 +2583,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
[&](const DerivationOutput::Impure & doi) {
|
||||
return newInfoFromCA(DerivationOutput::CAFloating {
|
||||
.method = doi.method,
|
||||
.hashType = doi.hashType,
|
||||
.hashAlgo = doi.hashAlgo,
|
||||
});
|
||||
},
|
||||
|
||||
@ -2945,7 +2945,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName)
|
||||
{
|
||||
return worker.store.makeStorePath(
|
||||
"rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName),
|
||||
Hash(htSHA256), outputPathName(drv->name, outputName));
|
||||
Hash(HashAlgorithm::SHA256), outputPathName(drv->name, outputName));
|
||||
}
|
||||
|
||||
|
||||
@ -2953,7 +2953,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(const StorePath & path)
|
||||
{
|
||||
return worker.store.makeStorePath(
|
||||
"rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()),
|
||||
Hash(htSHA256), path.name());
|
||||
Hash(HashAlgorithm::SHA256), path.name());
|
||||
}
|
||||
|
||||
|
||||
|
@ -519,8 +519,8 @@ bool Worker::pathContentsGood(const StorePath & path)
|
||||
if (!pathExists(store.printStorePath(path)))
|
||||
res = false;
|
||||
else {
|
||||
HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
|
||||
Hash nullHash(htSHA256);
|
||||
HashResult current = hashPath(info->narHash.algo, store.printStorePath(path));
|
||||
Hash nullHash(HashAlgorithm::SHA256);
|
||||
res = info->narHash == nullHash || info->narHash == current.first;
|
||||
}
|
||||
pathContentsGoodCache.insert_or_assign(path, res);
|
||||
|
@ -63,9 +63,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
||||
for (auto hashedMirror : settings.hashedMirrors.get())
|
||||
try {
|
||||
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
||||
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
|
||||
std::optional<HashAlgorithm> ht = parseHashAlgoOpt(getAttr("outputHashAlgo"));
|
||||
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
|
||||
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(HashFormat::Base16, false));
|
||||
fetch(hashedMirror + printHashAlgo(h.algo) + "/" + h.to_string(HashFormat::Base16, false));
|
||||
return;
|
||||
} catch (Error & e) {
|
||||
debug(e.what());
|
||||
|
@ -38,14 +38,14 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
|
||||
return FileIngestionMethod::Flat;
|
||||
}
|
||||
|
||||
std::string ContentAddressMethod::render(HashType ht) const
|
||||
std::string ContentAddressMethod::render(HashAlgorithm ha) const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const TextIngestionMethod & th) {
|
||||
return std::string{"text:"} + printHashType(ht);
|
||||
return std::string{"text:"} + printHashAlgo(ha);
|
||||
},
|
||||
[&](const FileIngestionMethod & fim) {
|
||||
return "fixed:" + makeFileIngestionPrefix(fim) + printHashType(ht);
|
||||
return "fixed:" + makeFileIngestionPrefix(fim) + printHashAlgo(ha);
|
||||
}
|
||||
}, raw);
|
||||
}
|
||||
@ -61,13 +61,13 @@ std::string ContentAddress::render() const
|
||||
+ makeFileIngestionPrefix(method);
|
||||
},
|
||||
}, method.raw)
|
||||
+ this->hash.to_string(HashFormat::Base32, true);
|
||||
+ this->hash.to_string(HashFormat::Nix32, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses content address strings up to the hash.
|
||||
*/
|
||||
static std::pair<ContentAddressMethod, HashType> parseContentAddressMethodPrefix(std::string_view & rest)
|
||||
static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodPrefix(std::string_view & rest)
|
||||
{
|
||||
std::string_view wholeInput { rest };
|
||||
|
||||
@ -83,27 +83,27 @@ static std::pair<ContentAddressMethod, HashType> parseContentAddressMethodPrefix
|
||||
auto hashTypeRaw = splitPrefixTo(rest, ':');
|
||||
if (!hashTypeRaw)
|
||||
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
|
||||
HashType hashType = parseHashType(*hashTypeRaw);
|
||||
return hashType;
|
||||
HashAlgorithm hashAlgo = parseHashAlgo(*hashTypeRaw);
|
||||
return hashAlgo;
|
||||
};
|
||||
|
||||
// Switch on prefix
|
||||
if (prefix == "text") {
|
||||
// No parsing of the ingestion method, "text" only support flat.
|
||||
HashType hashType = parseHashType_();
|
||||
HashAlgorithm hashAlgo = parseHashType_();
|
||||
return {
|
||||
TextIngestionMethod {},
|
||||
std::move(hashType),
|
||||
std::move(hashAlgo),
|
||||
};
|
||||
} else if (prefix == "fixed") {
|
||||
// Parse method
|
||||
auto method = FileIngestionMethod::Flat;
|
||||
if (splitPrefix(rest, "r:"))
|
||||
method = FileIngestionMethod::Recursive;
|
||||
HashType hashType = parseHashType_();
|
||||
HashAlgorithm hashAlgo = parseHashType_();
|
||||
return {
|
||||
std::move(method),
|
||||
std::move(hashType),
|
||||
std::move(hashAlgo),
|
||||
};
|
||||
} else
|
||||
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
|
||||
@ -113,15 +113,15 @@ ContentAddress ContentAddress::parse(std::string_view rawCa)
|
||||
{
|
||||
auto rest = rawCa;
|
||||
|
||||
auto [caMethod, hashType] = parseContentAddressMethodPrefix(rest);
|
||||
auto [caMethod, hashAlgo] = parseContentAddressMethodPrefix(rest);
|
||||
|
||||
return ContentAddress {
|
||||
.method = std::move(caMethod),
|
||||
.hash = Hash::parseNonSRIUnprefixed(rest, hashType),
|
||||
.hash = Hash::parseNonSRIUnprefixed(rest, hashAlgo),
|
||||
};
|
||||
}
|
||||
|
||||
std::pair<ContentAddressMethod, HashType> ContentAddressMethod::parse(std::string_view caMethod)
|
||||
std::pair<ContentAddressMethod, HashAlgorithm> ContentAddressMethod::parse(std::string_view caMethod)
|
||||
{
|
||||
std::string asPrefix = std::string{caMethod} + ":";
|
||||
// parseContentAddressMethodPrefix takes its argument by reference
|
||||
@ -144,7 +144,7 @@ std::string renderContentAddress(std::optional<ContentAddress> ca)
|
||||
std::string ContentAddress::printMethodAlgo() const
|
||||
{
|
||||
return method.renderPrefix()
|
||||
+ printHashType(hash.type);
|
||||
+ printHashAlgo(hash.algo);
|
||||
}
|
||||
|
||||
bool StoreReferences::empty() const
|
||||
|
@ -94,7 +94,7 @@ struct ContentAddressMethod
|
||||
/**
|
||||
* Parse a content addressing method and hash type.
|
||||
*/
|
||||
static std::pair<ContentAddressMethod, HashType> parse(std::string_view rawCaMethod);
|
||||
static std::pair<ContentAddressMethod, HashAlgorithm> parse(std::string_view rawCaMethod);
|
||||
|
||||
/**
|
||||
* Render a content addressing method and hash type in a
|
||||
@ -102,7 +102,7 @@ struct ContentAddressMethod
|
||||
*
|
||||
* The rough inverse of `parse()`.
|
||||
*/
|
||||
std::string render(HashType ht) const;
|
||||
std::string render(HashAlgorithm ha) const;
|
||||
};
|
||||
|
||||
|
||||
|
@ -400,22 +400,22 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||
logger->startWork();
|
||||
auto pathInfo = [&]() {
|
||||
// NB: FramedSource must be out of scope before logger->stopWork();
|
||||
auto [contentAddressMethod, hashType_] = ContentAddressMethod::parse(camStr);
|
||||
auto hashType = hashType_; // work around clang bug
|
||||
auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
|
||||
auto hashAlgo = hashAlgo_; // work around clang bug
|
||||
FramedSource source(from);
|
||||
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
|
||||
return std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) {
|
||||
if (hashType != htSHA256)
|
||||
if (hashAlgo != HashAlgorithm::SHA256)
|
||||
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
||||
name, printHashType(hashType));
|
||||
name, printHashAlgo(hashAlgo));
|
||||
// We could stream this by changing Store
|
||||
std::string contents = source.drain();
|
||||
auto path = store->addTextToStore(name, contents, refs, repair);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
[&](const FileIngestionMethod & fim) {
|
||||
auto path = store->addToStoreFromDump(source, name, fim, hashType, repair, refs);
|
||||
auto path = store->addToStoreFromDump(source, name, fim, hashAlgo, repair, refs);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
}, contentAddressMethod.raw);
|
||||
@ -424,7 +424,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||
|
||||
WorkerProto::Serialise<ValidPathInfo>::write(*store, wconn, *pathInfo);
|
||||
} else {
|
||||
HashType hashAlgo;
|
||||
HashAlgorithm hashAlgo;
|
||||
std::string baseName;
|
||||
FileIngestionMethod method;
|
||||
{
|
||||
@ -440,7 +440,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||
hashAlgoRaw = "sha256";
|
||||
method = FileIngestionMethod::Recursive;
|
||||
}
|
||||
hashAlgo = parseHashType(hashAlgoRaw);
|
||||
hashAlgo = parseHashAlgo(hashAlgoRaw);
|
||||
}
|
||||
|
||||
auto dumpSource = sinkToSource([&](Sink & saved) {
|
||||
@ -574,6 +574,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||
case WorkerProto::Op::BuildDerivation: {
|
||||
auto drvPath = store->parseStorePath(readString(from));
|
||||
BasicDerivation drv;
|
||||
/*
|
||||
* Note: unlike wopEnsurePath, this operation reads a
|
||||
* derivation-to-be-realized from the client with
|
||||
* readDerivation(Source,Store) rather than reading it from
|
||||
* the local store with Store::readDerivation(). Since the
|
||||
* derivation-to-be-realized is not registered in the store
|
||||
* it cannot be trusted that its outPath was calculated
|
||||
* correctly.
|
||||
*/
|
||||
readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath));
|
||||
BuildMode buildMode = (BuildMode) readInt(from);
|
||||
logger->startWork();
|
||||
@ -883,7 +892,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||
bool repair, dontCheckSigs;
|
||||
auto path = store->parseStorePath(readString(from));
|
||||
auto deriver = readString(from);
|
||||
auto narHash = Hash::parseAny(readString(from), htSHA256);
|
||||
auto narHash = Hash::parseAny(readString(from), HashAlgorithm::SHA256);
|
||||
ValidPathInfo info { path, narHash };
|
||||
if (deriver != "")
|
||||
info.deriver = store->parseStorePath(deriver);
|
||||
|
@ -215,25 +215,25 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
|
||||
|
||||
static DerivationOutput parseDerivationOutput(
|
||||
const StoreDirConfig & store,
|
||||
std::string_view pathS, std::string_view hashAlgo, std::string_view hashS,
|
||||
std::string_view pathS, std::string_view hashAlgoStr, std::string_view hashS,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
if (hashAlgo != "") {
|
||||
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgo);
|
||||
if (hashAlgoStr != "") {
|
||||
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr);
|
||||
if (method == TextIngestionMethod {})
|
||||
xpSettings.require(Xp::DynamicDerivations);
|
||||
const auto hashType = parseHashType(hashAlgo);
|
||||
const auto hashAlgo = parseHashAlgo(hashAlgoStr);
|
||||
if (hashS == "impure") {
|
||||
xpSettings.require(Xp::ImpureDerivations);
|
||||
if (pathS != "")
|
||||
throw FormatError("impure derivation output should not specify output path");
|
||||
return DerivationOutput::Impure {
|
||||
.method = std::move(method),
|
||||
.hashType = std::move(hashType),
|
||||
.hashAlgo = std::move(hashAlgo),
|
||||
};
|
||||
} else if (hashS != "") {
|
||||
validatePath(pathS);
|
||||
auto hash = Hash::parseNonSRIUnprefixed(hashS, hashType);
|
||||
auto hash = Hash::parseNonSRIUnprefixed(hashS, hashAlgo);
|
||||
return DerivationOutput::CAFixed {
|
||||
.ca = ContentAddress {
|
||||
.method = std::move(method),
|
||||
@ -246,7 +246,7 @@ static DerivationOutput parseDerivationOutput(
|
||||
throw FormatError("content-addressed derivation output should not specify output path");
|
||||
return DerivationOutput::CAFloating {
|
||||
.method = std::move(method),
|
||||
.hashType = std::move(hashType),
|
||||
.hashAlgo = std::move(hashAlgo),
|
||||
};
|
||||
}
|
||||
} else {
|
||||
@ -547,7 +547,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
|
||||
},
|
||||
[&](const DerivationOutput::CAFloating & dof) {
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashType(dof.hashType));
|
||||
s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo));
|
||||
s += ','; printUnquotedString(s, "");
|
||||
},
|
||||
[&](const DerivationOutput::Deferred &) {
|
||||
@ -558,7 +558,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
|
||||
[&](const DerivationOutput::Impure & doi) {
|
||||
// FIXME
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashType(doi.hashType));
|
||||
s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo));
|
||||
s += ','; printUnquotedString(s, "impure");
|
||||
}
|
||||
}, i.second.raw);
|
||||
@ -631,7 +631,7 @@ DerivationType BasicDerivation::type() const
|
||||
floatingCAOutputs,
|
||||
deferredIAOutputs,
|
||||
impureOutputs;
|
||||
std::optional<HashType> floatingHashType;
|
||||
std::optional<HashAlgorithm> floatingHashAlgo;
|
||||
|
||||
for (auto & i : outputs) {
|
||||
std::visit(overloaded {
|
||||
@ -643,10 +643,10 @@ DerivationType BasicDerivation::type() const
|
||||
},
|
||||
[&](const DerivationOutput::CAFloating & dof) {
|
||||
floatingCAOutputs.insert(i.first);
|
||||
if (!floatingHashType) {
|
||||
floatingHashType = dof.hashType;
|
||||
if (!floatingHashAlgo) {
|
||||
floatingHashAlgo = dof.hashAlgo;
|
||||
} else {
|
||||
if (*floatingHashType != dof.hashType)
|
||||
if (*floatingHashAlgo != dof.hashAlgo)
|
||||
throw Error("all floating outputs must use the same hash type");
|
||||
}
|
||||
},
|
||||
@ -774,7 +774,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
||||
std::map<std::string, Hash> outputHashes;
|
||||
for (const auto & i : drv.outputs) {
|
||||
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw);
|
||||
auto hash = hashString(htSHA256, "fixed:out:"
|
||||
auto hash = hashString(HashAlgorithm::SHA256, "fixed:out:"
|
||||
+ dof.ca.printMethodAlgo() + ":"
|
||||
+ dof.ca.hash.to_string(HashFormat::Base16, false) + ":"
|
||||
+ store.printStorePath(dof.path(store, drv.name, i.first)));
|
||||
@ -825,7 +825,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
||||
}
|
||||
}
|
||||
|
||||
auto hash = hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2));
|
||||
auto hash = hashString(HashAlgorithm::SHA256, drv.unparse(store, maskOutputs, &inputs2));
|
||||
|
||||
std::map<std::string, Hash> outputHashes;
|
||||
for (const auto & [outputName, _] : drv.outputs) {
|
||||
@ -930,7 +930,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
|
||||
},
|
||||
[&](const DerivationOutput::CAFloating & dof) {
|
||||
out << ""
|
||||
<< (dof.method.renderPrefix() + printHashType(dof.hashType))
|
||||
<< (dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo))
|
||||
<< "";
|
||||
},
|
||||
[&](const DerivationOutput::Deferred &) {
|
||||
@ -940,7 +940,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
|
||||
},
|
||||
[&](const DerivationOutput::Impure & doi) {
|
||||
out << ""
|
||||
<< (doi.method.renderPrefix() + printHashType(doi.hashType))
|
||||
<< (doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo))
|
||||
<< "impure";
|
||||
},
|
||||
}, i.second.raw);
|
||||
@ -958,7 +958,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
|
||||
std::string hashPlaceholder(const OutputNameView outputName)
|
||||
{
|
||||
// FIXME: memoize?
|
||||
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Base32, false);
|
||||
return "/" + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Nix32, false);
|
||||
}
|
||||
|
||||
|
||||
@ -1002,13 +1002,13 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String
|
||||
|
||||
}
|
||||
|
||||
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) const
|
||||
std::optional<BasicDerivation> Derivation::tryResolve(Store & store, Store * evalStore) const
|
||||
{
|
||||
std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs;
|
||||
|
||||
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accum;
|
||||
accum = [&](auto & inputDrv, auto & node) {
|
||||
for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv)) {
|
||||
for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv, evalStore)) {
|
||||
if (outputPath) {
|
||||
inputDrvOutputs.insert_or_assign({inputDrv, outputName}, *outputPath);
|
||||
if (auto p = get(node.childMap, outputName))
|
||||
@ -1150,7 +1150,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const
|
||||
}
|
||||
|
||||
|
||||
const Hash impureOutputHash = hashString(htSHA256, "impure");
|
||||
const Hash impureOutputHash = hashString(HashAlgorithm::SHA256, "impure");
|
||||
|
||||
nlohmann::json DerivationOutput::toJSON(
|
||||
const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const
|
||||
@ -1167,11 +1167,11 @@ nlohmann::json DerivationOutput::toJSON(
|
||||
// FIXME print refs?
|
||||
},
|
||||
[&](const DerivationOutput::CAFloating & dof) {
|
||||
res["hashAlgo"] = dof.method.renderPrefix() + printHashType(dof.hashType);
|
||||
res["hashAlgo"] = dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo);
|
||||
},
|
||||
[&](const DerivationOutput::Deferred &) {},
|
||||
[&](const DerivationOutput::Impure & doi) {
|
||||
res["hashAlgo"] = doi.method.renderPrefix() + printHashType(doi.hashType);
|
||||
res["hashAlgo"] = doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo);
|
||||
res["impure"] = true;
|
||||
},
|
||||
}, raw);
|
||||
@ -1191,15 +1191,15 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||
for (const auto & [key, _] : json)
|
||||
keys.insert(key);
|
||||
|
||||
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashType> {
|
||||
std::string hashAlgo = json["hashAlgo"];
|
||||
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashAlgorithm> {
|
||||
std::string hashAlgoStr = json["hashAlgo"];
|
||||
// remaining to parse, will be mutated by parsers
|
||||
std::string_view s = hashAlgo;
|
||||
std::string_view s = hashAlgoStr;
|
||||
ContentAddressMethod method = ContentAddressMethod::parsePrefix(s);
|
||||
if (method == TextIngestionMethod {})
|
||||
xpSettings.require(Xp::DynamicDerivations);
|
||||
auto hashType = parseHashType(s);
|
||||
return { std::move(method), std::move(hashType) };
|
||||
auto hashAlgo = parseHashAlgo(s);
|
||||
return { std::move(method), std::move(hashAlgo) };
|
||||
};
|
||||
|
||||
if (keys == (std::set<std::string_view> { "path" })) {
|
||||
@ -1209,11 +1209,11 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||
}
|
||||
|
||||
else if (keys == (std::set<std::string_view> { "path", "hashAlgo", "hash" })) {
|
||||
auto [method, hashType] = methodAlgo();
|
||||
auto [method, hashAlgo] = methodAlgo();
|
||||
auto dof = DerivationOutput::CAFixed {
|
||||
.ca = ContentAddress {
|
||||
.method = std::move(method),
|
||||
.hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashType),
|
||||
.hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashAlgo),
|
||||
},
|
||||
};
|
||||
if (dof.path(store, drvName, outputName) != store.parseStorePath((std::string) json["path"]))
|
||||
@ -1223,10 +1223,10 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||
|
||||
else if (keys == (std::set<std::string_view> { "hashAlgo" })) {
|
||||
xpSettings.require(Xp::CaDerivations);
|
||||
auto [method, hashType] = methodAlgo();
|
||||
auto [method, hashAlgo] = methodAlgo();
|
||||
return DerivationOutput::CAFloating {
|
||||
.method = std::move(method),
|
||||
.hashType = std::move(hashType),
|
||||
.hashAlgo = std::move(hashAlgo),
|
||||
};
|
||||
}
|
||||
|
||||
@ -1236,10 +1236,10 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||
|
||||
else if (keys == (std::set<std::string_view> { "hashAlgo", "impure" })) {
|
||||
xpSettings.require(Xp::ImpureDerivations);
|
||||
auto [method, hashType] = methodAlgo();
|
||||
auto [method, hashAlgo] = methodAlgo();
|
||||
return DerivationOutput::Impure {
|
||||
.method = std::move(method),
|
||||
.hashType = hashType,
|
||||
.hashAlgo = hashAlgo,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -75,9 +75,9 @@ struct DerivationOutput
|
||||
/**
|
||||
* How the serialization will be hashed
|
||||
*/
|
||||
HashType hashType;
|
||||
HashAlgorithm hashAlgo;
|
||||
|
||||
GENERATE_CMP(CAFloating, me->method, me->hashType);
|
||||
GENERATE_CMP(CAFloating, me->method, me->hashAlgo);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -102,9 +102,9 @@ struct DerivationOutput
|
||||
/**
|
||||
* How the serialization will be hashed
|
||||
*/
|
||||
HashType hashType;
|
||||
HashAlgorithm hashAlgo;
|
||||
|
||||
GENERATE_CMP(Impure, me->method, me->hashType);
|
||||
GENERATE_CMP(Impure, me->method, me->hashAlgo);
|
||||
};
|
||||
|
||||
typedef std::variant<
|
||||
@ -342,7 +342,7 @@ struct Derivation : BasicDerivation
|
||||
* 2. Input placeholders are replaced with realized input store
|
||||
* paths.
|
||||
*/
|
||||
std::optional<BasicDerivation> tryResolve(Store & store) const;
|
||||
std::optional<BasicDerivation> tryResolve(Store & store, Store * evalStore = nullptr) const;
|
||||
|
||||
/**
|
||||
* Like the above, but instead of querying the Nix database for
|
||||
|
@ -5,7 +5,7 @@ namespace nix {
|
||||
|
||||
std::string DownstreamPlaceholder::render() const
|
||||
{
|
||||
return "/" + hash.to_string(HashFormat::Base32, false);
|
||||
return "/" + hash.to_string(HashFormat::Nix32, false);
|
||||
}
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput(
|
||||
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
|
||||
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
|
||||
return DownstreamPlaceholder {
|
||||
hashString(htSHA256, clearText)
|
||||
hashString(HashAlgorithm::SHA256, clearText)
|
||||
};
|
||||
}
|
||||
|
||||
@ -31,10 +31,10 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation(
|
||||
xpSettings.require(Xp::DynamicDerivations);
|
||||
auto compressed = compressHash(placeholder.hash, 20);
|
||||
auto clearText = "nix-computed-output:"
|
||||
+ compressed.to_string(HashFormat::Base32, false)
|
||||
+ compressed.to_string(HashFormat::Nix32, false)
|
||||
+ ":" + std::string { outputName };
|
||||
return DownstreamPlaceholder {
|
||||
hashString(htSHA256, clearText)
|
||||
hashString(HashAlgorithm::SHA256, clearText)
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||
{
|
||||
auto info = queryPathInfo(path);
|
||||
|
||||
HashSink hashSink(htSHA256);
|
||||
HashSink hashSink(HashAlgorithm::SHA256);
|
||||
TeeSink teeSink(sink, hashSink);
|
||||
|
||||
narFromPath(path, teeSink);
|
||||
@ -39,9 +39,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||
filesystem corruption from spreading to other machines.
|
||||
Don't complain if the stored hash is zero (unknown). */
|
||||
Hash hash = hashSink.currentHash().first;
|
||||
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
|
||||
if (hash != info->narHash && info->narHash != Hash(info->narHash.algo))
|
||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||
printStorePath(path), info->narHash.to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true));
|
||||
printStorePath(path), info->narHash.to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true));
|
||||
|
||||
teeSink
|
||||
<< exportMagic
|
||||
@ -79,7 +79,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
||||
auto references = CommonProto::Serialise<StorePathSet>::read(*this,
|
||||
CommonProto::ReadConn { .from = source });
|
||||
auto deriver = readString(source);
|
||||
auto narHash = hashString(htSHA256, saved.s);
|
||||
auto narHash = hashString(HashAlgorithm::SHA256, saved.s);
|
||||
|
||||
ValidPathInfo info { path, narHash };
|
||||
if (deriver != "")
|
||||
|
@ -50,7 +50,7 @@ static void makeSymlink(const Path & link, const Path & target)
|
||||
|
||||
void LocalStore::addIndirectRoot(const Path & path)
|
||||
{
|
||||
std::string hash = hashString(htSHA1, path).to_string(HashFormat::Base32, false);
|
||||
std::string hash = hashString(HashAlgorithm::SHA1, path).to_string(HashFormat::Nix32, false);
|
||||
Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash));
|
||||
makeSymlink(realRoot, path);
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
#include "legacy-ssh-store.hh"
|
||||
#include "ssh-store-config.hh"
|
||||
#include "archive.hh"
|
||||
#include "pool.hh"
|
||||
@ -13,432 +14,355 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig
|
||||
std::string LegacySSHStoreConfig::doc()
|
||||
{
|
||||
using CommonSSHStoreConfig::CommonSSHStoreConfig;
|
||||
return
|
||||
#include "legacy-ssh-store.md"
|
||||
;
|
||||
}
|
||||
|
||||
const Setting<Path> remoteProgram{this, "nix-store", "remote-program",
|
||||
"Path to the `nix-store` executable on the remote machine."};
|
||||
|
||||
const Setting<int> maxConnections{this, 1, "max-connections",
|
||||
"Maximum number of concurrent SSH connections."};
|
||||
|
||||
const std::string name() override { return "SSH Store"; }
|
||||
|
||||
std::string doc() override
|
||||
{
|
||||
return
|
||||
#include "legacy-ssh-store.md"
|
||||
;
|
||||
}
|
||||
};
|
||||
|
||||
struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store
|
||||
struct LegacySSHStore::Connection
|
||||
{
|
||||
// Hack for getting remote build log output.
|
||||
// Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in
|
||||
// the documentation
|
||||
const Setting<int> logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
|
||||
|
||||
struct Connection
|
||||
{
|
||||
std::unique_ptr<SSHMaster::Connection> sshConn;
|
||||
FdSink to;
|
||||
FdSource from;
|
||||
ServeProto::Version remoteVersion;
|
||||
bool good = true;
|
||||
|
||||
/**
|
||||
* Coercion to `ServeProto::ReadConn`. This makes it easy to use the
|
||||
* factored out serve protocol searlizers with a
|
||||
* `LegacySSHStore::Connection`.
|
||||
*
|
||||
* The serve protocol connection types are unidirectional, unlike
|
||||
* this type.
|
||||
*/
|
||||
operator ServeProto::ReadConn ()
|
||||
{
|
||||
return ServeProto::ReadConn {
|
||||
.from = from,
|
||||
.version = remoteVersion,
|
||||
};
|
||||
}
|
||||
|
||||
/*
|
||||
* Coercion to `ServeProto::WriteConn`. This makes it easy to use the
|
||||
* factored out serve protocol searlizers with a
|
||||
* `LegacySSHStore::Connection`.
|
||||
*
|
||||
* The serve protocol connection types are unidirectional, unlike
|
||||
* this type.
|
||||
*/
|
||||
operator ServeProto::WriteConn ()
|
||||
{
|
||||
return ServeProto::WriteConn {
|
||||
.to = to,
|
||||
.version = remoteVersion,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
std::string host;
|
||||
|
||||
ref<Pool<Connection>> connections;
|
||||
|
||||
SSHMaster master;
|
||||
|
||||
static std::set<std::string> uriSchemes() { return {"ssh"}; }
|
||||
|
||||
LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params)
|
||||
: StoreConfig(params)
|
||||
, CommonSSHStoreConfig(params)
|
||||
, LegacySSHStoreConfig(params)
|
||||
, Store(params)
|
||||
, host(host)
|
||||
, connections(make_ref<Pool<Connection>>(
|
||||
std::max(1, (int) maxConnections),
|
||||
[this]() { return openConnection(); },
|
||||
[](const ref<Connection> & r) { return r->good; }
|
||||
))
|
||||
, master(
|
||||
host,
|
||||
sshKey,
|
||||
sshPublicHostKey,
|
||||
// Use SSH master only if using more than 1 connection.
|
||||
connections->capacity() > 1,
|
||||
compress,
|
||||
logFD)
|
||||
{
|
||||
}
|
||||
|
||||
ref<Connection> openConnection()
|
||||
{
|
||||
auto conn = make_ref<Connection>();
|
||||
conn->sshConn = master.startCommand(
|
||||
fmt("%s --serve --write", remoteProgram)
|
||||
+ (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get())));
|
||||
conn->to = FdSink(conn->sshConn->in.get());
|
||||
conn->from = FdSource(conn->sshConn->out.get());
|
||||
|
||||
try {
|
||||
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
|
||||
conn->to.flush();
|
||||
|
||||
StringSink saved;
|
||||
try {
|
||||
TeeSource tee(conn->from, saved);
|
||||
unsigned int magic = readInt(tee);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
|
||||
} catch (SerialisationError & e) {
|
||||
/* In case the other side is waiting for our input,
|
||||
close it. */
|
||||
conn->sshConn->in.close();
|
||||
auto msg = conn->from.drain();
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||
host, chomp(saved.s + msg));
|
||||
}
|
||||
conn->remoteVersion = readInt(conn->from);
|
||||
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
|
||||
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
|
||||
|
||||
} catch (EndOfFile & e) {
|
||||
throw Error("cannot connect to '%1%'", host);
|
||||
}
|
||||
|
||||
return conn;
|
||||
};
|
||||
|
||||
std::string getUri() override
|
||||
{
|
||||
return *uriSchemes().begin() + "://" + host;
|
||||
}
|
||||
|
||||
void queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override
|
||||
{
|
||||
try {
|
||||
auto conn(connections->get());
|
||||
|
||||
/* No longer support missing NAR hash */
|
||||
assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
|
||||
|
||||
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
|
||||
|
||||
conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)};
|
||||
conn->to.flush();
|
||||
|
||||
auto p = readString(conn->from);
|
||||
if (p.empty()) return callback(nullptr);
|
||||
auto path2 = parseStorePath(p);
|
||||
assert(path == path2);
|
||||
/* Hash will be set below. FIXME construct ValidPathInfo at end. */
|
||||
auto info = std::make_shared<ValidPathInfo>(path, Hash::dummy);
|
||||
|
||||
auto deriver = readString(conn->from);
|
||||
if (deriver != "")
|
||||
info->deriver = parseStorePath(deriver);
|
||||
info->references = ServeProto::Serialise<StorePathSet>::read(*this, *conn);
|
||||
readLongLong(conn->from); // download size
|
||||
info->narSize = readLongLong(conn->from);
|
||||
|
||||
{
|
||||
auto s = readString(conn->from);
|
||||
if (s == "")
|
||||
throw Error("NAR hash is now mandatory");
|
||||
info->narHash = Hash::parseAnyPrefixed(s);
|
||||
}
|
||||
info->ca = ContentAddress::parseOpt(readString(conn->from));
|
||||
info->sigs = readStrings<StringSet>(conn->from);
|
||||
|
||||
auto s = readString(conn->from);
|
||||
assert(s == "");
|
||||
|
||||
callback(std::move(info));
|
||||
} catch (...) { callback.rethrow(); }
|
||||
}
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & source,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override
|
||||
{
|
||||
debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host);
|
||||
|
||||
auto conn(connections->get());
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) {
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::AddToStoreNar
|
||||
<< printStorePath(info.path)
|
||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||
<< info.narHash.to_string(HashFormat::Base16, false);
|
||||
ServeProto::write(*this, *conn, info.references);
|
||||
conn->to
|
||||
<< info.registrationTime
|
||||
<< info.narSize
|
||||
<< info.ultimate
|
||||
<< info.sigs
|
||||
<< renderContentAddress(info.ca);
|
||||
try {
|
||||
copyNAR(source, conn->to);
|
||||
} catch (...) {
|
||||
conn->good = false;
|
||||
throw;
|
||||
}
|
||||
conn->to.flush();
|
||||
|
||||
} else {
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::ImportPaths
|
||||
<< 1;
|
||||
try {
|
||||
copyNAR(source, conn->to);
|
||||
} catch (...) {
|
||||
conn->good = false;
|
||||
throw;
|
||||
}
|
||||
conn->to
|
||||
<< exportMagic
|
||||
<< printStorePath(info.path);
|
||||
ServeProto::write(*this, *conn, info.references);
|
||||
conn->to
|
||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||
<< 0
|
||||
<< 0;
|
||||
conn->to.flush();
|
||||
|
||||
}
|
||||
|
||||
if (readInt(conn->from) != 1)
|
||||
throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host);
|
||||
}
|
||||
|
||||
void narFromPath(const StorePath & path, Sink & sink) override
|
||||
{
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to << ServeProto::Command::DumpStorePath << printStorePath(path);
|
||||
conn->to.flush();
|
||||
copyNAR(conn->from, sink);
|
||||
}
|
||||
|
||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
||||
{ unsupported("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
|
||||
private:
|
||||
|
||||
void putBuildSettings(Connection & conn)
|
||||
{
|
||||
conn.to
|
||||
<< settings.maxSilentTime
|
||||
<< settings.buildTimeout;
|
||||
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 2)
|
||||
conn.to
|
||||
<< settings.maxLogSize;
|
||||
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3)
|
||||
conn.to
|
||||
<< 0 // buildRepeat hasn't worked for ages anyway
|
||||
<< 0;
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 7) {
|
||||
conn.to << ((int) settings.keepFailed);
|
||||
}
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode) override
|
||||
{
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::BuildDerivation
|
||||
<< printStorePath(drvPath);
|
||||
writeDerivation(conn->to, *this, drv);
|
||||
|
||||
putBuildSettings(*conn);
|
||||
|
||||
conn->to.flush();
|
||||
|
||||
return ServeProto::Serialise<BuildResult>::read(*this, *conn);
|
||||
}
|
||||
|
||||
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
|
||||
{
|
||||
if (evalStore && evalStore.get() != this)
|
||||
throw Error("building on an SSH store is incompatible with '--eval-store'");
|
||||
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to << ServeProto::Command::BuildPaths;
|
||||
Strings ss;
|
||||
for (auto & p : drvPaths) {
|
||||
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
|
||||
std::visit(overloaded {
|
||||
[&](const StorePathWithOutputs & s) {
|
||||
ss.push_back(s.to_string(*this));
|
||||
},
|
||||
[&](const StorePath & drvPath) {
|
||||
throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath));
|
||||
},
|
||||
[&](std::monostate) {
|
||||
throw Error("wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://");
|
||||
},
|
||||
}, sOrDrvPath);
|
||||
}
|
||||
conn->to << ss;
|
||||
|
||||
putBuildSettings(*conn);
|
||||
|
||||
conn->to.flush();
|
||||
|
||||
BuildResult result;
|
||||
result.status = (BuildResult::Status) readInt(conn->from);
|
||||
|
||||
if (!result.success()) {
|
||||
conn->from >> result.errorMsg;
|
||||
throw Error(result.status, result.errorMsg);
|
||||
}
|
||||
}
|
||||
|
||||
void ensurePath(const StorePath & path) override
|
||||
{ unsupported("ensurePath"); }
|
||||
|
||||
virtual ref<SourceAccessor> getFSAccessor(bool requireValidPath) override
|
||||
{ unsupported("getFSAccessor"); }
|
||||
std::unique_ptr<SSHMaster::Connection> sshConn;
|
||||
FdSink to;
|
||||
FdSource from;
|
||||
ServeProto::Version remoteVersion;
|
||||
bool good = true;
|
||||
|
||||
/**
|
||||
* The default instance would schedule the work on the client side, but
|
||||
* for consistency with `buildPaths` and `buildDerivation` it should happen
|
||||
* on the remote side.
|
||||
* Coercion to `ServeProto::ReadConn`. This makes it easy to use the
|
||||
* factored out serve protocol searlizers with a
|
||||
* `LegacySSHStore::Connection`.
|
||||
*
|
||||
* We make this fail for now so we can add implement this properly later
|
||||
* without it being a breaking change.
|
||||
* The serve protocol connection types are unidirectional, unlike
|
||||
* this type.
|
||||
*/
|
||||
void repairPath(const StorePath & path) override
|
||||
{ unsupported("repairPath"); }
|
||||
|
||||
void computeFSClosure(const StorePathSet & paths,
|
||||
StorePathSet & out, bool flipDirection = false,
|
||||
bool includeOutputs = false, bool includeDerivers = false) override
|
||||
operator ServeProto::ReadConn ()
|
||||
{
|
||||
if (flipDirection || includeDerivers) {
|
||||
Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers);
|
||||
return;
|
||||
}
|
||||
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::QueryClosure
|
||||
<< includeOutputs;
|
||||
ServeProto::write(*this, *conn, paths);
|
||||
conn->to.flush();
|
||||
|
||||
for (auto & i : ServeProto::Serialise<StorePathSet>::read(*this, *conn))
|
||||
out.insert(i);
|
||||
return ServeProto::ReadConn {
|
||||
.from = from,
|
||||
.version = remoteVersion,
|
||||
};
|
||||
}
|
||||
|
||||
StorePathSet queryValidPaths(const StorePathSet & paths,
|
||||
SubstituteFlag maybeSubstitute = NoSubstitute) override
|
||||
{
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::QueryValidPaths
|
||||
<< false // lock
|
||||
<< maybeSubstitute;
|
||||
ServeProto::write(*this, *conn, paths);
|
||||
conn->to.flush();
|
||||
|
||||
return ServeProto::Serialise<StorePathSet>::read(*this, *conn);
|
||||
}
|
||||
|
||||
void connect() override
|
||||
{
|
||||
auto conn(connections->get());
|
||||
}
|
||||
|
||||
unsigned int getProtocol() override
|
||||
{
|
||||
auto conn(connections->get());
|
||||
return conn->remoteVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* The legacy ssh protocol doesn't support checking for trusted-user.
|
||||
* Try using ssh-ng:// instead if you want to know.
|
||||
/*
|
||||
* Coercion to `ServeProto::WriteConn`. This makes it easy to use the
|
||||
* factored out serve protocol searlizers with a
|
||||
* `LegacySSHStore::Connection`.
|
||||
*
|
||||
* The serve protocol connection types are unidirectional, unlike
|
||||
* this type.
|
||||
*/
|
||||
std::optional<TrustedFlag> isTrustedClient() override
|
||||
operator ServeProto::WriteConn ()
|
||||
{
|
||||
return std::nullopt;
|
||||
return ServeProto::WriteConn {
|
||||
.to = to,
|
||||
.version = remoteVersion,
|
||||
};
|
||||
}
|
||||
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
// TODO: Implement
|
||||
{ unsupported("queryRealisation"); }
|
||||
};
|
||||
|
||||
|
||||
LegacySSHStore::LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params)
|
||||
: StoreConfig(params)
|
||||
, CommonSSHStoreConfig(params)
|
||||
, LegacySSHStoreConfig(params)
|
||||
, Store(params)
|
||||
, host(host)
|
||||
, connections(make_ref<Pool<Connection>>(
|
||||
std::max(1, (int) maxConnections),
|
||||
[this]() { return openConnection(); },
|
||||
[](const ref<Connection> & r) { return r->good; }
|
||||
))
|
||||
, master(
|
||||
host,
|
||||
sshKey,
|
||||
sshPublicHostKey,
|
||||
// Use SSH master only if using more than 1 connection.
|
||||
connections->capacity() > 1,
|
||||
compress,
|
||||
logFD)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
ref<LegacySSHStore::Connection> LegacySSHStore::openConnection()
|
||||
{
|
||||
auto conn = make_ref<Connection>();
|
||||
conn->sshConn = master.startCommand(
|
||||
fmt("%s --serve --write", remoteProgram)
|
||||
+ (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get())));
|
||||
conn->to = FdSink(conn->sshConn->in.get());
|
||||
conn->from = FdSource(conn->sshConn->out.get());
|
||||
|
||||
try {
|
||||
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
|
||||
conn->to.flush();
|
||||
|
||||
StringSink saved;
|
||||
try {
|
||||
TeeSource tee(conn->from, saved);
|
||||
unsigned int magic = readInt(tee);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
|
||||
} catch (SerialisationError & e) {
|
||||
/* In case the other side is waiting for our input,
|
||||
close it. */
|
||||
conn->sshConn->in.close();
|
||||
auto msg = conn->from.drain();
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||
host, chomp(saved.s + msg));
|
||||
}
|
||||
conn->remoteVersion = readInt(conn->from);
|
||||
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
|
||||
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
|
||||
|
||||
} catch (EndOfFile & e) {
|
||||
throw Error("cannot connect to '%1%'", host);
|
||||
}
|
||||
|
||||
return conn;
|
||||
};
|
||||
|
||||
|
||||
std::string LegacySSHStore::getUri()
|
||||
{
|
||||
return *uriSchemes().begin() + "://" + host;
|
||||
}
|
||||
|
||||
|
||||
void LegacySSHStore::queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
||||
{
|
||||
try {
|
||||
auto conn(connections->get());
|
||||
|
||||
/* No longer support missing NAR hash */
|
||||
assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
|
||||
|
||||
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
|
||||
|
||||
conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)};
|
||||
conn->to.flush();
|
||||
|
||||
auto p = readString(conn->from);
|
||||
if (p.empty()) return callback(nullptr);
|
||||
auto path2 = parseStorePath(p);
|
||||
assert(path == path2);
|
||||
auto info = std::make_shared<ValidPathInfo>(
|
||||
path,
|
||||
ServeProto::Serialise<UnkeyedValidPathInfo>::read(*this, *conn));
|
||||
|
||||
if (info->narHash == Hash::dummy)
|
||||
throw Error("NAR hash is now mandatory");
|
||||
|
||||
auto s = readString(conn->from);
|
||||
assert(s == "");
|
||||
|
||||
callback(std::move(info));
|
||||
} catch (...) { callback.rethrow(); }
|
||||
}
|
||||
|
||||
|
||||
void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||
{
|
||||
debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host);
|
||||
|
||||
auto conn(connections->get());
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) {
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::AddToStoreNar
|
||||
<< printStorePath(info.path)
|
||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||
<< info.narHash.to_string(HashFormat::Base16, false);
|
||||
ServeProto::write(*this, *conn, info.references);
|
||||
conn->to
|
||||
<< info.registrationTime
|
||||
<< info.narSize
|
||||
<< info.ultimate
|
||||
<< info.sigs
|
||||
<< renderContentAddress(info.ca);
|
||||
try {
|
||||
copyNAR(source, conn->to);
|
||||
} catch (...) {
|
||||
conn->good = false;
|
||||
throw;
|
||||
}
|
||||
conn->to.flush();
|
||||
|
||||
} else {
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::ImportPaths
|
||||
<< 1;
|
||||
try {
|
||||
copyNAR(source, conn->to);
|
||||
} catch (...) {
|
||||
conn->good = false;
|
||||
throw;
|
||||
}
|
||||
conn->to
|
||||
<< exportMagic
|
||||
<< printStorePath(info.path);
|
||||
ServeProto::write(*this, *conn, info.references);
|
||||
conn->to
|
||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||
<< 0
|
||||
<< 0;
|
||||
conn->to.flush();
|
||||
|
||||
}
|
||||
|
||||
if (readInt(conn->from) != 1)
|
||||
throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host);
|
||||
}
|
||||
|
||||
|
||||
void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink)
|
||||
{
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to << ServeProto::Command::DumpStorePath << printStorePath(path);
|
||||
conn->to.flush();
|
||||
copyNAR(conn->from, sink);
|
||||
}
|
||||
|
||||
|
||||
void LegacySSHStore::putBuildSettings(Connection & conn)
|
||||
{
|
||||
ServeProto::write(*this, conn, ServeProto::BuildOptions {
|
||||
.maxSilentTime = settings.maxSilentTime,
|
||||
.buildTimeout = settings.buildTimeout,
|
||||
.maxLogSize = settings.maxLogSize,
|
||||
.nrRepeats = 0, // buildRepeat hasn't worked for ages anyway
|
||||
.enforceDeterminism = 0,
|
||||
.keepFailed = settings.keepFailed,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode)
|
||||
{
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::BuildDerivation
|
||||
<< printStorePath(drvPath);
|
||||
writeDerivation(conn->to, *this, drv);
|
||||
|
||||
putBuildSettings(*conn);
|
||||
|
||||
conn->to.flush();
|
||||
|
||||
return ServeProto::Serialise<BuildResult>::read(*this, *conn);
|
||||
}
|
||||
|
||||
|
||||
void LegacySSHStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore)
|
||||
{
|
||||
if (evalStore && evalStore.get() != this)
|
||||
throw Error("building on an SSH store is incompatible with '--eval-store'");
|
||||
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to << ServeProto::Command::BuildPaths;
|
||||
Strings ss;
|
||||
for (auto & p : drvPaths) {
|
||||
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
|
||||
std::visit(overloaded {
|
||||
[&](const StorePathWithOutputs & s) {
|
||||
ss.push_back(s.to_string(*this));
|
||||
},
|
||||
[&](const StorePath & drvPath) {
|
||||
throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath));
|
||||
},
|
||||
[&](std::monostate) {
|
||||
throw Error("wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://");
|
||||
},
|
||||
}, sOrDrvPath);
|
||||
}
|
||||
conn->to << ss;
|
||||
|
||||
putBuildSettings(*conn);
|
||||
|
||||
conn->to.flush();
|
||||
|
||||
BuildResult result;
|
||||
result.status = (BuildResult::Status) readInt(conn->from);
|
||||
|
||||
if (!result.success()) {
|
||||
conn->from >> result.errorMsg;
|
||||
throw Error(result.status, result.errorMsg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void LegacySSHStore::computeFSClosure(const StorePathSet & paths,
|
||||
StorePathSet & out, bool flipDirection,
|
||||
bool includeOutputs, bool includeDerivers)
|
||||
{
|
||||
if (flipDirection || includeDerivers) {
|
||||
Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers);
|
||||
return;
|
||||
}
|
||||
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::QueryClosure
|
||||
<< includeOutputs;
|
||||
ServeProto::write(*this, *conn, paths);
|
||||
conn->to.flush();
|
||||
|
||||
for (auto & i : ServeProto::Serialise<StorePathSet>::read(*this, *conn))
|
||||
out.insert(i);
|
||||
}
|
||||
|
||||
|
||||
StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths,
|
||||
SubstituteFlag maybeSubstitute)
|
||||
{
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::QueryValidPaths
|
||||
<< false // lock
|
||||
<< maybeSubstitute;
|
||||
ServeProto::write(*this, *conn, paths);
|
||||
conn->to.flush();
|
||||
|
||||
return ServeProto::Serialise<StorePathSet>::read(*this, *conn);
|
||||
}
|
||||
|
||||
|
||||
void LegacySSHStore::connect()
|
||||
{
|
||||
auto conn(connections->get());
|
||||
}
|
||||
|
||||
|
||||
unsigned int LegacySSHStore::getProtocol()
|
||||
{
|
||||
auto conn(connections->get());
|
||||
return conn->remoteVersion;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The legacy ssh protocol doesn't support checking for trusted-user.
|
||||
* Try using ssh-ng:// instead if you want to know.
|
||||
*/
|
||||
std::optional<TrustedFlag> isTrustedClient()
|
||||
{
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
|
||||
static RegisterStoreImplementation<LegacySSHStore, LegacySSHStoreConfig> regLegacySSHStore;
|
||||
|
||||
}
|
||||
|
132
src/libstore/legacy-ssh-store.hh
Normal file
132
src/libstore/legacy-ssh-store.hh
Normal file
@ -0,0 +1,132 @@
|
||||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "ssh-store-config.hh"
|
||||
#include "store-api.hh"
|
||||
#include "ssh.hh"
|
||||
#include "callback.hh"
|
||||
#include "pool.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig
|
||||
{
|
||||
using CommonSSHStoreConfig::CommonSSHStoreConfig;
|
||||
|
||||
const Setting<Path> remoteProgram{this, "nix-store", "remote-program",
|
||||
"Path to the `nix-store` executable on the remote machine."};
|
||||
|
||||
const Setting<int> maxConnections{this, 1, "max-connections",
|
||||
"Maximum number of concurrent SSH connections."};
|
||||
|
||||
const std::string name() override { return "SSH Store"; }
|
||||
|
||||
std::string doc() override;
|
||||
};
|
||||
|
||||
struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store
|
||||
{
|
||||
// Hack for getting remote build log output.
|
||||
// Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in
|
||||
// the documentation
|
||||
const Setting<int> logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
|
||||
|
||||
struct Connection;
|
||||
|
||||
std::string host;
|
||||
|
||||
ref<Pool<Connection>> connections;
|
||||
|
||||
SSHMaster master;
|
||||
|
||||
static std::set<std::string> uriSchemes() { return {"ssh"}; }
|
||||
|
||||
LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params);
|
||||
|
||||
ref<Connection> openConnection();
|
||||
|
||||
std::string getUri() override;
|
||||
|
||||
void queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override;
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & source,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
void narFromPath(const StorePath & path, Sink & sink) override;
|
||||
|
||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
||||
{ unsupported("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
|
||||
private:
|
||||
|
||||
void putBuildSettings(Connection & conn);
|
||||
|
||||
public:
|
||||
|
||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode) override;
|
||||
|
||||
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override;
|
||||
|
||||
void ensurePath(const StorePath & path) override
|
||||
{ unsupported("ensurePath"); }
|
||||
|
||||
virtual ref<SourceAccessor> getFSAccessor(bool requireValidPath) override
|
||||
{ unsupported("getFSAccessor"); }
|
||||
|
||||
/**
|
||||
* The default instance would schedule the work on the client side, but
|
||||
* for consistency with `buildPaths` and `buildDerivation` it should happen
|
||||
* on the remote side.
|
||||
*
|
||||
* We make this fail for now so we can add implement this properly later
|
||||
* without it being a breaking change.
|
||||
*/
|
||||
void repairPath(const StorePath & path) override
|
||||
{ unsupported("repairPath"); }
|
||||
|
||||
void computeFSClosure(const StorePathSet & paths,
|
||||
StorePathSet & out, bool flipDirection = false,
|
||||
bool includeOutputs = false, bool includeDerivers = false) override;
|
||||
|
||||
StorePathSet queryValidPaths(const StorePathSet & paths,
|
||||
SubstituteFlag maybeSubstitute = NoSubstitute) override;
|
||||
|
||||
void connect() override;
|
||||
|
||||
unsigned int getProtocol() override;
|
||||
|
||||
/**
|
||||
* The legacy ssh protocol doesn't support checking for trusted-user.
|
||||
* Try using ssh-ng:// instead if you want to know.
|
||||
*/
|
||||
std::optional<TrustedFlag> isTrustedClient() override
|
||||
{
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
// TODO: Implement
|
||||
{ unsupported("queryRealisation"); }
|
||||
};
|
||||
|
||||
}
|
@ -955,7 +955,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
|
||||
StorePathSet paths;
|
||||
|
||||
for (auto & [_, i] : infos) {
|
||||
assert(i.narHash.type == htSHA256);
|
||||
assert(i.narHash.algo == HashAlgorithm::SHA256);
|
||||
if (isValidPath_(*state, i.path))
|
||||
updatePathInfo(*state, i);
|
||||
else
|
||||
@ -1069,7 +1069,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
|
||||
/* While restoring the path from the NAR, compute the hash
|
||||
of the NAR. */
|
||||
HashSink hashSink(htSHA256);
|
||||
HashSink hashSink(HashAlgorithm::SHA256);
|
||||
|
||||
TeeSource wrapperSource { source, hashSink };
|
||||
|
||||
@ -1080,7 +1080,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
|
||||
if (hashResult.first != info.narHash)
|
||||
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path), info.narHash.to_string(HashFormat::Base32, true), hashResult.first.to_string(HashFormat::Base32, true));
|
||||
printStorePath(info.path), info.narHash.to_string(HashFormat::Nix32, true), hashResult.first.to_string(HashFormat::Nix32, true));
|
||||
|
||||
if (hashResult.second != info.narSize)
|
||||
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
@ -1090,14 +1090,14 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
auto & specified = *info.ca;
|
||||
auto actualHash = hashCAPath(
|
||||
specified.method,
|
||||
specified.hash.type,
|
||||
specified.hash.algo,
|
||||
info.path
|
||||
);
|
||||
if (specified.hash != actualHash.hash) {
|
||||
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path),
|
||||
specified.hash.to_string(HashFormat::Base32, true),
|
||||
actualHash.hash.to_string(HashFormat::Base32, true));
|
||||
specified.hash.to_string(HashFormat::Nix32, true),
|
||||
actualHash.hash.to_string(HashFormat::Nix32, true));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1116,7 +1116,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
|
||||
|
||||
StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
{
|
||||
/* For computing the store path. */
|
||||
auto hashSink = std::make_unique<HashSink>(hashAlgo);
|
||||
@ -1220,8 +1220,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
||||
/* For computing the nar hash. In recursive SHA-256 mode, this
|
||||
is the same as the store hash, so no need to do it again. */
|
||||
auto narHash = std::pair { hash, size };
|
||||
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) {
|
||||
HashSink narSink { htSHA256 };
|
||||
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) {
|
||||
HashSink narSink { HashAlgorithm::SHA256 };
|
||||
dumpPath(realPath, narSink);
|
||||
narHash = narSink.finish();
|
||||
}
|
||||
@ -1252,7 +1252,7 @@ StorePath LocalStore::addTextToStore(
|
||||
std::string_view s,
|
||||
const StorePathSet & references, RepairFlag repair)
|
||||
{
|
||||
auto hash = hashString(htSHA256, s);
|
||||
auto hash = hashString(HashAlgorithm::SHA256, s);
|
||||
auto dstPath = makeTextPath(name, TextInfo {
|
||||
.hash = hash,
|
||||
.references = references,
|
||||
@ -1278,7 +1278,7 @@ StorePath LocalStore::addTextToStore(
|
||||
|
||||
StringSink sink;
|
||||
dumpString(s, sink);
|
||||
auto narHash = hashString(htSHA256, sink.s);
|
||||
auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
|
||||
|
||||
optimisePath(realPath, repair);
|
||||
|
||||
@ -1389,7 +1389,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||
for (auto & link : readDirectory(linksDir)) {
|
||||
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
|
||||
Path linkPath = linksDir + "/" + link.name;
|
||||
std::string hash = hashPath(htSHA256, linkPath).first.to_string(HashFormat::Base32, false);
|
||||
std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Nix32, false);
|
||||
if (hash != link.name) {
|
||||
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
||||
linkPath, link.name, hash);
|
||||
@ -1406,7 +1406,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||
|
||||
printInfo("checking store hashes...");
|
||||
|
||||
Hash nullHash(htSHA256);
|
||||
Hash nullHash(HashAlgorithm::SHA256);
|
||||
|
||||
for (auto & i : validPaths) {
|
||||
try {
|
||||
@ -1415,14 +1415,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||
/* Check the content hash (optionally - slow). */
|
||||
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
|
||||
|
||||
auto hashSink = HashSink(info->narHash.type);
|
||||
auto hashSink = HashSink(info->narHash.algo);
|
||||
|
||||
dumpPath(Store::toRealPath(i), hashSink);
|
||||
auto current = hashSink.finish();
|
||||
|
||||
if (info->narHash != nullHash && info->narHash != current.first) {
|
||||
printError("path '%s' was modified! expected hash '%s', got '%s'",
|
||||
printStorePath(i), info->narHash.to_string(HashFormat::Base32, true), current.first.to_string(HashFormat::Base32, true));
|
||||
printStorePath(i), info->narHash.to_string(HashFormat::Nix32, true), current.first.to_string(HashFormat::Nix32, true));
|
||||
if (repair) repairPath(i); else errors = true;
|
||||
} else {
|
||||
|
||||
@ -1697,20 +1697,20 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id,
|
||||
}
|
||||
|
||||
ContentAddress LocalStore::hashCAPath(
|
||||
const ContentAddressMethod & method, const HashType & hashType,
|
||||
const ContentAddressMethod & method, const HashAlgorithm & hashAlgo,
|
||||
const StorePath & path)
|
||||
{
|
||||
return hashCAPath(method, hashType, Store::toRealPath(path), path.hashPart());
|
||||
return hashCAPath(method, hashAlgo, Store::toRealPath(path), path.hashPart());
|
||||
}
|
||||
|
||||
ContentAddress LocalStore::hashCAPath(
|
||||
const ContentAddressMethod & method,
|
||||
const HashType & hashType,
|
||||
const HashAlgorithm & hashAlgo,
|
||||
const Path & path,
|
||||
const std::string_view pathHash
|
||||
)
|
||||
{
|
||||
HashModuloSink caSink ( hashType, std::string(pathHash) );
|
||||
HashModuloSink caSink ( hashAlgo, std::string(pathHash) );
|
||||
std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) {
|
||||
readFile(path, caSink);
|
||||
|
@ -178,7 +178,7 @@ public:
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
@ -353,12 +353,12 @@ private:
|
||||
// XXX: Make a generic `Store` method
|
||||
ContentAddress hashCAPath(
|
||||
const ContentAddressMethod & method,
|
||||
const HashType & hashType,
|
||||
const HashAlgorithm & hashAlgo,
|
||||
const StorePath & path);
|
||||
|
||||
ContentAddress hashCAPath(
|
||||
const ContentAddressMethod & method,
|
||||
const HashType & hashType,
|
||||
const HashAlgorithm & hashAlgo,
|
||||
const Path & path,
|
||||
const std::string_view pathHash
|
||||
);
|
||||
|
@ -43,7 +43,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
|
||||
|
||||
sink.s = rewriteStrings(sink.s, rewrites);
|
||||
|
||||
HashModuloSink hashModuloSink(htSHA256, oldHashPart);
|
||||
HashModuloSink hashModuloSink(HashAlgorithm::SHA256, oldHashPart);
|
||||
hashModuloSink(sink.s);
|
||||
|
||||
auto narModuloHash = hashModuloSink.finish().first;
|
||||
@ -66,7 +66,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
|
||||
rsink2(sink.s);
|
||||
rsink2.flush();
|
||||
|
||||
info.narHash = hashString(htSHA256, sink2.s);
|
||||
info.narHash = hashString(HashAlgorithm::SHA256, sink2.s);
|
||||
info.narSize = sink.s.size();
|
||||
|
||||
StringSource source(sink2.s);
|
||||
|
@ -333,9 +333,9 @@ public:
|
||||
(std::string(info->path.name()))
|
||||
(narInfo ? narInfo->url : "", narInfo != 0)
|
||||
(narInfo ? narInfo->compression : "", narInfo != 0)
|
||||
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Base32, true) : "", narInfo && narInfo->fileHash)
|
||||
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Nix32, true) : "", narInfo && narInfo->fileHash)
|
||||
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
||||
(info->narHash.to_string(HashFormat::Base32, true))
|
||||
(info->narHash.to_string(HashFormat::Nix32, true))
|
||||
(info->narSize)
|
||||
(concatStringsSep(" ", info->shortRefs()))
|
||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||
|
@ -113,11 +113,11 @@ std::string NarInfo::to_string(const Store & store) const
|
||||
res += "URL: " + url + "\n";
|
||||
assert(compression != "");
|
||||
res += "Compression: " + compression + "\n";
|
||||
assert(fileHash && fileHash->type == htSHA256);
|
||||
res += "FileHash: " + fileHash->to_string(HashFormat::Base32, true) + "\n";
|
||||
assert(fileHash && fileHash->algo == HashAlgorithm::SHA256);
|
||||
res += "FileHash: " + fileHash->to_string(HashFormat::Nix32, true) + "\n";
|
||||
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
||||
assert(narHash.type == htSHA256);
|
||||
res += "NarHash: " + narHash.to_string(HashFormat::Base32, true) + "\n";
|
||||
assert(narHash.algo == HashAlgorithm::SHA256);
|
||||
res += "NarHash: " + narHash.to_string(HashFormat::Nix32, true) + "\n";
|
||||
res += "NarSize: " + std::to_string(narSize) + "\n";
|
||||
|
||||
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
||||
|
@ -146,17 +146,17 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||
Also note that if `path' is a symlink, then we're hashing the
|
||||
contents of the symlink (i.e. the result of readlink()), not
|
||||
the contents of the target (which may not even exist). */
|
||||
Hash hash = hashPath(htSHA256, path).first;
|
||||
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Base32, true));
|
||||
Hash hash = hashPath(HashAlgorithm::SHA256, path).first;
|
||||
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
|
||||
|
||||
/* Check if this is a known hash. */
|
||||
Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Base32, false);
|
||||
Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Nix32, false);
|
||||
|
||||
/* Maybe delete the link, if it has been corrupted. */
|
||||
if (pathExists(linkPath)) {
|
||||
auto stLink = lstat(linkPath);
|
||||
if (st.st_size != stLink.st_size
|
||||
|| (repair && hash != hashPath(htSHA256, linkPath).first))
|
||||
|| (repair && hash != hashPath(HashAlgorithm::SHA256, linkPath).first))
|
||||
{
|
||||
// XXX: Consider overwriting linkPath with our valid version.
|
||||
warn("removing corrupted link '%s'", linkPath);
|
||||
|
@ -146,7 +146,7 @@ static nlohmann::json pathInfoToJSON(
|
||||
auto info = store.queryPathInfo(storePath);
|
||||
|
||||
auto & jsonPath = jsonList.emplace_back(
|
||||
info->toJSON(store, false, HashFormat::Base32));
|
||||
info->toJSON(store, false, HashFormat::Nix32));
|
||||
|
||||
// Add the path to the object whose metadata we are including.
|
||||
jsonPath["path"] = store.printStorePath(storePath);
|
||||
|
@ -31,9 +31,9 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||
throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
|
||||
store.printStorePath(path));
|
||||
return
|
||||
"1;" + store.printStorePath(path) + ";"
|
||||
+ narHash.to_string(HashFormat::Base32, true) + ";"
|
||||
+ std::to_string(narSize) + ";"
|
||||
"1;" + store.printStorePath(path) + ";"
|
||||
+ narHash.to_string(HashFormat::Nix32, true) + ";"
|
||||
+ std::to_string(narSize) + ";"
|
||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||
}
|
||||
|
||||
|
@ -49,7 +49,7 @@ std::pair<StorePathSet, HashResult> scanForReferences(
|
||||
const std::string & path,
|
||||
const StorePathSet & refs)
|
||||
{
|
||||
HashSink hashSink { htSHA256 };
|
||||
HashSink hashSink { HashAlgorithm::SHA256 };
|
||||
auto found = scanForReferences(hashSink, path, refs);
|
||||
auto hash = hashSink.finish();
|
||||
return std::pair<StorePathSet, HashResult>(found, hash);
|
||||
|
@ -35,7 +35,7 @@ StorePath::StorePath(std::string_view _baseName)
|
||||
}
|
||||
|
||||
StorePath::StorePath(const Hash & hash, std::string_view _name)
|
||||
: baseName((hash.to_string(HashFormat::Base32, false) + "-").append(std::string(_name)))
|
||||
: baseName((hash.to_string(HashFormat::Nix32, false) + "-").append(std::string(_name)))
|
||||
{
|
||||
checkName(baseName, name());
|
||||
}
|
||||
@ -49,7 +49,7 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x");
|
||||
|
||||
StorePath StorePath::random(std::string_view name)
|
||||
{
|
||||
Hash hash(htSHA1);
|
||||
Hash hash(HashAlgorithm::SHA1);
|
||||
randombytes_buf(hash.hash, hash.hashSize);
|
||||
return StorePath(hash, name);
|
||||
}
|
||||
|
@ -417,12 +417,12 @@ std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string &
|
||||
|
||||
|
||||
ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod caMethod,
|
||||
HashType hashType,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod caMethod,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
std::optional<ConnectionHandle> conn_(getConnection());
|
||||
auto & conn = *conn_;
|
||||
@ -432,7 +432,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||
conn->to
|
||||
<< WorkerProto::Op::AddToStore
|
||||
<< name
|
||||
<< caMethod.render(hashType);
|
||||
<< caMethod.render(hashAlgo);
|
||||
WorkerProto::write(*this, *conn, references);
|
||||
conn->to << repair;
|
||||
|
||||
@ -453,9 +453,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||
|
||||
std::visit(overloaded {
|
||||
[&](const TextIngestionMethod & thm) -> void {
|
||||
if (hashType != htSHA256)
|
||||
if (hashAlgo != HashAlgorithm::SHA256)
|
||||
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
||||
name, printHashType(hashType));
|
||||
name, printHashAlgo(hashAlgo));
|
||||
std::string s = dump.drain();
|
||||
conn->to << WorkerProto::Op::AddTextToStore << name << s;
|
||||
WorkerProto::write(*this, *conn, references);
|
||||
@ -465,9 +465,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||
conn->to
|
||||
<< WorkerProto::Op::AddToStore
|
||||
<< name
|
||||
<< ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
|
||||
<< ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
|
||||
<< (fim == FileIngestionMethod::Recursive ? 1 : 0)
|
||||
<< printHashType(hashType);
|
||||
<< printHashAlgo(hashAlgo);
|
||||
|
||||
try {
|
||||
conn->to.written = 0;
|
||||
@ -503,9 +503,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||
|
||||
|
||||
StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashType, RepairFlag repair, const StorePathSet & references)
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
{
|
||||
return addCAToStore(dump, name, method, hashType, references, repair)->path;
|
||||
return addCAToStore(dump, name, method, hashAlgo, references, repair)->path;
|
||||
}
|
||||
|
||||
|
||||
@ -610,7 +610,7 @@ StorePath RemoteStore::addTextToStore(
|
||||
RepairFlag repair)
|
||||
{
|
||||
StringSource source(s);
|
||||
return addCAToStore(source, name, TextIngestionMethod {}, htSHA256, references, repair)->path;
|
||||
return addCAToStore(source, name, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair)->path;
|
||||
}
|
||||
|
||||
void RemoteStore::registerDrvOutput(const Realisation & info)
|
||||
|
@ -74,18 +74,18 @@ public:
|
||||
* Add a content-addressable store path. `dump` will be drained.
|
||||
*/
|
||||
ref<const ValidPathInfo> addCAToStore(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod caMethod,
|
||||
HashType hashType,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair);
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod caMethod,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair);
|
||||
|
||||
/**
|
||||
* Add a content-addressable store path. Does not support references. `dump` will be drained.
|
||||
*/
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & nar,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
@ -5,6 +5,7 @@
|
||||
#include "serve-protocol.hh"
|
||||
#include "serve-protocol-impl.hh"
|
||||
#include "archive.hh"
|
||||
#include "path-info.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
@ -54,4 +55,83 @@ void ServeProto::Serialise<BuildResult>::write(const StoreDirConfig & store, Ser
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
UnkeyedValidPathInfo ServeProto::Serialise<UnkeyedValidPathInfo>::read(const StoreDirConfig & store, ReadConn conn)
|
||||
{
|
||||
/* Hash should be set below unless very old `nix-store --serve`.
|
||||
Caller should assert that it did set it. */
|
||||
UnkeyedValidPathInfo info { Hash::dummy };
|
||||
|
||||
auto deriver = readString(conn.from);
|
||||
if (deriver != "")
|
||||
info.deriver = store.parseStorePath(deriver);
|
||||
info.references = ServeProto::Serialise<StorePathSet>::read(store, conn);
|
||||
|
||||
readLongLong(conn.from); // download size, unused
|
||||
info.narSize = readLongLong(conn.from);
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn.version) >= 4) {
|
||||
auto s = readString(conn.from);
|
||||
if (!s.empty())
|
||||
info.narHash = Hash::parseAnyPrefixed(s);
|
||||
info.ca = ContentAddress::parseOpt(readString(conn.from));
|
||||
info.sigs = readStrings<StringSet>(conn.from);
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
void ServeProto::Serialise<UnkeyedValidPathInfo>::write(const StoreDirConfig & store, WriteConn conn, const UnkeyedValidPathInfo & info)
|
||||
{
|
||||
conn.to
|
||||
<< (info.deriver ? store.printStorePath(*info.deriver) : "");
|
||||
|
||||
ServeProto::write(store, conn, info.references);
|
||||
// !!! Maybe we want compression?
|
||||
conn.to
|
||||
<< info.narSize // downloadSize, lie a little
|
||||
<< info.narSize;
|
||||
if (GET_PROTOCOL_MINOR(conn.version) >= 4)
|
||||
conn.to
|
||||
<< info.narHash.to_string(HashFormat::Nix32, true)
|
||||
<< renderContentAddress(info.ca)
|
||||
<< info.sigs;
|
||||
}
|
||||
|
||||
|
||||
ServeProto::BuildOptions ServeProto::Serialise<ServeProto::BuildOptions>::read(const StoreDirConfig & store, ReadConn conn)
|
||||
{
|
||||
BuildOptions options;
|
||||
options.maxSilentTime = readInt(conn.from);
|
||||
options.buildTimeout = readInt(conn.from);
|
||||
if (GET_PROTOCOL_MINOR(conn.version) >= 2)
|
||||
options.maxLogSize = readNum<unsigned long>(conn.from);
|
||||
if (GET_PROTOCOL_MINOR(conn.version) >= 3) {
|
||||
options.nrRepeats = readInt(conn.from);
|
||||
options.enforceDeterminism = readInt(conn.from);
|
||||
}
|
||||
if (GET_PROTOCOL_MINOR(conn.version) >= 7) {
|
||||
options.keepFailed = (bool) readInt(conn.from);
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
void ServeProto::Serialise<ServeProto::BuildOptions>::write(const StoreDirConfig & store, WriteConn conn, const ServeProto::BuildOptions & options)
|
||||
{
|
||||
conn.to
|
||||
<< options.maxSilentTime
|
||||
<< options.buildTimeout;
|
||||
if (GET_PROTOCOL_MINOR(conn.version) >= 2)
|
||||
conn.to
|
||||
<< options.maxLogSize;
|
||||
if (GET_PROTOCOL_MINOR(conn.version) >= 3)
|
||||
conn.to
|
||||
<< options.nrRepeats
|
||||
<< options.enforceDeterminism;
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn.version) >= 7) {
|
||||
conn.to << ((int) options.keepFailed);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ struct Source;
|
||||
|
||||
// items being serialised
|
||||
struct BuildResult;
|
||||
struct UnkeyedValidPathInfo;
|
||||
|
||||
|
||||
/**
|
||||
@ -86,6 +87,13 @@ struct ServeProto
|
||||
{
|
||||
ServeProto::Serialise<T>::write(store, conn, t);
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for building shared between
|
||||
* `ServeProto::Command::BuildPaths` and
|
||||
* `ServeProto::Command::BuildDerivation`.
|
||||
*/
|
||||
struct BuildOptions;
|
||||
};
|
||||
|
||||
enum struct ServeProto::Command : uint64_t
|
||||
@ -101,6 +109,22 @@ enum struct ServeProto::Command : uint64_t
|
||||
AddToStoreNar = 9,
|
||||
};
|
||||
|
||||
|
||||
struct ServeProto::BuildOptions {
|
||||
/**
|
||||
* Default value in this and every other field is so tests pass when
|
||||
* testing older deserialisers which do not set all the fields.
|
||||
*/
|
||||
time_t maxSilentTime = -1;
|
||||
time_t buildTimeout = -1;
|
||||
size_t maxLogSize = -1;
|
||||
size_t nrRepeats = -1;
|
||||
bool enforceDeterminism = -1;
|
||||
bool keepFailed = -1;
|
||||
|
||||
bool operator == (const ServeProto::BuildOptions &) const = default;
|
||||
};
|
||||
|
||||
/**
|
||||
* Convenience for sending operation codes.
|
||||
*
|
||||
@ -141,6 +165,10 @@ inline std::ostream & operator << (std::ostream & s, ServeProto::Command op)
|
||||
|
||||
template<>
|
||||
DECLARE_SERVE_SERIALISER(BuildResult);
|
||||
template<>
|
||||
DECLARE_SERVE_SERIALISER(UnkeyedValidPathInfo);
|
||||
template<>
|
||||
DECLARE_SERVE_SERIALISER(ServeProto::BuildOptions);
|
||||
|
||||
template<typename T>
|
||||
DECLARE_SERVE_SERIALISER(std::vector<T>);
|
||||
|
@ -153,7 +153,7 @@ StorePath StoreDirConfig::makeStorePath(std::string_view type,
|
||||
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
|
||||
auto s = std::string(type) + ":" + std::string(hash)
|
||||
+ ":" + storeDir + ":" + std::string(name);
|
||||
auto h = compressHash(hashString(htSHA256, s), 20);
|
||||
auto h = compressHash(hashString(HashAlgorithm::SHA256, s), 20);
|
||||
return StorePath(h, name);
|
||||
}
|
||||
|
||||
@ -191,12 +191,12 @@ static std::string makeType(
|
||||
|
||||
StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const
|
||||
{
|
||||
if (info.hash.type == htSHA256 && info.method == FileIngestionMethod::Recursive) {
|
||||
if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) {
|
||||
return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
|
||||
} else {
|
||||
assert(info.references.size() == 0);
|
||||
return makeStorePath("output:out",
|
||||
hashString(htSHA256,
|
||||
hashString(HashAlgorithm::SHA256,
|
||||
"fixed:out:"
|
||||
+ makeFileIngestionPrefix(info.method)
|
||||
+ info.hash.to_string(HashFormat::Base16, true) + ":"),
|
||||
@ -207,7 +207,7 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed
|
||||
|
||||
StorePath StoreDirConfig::makeTextPath(std::string_view name, const TextInfo & info) const
|
||||
{
|
||||
assert(info.hash.type == htSHA256);
|
||||
assert(info.hash.algo == HashAlgorithm::SHA256);
|
||||
return makeStorePath(
|
||||
makeType(*this, "text", StoreReferences {
|
||||
.others = info.references,
|
||||
@ -233,11 +233,11 @@ StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const
|
||||
|
||||
|
||||
std::pair<StorePath, Hash> StoreDirConfig::computeStorePathFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
const StorePathSet & references) const
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references) const
|
||||
{
|
||||
HashSink sink(hashAlgo);
|
||||
dump.drainInto(sink);
|
||||
@ -257,20 +257,20 @@ StorePath StoreDirConfig::computeStorePathForText(
|
||||
const StorePathSet & references) const
|
||||
{
|
||||
return makeTextPath(name, TextInfo {
|
||||
.hash = hashString(htSHA256, s),
|
||||
.hash = hashString(HashAlgorithm::SHA256, s),
|
||||
.references = references,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
StorePath Store::addToStore(
|
||||
std::string_view name,
|
||||
const Path & _srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references)
|
||||
std::string_view name,
|
||||
const Path & _srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references)
|
||||
{
|
||||
Path srcPath(absPath(_srcPath));
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
@ -405,10 +405,10 @@ digraph graphname {
|
||||
}
|
||||
*/
|
||||
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||
FileIngestionMethod method, HashType hashAlgo,
|
||||
std::optional<Hash> expectedCAHash)
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo,
|
||||
std::optional<Hash> expectedCAHash)
|
||||
{
|
||||
HashSink narHashSink { htSHA256 };
|
||||
HashSink narHashSink { HashAlgorithm::SHA256 };
|
||||
HashSink caHashSink { hashAlgo };
|
||||
|
||||
/* Note that fileSink and unusualHashTee must be mutually exclusive, since
|
||||
@ -417,7 +417,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||
RegularFileSink fileSink { caHashSink };
|
||||
TeeSink unusualHashTee { narHashSink, caHashSink };
|
||||
|
||||
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256
|
||||
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashAlgorithm::SHA256
|
||||
? static_cast<Sink &>(unusualHashTee)
|
||||
: narHashSink;
|
||||
|
||||
@ -445,7 +445,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||
finish. */
|
||||
auto [narHash, narSize] = narHashSink.finish();
|
||||
|
||||
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256
|
||||
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256
|
||||
? narHash
|
||||
: caHashSink.finish().first;
|
||||
|
||||
@ -1205,7 +1205,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istre
|
||||
if (!hashGiven) {
|
||||
std::string s;
|
||||
getline(str, s);
|
||||
auto narHash = Hash::parseAny(s, htSHA256);
|
||||
auto narHash = Hash::parseAny(s, HashAlgorithm::SHA256);
|
||||
getline(str, s);
|
||||
auto narSize = string2Int<uint64_t>(s);
|
||||
if (!narSize) throw Error("number expected");
|
||||
|
@ -427,13 +427,13 @@ public:
|
||||
* libutil/archive.hh).
|
||||
*/
|
||||
virtual StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
HashType hashAlgo = htSHA256,
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
RepairFlag repair = NoRepair,
|
||||
const StorePathSet & references = StorePathSet());
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
RepairFlag repair = NoRepair,
|
||||
const StorePathSet & references = StorePathSet());
|
||||
|
||||
/**
|
||||
* Copy the contents of a path to the store and register the
|
||||
@ -441,8 +441,8 @@ public:
|
||||
* memory.
|
||||
*/
|
||||
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||
std::optional<Hash> expectedCAHash = {});
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
std::optional<Hash> expectedCAHash = {});
|
||||
|
||||
/**
|
||||
* Like addToStore(), but the contents of the path are contained
|
||||
@ -454,8 +454,8 @@ public:
|
||||
* \todo remove?
|
||||
*/
|
||||
virtual StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair,
|
||||
const StorePathSet & references = StorePathSet())
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair,
|
||||
const StorePathSet & references = StorePathSet())
|
||||
{ unsupported("addToStoreFromDump"); }
|
||||
|
||||
/**
|
||||
|
@ -98,7 +98,7 @@ struct StoreDirConfig : public Config
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
HashType hashAlgo = htSHA256,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = {}) const;
|
||||
|
||||
/**
|
||||
|
@ -160,7 +160,7 @@ void WorkerProto::Serialise<ValidPathInfo>::write(const StoreDirConfig & store,
|
||||
UnkeyedValidPathInfo WorkerProto::Serialise<UnkeyedValidPathInfo>::read(const StoreDirConfig & store, ReadConn conn)
|
||||
{
|
||||
auto deriver = readString(conn.from);
|
||||
auto narHash = Hash::parseAny(readString(conn.from), htSHA256);
|
||||
auto narHash = Hash::parseAny(readString(conn.from), HashAlgorithm::SHA256);
|
||||
UnkeyedValidPathInfo info(narHash);
|
||||
if (deriver != "") info.deriver = store.parseStorePath(deriver);
|
||||
info.references = WorkerProto::Serialise<StorePathSet>::read(store, conn);
|
||||
|
@ -483,7 +483,7 @@ bool Args::processArgs(const Strings & args, bool finish)
|
||||
if (!anyCompleted)
|
||||
exp.handler.fun(ss);
|
||||
|
||||
/* Move the list element to the processedArgs. This is almost the same as
|
||||
/* Move the list element to the processedArgs. This is almost the same as
|
||||
`processedArgs.push_back(expectedArgs.front()); expectedArgs.pop_front()`,
|
||||
except that it will only adjust the next and prev pointers of the list
|
||||
elements, meaning the actual contents don't move in memory. This is
|
||||
@ -544,36 +544,70 @@ nlohmann::json Args::toJSON()
|
||||
return res;
|
||||
}
|
||||
|
||||
static void hashTypeCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
|
||||
static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
|
||||
{
|
||||
for (auto & type : hashTypes)
|
||||
if (hasPrefix(type, prefix))
|
||||
completions.add(type);
|
||||
for (auto & format : hashFormats) {
|
||||
if (hasPrefix(format, prefix)) {
|
||||
completions.add(format);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
|
||||
{
|
||||
return Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[ht](std::string s) {
|
||||
*ht = parseHashType(s);
|
||||
}},
|
||||
.completer = hashTypeCompleter,
|
||||
Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashFormat * hf) {
|
||||
assert(*hf == nix::HashFormat::SRI);
|
||||
return Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "hash format ('base16', 'nix32', 'base64', 'sri'). Default: 'sri'",
|
||||
.labels = {"hash-format"},
|
||||
.handler = {[hf](std::string s) {
|
||||
*hf = parseHashFormat(s);
|
||||
}},
|
||||
.completer = hashFormatCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht)
|
||||
Args::Flag Args::Flag::mkHashFormatOptFlag(std::string && longName, std::optional<HashFormat> * ohf) {
|
||||
return Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "hash format ('base16', 'nix32', 'base64', 'sri').",
|
||||
.labels = {"hash-format"},
|
||||
.handler = {[ohf](std::string s) {
|
||||
*ohf = std::optional<HashFormat>{parseHashFormat(s)};
|
||||
}},
|
||||
.completer = hashFormatCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
|
||||
{
|
||||
return Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[oht](std::string s) {
|
||||
*oht = std::optional<HashType> { parseHashType(s) };
|
||||
}},
|
||||
.completer = hashTypeCompleter,
|
||||
for (auto & algo : hashAlgorithms)
|
||||
if (hasPrefix(algo, prefix))
|
||||
completions.add(algo);
|
||||
}
|
||||
|
||||
Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha)
|
||||
{
|
||||
return Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[ha](std::string s) {
|
||||
*ha = parseHashAlgo(s);
|
||||
}},
|
||||
.completer = hashAlgoCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag Args::Flag::mkHashAlgoOptFlag(std::string && longName, std::optional<HashAlgorithm> * oha)
|
||||
{
|
||||
return Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[oha](std::string s) {
|
||||
*oha = std::optional<HashAlgorithm>{parseHashAlgo(s)};
|
||||
}},
|
||||
.completer = hashAlgoCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
@ -622,8 +656,9 @@ std::optional<ExperimentalFeature> Command::experimentalFeature ()
|
||||
return { Xp::NixCommand };
|
||||
}
|
||||
|
||||
MultiCommand::MultiCommand(const Commands & commands_)
|
||||
MultiCommand::MultiCommand(std::string_view commandName, const Commands & commands_)
|
||||
: commands(commands_)
|
||||
, commandName(commandName)
|
||||
{
|
||||
expectArgs({
|
||||
.label = "subcommand",
|
||||
|
@ -14,7 +14,8 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
enum HashType : char;
|
||||
enum struct HashAlgorithm : char;
|
||||
enum struct HashFormat : int;
|
||||
|
||||
class MultiCommand;
|
||||
|
||||
@ -175,8 +176,10 @@ protected:
|
||||
|
||||
std::optional<ExperimentalFeature> experimentalFeature;
|
||||
|
||||
static Flag mkHashTypeFlag(std::string && longName, HashType * ht);
|
||||
static Flag mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht);
|
||||
static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha);
|
||||
static Flag mkHashAlgoOptFlag(std::string && longName, std::optional<HashAlgorithm> * oha);
|
||||
static Flag mkHashFormatFlagWithDefault(std::string && longName, HashFormat * hf);
|
||||
static Flag mkHashFormatOptFlag(std::string && longName, std::optional<HashFormat> * ohf);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -223,11 +226,11 @@ protected:
|
||||
std::list<ExpectedArg> expectedArgs;
|
||||
/**
|
||||
* List of processed positional argument forms.
|
||||
*
|
||||
*
|
||||
* All items removed from `expectedArgs` are added here. After all
|
||||
* arguments were processed, this list should be exactly the same as
|
||||
* `expectedArgs` was before.
|
||||
*
|
||||
*
|
||||
* This list is used to extend the lifetime of the argument forms.
|
||||
* If this is not done, some closures that reference the command
|
||||
* itself will segfault.
|
||||
@ -356,13 +359,16 @@ public:
|
||||
*/
|
||||
std::optional<std::pair<std::string, ref<Command>>> command;
|
||||
|
||||
MultiCommand(const Commands & commands);
|
||||
MultiCommand(std::string_view commandName, const Commands & commands);
|
||||
|
||||
bool processFlag(Strings::iterator & pos, Strings::iterator end) override;
|
||||
|
||||
bool processArgs(const Strings & args, bool finish) override;
|
||||
|
||||
nlohmann::json toJSON() override;
|
||||
|
||||
protected:
|
||||
std::string commandName = "";
|
||||
};
|
||||
|
||||
Strings argvToStrings(int argc, char * * argv);
|
||||
|
@ -205,8 +205,19 @@ public:
|
||||
* `CanonPath(this.makeRelative(x), this) == path`.
|
||||
*/
|
||||
std::string makeRelative(const CanonPath & path) const;
|
||||
|
||||
friend class std::hash<CanonPath>;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & stream, const CanonPath & path);
|
||||
|
||||
}
|
||||
|
||||
template<>
|
||||
struct std::hash<nix::CanonPath>
|
||||
{
|
||||
std::size_t operator ()(const nix::CanonPath & s) const noexcept
|
||||
{
|
||||
return std::hash<std::string>{}(s.path);
|
||||
}
|
||||
};
|
||||
|
@ -80,12 +80,11 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
|
||||
.description = R"(
|
||||
Enable the use of the [`fetchTree`](@docroot@/language/builtins.md#builtins-fetchTree) built-in function in the Nix language.
|
||||
|
||||
`fetchTree` exposes a large suite of fetching functionality in a more systematic way.
|
||||
`fetchTree` exposes a generic interface for fetching remote file system trees from different types of remote sources.
|
||||
The [`flakes`](#xp-feature-flakes) feature flag always enables `fetch-tree`.
|
||||
This built-in was previously guarded by the `flakes` experimental feature because of that overlap.
|
||||
|
||||
This built-in was previously guarded by the `flakes` experimental feature because of that overlap,
|
||||
but since the plan is to work on stabilizing this first (due 2024 Q1), we are putting it underneath a separate feature.
|
||||
Once we've made the changes we want to make, enabling just this feature will serve as a "release candidate" --- allowing users to try out the functionality we want to stabilize and not any other functionality we don't yet want to, in isolation.
|
||||
Enabling just this feature serves as a "release candidate", allowing users to try it out in isolation.
|
||||
)",
|
||||
},
|
||||
{
|
||||
|
@ -106,7 +106,7 @@ void parse(
|
||||
std::string hashs = getString(source, 20);
|
||||
left -= 20;
|
||||
|
||||
Hash hash(htSHA1);
|
||||
Hash hash(HashAlgorithm::SHA1);
|
||||
std::copy(hashs.begin(), hashs.end(), hash.hash);
|
||||
|
||||
hook(name, TreeEntry {
|
||||
@ -241,12 +241,12 @@ Mode dump(
|
||||
|
||||
|
||||
TreeEntry dumpHash(
|
||||
HashType ht,
|
||||
SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
|
||||
HashAlgorithm ha,
|
||||
SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
|
||||
{
|
||||
std::function<DumpHook> hook;
|
||||
hook = [&](const CanonPath & path) -> TreeEntry {
|
||||
auto hashSink = HashSink(ht);
|
||||
auto hashSink = HashSink(ha);
|
||||
auto mode = dump(accessor, path, hashSink, hook, filter);
|
||||
auto hash = hashSink.finish().first;
|
||||
return {
|
||||
|
@ -123,9 +123,9 @@ Mode dump(
|
||||
* A smaller wrapper around `dump`.
|
||||
*/
|
||||
TreeEntry dumpHash(
|
||||
HashType ht,
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
HashAlgorithm ha,
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/**
|
||||
* A line from the output of `git ls-remote --symref`.
|
||||
|
@ -16,23 +16,24 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
static size_t regularHashSize(HashType type) {
|
||||
static size_t regularHashSize(HashAlgorithm type) {
|
||||
switch (type) {
|
||||
case htMD5: return md5HashSize;
|
||||
case htSHA1: return sha1HashSize;
|
||||
case htSHA256: return sha256HashSize;
|
||||
case htSHA512: return sha512HashSize;
|
||||
case HashAlgorithm::MD5: return md5HashSize;
|
||||
case HashAlgorithm::SHA1: return sha1HashSize;
|
||||
case HashAlgorithm::SHA256: return sha256HashSize;
|
||||
case HashAlgorithm::SHA512: return sha512HashSize;
|
||||
}
|
||||
abort();
|
||||
}
|
||||
|
||||
|
||||
std::set<std::string> hashTypes = { "md5", "sha1", "sha256", "sha512" };
|
||||
const std::set<std::string> hashAlgorithms = {"md5", "sha1", "sha256", "sha512" };
|
||||
|
||||
const std::set<std::string> hashFormats = {"base64", "nix32", "base16", "sri" };
|
||||
|
||||
Hash::Hash(HashType type) : type(type)
|
||||
Hash::Hash(HashAlgorithm algo) : algo(algo)
|
||||
{
|
||||
hashSize = regularHashSize(type);
|
||||
hashSize = regularHashSize(algo);
|
||||
assert(hashSize <= maxHashSize);
|
||||
memset(hash, 0, maxHashSize);
|
||||
}
|
||||
@ -81,7 +82,7 @@ static std::string printHash16(const Hash & hash)
|
||||
|
||||
|
||||
// omitted: E O U T
|
||||
const std::string base32Chars = "0123456789abcdfghijklmnpqrsvwxyz";
|
||||
const std::string nix32Chars = "0123456789abcdfghijklmnpqrsvwxyz";
|
||||
|
||||
|
||||
static std::string printHash32(const Hash & hash)
|
||||
@ -100,7 +101,7 @@ static std::string printHash32(const Hash & hash)
|
||||
unsigned char c =
|
||||
(hash.hash[i] >> j)
|
||||
| (i >= hash.hashSize - 1 ? 0 : hash.hash[i + 1] << (8 - j));
|
||||
s.push_back(base32Chars[c & 0x1f]);
|
||||
s.push_back(nix32Chars[c & 0x1f]);
|
||||
}
|
||||
|
||||
return s;
|
||||
@ -109,23 +110,23 @@ static std::string printHash32(const Hash & hash)
|
||||
|
||||
std::string printHash16or32(const Hash & hash)
|
||||
{
|
||||
assert(hash.type);
|
||||
return hash.to_string(hash.type == htMD5 ? HashFormat::Base16 : HashFormat::Base32, false);
|
||||
assert(static_cast<char>(hash.algo));
|
||||
return hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Nix32, false);
|
||||
}
|
||||
|
||||
|
||||
std::string Hash::to_string(HashFormat hashFormat, bool includeType) const
|
||||
std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const
|
||||
{
|
||||
std::string s;
|
||||
if (hashFormat == HashFormat::SRI || includeType) {
|
||||
s += printHashType(type);
|
||||
if (hashFormat == HashFormat::SRI || includeAlgo) {
|
||||
s += printHashAlgo(algo);
|
||||
s += hashFormat == HashFormat::SRI ? '-' : ':';
|
||||
}
|
||||
switch (hashFormat) {
|
||||
case HashFormat::Base16:
|
||||
s += printHash16(*this);
|
||||
break;
|
||||
case HashFormat::Base32:
|
||||
case HashFormat::Nix32:
|
||||
s += printHash32(*this);
|
||||
break;
|
||||
case HashFormat::Base64:
|
||||
@ -136,7 +137,7 @@ std::string Hash::to_string(HashFormat hashFormat, bool includeType) const
|
||||
return s;
|
||||
}
|
||||
|
||||
Hash Hash::dummy(htSHA256);
|
||||
Hash Hash::dummy(HashAlgorithm::SHA256);
|
||||
|
||||
Hash Hash::parseSRI(std::string_view original) {
|
||||
auto rest = original;
|
||||
@ -145,18 +146,18 @@ Hash Hash::parseSRI(std::string_view original) {
|
||||
auto hashRaw = splitPrefixTo(rest, '-');
|
||||
if (!hashRaw)
|
||||
throw BadHash("hash '%s' is not SRI", original);
|
||||
HashType parsedType = parseHashType(*hashRaw);
|
||||
HashAlgorithm parsedType = parseHashAlgo(*hashRaw);
|
||||
|
||||
return Hash(rest, parsedType, true);
|
||||
}
|
||||
|
||||
// Mutates the string to eliminate the prefixes when found
|
||||
static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_view & rest)
|
||||
static std::pair<std::optional<HashAlgorithm>, bool> getParsedTypeAndSRI(std::string_view & rest)
|
||||
{
|
||||
bool isSRI = false;
|
||||
|
||||
// Parse the hash type before the separator, if there was one.
|
||||
std::optional<HashType> optParsedType;
|
||||
std::optional<HashAlgorithm> optParsedType;
|
||||
{
|
||||
auto hashRaw = splitPrefixTo(rest, ':');
|
||||
|
||||
@ -166,7 +167,7 @@ static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_
|
||||
isSRI = true;
|
||||
}
|
||||
if (hashRaw)
|
||||
optParsedType = parseHashType(*hashRaw);
|
||||
optParsedType = parseHashAlgo(*hashRaw);
|
||||
}
|
||||
|
||||
return {optParsedType, isSRI};
|
||||
@ -185,29 +186,29 @@ Hash Hash::parseAnyPrefixed(std::string_view original)
|
||||
return Hash(rest, *optParsedType, isSRI);
|
||||
}
|
||||
|
||||
Hash Hash::parseAny(std::string_view original, std::optional<HashType> optType)
|
||||
Hash Hash::parseAny(std::string_view original, std::optional<HashAlgorithm> optAlgo)
|
||||
{
|
||||
auto rest = original;
|
||||
auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest);
|
||||
|
||||
// Either the string or user must provide the type, if they both do they
|
||||
// must agree.
|
||||
if (!optParsedType && !optType)
|
||||
if (!optParsedType && !optAlgo)
|
||||
throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context", rest);
|
||||
else if (optParsedType && optType && *optParsedType != *optType)
|
||||
throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType));
|
||||
else if (optParsedType && optAlgo && *optParsedType != *optAlgo)
|
||||
throw BadHash("hash '%s' should have type '%s'", original, printHashAlgo(*optAlgo));
|
||||
|
||||
HashType hashType = optParsedType ? *optParsedType : *optType;
|
||||
return Hash(rest, hashType, isSRI);
|
||||
HashAlgorithm hashAlgo = optParsedType ? *optParsedType : *optAlgo;
|
||||
return Hash(rest, hashAlgo, isSRI);
|
||||
}
|
||||
|
||||
Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashType type)
|
||||
Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo)
|
||||
{
|
||||
return Hash(s, type, false);
|
||||
return Hash(s, algo, false);
|
||||
}
|
||||
|
||||
Hash::Hash(std::string_view rest, HashType type, bool isSRI)
|
||||
: Hash(type)
|
||||
Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI)
|
||||
: Hash(algo)
|
||||
{
|
||||
if (!isSRI && rest.size() == base16Len()) {
|
||||
|
||||
@ -230,8 +231,8 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI)
|
||||
for (unsigned int n = 0; n < rest.size(); ++n) {
|
||||
char c = rest[rest.size() - n - 1];
|
||||
unsigned char digit;
|
||||
for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */
|
||||
if (base32Chars[digit] == c) break;
|
||||
for (digit = 0; digit < nix32Chars.size(); ++digit) /* !!! slow */
|
||||
if (nix32Chars[digit] == c) break;
|
||||
if (digit >= 32)
|
||||
throw BadHash("invalid base-32 hash '%s'", rest);
|
||||
unsigned int b = n * 5;
|
||||
@ -257,19 +258,19 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI)
|
||||
}
|
||||
|
||||
else
|
||||
throw BadHash("hash '%s' has wrong length for hash type '%s'", rest, printHashType(this->type));
|
||||
throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(this->algo));
|
||||
}
|
||||
|
||||
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht)
|
||||
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashAlgorithm> ha)
|
||||
{
|
||||
if (hashStr.empty()) {
|
||||
if (!ht)
|
||||
if (!ha)
|
||||
throw BadHash("empty hash requires explicit hash type");
|
||||
Hash h(*ht);
|
||||
Hash h(*ha);
|
||||
warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true));
|
||||
return h;
|
||||
} else
|
||||
return Hash::parseAny(hashStr, ht);
|
||||
return Hash::parseAny(hashStr, ha);
|
||||
}
|
||||
|
||||
|
||||
@ -282,58 +283,58 @@ union Ctx
|
||||
};
|
||||
|
||||
|
||||
static void start(HashType ht, Ctx & ctx)
|
||||
static void start(HashAlgorithm ha, Ctx & ctx)
|
||||
{
|
||||
if (ht == htMD5) MD5_Init(&ctx.md5);
|
||||
else if (ht == htSHA1) SHA1_Init(&ctx.sha1);
|
||||
else if (ht == htSHA256) SHA256_Init(&ctx.sha256);
|
||||
else if (ht == htSHA512) SHA512_Init(&ctx.sha512);
|
||||
if (ha == HashAlgorithm::MD5) MD5_Init(&ctx.md5);
|
||||
else if (ha == HashAlgorithm::SHA1) SHA1_Init(&ctx.sha1);
|
||||
else if (ha == HashAlgorithm::SHA256) SHA256_Init(&ctx.sha256);
|
||||
else if (ha == HashAlgorithm::SHA512) SHA512_Init(&ctx.sha512);
|
||||
}
|
||||
|
||||
|
||||
static void update(HashType ht, Ctx & ctx,
|
||||
std::string_view data)
|
||||
static void update(HashAlgorithm ha, Ctx & ctx,
|
||||
std::string_view data)
|
||||
{
|
||||
if (ht == htMD5) MD5_Update(&ctx.md5, data.data(), data.size());
|
||||
else if (ht == htSHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
|
||||
else if (ht == htSHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
|
||||
else if (ht == htSHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
|
||||
if (ha == HashAlgorithm::MD5) MD5_Update(&ctx.md5, data.data(), data.size());
|
||||
else if (ha == HashAlgorithm::SHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
|
||||
else if (ha == HashAlgorithm::SHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
|
||||
else if (ha == HashAlgorithm::SHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
|
||||
}
|
||||
|
||||
|
||||
static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
|
||||
static void finish(HashAlgorithm ha, Ctx & ctx, unsigned char * hash)
|
||||
{
|
||||
if (ht == htMD5) MD5_Final(hash, &ctx.md5);
|
||||
else if (ht == htSHA1) SHA1_Final(hash, &ctx.sha1);
|
||||
else if (ht == htSHA256) SHA256_Final(hash, &ctx.sha256);
|
||||
else if (ht == htSHA512) SHA512_Final(hash, &ctx.sha512);
|
||||
if (ha == HashAlgorithm::MD5) MD5_Final(hash, &ctx.md5);
|
||||
else if (ha == HashAlgorithm::SHA1) SHA1_Final(hash, &ctx.sha1);
|
||||
else if (ha == HashAlgorithm::SHA256) SHA256_Final(hash, &ctx.sha256);
|
||||
else if (ha == HashAlgorithm::SHA512) SHA512_Final(hash, &ctx.sha512);
|
||||
}
|
||||
|
||||
|
||||
Hash hashString(HashType ht, std::string_view s)
|
||||
Hash hashString(HashAlgorithm ha, std::string_view s)
|
||||
{
|
||||
Ctx ctx;
|
||||
Hash hash(ht);
|
||||
start(ht, ctx);
|
||||
update(ht, ctx, s);
|
||||
finish(ht, ctx, hash.hash);
|
||||
Hash hash(ha);
|
||||
start(ha, ctx);
|
||||
update(ha, ctx, s);
|
||||
finish(ha, ctx, hash.hash);
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
Hash hashFile(HashType ht, const Path & path)
|
||||
Hash hashFile(HashAlgorithm ha, const Path & path)
|
||||
{
|
||||
HashSink sink(ht);
|
||||
HashSink sink(ha);
|
||||
readFile(path, sink);
|
||||
return sink.finish().first;
|
||||
}
|
||||
|
||||
|
||||
HashSink::HashSink(HashType ht) : ht(ht)
|
||||
HashSink::HashSink(HashAlgorithm ha) : ha(ha)
|
||||
{
|
||||
ctx = new Ctx;
|
||||
bytes = 0;
|
||||
start(ht, *ctx);
|
||||
start(ha, *ctx);
|
||||
}
|
||||
|
||||
HashSink::~HashSink()
|
||||
@ -345,14 +346,14 @@ HashSink::~HashSink()
|
||||
void HashSink::writeUnbuffered(std::string_view data)
|
||||
{
|
||||
bytes += data.size();
|
||||
update(ht, *ctx, data);
|
||||
update(ha, *ctx, data);
|
||||
}
|
||||
|
||||
HashResult HashSink::finish()
|
||||
{
|
||||
flush();
|
||||
Hash hash(ht);
|
||||
nix::finish(ht, *ctx, hash.hash);
|
||||
Hash hash(ha);
|
||||
nix::finish(ha, *ctx, hash.hash);
|
||||
return HashResult(hash, bytes);
|
||||
}
|
||||
|
||||
@ -360,16 +361,16 @@ HashResult HashSink::currentHash()
|
||||
{
|
||||
flush();
|
||||
Ctx ctx2 = *ctx;
|
||||
Hash hash(ht);
|
||||
nix::finish(ht, ctx2, hash.hash);
|
||||
Hash hash(ha);
|
||||
nix::finish(ha, ctx2, hash.hash);
|
||||
return HashResult(hash, bytes);
|
||||
}
|
||||
|
||||
|
||||
HashResult hashPath(
|
||||
HashType ht, const Path & path, PathFilter & filter)
|
||||
HashAlgorithm ha, const Path & path, PathFilter & filter)
|
||||
{
|
||||
HashSink sink(ht);
|
||||
HashSink sink(ha);
|
||||
dumpPath(path, sink, filter);
|
||||
return sink.finish();
|
||||
}
|
||||
@ -377,7 +378,7 @@ HashResult hashPath(
|
||||
|
||||
Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||
{
|
||||
Hash h(hash.type);
|
||||
Hash h(hash.algo);
|
||||
h.hashSize = newSize;
|
||||
for (unsigned int i = 0; i < hash.hashSize; ++i)
|
||||
h.hash[i % newSize] ^= hash.hash[i];
|
||||
@ -388,7 +389,11 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||
std::optional<HashFormat> parseHashFormatOpt(std::string_view hashFormatName)
|
||||
{
|
||||
if (hashFormatName == "base16") return HashFormat::Base16;
|
||||
if (hashFormatName == "base32") return HashFormat::Base32;
|
||||
if (hashFormatName == "nix32") return HashFormat::Nix32;
|
||||
if (hashFormatName == "base32") {
|
||||
warn(R"("base32" is a deprecated alias for hash format "nix32".)");
|
||||
return HashFormat::Nix32;
|
||||
}
|
||||
if (hashFormatName == "base64") return HashFormat::Base64;
|
||||
if (hashFormatName == "sri") return HashFormat::SRI;
|
||||
return std::nullopt;
|
||||
@ -407,8 +412,8 @@ std::string_view printHashFormat(HashFormat HashFormat)
|
||||
switch (HashFormat) {
|
||||
case HashFormat::Base64:
|
||||
return "base64";
|
||||
case HashFormat::Base32:
|
||||
return "base32";
|
||||
case HashFormat::Nix32:
|
||||
return "nix32";
|
||||
case HashFormat::Base16:
|
||||
return "base16";
|
||||
case HashFormat::SRI:
|
||||
@ -420,31 +425,31 @@ std::string_view printHashFormat(HashFormat HashFormat)
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<HashType> parseHashTypeOpt(std::string_view s)
|
||||
std::optional<HashAlgorithm> parseHashAlgoOpt(std::string_view s)
|
||||
{
|
||||
if (s == "md5") return htMD5;
|
||||
if (s == "sha1") return htSHA1;
|
||||
if (s == "sha256") return htSHA256;
|
||||
if (s == "sha512") return htSHA512;
|
||||
if (s == "md5") return HashAlgorithm::MD5;
|
||||
if (s == "sha1") return HashAlgorithm::SHA1;
|
||||
if (s == "sha256") return HashAlgorithm::SHA256;
|
||||
if (s == "sha512") return HashAlgorithm::SHA512;
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
HashType parseHashType(std::string_view s)
|
||||
HashAlgorithm parseHashAlgo(std::string_view s)
|
||||
{
|
||||
auto opt_h = parseHashTypeOpt(s);
|
||||
auto opt_h = parseHashAlgoOpt(s);
|
||||
if (opt_h)
|
||||
return *opt_h;
|
||||
else
|
||||
throw UsageError("unknown hash algorithm '%1%', expect 'md5', 'sha1', 'sha256', or 'sha512'", s);
|
||||
}
|
||||
|
||||
std::string_view printHashType(HashType ht)
|
||||
std::string_view printHashAlgo(HashAlgorithm ha)
|
||||
{
|
||||
switch (ht) {
|
||||
case htMD5: return "md5";
|
||||
case htSHA1: return "sha1";
|
||||
case htSHA256: return "sha256";
|
||||
case htSHA512: return "sha512";
|
||||
switch (ha) {
|
||||
case HashAlgorithm::MD5: return "md5";
|
||||
case HashAlgorithm::SHA1: return "sha1";
|
||||
case HashAlgorithm::SHA256: return "sha256";
|
||||
case HashAlgorithm::SHA512: return "sha512";
|
||||
default:
|
||||
// illegal hash type enum value internally, as opposed to external input
|
||||
// which should be validated with nice error message.
|
||||
|
@ -12,7 +12,7 @@ namespace nix {
|
||||
MakeError(BadHash, Error);
|
||||
|
||||
|
||||
enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
|
||||
enum struct HashAlgorithm : char { MD5 = 42, SHA1, SHA256, SHA512 };
|
||||
|
||||
|
||||
const int md5HashSize = 16;
|
||||
@ -20,9 +20,9 @@ const int sha1HashSize = 20;
|
||||
const int sha256HashSize = 32;
|
||||
const int sha512HashSize = 64;
|
||||
|
||||
extern std::set<std::string> hashTypes;
|
||||
extern const std::set<std::string> hashAlgorithms;
|
||||
|
||||
extern const std::string base32Chars;
|
||||
extern const std::string nix32Chars;
|
||||
|
||||
/**
|
||||
* @brief Enumeration representing the hash formats.
|
||||
@ -31,8 +31,8 @@ enum struct HashFormat : int {
|
||||
/// @brief Base 64 encoding.
|
||||
/// @see [IETF RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4).
|
||||
Base64,
|
||||
/// @brief Nix-specific base-32 encoding. @see base32Chars
|
||||
Base32,
|
||||
/// @brief Nix-specific base-32 encoding. @see nix32Chars
|
||||
Nix32,
|
||||
/// @brief Lowercase hexadecimal encoding. @see base16Chars
|
||||
Base16,
|
||||
/// @brief "<hash algo>:<Base 64 hash>", format of the SRI integrity attribute.
|
||||
@ -40,6 +40,7 @@ enum struct HashFormat : int {
|
||||
SRI
|
||||
};
|
||||
|
||||
extern const std::set<std::string> hashFormats;
|
||||
|
||||
struct Hash
|
||||
{
|
||||
@ -47,12 +48,12 @@ struct Hash
|
||||
size_t hashSize = 0;
|
||||
uint8_t hash[maxHashSize] = {};
|
||||
|
||||
HashType type;
|
||||
HashAlgorithm algo;
|
||||
|
||||
/**
|
||||
* Create a zero-filled hash object.
|
||||
*/
|
||||
Hash(HashType type);
|
||||
explicit Hash(HashAlgorithm algo);
|
||||
|
||||
/**
|
||||
* Parse the hash from a string representation in the format
|
||||
@ -61,7 +62,7 @@ struct Hash
|
||||
* is not present, then the hash type must be specified in the
|
||||
* string.
|
||||
*/
|
||||
static Hash parseAny(std::string_view s, std::optional<HashType> type);
|
||||
static Hash parseAny(std::string_view s, std::optional<HashAlgorithm> optAlgo);
|
||||
|
||||
/**
|
||||
* Parse a hash from a string representation like the above, except the
|
||||
@ -73,7 +74,7 @@ struct Hash
|
||||
* Parse a plain hash that musst not have any prefix indicating the type.
|
||||
* The type is passed in to disambiguate.
|
||||
*/
|
||||
static Hash parseNonSRIUnprefixed(std::string_view s, HashType type);
|
||||
static Hash parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo);
|
||||
|
||||
static Hash parseSRI(std::string_view original);
|
||||
|
||||
@ -82,7 +83,7 @@ private:
|
||||
* The type must be provided, the string view must not include <type>
|
||||
* prefix. `isSRI` helps disambigate the various base-* encodings.
|
||||
*/
|
||||
Hash(std::string_view s, HashType type, bool isSRI);
|
||||
Hash(std::string_view s, HashAlgorithm algo, bool isSRI);
|
||||
|
||||
public:
|
||||
/**
|
||||
@ -103,7 +104,7 @@ public:
|
||||
/**
|
||||
* Returns the length of a base-16 representation of this hash.
|
||||
*/
|
||||
size_t base16Len() const
|
||||
[[nodiscard]] size_t base16Len() const
|
||||
{
|
||||
return hashSize * 2;
|
||||
}
|
||||
@ -111,7 +112,7 @@ public:
|
||||
/**
|
||||
* Returns the length of a base-32 representation of this hash.
|
||||
*/
|
||||
size_t base32Len() const
|
||||
[[nodiscard]] size_t base32Len() const
|
||||
{
|
||||
return (hashSize * 8 - 1) / 5 + 1;
|
||||
}
|
||||
@ -119,24 +120,24 @@ public:
|
||||
/**
|
||||
* Returns the length of a base-64 representation of this hash.
|
||||
*/
|
||||
size_t base64Len() const
|
||||
[[nodiscard]] size_t base64Len() const
|
||||
{
|
||||
return ((4 * hashSize / 3) + 3) & ~3;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a string representation of the hash, in base-16, base-32
|
||||
* or base-64. By default, this is prefixed by the hash type
|
||||
* or base-64. By default, this is prefixed by the hash algo
|
||||
* (e.g. "sha256:").
|
||||
*/
|
||||
std::string to_string(HashFormat hashFormat, bool includeType) const;
|
||||
[[nodiscard]] std::string to_string(HashFormat hashFormat, bool includeAlgo) const;
|
||||
|
||||
std::string gitRev() const
|
||||
[[nodiscard]] std::string gitRev() const
|
||||
{
|
||||
return to_string(HashFormat::Base16, false);
|
||||
}
|
||||
|
||||
std::string gitShortRev() const
|
||||
[[nodiscard]] std::string gitShortRev() const
|
||||
{
|
||||
return std::string(to_string(HashFormat::Base16, false), 0, 7);
|
||||
}
|
||||
@ -147,7 +148,7 @@ public:
|
||||
/**
|
||||
* Helper that defaults empty hashes to the 0 hash.
|
||||
*/
|
||||
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht);
|
||||
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashAlgorithm> ha);
|
||||
|
||||
/**
|
||||
* Print a hash in base-16 if it's MD5, or base-32 otherwise.
|
||||
@ -157,14 +158,14 @@ std::string printHash16or32(const Hash & hash);
|
||||
/**
|
||||
* Compute the hash of the given string.
|
||||
*/
|
||||
Hash hashString(HashType ht, std::string_view s);
|
||||
Hash hashString(HashAlgorithm ha, std::string_view s);
|
||||
|
||||
/**
|
||||
* Compute the hash of the given file, hashing its contents directly.
|
||||
*
|
||||
* (Metadata, such as the executable permission bit, is ignored.)
|
||||
*/
|
||||
Hash hashFile(HashType ht, const Path & path);
|
||||
Hash hashFile(HashAlgorithm ha, const Path & path);
|
||||
|
||||
/**
|
||||
* Compute the hash of the given path, serializing as a Nix Archive and
|
||||
@ -173,8 +174,8 @@ Hash hashFile(HashType ht, const Path & path);
|
||||
* The hash is defined as (essentially) hashString(ht, dumpPath(path)).
|
||||
*/
|
||||
typedef std::pair<Hash, uint64_t> HashResult;
|
||||
HashResult hashPath(HashType ht, const Path & path,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
HashResult hashPath(HashAlgorithm ha, const Path & path,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/**
|
||||
* Compress a hash to the specified number of bytes by cyclically
|
||||
@ -200,17 +201,17 @@ std::string_view printHashFormat(HashFormat hashFormat);
|
||||
/**
|
||||
* Parse a string representing a hash type.
|
||||
*/
|
||||
HashType parseHashType(std::string_view s);
|
||||
HashAlgorithm parseHashAlgo(std::string_view s);
|
||||
|
||||
/**
|
||||
* Will return nothing on parse error
|
||||
*/
|
||||
std::optional<HashType> parseHashTypeOpt(std::string_view s);
|
||||
std::optional<HashAlgorithm> parseHashAlgoOpt(std::string_view s);
|
||||
|
||||
/**
|
||||
* And the reverse.
|
||||
*/
|
||||
std::string_view printHashType(HashType ht);
|
||||
std::string_view printHashAlgo(HashAlgorithm ha);
|
||||
|
||||
|
||||
union Ctx;
|
||||
@ -223,12 +224,12 @@ struct AbstractHashSink : virtual Sink
|
||||
class HashSink : public BufferedSink, public AbstractHashSink
|
||||
{
|
||||
private:
|
||||
HashType ht;
|
||||
HashAlgorithm ha;
|
||||
Ctx * ctx;
|
||||
uint64_t bytes;
|
||||
|
||||
public:
|
||||
HashSink(HashType ht);
|
||||
HashSink(HashAlgorithm ha);
|
||||
HashSink(const HashSink & h);
|
||||
~HashSink();
|
||||
void writeUnbuffered(std::string_view data) override;
|
||||
|
@ -1,5 +1,8 @@
|
||||
#include "posix-source-accessor.hh"
|
||||
#include "signals.hh"
|
||||
#include "sync.hh"
|
||||
|
||||
#include <unordered_map>
|
||||
|
||||
namespace nix {
|
||||
|
||||
@ -8,9 +11,9 @@ void PosixSourceAccessor::readFile(
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback)
|
||||
{
|
||||
// FIXME: add O_NOFOLLOW since symlinks should be resolved by the
|
||||
// caller?
|
||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC);
|
||||
assertNoSymlinks(path);
|
||||
|
||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC | O_NOFOLLOW);
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", path);
|
||||
|
||||
@ -42,30 +45,55 @@ void PosixSourceAccessor::readFile(
|
||||
|
||||
bool PosixSourceAccessor::pathExists(const CanonPath & path)
|
||||
{
|
||||
if (auto parent = path.parent()) assertNoSymlinks(*parent);
|
||||
return nix::pathExists(path.abs());
|
||||
}
|
||||
|
||||
std::optional<struct stat> PosixSourceAccessor::cachedLstat(const CanonPath & path)
|
||||
{
|
||||
static Sync<std::unordered_map<CanonPath, std::optional<struct stat>>> _cache;
|
||||
|
||||
{
|
||||
auto cache(_cache.lock());
|
||||
auto i = cache->find(path);
|
||||
if (i != cache->end()) return i->second;
|
||||
}
|
||||
|
||||
std::optional<struct stat> st{std::in_place};
|
||||
if (::lstat(path.c_str(), &*st)) {
|
||||
if (errno == ENOENT || errno == ENOTDIR)
|
||||
st.reset();
|
||||
else
|
||||
throw SysError("getting status of '%s'", showPath(path));
|
||||
}
|
||||
|
||||
auto cache(_cache.lock());
|
||||
if (cache->size() >= 16384) cache->clear();
|
||||
cache->emplace(path, st);
|
||||
|
||||
return st;
|
||||
}
|
||||
|
||||
std::optional<SourceAccessor::Stat> PosixSourceAccessor::maybeLstat(const CanonPath & path)
|
||||
{
|
||||
struct stat st;
|
||||
if (::lstat(path.c_str(), &st)) {
|
||||
if (errno == ENOENT) return std::nullopt;
|
||||
throw SysError("getting status of '%s'", showPath(path));
|
||||
}
|
||||
mtime = std::max(mtime, st.st_mtime);
|
||||
if (auto parent = path.parent()) assertNoSymlinks(*parent);
|
||||
auto st = cachedLstat(path);
|
||||
if (!st) return std::nullopt;
|
||||
mtime = std::max(mtime, st->st_mtime);
|
||||
return Stat {
|
||||
.type =
|
||||
S_ISREG(st.st_mode) ? tRegular :
|
||||
S_ISDIR(st.st_mode) ? tDirectory :
|
||||
S_ISLNK(st.st_mode) ? tSymlink :
|
||||
S_ISREG(st->st_mode) ? tRegular :
|
||||
S_ISDIR(st->st_mode) ? tDirectory :
|
||||
S_ISLNK(st->st_mode) ? tSymlink :
|
||||
tMisc,
|
||||
.fileSize = S_ISREG(st.st_mode) ? std::optional<uint64_t>(st.st_size) : std::nullopt,
|
||||
.isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR,
|
||||
.fileSize = S_ISREG(st->st_mode) ? std::optional<uint64_t>(st->st_size) : std::nullopt,
|
||||
.isExecutable = S_ISREG(st->st_mode) && st->st_mode & S_IXUSR,
|
||||
};
|
||||
}
|
||||
|
||||
SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & path)
|
||||
{
|
||||
assertNoSymlinks(path);
|
||||
DirEntries res;
|
||||
for (auto & entry : nix::readDirectory(path.abs())) {
|
||||
std::optional<Type> type;
|
||||
@ -81,6 +109,7 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath &
|
||||
|
||||
std::string PosixSourceAccessor::readLink(const CanonPath & path)
|
||||
{
|
||||
if (auto parent = path.parent()) assertNoSymlinks(*parent);
|
||||
return nix::readLink(path.abs());
|
||||
}
|
||||
|
||||
@ -89,4 +118,14 @@ std::optional<CanonPath> PosixSourceAccessor::getPhysicalPath(const CanonPath &
|
||||
return path;
|
||||
}
|
||||
|
||||
void PosixSourceAccessor::assertNoSymlinks(CanonPath path)
|
||||
{
|
||||
while (!path.isRoot()) {
|
||||
auto st = cachedLstat(path);
|
||||
if (st && S_ISLNK(st->st_mode))
|
||||
throw Error("path '%s' is a symlink", showPath(path));
|
||||
path.pop();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -29,6 +29,15 @@ struct PosixSourceAccessor : virtual SourceAccessor
|
||||
std::string readLink(const CanonPath & path) override;
|
||||
|
||||
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override;
|
||||
|
||||
private:
|
||||
|
||||
/**
|
||||
* Throw an error if `path` or any of its ancestors are symlinks.
|
||||
*/
|
||||
void assertNoSymlinks(CanonPath path);
|
||||
|
||||
std::optional<struct stat> cachedLstat(const CanonPath & path);
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -23,8 +23,8 @@ static void search(
|
||||
static bool isBase32[256];
|
||||
std::call_once(initialised, [](){
|
||||
for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false;
|
||||
for (unsigned int i = 0; i < base32Chars.size(); ++i)
|
||||
isBase32[(unsigned char) base32Chars[i]] = true;
|
||||
for (unsigned int i = 0; i < nix32Chars.size(); ++i)
|
||||
isBase32[(unsigned char) nix32Chars[i]] = true;
|
||||
});
|
||||
|
||||
for (size_t i = 0; i + refLength <= s.size(); ) {
|
||||
@ -110,8 +110,8 @@ void RewritingSink::flush()
|
||||
prev.clear();
|
||||
}
|
||||
|
||||
HashModuloSink::HashModuloSink(HashType ht, const std::string & modulus)
|
||||
: hashSink(ht)
|
||||
HashModuloSink::HashModuloSink(HashAlgorithm ha, const std::string & modulus)
|
||||
: hashSink(ha)
|
||||
, rewritingSink(modulus, std::string(modulus.size(), 0), hashSink)
|
||||
{
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ struct HashModuloSink : AbstractHashSink
|
||||
HashSink hashSink;
|
||||
RewritingSink rewritingSink;
|
||||
|
||||
HashModuloSink(HashType ht, const std::string & modulus);
|
||||
HashModuloSink(HashAlgorithm ha, const std::string & modulus);
|
||||
|
||||
void operator () (std::string_view data) override;
|
||||
|
||||
|
@ -39,11 +39,11 @@ void SourceAccessor::readFile(
|
||||
}
|
||||
|
||||
Hash SourceAccessor::hashPath(
|
||||
const CanonPath & path,
|
||||
PathFilter & filter,
|
||||
HashType ht)
|
||||
const CanonPath & path,
|
||||
PathFilter & filter,
|
||||
HashAlgorithm ha)
|
||||
{
|
||||
HashSink sink(ht);
|
||||
HashSink sink(ha);
|
||||
dumpPath(path, sink, filter);
|
||||
return sink.finish().first;
|
||||
}
|
||||
|
@ -26,6 +26,13 @@ struct SourceAccessor
|
||||
|
||||
/**
|
||||
* Return the contents of a file as a string.
|
||||
*
|
||||
* @note Unlike Unix, this method should *not* follow symlinks. Nix
|
||||
* by default wants to manipulate symlinks explicitly, and not
|
||||
* implictly follow them, as they are frequently untrusted user data
|
||||
* and thus may point to arbitrary locations. Acting on the targets
|
||||
* targets of symlinks should only occasionally be done, and only
|
||||
* with care.
|
||||
*/
|
||||
virtual std::string readFile(const CanonPath & path);
|
||||
|
||||
@ -34,7 +41,10 @@ struct SourceAccessor
|
||||
* called with the size of the file before any data is written to
|
||||
* the sink.
|
||||
*
|
||||
* Note: subclasses of `SourceAccessor` need to implement at least
|
||||
* @note Like the other `readFile`, this method should *not* follow
|
||||
* symlinks.
|
||||
*
|
||||
* @note subclasses of `SourceAccessor` need to implement at least
|
||||
* one of the `readFile()` variants.
|
||||
*/
|
||||
virtual void readFile(
|
||||
@ -87,6 +97,9 @@ struct SourceAccessor
|
||||
|
||||
typedef std::map<std::string, DirEntry> DirEntries;
|
||||
|
||||
/**
|
||||
* @note Like `readFile`, this method should *not* follow symlinks.
|
||||
*/
|
||||
virtual DirEntries readDirectory(const CanonPath & path) = 0;
|
||||
|
||||
virtual std::string readLink(const CanonPath & path) = 0;
|
||||
@ -97,9 +110,9 @@ struct SourceAccessor
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
Hash hashPath(
|
||||
const CanonPath & path,
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
HashType ht = htSHA256);
|
||||
const CanonPath & path,
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
HashAlgorithm ha = HashAlgorithm::SHA256);
|
||||
|
||||
/**
|
||||
* Return a corresponding path in the root filesystem, if
|
||||
|
@ -53,6 +53,7 @@ TarArchive::TarArchive(Source & source, bool raw) : buffer(65536)
|
||||
archive_read_support_format_raw(archive);
|
||||
archive_read_support_format_empty(archive);
|
||||
}
|
||||
archive_read_set_option(archive, NULL, "mac-ext", NULL);
|
||||
check(archive_read_open(archive, (void *)this, callback_open, callback_read, callback_close), "Failed to open archive (%s)");
|
||||
}
|
||||
|
||||
@ -63,6 +64,7 @@ TarArchive::TarArchive(const Path & path)
|
||||
|
||||
archive_read_support_filter_all(archive);
|
||||
archive_read_support_format_all(archive);
|
||||
archive_read_set_option(archive, NULL, "mac-ext", NULL);
|
||||
check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s");
|
||||
}
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user