From b36857ac8d60cbf9a78c3c69f6370d38a14facbc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 29 Nov 2023 12:35:08 +0100 Subject: [PATCH 001/164] Add a Git-based content-addressed tarball cache GitArchiveInputScheme now streams tarballs into a Git repository. This deduplicates data a lot, e.g. when you're fetching different revisions of the Nixpkgs repo. It also warns if the tree hash returned by GitHub doesn't match the tree hash of the imported tarball. --- src/libfetchers/attrs.cc | 5 + src/libfetchers/attrs.hh | 2 + src/libfetchers/git-utils.cc | 178 +++++++++++++++++++++++++++++++++++ src/libfetchers/git-utils.hh | 10 ++ src/libfetchers/github.cc | 114 ++++++++++++++-------- 5 files changed, 272 insertions(+), 37 deletions(-) diff --git a/src/libfetchers/attrs.cc b/src/libfetchers/attrs.cc index a565d19d4..e3fa1d26a 100644 --- a/src/libfetchers/attrs.cc +++ b/src/libfetchers/attrs.cc @@ -104,4 +104,9 @@ std::map attrsToQuery(const Attrs & attrs) return query; } +Hash getRevAttr(const Attrs & attrs, const std::string & name) +{ + return Hash::parseAny(getStrAttr(attrs, name), htSHA1); +} + } diff --git a/src/libfetchers/attrs.hh b/src/libfetchers/attrs.hh index b9a2c824e..97a74bce0 100644 --- a/src/libfetchers/attrs.hh +++ b/src/libfetchers/attrs.hh @@ -39,4 +39,6 @@ bool getBoolAttr(const Attrs & attrs, const std::string & name); std::map attrsToQuery(const Attrs & attrs); +Hash getRevAttr(const Attrs & attrs, const std::string & name); + } diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 19eae0e1d..abad42c29 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -4,6 +4,7 @@ #include "finally.hh" #include "processes.hh" #include "signals.hh" +#include "users.hh" #include @@ -21,6 +22,9 @@ #include #include +#include "tarfile.hh" +#include + #include #include #include @@ -307,6 +311,158 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return std::nullopt; } + TarballInfo importTarball(Source & source) override + { + TarArchive archive(source); + + struct PendingDir + { + std::string name; + TreeBuilder builder; + }; + + std::vector pendingDirs; + + auto pushBuilder = [&](std::string name) + { + git_treebuilder * b; + if (git_treebuilder_new(&b, *this, nullptr)) + throw Error("creating a tree builder: %s", git_error_last()->message); + pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) }); + }; + + auto popBuilder = [&]() -> std::pair + { + assert(!pendingDirs.empty()); + auto pending = std::move(pendingDirs.back()); + git_oid oid; + if (git_treebuilder_write(&oid, pending.builder.get())) + throw Error("creating a tree object: %s", git_error_last()->message); + pendingDirs.pop_back(); + return {oid, pending.name}; + }; + + auto addToTree = [&](const std::string & name, const git_oid & oid, git_filemode_t mode) + { + assert(!pendingDirs.empty()); + auto & pending = pendingDirs.back(); + if (git_treebuilder_insert(nullptr, pending.builder.get(), name.c_str(), &oid, mode)) + throw Error("adding a file to a tree builder: %s", git_error_last()->message); + }; + + auto updateBuilders = [&](boost::span names) + { + // Find the common prefix of pendingDirs and names. + size_t prefixLen = 0; + for (; prefixLen < names.size() && prefixLen + 1 < pendingDirs.size(); ++prefixLen) + if (names[prefixLen] != pendingDirs[prefixLen + 1].name) + break; + + // Finish the builders that are not part of the common prefix. + for (auto n = pendingDirs.size(); n > prefixLen + 1; --n) { + auto [oid, name] = popBuilder(); + addToTree(name, oid, GIT_FILEMODE_TREE); + } + + // Create builders for the new directories. + for (auto n = prefixLen; n < names.size(); ++n) + pushBuilder(names[n]); + }; + + pushBuilder(""); + + size_t componentsToStrip = 1; + + time_t lastModified = 0; + + for (;;) { + // FIXME: merge with extract_archive + struct archive_entry * entry; + int r = archive_read_next_header(archive.archive, &entry); + if (r == ARCHIVE_EOF) break; + auto path = archive_entry_pathname(entry); + if (!path) + throw Error("cannot get archive member name: %s", archive_error_string(archive.archive)); + if (r == ARCHIVE_WARN) + warn(archive_error_string(archive.archive)); + else + archive.check(r); + + lastModified = std::max(lastModified, archive_entry_mtime(entry)); + + auto pathComponents = tokenizeString>(path, "/"); + + boost::span pathComponents2{pathComponents}; + + if (pathComponents2.size() <= componentsToStrip) continue; + pathComponents2 = pathComponents2.subspan(componentsToStrip); + + updateBuilders( + archive_entry_filetype(entry) == AE_IFDIR + ? pathComponents2 + : pathComponents2.first(pathComponents2.size() - 1)); + + switch (archive_entry_filetype(entry)) { + + case AE_IFDIR: + // Nothing to do right now. + break; + + case AE_IFREG: { + + git_writestream * stream = nullptr; + if (git_blob_create_from_stream(&stream, *this, nullptr)) + throw Error("creating a blob stream object: %s", git_error_last()->message); + + while (true) { + std::vector buf(128 * 1024); + auto n = archive_read_data(archive.archive, buf.data(), buf.size()); + if (n < 0) + throw Error("cannot read file '%s' from tarball", path); + if (n == 0) break; + if (stream->write(stream, (const char *) buf.data(), n)) + throw Error("writing a blob for tarball member '%s': %s", path, git_error_last()->message); + } + + git_oid oid; + if (git_blob_create_from_stream_commit(&oid, stream)) + throw Error("creating a blob object for tarball member '%s': %s", path, git_error_last()->message); + + addToTree(*pathComponents.rbegin(), oid, + archive_entry_mode(entry) & S_IXUSR + ? GIT_FILEMODE_BLOB_EXECUTABLE + : GIT_FILEMODE_BLOB); + + break; + } + + case AE_IFLNK: { + auto target = archive_entry_symlink(entry); + + git_oid oid; + if (git_blob_create_from_buffer(&oid, *this, target, strlen(target))) + throw Error("creating a blob object for tarball symlink member '%s': %s", path, git_error_last()->message); + + addToTree(*pathComponents.rbegin(), oid, GIT_FILEMODE_LINK); + + break; + } + + default: + throw Error("file '%s' in tarball has unsupported file type", path); + } + } + + updateBuilders({}); + + auto [oid, _name] = popBuilder(); + + return TarballInfo { + .treeHash = toHash(oid), + .lastModified = lastModified + }; + } + std::vector> getSubmodules(const Hash & rev) override; std::string resolveSubmoduleUrl( @@ -449,6 +605,22 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this else throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output); } + + Hash treeHashToNarHash(const Hash & treeHash) override + { + auto accessor = getAccessor(treeHash); + + fetchers::Attrs cacheKey({{"_what", "treeHashToNarHash"}, {"treeHash", treeHash.gitRev()}}); + + if (auto res = fetchers::getCache()->lookup(cacheKey)) + return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), htSHA256); + + auto narHash = accessor->hashPath(CanonPath::root); + + fetchers::getCache()->upsert(cacheKey, fetchers::Attrs({{"narHash", narHash.to_string(HashFormat::SRI, true)}})); + + return narHash; + } }; ref GitRepo::openRepo(const CanonPath & path, bool create, bool bare) @@ -673,5 +845,11 @@ std::vector> GitRepoImpl::getSubmodules return result; } +ref getTarballCache() +{ + static CanonPath repoDir(getCacheDir() + "/nix/tarball-cache"); + + return make_ref(repoDir, true, true); +} } diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index 1def82071..b8b31530a 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -69,6 +69,8 @@ struct GitRepo time_t lastModified; }; + virtual TarballInfo importTarball(Source & source) = 0; + virtual bool hasObject(const Hash & oid) = 0; virtual ref getAccessor(const Hash & rev) = 0; @@ -85,6 +87,14 @@ struct GitRepo virtual void verifyCommit( const Hash & rev, const std::vector & publicKeys) = 0; + + /** + * Given a Git tree hash, compute the hash of its NAR + * serialisation. This is memoised on-disk. + */ + virtual Hash treeHashToNarHash(const Hash & treeHash) = 0; }; +ref getTarballCache(); + } diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 661ad4884..877f6378b 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -8,6 +8,7 @@ #include "fetchers.hh" #include "fetch-settings.hh" #include "tarball.hh" +#include "git-utils.hh" #include #include @@ -180,49 +181,87 @@ struct GitArchiveInputScheme : InputScheme return headers; } - virtual Hash getRevFromRef(nix::ref store, const Input & input) const = 0; + struct RefInfo + { + Hash rev; + std::optional treeHash; + }; + + virtual RefInfo getRevFromRef(nix::ref store, const Input & input) const = 0; virtual DownloadUrl getDownloadUrl(const Input & input) const = 0; - std::pair fetch(ref store, const Input & _input) override + std::pair downloadArchive(ref store, Input input) const { - Input input(_input); - if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD"); + std::optional upstreamTreeHash; + auto rev = input.getRev(); - if (!rev) rev = getRevFromRef(store, input); + if (!rev) { + auto refInfo = getRevFromRef(store, input); + rev = refInfo.rev; + upstreamTreeHash = refInfo.treeHash; + debug("HEAD revision for '%s' is %s", input.to_string(), refInfo.rev.gitRev()); + } input.attrs.erase("ref"); input.attrs.insert_or_assign("rev", rev->gitRev()); - Attrs lockedAttrs({ - {"type", "git-tarball"}, - {"rev", rev->gitRev()}, - }); + auto cache = getCache(); - if (auto res = getCache()->lookup(store, lockedAttrs)) { - input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified")); - return {std::move(res->second), input}; + Attrs treeHashKey{{"_what", "gitRevToTreeHash"}, {"rev", rev->gitRev()}}; + Attrs lastModifiedKey{{"_what", "gitRevToLastModified"}, {"rev", rev->gitRev()}}; + + if (auto treeHashAttrs = cache->lookup(treeHashKey)) { + if (auto lastModifiedAttrs = cache->lookup(lastModifiedKey)) { + auto treeHash = getRevAttr(*treeHashAttrs, "treeHash"); + auto lastModified = getIntAttr(*lastModifiedAttrs, "lastModified"); + if (getTarballCache()->hasObject(treeHash)) + return {std::move(input), GitRepo::TarballInfo { .treeHash = treeHash, .lastModified = (time_t) lastModified }}; + else + debug("Git tree with hash '%s' has disappeared from the cache, refetching...", treeHash.gitRev()); + } } + /* Stream the tarball into the tarball cache. */ auto url = getDownloadUrl(input); - auto result = downloadTarball(store, url.url, input.getName(), true, url.headers); + auto source = sinkToSource([&](Sink & sink) { + FileTransferRequest req(url.url); + req.headers = url.headers; + getFileTransfer()->download(std::move(req), sink); + }); - input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified)); + auto tarballInfo = getTarballCache()->importTarball(*source); - getCache()->add( - store, - lockedAttrs, - { - {"rev", rev->gitRev()}, - {"lastModified", uint64_t(result.lastModified)} - }, - result.storePath, - true); + cache->upsert(treeHashKey, Attrs{{"treeHash", tarballInfo.treeHash.gitRev()}}); + cache->upsert(lastModifiedKey, Attrs{{"lastModified", (uint64_t) tarballInfo.lastModified}}); - return {result.storePath, input}; + if (upstreamTreeHash != tarballInfo.treeHash) + warn( + "Git tree hash mismatch for revision '%s' of '%s': " + "expected '%s', got '%s'. " + "This can happen if the Git repository uses submodules.", + rev->gitRev(), input.to_string(), upstreamTreeHash->gitRev(), tarballInfo.treeHash.gitRev()); + + return {std::move(input), tarballInfo}; + } + + std::pair, Input> getAccessor(ref store, const Input & _input) const override + { + auto [input, tarballInfo] = downloadArchive(store, _input); + + input.attrs.insert_or_assign("treeHash", tarballInfo.treeHash.gitRev()); + input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified)); + + auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash); + + accessor->setPathDisplay("«" + input.to_string() + "»"); + + accessor->fingerprint = input.getFingerprint(store); + + return {accessor, input}; } std::optional experimentalFeature() const override @@ -269,7 +308,7 @@ struct GitHubInputScheme : GitArchiveInputScheme return getStrAttr(input.attrs, "repo"); } - Hash getRevFromRef(nix::ref store, const Input & input) const override + RefInfo getRevFromRef(nix::ref store, const Input & input) const override { auto host = getHost(input); auto url = fmt( @@ -284,9 +323,10 @@ struct GitHubInputScheme : GitArchiveInputScheme readFile( store->toRealPath( downloadFile(store, url, "source", false, headers).storePath))); - auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1); - debug("HEAD revision for '%s' is %s", url, rev.gitRev()); - return rev; + return RefInfo { + .rev = Hash::parseAny(std::string { json["sha"] }, htSHA1), + .treeHash = Hash::parseAny(std::string { json["commit"]["tree"]["sha"] }, htSHA1) + }; } DownloadUrl getDownloadUrl(const Input & input) const override @@ -343,7 +383,7 @@ struct GitLabInputScheme : GitArchiveInputScheme return std::make_pair(token.substr(0,fldsplit), token.substr(fldsplit+1)); } - Hash getRevFromRef(nix::ref store, const Input & input) const override + RefInfo getRevFromRef(nix::ref store, const Input & input) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); // See rate limiting note below @@ -356,9 +396,9 @@ struct GitLabInputScheme : GitArchiveInputScheme readFile( store->toRealPath( downloadFile(store, url, "source", false, headers).storePath))); - auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1); - debug("HEAD revision for '%s' is %s", url, rev.gitRev()); - return rev; + return RefInfo { + .rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1) + }; } DownloadUrl getDownloadUrl(const Input & input) const override @@ -402,7 +442,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme // Once it is implemented, however, should work as expected. } - Hash getRevFromRef(nix::ref store, const Input & input) const override + RefInfo getRevFromRef(nix::ref store, const Input & input) const override { // TODO: In the future, when the sourcehut graphql API is implemented for mercurial // and with anonymous access, this method should use it instead. @@ -445,12 +485,12 @@ struct SourceHutInputScheme : GitArchiveInputScheme id = parsedLine->target; } - if(!id) + if (!id) throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref); - auto rev = Hash::parseAny(*id, htSHA1); - debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev()); - return rev; + return RefInfo { + .rev = Hash::parseAny(*id, htSHA1) + }; } DownloadUrl getDownloadUrl(const Input & input) const override From 043413bb597760eefb983395a10141643db9ee8c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 29 Nov 2023 12:38:46 +0100 Subject: [PATCH 002/164] boost::span -> std::span --- src/libfetchers/git-utils.cc | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index abad42c29..2324fd9ee 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -6,8 +6,6 @@ #include "signals.hh" #include "users.hh" -#include - #include #include #include @@ -28,6 +26,7 @@ #include #include #include +#include namespace std { @@ -350,7 +349,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this throw Error("adding a file to a tree builder: %s", git_error_last()->message); }; - auto updateBuilders = [&](boost::span names) + auto updateBuilders = [&](std::span names) { // Find the common prefix of pendingDirs and names. size_t prefixLen = 0; @@ -392,7 +391,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this auto pathComponents = tokenizeString>(path, "/"); - boost::span pathComponents2{pathComponents}; + std::span pathComponents2{pathComponents}; if (pathComponents2.size() <= componentsToStrip) continue; pathComponents2 = pathComponents2.subspan(componentsToStrip); From 06e106beff4fe9922d1e5debe7a16daec26c398d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 14 Dec 2023 13:38:10 +0100 Subject: [PATCH 003/164] Disable GitHub tree hash mismatch warning --- src/libfetchers/github.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index d07aa3cea..0f30723cf 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -238,12 +238,14 @@ struct GitArchiveInputScheme : InputScheme cache->upsert(treeHashKey, Attrs{{"treeHash", tarballInfo.treeHash.gitRev()}}); cache->upsert(lastModifiedKey, Attrs{{"lastModified", (uint64_t) tarballInfo.lastModified}}); + #if 0 if (upstreamTreeHash != tarballInfo.treeHash) warn( "Git tree hash mismatch for revision '%s' of '%s': " "expected '%s', got '%s'. " "This can happen if the Git repository uses submodules.", rev->gitRev(), input.to_string(), upstreamTreeHash->gitRev(), tarballInfo.treeHash.gitRev()); + #endif return {std::move(input), tarballInfo}; } From a34ec0bd123619277e5682b7f6f8da41166e3eab Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 4 Nov 2023 20:10:55 -0400 Subject: [PATCH 004/164] Include store path exact spec in the docs This is niche, but deserves to be in the manual because it is describing behavior visible to the outside world, not mere implementation details. --- doc/manual/src/SUMMARY.md.in | 1 + doc/manual/src/protocols/store-path.md | 104 +++++++++++++++++++++++++ src/libstore/store-api.cc | 84 ++------------------ 3 files changed, 111 insertions(+), 78 deletions(-) create mode 100644 doc/manual/src/protocols/store-path.md diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index c67ddc6cb..e6390c60a 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -106,6 +106,7 @@ - [Architecture and Design](architecture/architecture.md) - [Protocols](protocols/index.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) + - [Exact Store Path Specification](protocols/store-path.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) - [Contributing](contributing/index.md) diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md new file mode 100644 index 000000000..d1c35b05e --- /dev/null +++ b/doc/manual/src/protocols/store-path.md @@ -0,0 +1,104 @@ +# Complete Store Path Calculation + +This is the complete specification for how store paths are calculated. + +Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to. +But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful. + +```bnf + ::= /- +``` +where + +- `` = base-32 representation of the first 160 bits of a [SHA-256] hash of `
`
+
+  Th is :the hash part of the store name
+
+- `
` = the string `:sha256:::`;
+
+  Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
+  (e.g. you won't get `/nix/store/-name1` and `/nix/store/-name2`, or `/gnu/store/-name1`, with equal hash parts).
+
+- `` = the name of the store object.
+
+- `` = the [store directory](@docroot@/store/store-path.md#store-directory)
+
+- `` = one of:
+
+  - ```bnf
+    text:::...
+    ```
+
+    for encoded derivations written to the store.
+    ` ... ` are the store paths referenced by this path.
+    Those are encoded in the form described by ``.
+
+  - ```bnf
+    source:::...::self
+    ```
+
+    For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
+    Just like in the text case, we can have the store objects referenced by their paths.
+    Additionally, we can have an optional `:self` label to denote self reference.
+
+  - ```bnf
+    output:
+    ```
+
+    For either the outputs built from derivations,
+    paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256].
+    (in that case "source" is used; it's silly, but it's done that way for compatibility).
+
+    `` is the name of the output (usually, "out").
+    For content-addressed store objects, ``, is always "out".
+
+- `` = base-16 representation of a SHA-256 hash of ``
+
+- `` = one of the following based on ``:
+
+  - if `` = `text:...`:
+
+    the string written to the resulting store path.
+
+  - if `` = `source:...`:
+
+    the the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object.
+
+  - if `` = `output:`:
+
+    - For input-addressed derivation outputs:
+
+      the [ATerm](@docroot@/protocols/derivation-aterm.md) serialization of the derivation modulo fixed output derivations.
+
+    - For content-addressed store paths:
+
+      the string `fixed:out:::`, where
+
+      - `` = one of:
+
+        - `r:` hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
+
+        - `` (empty string) for hashes of the flat (single file) serialization
+
+      - `` = `md5`, `sha1` or `sha256`
+
+      - `` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
+
+      Note that `` = `out`, regardless of the name part of the store path.
+      Also note that NAR + SHA-256 must not use this case, and instead must use the `` = `source:...` case.
+
+[Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
+[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
+
+## Historical Note
+
+The `` = `source:...` and `` = `output:out` grammars technically overlap, in that both can represent data hashed by its SHA-256 NAR serialization.
+
+The original reason for this way of computing names was to prevent name collisions (for security).
+For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
+The former would have an `` starting with `output:out:`, while the latter would have an `` starting with `source:`.
+
+Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
+Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
+This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
+It also removes the ambiguity from the grammar.
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 0c37ecd30..dcfe5991d 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -65,85 +65,13 @@ StorePath Store::followLinksToStorePath(std::string_view path) const
 }
 
 
-/* Store paths have the following form:
+/*
+The exact specification of store paths is in `protocols/store-path.md`
+in the Nix manual. These few functions implement that specification.
 
-    = /-
-
-   where
-
-    = the location of the Nix store, usually /nix/store
-
-    = a human readable name for the path, typically obtained
-     from the name attribute of the derivation, or the name of the
-     source file from which the store path is created.  For derivation
-     outputs other than the default "out" output, the string "-"
-     is suffixed to .
-
-    = base-32 representation of the first 160 bits of a SHA-256
-     hash of ; the hash part of the store name
-
-    = the string ":sha256:

::"; - note that it includes the location of the store as well as the - name to make sure that changes to either of those are reflected - in the hash (e.g. you won't get /nix/store/-name1 and - /nix/store/-name2 with equal hash parts). - - = one of: - "text:::..." - for plain text files written to the store using - addTextToStore(); ... are the store paths referenced - by this path, in the form described by - "source:::...::self" - for paths copied to the store using addToStore() when recursive - = true and hashAlgo = "sha256". Just like in the text case, we - can have the store paths referenced by the path. - Additionally, we can have an optional :self label to denote self - reference. - "output:" - for either the outputs created by derivations, OR paths copied - to the store using addToStore() with recursive != true or - hashAlgo != "sha256" (in that case "source" is used; it's - silly, but it's done that way for compatibility). is the - name of the output (usually, "out"). - -

= base-16 representation of a SHA-256 hash of - - = - if = "text:...": - the string written to the resulting store path - if = "source:...": - the serialisation of the path from which this store path is - copied, as returned by hashPath() - if = "output:": - for non-fixed derivation outputs: - the derivation (see hashDerivationModulo() in - primops.cc) - for paths copied by addToStore() or produced by fixed-output - derivations: - the string "fixed:out:::", where - = "r:" for recursive (path) hashes, or "" for flat - (file) hashes - = "md5", "sha1" or "sha256" - = base-16 representation of the path or flat hash of - the contents of the path (or expected contents of the - path for fixed-output derivations) - - Note that since an output derivation has always type output, while - something added by addToStore can have type output or source depending - on the hash, this means that the same input can be hashed differently - if added to the store via addToStore or via a derivation, in the sha256 - recursive case. - - It would have been nicer to handle fixed-output derivations under - "source", e.g. have something like "source:", but we're - stuck with this for now... - - The main reason for this way of computing names is to prevent name - collisions (for security). For instance, it shouldn't be feasible - to come up with a derivation whose output path collides with the - path for a copied source. The former would have a starting with - "output:out:", while the latter would have a starting with - "source:". +If changes do these functions go behind mere implementation changes but +also update the user-visible behavior, please update the specification +to match. */ From 1ee42c5b88eb0533ebcf8b2579ec82f2be80e4b2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Feb 2024 21:46:01 +0100 Subject: [PATCH 005/164] builtin:fetchurl: Ensure a fixed-output derivation Previously we didn't check that the derivation was fixed-output, so you could use builtin:fetchurl to impurely fetch a file. --- src/libstore/builtins/fetchurl.cc | 3 +++ tests/functional/fetchurl.sh | 3 +++ 2 files changed, 6 insertions(+) diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 2086bd0b9..cf7b2770f 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -16,6 +16,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) writeFile(settings.netrcFile, netrcData, 0600); } + if (!drv.type().isFixed()) + throw Error("'builtin:fetchurl' must be a fixed-output derivation"); + auto getAttr = [&](const std::string & name) { auto i = drv.env.find(name); if (i == drv.env.end()) throw Error("attribute '%s' missing", name); diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index 8cd40c09f..578f5a34c 100644 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -78,3 +78,6 @@ outPath=$(nix-build -vvvvv --expr 'import ' --argstr url file: test -x $outPath/fetchurl.sh test -L $outPath/symlink + +# Make sure that *not* passing a outputHash fails. +expectStderr 100 nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' From b8b739e484078863c10c48d031fa8459081ba8b3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Feb 2024 22:01:02 +0100 Subject: [PATCH 006/164] builtin:fetchurl: Get output hash info from the drv --- src/libstore/builtins/fetchurl.cc | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index cf7b2770f..a9f2e748e 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -16,7 +16,12 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) writeFile(settings.netrcFile, netrcData, 0600); } - if (!drv.type().isFixed()) + auto out = get(drv.outputs, "out"); + if (!out) + throw Error("'builtin:fetchurl' requires an 'out' output"); + + auto dof = std::get_if(&out->raw); + if (!dof) throw Error("'builtin:fetchurl' must be a fixed-output derivation"); auto getAttr = [&](const std::string & name) { @@ -62,13 +67,11 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) }; /* Try the hashed mirrors first. */ - if (getAttr("outputHashMode") == "flat") + if (dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat) for (auto hashedMirror : settings.hashedMirrors.get()) try { if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; - std::optional ht = parseHashAlgoOpt(getAttr("outputHashAlgo")); - Hash h = newHashAllowEmpty(getAttr("outputHash"), ht); - fetch(hashedMirror + printHashAlgo(h.algo) + "/" + h.to_string(HashFormat::Base16, false)); + fetch(hashedMirror + printHashAlgo(dof->ca.hash.algo) + "/" + dof->ca.hash.to_string(HashFormat::Base16, false)); return; } catch (Error & e) { debug(e.what()); From c62c21e29af20f1c14a59ab37d7a25dd0b70f69e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 1 Feb 2024 13:07:45 -0800 Subject: [PATCH 007/164] Move `PodIdx` to `pos-idx.hh` and `PosTable` to `pos-table.hh` --- src/libexpr/nixexpr.hh | 86 +--------------------------------------- src/libexpr/pos-idx.hh | 48 ++++++++++++++++++++++ src/libexpr/pos-table.hh | 83 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 133 insertions(+), 84 deletions(-) create mode 100644 src/libexpr/pos-idx.hh create mode 100644 src/libexpr/pos-table.hh diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index b6189c2a9..da0ec6e9d 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -9,6 +9,8 @@ #include "error.hh" #include "chunked-vector.hh" #include "position.hh" +#include "pos-idx.hh" +#include "pos-table.hh" namespace nix { @@ -29,90 +31,6 @@ public: using EvalError::EvalError; }; -class PosIdx { - friend class PosTable; - -private: - uint32_t id; - - explicit PosIdx(uint32_t id): id(id) {} - -public: - PosIdx() : id(0) {} - - explicit operator bool() const { return id > 0; } - - bool operator <(const PosIdx other) const { return id < other.id; } - - bool operator ==(const PosIdx other) const { return id == other.id; } - - bool operator !=(const PosIdx other) const { return id != other.id; } -}; - -class PosTable -{ -public: - class Origin { - friend PosTable; - private: - // must always be invalid by default, add() replaces this with the actual value. - // subsequent add() calls use this index as a token to quickly check whether the - // current origins.back() can be reused or not. - mutable uint32_t idx = std::numeric_limits::max(); - - // Used for searching in PosTable::[]. - explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {} - - public: - const Pos::Origin origin; - - Origin(Pos::Origin origin): origin(origin) {} - }; - - struct Offset { - uint32_t line, column; - }; - -private: - std::vector origins; - ChunkedVector offsets; - -public: - PosTable(): offsets(1024) - { - origins.reserve(1024); - } - - PosIdx add(const Origin & origin, uint32_t line, uint32_t column) - { - const auto idx = offsets.add({line, column}).second; - if (origins.empty() || origins.back().idx != origin.idx) { - origin.idx = idx; - origins.push_back(origin); - } - return PosIdx(idx + 1); - } - - Pos operator[](PosIdx p) const - { - if (p.id == 0 || p.id > offsets.size()) - return {}; - const auto idx = p.id - 1; - /* we want the last key <= idx, so we'll take prev(first key > idx). - this is guaranteed to never rewind origin.begin because the first - key is always 0. */ - const auto pastOrigin = std::upper_bound( - origins.begin(), origins.end(), Origin(idx), - [] (const auto & a, const auto & b) { return a.idx < b.idx; }); - const auto origin = *std::prev(pastOrigin); - const auto offset = offsets[idx]; - return {offset.line, offset.column, origin.origin}; - } -}; - -inline PosIdx noPos = {}; - - struct Env; struct Value; class EvalState; diff --git a/src/libexpr/pos-idx.hh b/src/libexpr/pos-idx.hh new file mode 100644 index 000000000..9949f1dc5 --- /dev/null +++ b/src/libexpr/pos-idx.hh @@ -0,0 +1,48 @@ +#pragma once + +#include + +namespace nix { + +class PosIdx +{ + friend class PosTable; + +private: + uint32_t id; + + explicit PosIdx(uint32_t id) + : id(id) + { + } + +public: + PosIdx() + : id(0) + { + } + + explicit operator bool() const + { + return id > 0; + } + + bool operator<(const PosIdx other) const + { + return id < other.id; + } + + bool operator==(const PosIdx other) const + { + return id == other.id; + } + + bool operator!=(const PosIdx other) const + { + return id != other.id; + } +}; + +inline PosIdx noPos = {}; + +} diff --git a/src/libexpr/pos-table.hh b/src/libexpr/pos-table.hh new file mode 100644 index 000000000..1decf3c85 --- /dev/null +++ b/src/libexpr/pos-table.hh @@ -0,0 +1,83 @@ +#pragma once + +#include +#include +#include + +#include "chunked-vector.hh" +#include "pos-idx.hh" +#include "position.hh" + +namespace nix { + +class PosTable +{ +public: + class Origin + { + friend PosTable; + private: + // must always be invalid by default, add() replaces this with the actual value. + // subsequent add() calls use this index as a token to quickly check whether the + // current origins.back() can be reused or not. + mutable uint32_t idx = std::numeric_limits::max(); + + // Used for searching in PosTable::[]. + explicit Origin(uint32_t idx) + : idx(idx) + , origin{std::monostate()} + { + } + + public: + const Pos::Origin origin; + + Origin(Pos::Origin origin) + : origin(origin) + { + } + }; + + struct Offset + { + uint32_t line, column; + }; + +private: + std::vector origins; + ChunkedVector offsets; + +public: + PosTable() + : offsets(1024) + { + origins.reserve(1024); + } + + PosIdx add(const Origin & origin, uint32_t line, uint32_t column) + { + const auto idx = offsets.add({line, column}).second; + if (origins.empty() || origins.back().idx != origin.idx) { + origin.idx = idx; + origins.push_back(origin); + } + return PosIdx(idx + 1); + } + + Pos operator[](PosIdx p) const + { + if (p.id == 0 || p.id > offsets.size()) + return {}; + const auto idx = p.id - 1; + /* we want the last key <= idx, so we'll take prev(first key > idx). + this is guaranteed to never rewind origin.begin because the first + key is always 0. */ + const auto pastOrigin = std::upper_bound( + origins.begin(), origins.end(), Origin(idx), [](const auto & a, const auto & b) { return a.idx < b.idx; }); + const auto origin = *std::prev(pastOrigin); + const auto offset = offsets[idx]; + return {offset.line, offset.column, origin.origin}; + } +}; + +} From c6a89c1a1659b31694c0fbcd21d78a6dd521c732 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 22 Jan 2024 17:08:29 -0800 Subject: [PATCH 008/164] libexpr: Support structured error classes While preparing PRs like #9753, I've had to change error messages in dozens of code paths. It would be nice if instead of EvalError("expected 'boolean' but found '%1%'", showType(v)) we could write TypeError(v, "boolean") or similar. Then, changing the error message could be a mechanical refactor with the compiler pointing out places the constructor needs to be changed, rather than the error-prone process of grepping through the codebase. Structured errors would also help prevent the "same" error from having multiple slightly different messages, and could be a first step towards error codes / an error index. This PR reworks the exception infrastructure in `libexpr` to support exception types with different constructor signatures than `BaseError`. Actually refactoring the exceptions to use structured data will come in a future PR (this one is big enough already, as it has to touch every exception in `libexpr`). The core design is in `eval-error.hh`. Generally, errors like this: state.error("'%s' is not a string", getAttrPathStr()) .debugThrow() are transformed like this: state.error("'%s' is not a string", getAttrPathStr()) .debugThrow() The type annotation has moved from `ErrorBuilder::debugThrow` to `EvalState::error`. --- src/libcmd/repl.cc | 2 - src/libexpr/attr-path.cc | 8 +- src/libexpr/eval-cache.cc | 30 +-- src/libexpr/eval-error.cc | 113 ++++++++ src/libexpr/eval-error.hh | 118 +++++++++ src/libexpr/eval-inline.hh | 19 +- src/libexpr/eval.cc | 217 +++++++--------- src/libexpr/eval.hh | 91 +------ src/libexpr/flake/flake.cc | 16 +- src/libexpr/get-drvs.cc | 5 +- src/libexpr/json-to-value.cc | 4 +- src/libexpr/json-to-value.hh | 7 +- src/libexpr/lexer.l | 12 +- src/libexpr/nixexpr.cc | 8 +- src/libexpr/nixexpr.hh | 17 +- src/libexpr/parser-state.hh | 8 +- src/libexpr/parser.y | 8 +- src/libexpr/primops.cc | 244 ++++++++---------- src/libexpr/primops/context.cc | 50 ++-- src/libexpr/primops/fetchClosure.cc | 22 +- src/libexpr/primops/fetchMercurial.cc | 10 +- src/libexpr/primops/fetchTree.cc | 68 ++--- src/libexpr/primops/fromTOML.cc | 5 +- src/libexpr/value-to-json.cc | 18 +- src/libexpr/value.hh | 2 +- src/libmain/shared.cc | 2 +- src/libstore/build/entry-points.cc | 4 +- src/libstore/daemon.cc | 2 +- src/libutil/error.cc | 6 +- src/libutil/error.hh | 27 +- src/libutil/logging.cc | 2 +- src/nix-store/nix-store.cc | 4 +- src/nix/eval.cc | 2 +- src/nix/flake.cc | 6 +- tests/functional/fetchGit.sh | 4 +- .../lang/eval-fail-attr-name-type.err.exp | 5 + .../eval-fail-fromTOML-timestamps.err.exp | 2 +- .../functional/lang/eval-fail-toJSON.err.exp | 5 + .../eval-fail-using-set-as-attr-name.err.exp | 5 + tests/unit/libexpr/error_traces.cc | 20 +- 40 files changed, 653 insertions(+), 545 deletions(-) create mode 100644 src/libexpr/eval-error.cc create mode 100644 src/libexpr/eval-error.hh diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7d8f9819..714d3adb5 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -422,8 +422,6 @@ StringSet NixRepl::completePrefix(const std::string & prefix) // Quietly ignore parse errors. } catch (EvalError & e) { // Quietly ignore evaluation errors. - } catch (UndefinedVarError & e) { - // Quietly ignore undefined variable errors. } catch (BadURL & e) { // Quietly ignore BadURL flake-related errors. } diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc index 7481a2232..d6befd362 100644 --- a/src/libexpr/attr-path.cc +++ b/src/libexpr/attr-path.cc @@ -65,10 +65,10 @@ std::pair findAlongAttrPath(EvalState & state, const std::strin if (!attrIndex) { if (v->type() != nAttrs) - throw TypeError( + state.error( "the expression selected by the selection path '%1%' should be a set but is %2%", attrPath, - showType(*v)); + showType(*v)).debugThrow(); if (attr.empty()) throw Error("empty attribute name in selection path '%1%'", attrPath); @@ -88,10 +88,10 @@ std::pair findAlongAttrPath(EvalState & state, const std::strin else { if (!v->isList()) - throw TypeError( + state.error( "the expression selected by the selection path '%1%' should be a list but is %2%", attrPath, - showType(*v)); + showType(*v)).debugThrow(); if (*attrIndex >= v->listSize()) throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath); diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 5808d58b6..2fc69e796 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -491,7 +491,7 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name, bool forceErro if (forceErrors) debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name)); else - throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name)); + throw CachedEvalError(root->state, "cached failure of attribute '%s'", getAttrPathStr(name)); } else return std::make_shared(root, std::make_pair(shared_from_this(), name), nullptr, std::move(attr)); @@ -500,7 +500,7 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name, bool forceErro // evaluate to see whether 'name' exists } else return nullptr; - //throw TypeError("'%s' is not an attribute set", getAttrPathStr()); + //error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); } } @@ -508,7 +508,7 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name, bool forceErro if (v.type() != nAttrs) return nullptr; - //throw TypeError("'%s' is not an attribute set", getAttrPathStr()); + //error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); auto attr = v.attrs->get(name); @@ -574,14 +574,14 @@ std::string AttrCursor::getString() debug("using cached string attribute '%s'", getAttrPathStr()); return s->first; } else - root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nString && v.type() != nPath) - root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); return v.type() == nString ? v.c_str() : v.path().to_string(); } @@ -616,7 +616,7 @@ string_t AttrCursor::getStringWithContext() return *s; } } else - root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); } } @@ -630,7 +630,7 @@ string_t AttrCursor::getStringWithContext() else if (v.type() == nPath) return {v.path().to_string(), {}}; else - root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); } bool AttrCursor::getBool() @@ -643,14 +643,14 @@ bool AttrCursor::getBool() debug("using cached Boolean attribute '%s'", getAttrPathStr()); return *b; } else - root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nBool) - root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); return v.boolean; } @@ -665,14 +665,14 @@ NixInt AttrCursor::getInt() debug("using cached integer attribute '%s'", getAttrPathStr()); return i->x; } else - throw TypeError("'%s' is not an integer", getAttrPathStr()); + root->state.error("'%s' is not an integer", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nInt) - throw TypeError("'%s' is not an integer", getAttrPathStr()); + root->state.error("'%s' is not an integer", getAttrPathStr()).debugThrow(); return v.integer; } @@ -687,7 +687,7 @@ std::vector AttrCursor::getListOfStrings() debug("using cached list of strings attribute '%s'", getAttrPathStr()); return *l; } else - throw TypeError("'%s' is not a list of strings", getAttrPathStr()); + root->state.error("'%s' is not a list of strings", getAttrPathStr()).debugThrow(); } } @@ -697,7 +697,7 @@ std::vector AttrCursor::getListOfStrings() root->state.forceValue(v, noPos); if (v.type() != nList) - throw TypeError("'%s' is not a list", getAttrPathStr()); + root->state.error("'%s' is not a list", getAttrPathStr()).debugThrow(); std::vector res; @@ -720,14 +720,14 @@ std::vector AttrCursor::getAttrs() debug("using cached attrset attribute '%s'", getAttrPathStr()); return *attrs; } else - root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nAttrs) - root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); std::vector attrs; for (auto & attr : *getValue().attrs) diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc new file mode 100644 index 000000000..b9411cbf4 --- /dev/null +++ b/src/libexpr/eval-error.cc @@ -0,0 +1,113 @@ +#include "eval-error.hh" +#include "eval.hh" +#include "value.hh" + +namespace nix { + +template +EvalErrorBuilder & EvalErrorBuilder::withExitStatus(unsigned int exitStatus) +{ + error.withExitStatus(exitStatus); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::atPos(PosIdx pos) +{ + error.err.pos = error.state.positions[pos]; + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::atPos(Value & value, PosIdx fallback) +{ + return atPos(value.determinePos(fallback)); +} + +template +EvalErrorBuilder & EvalErrorBuilder::withTrace(PosIdx pos, const std::string_view text) +{ + error.err.traces.push_front( + Trace{.pos = error.state.positions[pos], .hint = hintfmt(std::string(text)), .frame = false}); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text) +{ + error.err.traces.push_front( + Trace{.pos = error.state.positions[pos], .hint = hintformat(std::string(text)), .frame = true}); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::withSuggestions(Suggestions & s) +{ + error.err.suggestions = s; + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::withFrame(const Env & env, const Expr & expr) +{ + // NOTE: This is abusing side-effects. + // TODO: check compatibility with nested debugger calls. + // TODO: What side-effects?? + error.state.debugTraces.push_front(DebugTrace{ + .pos = error.state.positions[expr.getPos()], + .expr = expr, + .env = env, + .hint = hintformat("Fake frame for debugging purposes"), + .isError = true}); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, hintformat hint, bool frame) +{ + error.addTrace(error.state.positions[pos], hint, frame); + return *this; +} + +template +template +EvalErrorBuilder & +EvalErrorBuilder::addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs) +{ + + addTrace(error.state.positions[pos], hintfmt(std::string(formatString), formatArgs...)); + return *this; +} + +template +void EvalErrorBuilder::debugThrow() +{ + if (error.state.debugRepl && !error.state.debugTraces.empty()) { + const DebugTrace & last = error.state.debugTraces.front(); + const Env * env = &last.env; + const Expr * expr = &last.expr; + error.state.runDebugRepl(&error, *env, *expr); + } + + // `EvalState` is the only class that can construct an `EvalErrorBuilder`, + // and it does so in dynamic storage. This is the final method called on + // any such instancve and must delete itself before throwing the underlying + // error. + auto error = std::move(this->error); + delete this; + + throw error; +} + +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; + +} diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh new file mode 100644 index 000000000..ee69dce64 --- /dev/null +++ b/src/libexpr/eval-error.hh @@ -0,0 +1,118 @@ +#pragma once + +#include + +#include "error.hh" +#include "pos-idx.hh" + +namespace nix { + +struct Env; +struct Expr; +struct Value; + +class EvalState; +template +class EvalErrorBuilder; + +class EvalError : public Error +{ + template + friend class EvalErrorBuilder; +public: + EvalState & state; + + EvalError(EvalState & state, ErrorInfo && errorInfo) + : Error(errorInfo) + , state(state) + { + } + + template + explicit EvalError(EvalState & state, const std::string & formatString, const Args &... formatArgs) + : Error(formatString, formatArgs...) + , state(state) + { + } +}; + +MakeError(ParseError, Error); +MakeError(AssertionError, EvalError); +MakeError(ThrownError, AssertionError); +MakeError(Abort, EvalError); +MakeError(TypeError, EvalError); +MakeError(UndefinedVarError, EvalError); +MakeError(MissingArgumentError, EvalError); +MakeError(CachedEvalError, EvalError); +MakeError(InfiniteRecursionError, EvalError); + +struct InvalidPathError : public EvalError +{ +public: + Path path; + InvalidPathError(EvalState & state, const Path & path) + : EvalError(state, "path '%s' is not valid", path) + { + } +}; + +template +class EvalErrorBuilder final +{ + friend class EvalState; + + template + explicit EvalErrorBuilder(EvalState & state, const Args &... args) + : error(T(state, args...)) + { + } + +public: + T error; + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withExitStatus(unsigned int exitStatus); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & atPos(PosIdx pos); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & atPos(Value & value, PosIdx fallback = noPos); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withTrace(PosIdx pos, const std::string_view text); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withSuggestions(Suggestions & s); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrame(const Env & e, const Expr & ex); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, hintformat hint, bool frame = false); + + template + [[nodiscard, gnu::noinline]] EvalErrorBuilder & + addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs); + + [[gnu::noinline, gnu::noreturn]] void debugThrow(); +}; + +/** + * The size needed to allocate any `EvalErrorBuilder`. + * + * The list of classes here needs to be kept in sync with the list of `template + * class` declarations in `eval-error.cc`. + * + * This is used by `EvalState` to preallocate a buffer of sufficient size for + * any `EvalErrorBuilder` to avoid allocating while evaluating Nix code. + */ +constexpr size_t EVAL_ERROR_BUILDER_SIZE = std::max({ + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), +}); + +} diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 42cb68bbe..03320c7c9 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -3,6 +3,7 @@ #include "print.hh" #include "eval.hh" +#include "eval-error.hh" namespace nix { @@ -115,10 +116,11 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e PosIdx pos = getPos(); forceValue(v, pos); if (v.type() != nAttrs) { - error("expected a set but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withTrace(pos, errorCtx).debugThrow(); + error( + "expected a set but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).withTrace(pos, errorCtx).debugThrow(); } } @@ -128,10 +130,11 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e { forceValue(v, pos); if (!v.isList()) { - error("expected a list but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withTrace(pos, errorCtx).debugThrow(); + error( + "expected a list but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).withTrace(pos, errorCtx).debugThrow(); } } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..ded4415cc 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -339,46 +339,6 @@ void initGC() gcInitialised = true; } - -ErrorBuilder & ErrorBuilder::atPos(PosIdx pos) -{ - info.errPos = state.positions[pos]; - return *this; -} - -ErrorBuilder & ErrorBuilder::withTrace(PosIdx pos, const std::string_view text) -{ - info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = false }); - return *this; -} - -ErrorBuilder & ErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text) -{ - info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = true }); - return *this; -} - -ErrorBuilder & ErrorBuilder::withSuggestions(Suggestions & s) -{ - info.suggestions = s; - return *this; -} - -ErrorBuilder & ErrorBuilder::withFrame(const Env & env, const Expr & expr) -{ - // NOTE: This is abusing side-effects. - // TODO: check compatibility with nested debugger calls. - state.debugTraces.push_front(DebugTrace { - .pos = nullptr, - .expr = expr, - .env = env, - .hint = hintformat("Fake frame for debugging purposes"), - .isError = true - }); - return *this; -} - - EvalState::EvalState( const SearchPath & _searchPath, ref store, @@ -811,7 +771,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & ? std::make_unique( *this, DebugTrace { - .pos = error->info().errPos ? error->info().errPos : positions[expr.getPos()], + .pos = error->info().pos ? error->info().pos : positions[expr.getPos()], .expr = expr, .env = env, .hint = error->info().msg, @@ -930,7 +890,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) return j->value; } if (!fromWith->parentWith) - error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow(); + error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow(); for (size_t l = fromWith->prevWith; l; --l, env = env->up) ; fromWith = fromWith->parentWith; } @@ -1136,7 +1096,7 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) // computation. if (mustBeTrivial && !(dynamic_cast(e))) - error("file '%s' must be an attribute set", path).debugThrow(); + error("file '%s' must be an attribute set", path).debugThrow(); eval(e, v); } catch (Error & e) { addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath.to_string()); @@ -1167,10 +1127,11 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri Value v; e->eval(*this, env, v); if (v.type() != nBool) - error("expected a Boolean but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withFrame(env, *e).debugThrow(); + error( + "expected a Boolean but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).withFrame(env, *e).debugThrow(); return v.boolean; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -1184,10 +1145,11 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po try { e->eval(*this, env, v); if (v.type() != nAttrs) - error("expected a set but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withFrame(env, *e).debugThrow(); + error( + "expected a set but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).withFrame(env, *e).debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -1296,7 +1258,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) auto nameSym = state.symbols.create(nameVal.string_view()); Bindings::iterator j = v.attrs->find(nameSym); if (j != v.attrs->end()) - state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow(); + state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow(); i.valueExpr->setName(nameSym); /* Keep sorted order so find can catch duplicates */ @@ -1408,8 +1370,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) for (auto & attr : *vAttrs->attrs) allAttrNames.insert(state.symbols[attr.name]); auto suggestions = Suggestions::bestMatches(allAttrNames, state.symbols[name]); - state.error("attribute '%1%' missing", state.symbols[name]) - .atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow(); + state.error("attribute '%1%' missing", state.symbols[name]) + .atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow(); } } vAttrs = j->value; @@ -1482,7 +1444,7 @@ public: void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos) { if (callDepth > evalSettings.maxCallDepth) - error("stack overflow; max-call-depth exceeded").atPos(pos).template debugThrow(); + error("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow(); CallDepth _level(callDepth); auto trace = evalSettings.traceFunctionCalls @@ -1540,13 +1502,13 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & auto j = args[0]->attrs->get(i.name); if (!j) { if (!i.def) { - error("function '%1%' called without required argument '%2%'", + error("function '%1%' called without required argument '%2%'", (lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"), symbols[i.name]) .atPos(lambda.pos) .withTrace(pos, "from call site") .withFrame(*fun.lambda.env, lambda) - .debugThrow(); + .debugThrow(); } env2.values[displ++] = i.def->maybeThunk(*this, env2); } else { @@ -1566,14 +1528,14 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & for (auto & formal : lambda.formals->formals) formalNames.insert(symbols[formal.name]); auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]); - error("function '%1%' called with unexpected argument '%2%'", + error("function '%1%' called with unexpected argument '%2%'", (lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"), symbols[i.name]) .atPos(lambda.pos) .withTrace(pos, "from call site") .withSuggestions(suggestions) .withFrame(*fun.lambda.env, lambda) - .debugThrow(); + .debugThrow(); } abort(); // can't happen } @@ -1705,11 +1667,12 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & } else - error("attempt to call something which is not a function but %1%: %2%", + error( + "attempt to call something which is not a function but %1%: %2%", showType(vCur), ValuePrinter(*this, vCur, errorPrintOptions)) .atPos(pos) - .debugThrow(); + .debugThrow(); } vRes = vCur; @@ -1779,12 +1742,12 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res) if (j != args.end()) { attrs.insert(*j); } else if (!i.def) { - error(R"(cannot evaluate a function that has an argument without a value ('%1%') + error(R"(cannot evaluate a function that has an argument without a value ('%1%') Nix attempted to evaluate a function as a top level expression; in this case it must have its arguments supplied either by default values, or passed explicitly with '--arg' or '--argstr'. See https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name]) - .atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow(); + .atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow(); } } } @@ -1815,7 +1778,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v) if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) { std::ostringstream out; cond->show(state.symbols, out); - state.error("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow(); + state.error("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow(); } body->eval(state, env, v); } @@ -1993,14 +1956,14 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) nf = n; nf += vTmp.fpoint; } else - state.error("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); + state.error("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); } else if (firstType == nFloat) { if (vTmp.type() == nInt) { nf += vTmp.integer; } else if (vTmp.type() == nFloat) { nf += vTmp.fpoint; } else - state.error("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); + state.error("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); } else { if (s.empty()) s.reserve(es->size()); /* skip canonization of first path, which would only be not @@ -2022,7 +1985,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) v.mkFloat(nf); else if (firstType == nPath) { if (!context.empty()) - state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow(); + state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow(); v.mkPath(state.rootPath(CanonPath(canonPath(str())))); } else v.mkStringMove(c_str(), context); @@ -2037,8 +2000,9 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v) void ExprBlackHole::eval(EvalState & state, Env & env, Value & v) { - state.error("infinite recursion encountered") - .debugThrow(); + state.error("infinite recursion encountered") + .atPos(v.determinePos(noPos)) + .debugThrow(); } // always force this to be separate, otherwise forceValue may inline it and take @@ -2052,7 +2016,7 @@ void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos) try { std::rethrow_exception(e); } catch (InfiniteRecursionError & e) { - e.err.errPos = positions[pos]; + e.atPos(positions[pos]); } catch (...) { } } @@ -2100,15 +2064,18 @@ NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCt try { forceValue(v, pos); if (v.type() != nInt) - error("expected an integer but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected an integer but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.integer; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; } + + return v.integer; } @@ -2119,10 +2086,11 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err if (v.type() == nInt) return v.integer; else if (v.type() != nFloat) - error("expected a float but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a float but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.fpoint; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2136,15 +2104,18 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx try { forceValue(v, pos); if (v.type() != nBool) - error("expected a Boolean but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a Boolean but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.boolean; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; } + + return v.boolean; } @@ -2159,10 +2130,11 @@ void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view erro try { forceValue(v, pos); if (v.type() != nFunction && !isFunctor(v)) - error("expected a function but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a function but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -2175,10 +2147,11 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string try { forceValue(v, pos); if (v.type() != nString) - error("expected a string but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a string but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.string_view(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2207,7 +2180,7 @@ std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::s { auto s = forceString(v, pos, errorCtx); if (v.context()) { - error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow(); + error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow(); } return s; } @@ -2272,11 +2245,13 @@ BackedStringView EvalState::coerceToString( return std::move(*maybeString); auto i = v.attrs->find(sOutPath); if (i == v.attrs->end()) { - error("cannot coerce %1% to a string: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) + error( + "cannot coerce %1% to a string: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ) .withTrace(pos, errorCtx) - .debugThrow(); + .debugThrow(); } return coerceToString(pos, *i->value, context, errorCtx, coerceMore, copyToStore, canonicalizePath); @@ -2284,7 +2259,7 @@ BackedStringView EvalState::coerceToString( if (v.type() == nExternal) { try { - return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore); + return v.external->coerceToString(*this, pos, context, coerceMore, copyToStore); } catch (Error & e) { e.addTrace(nullptr, errorCtx); throw; @@ -2320,18 +2295,19 @@ BackedStringView EvalState::coerceToString( } } - error("cannot coerce %1% to a string: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) + error("cannot coerce %1% to a string: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ) .withTrace(pos, errorCtx) - .debugThrow(); + .debugThrow(); } StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePath & path) { if (nix::isDerivation(path.path.abs())) - error("file names are not allowed to end in '%1%'", drvExtension).debugThrow(); + error("file names are not allowed to end in '%1%'", drvExtension).debugThrow(); auto i = srcToStore.find(path); @@ -2380,7 +2356,7 @@ SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext relative to the root filesystem. */ auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned(); if (path == "" || path[0] != '/') - error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow(); + error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow(); return rootPath(CanonPath(path)); } @@ -2390,7 +2366,7 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringCon auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned(); if (auto storePath = store->maybeParseStorePath(path)) return *storePath; - error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow(); + error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow(); } @@ -2400,18 +2376,18 @@ std::pair EvalState::coerceToSingleDerivedP auto s = forceString(v, context, pos, errorCtx); auto csize = context.size(); if (csize != 1) - error( + error( "string '%s' has %d entries in its context. It should only have exactly one entry", s, csize) - .withTrace(pos, errorCtx).debugThrow(); + .withTrace(pos, errorCtx).debugThrow(); auto derivedPath = std::visit(overloaded { [&](NixStringContextElem::Opaque && o) -> SingleDerivedPath { return std::move(o); }, [&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath { - error( + error( "string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time", - s).withTrace(pos, errorCtx).debugThrow(); + s).withTrace(pos, errorCtx).debugThrow(); }, [&](NixStringContextElem::Built && b) -> SingleDerivedPath { return std::move(b); @@ -2434,16 +2410,16 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value & error message. */ std::visit(overloaded { [&](const SingleDerivedPath::Opaque & o) { - error( + error( "path string '%s' has context with the different path '%s'", s, sExpected) - .withTrace(pos, errorCtx).debugThrow(); + .withTrace(pos, errorCtx).debugThrow(); }, [&](const SingleDerivedPath::Built & b) { - error( + error( "string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'", s, b.output, b.drvPath->to_string(*store), sExpected) - .withTrace(pos, errorCtx).debugThrow(); + .withTrace(pos, errorCtx).debugThrow(); } }, derivedPath.raw()); } @@ -2528,7 +2504,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v case nThunk: // Must not be left by forceValue default: - error("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow(); + error("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow(); } } @@ -2767,13 +2743,12 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_ if (hasPrefix(path, "nix/")) return {corepkgsFS, CanonPath(path.substr(3))}; - debugThrow(ThrownError({ - .msg = hintfmt(evalSettings.pureEval + error( + evalSettings.pureEval ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", - path), - .errPos = positions[pos] - }), 0, 0); + path + ).atPos(pos).debugThrow(); } @@ -2856,11 +2831,11 @@ Expr * EvalState::parse( } -std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const +std::string ExternalValueBase::coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { - throw TypeError({ - .msg = hintfmt("cannot coerce %1% to a string: %2%", showType(), *this) - }); + state.error( + "cannot coerce %1% to a string: %2%", showType(), *this + ).atPos(pos).debugThrow(); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 2368187b1..afe89cd30 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -2,6 +2,7 @@ ///@file #include "attr-set.hh" +#include "eval-error.hh" #include "types.hh" #include "value.hh" #include "nixexpr.hh" @@ -151,45 +152,6 @@ struct DebugTrace { bool isError; }; -void debugError(Error * e, Env & env, Expr & expr); - -class ErrorBuilder -{ - private: - EvalState & state; - ErrorInfo info; - - ErrorBuilder(EvalState & s, ErrorInfo && i): state(s), info(i) { } - - public: - template - [[nodiscard, gnu::noinline]] - static ErrorBuilder * create(EvalState & s, const Args & ... args) - { - return new ErrorBuilder(s, ErrorInfo { .msg = hintfmt(args...) }); - } - - [[nodiscard, gnu::noinline]] - ErrorBuilder & atPos(PosIdx pos); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withTrace(PosIdx pos, const std::string_view text); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withSuggestions(Suggestions & s); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withFrame(const Env & e, const Expr & ex); - - template - [[gnu::noinline, gnu::noreturn]] - void debugThrow(); -}; - - class EvalState : public std::enable_shared_from_this { public: @@ -274,39 +236,10 @@ public: void runDebugRepl(const Error * error, const Env & env, const Expr & expr); - template - [[gnu::noinline, gnu::noreturn]] - void debugThrowLastTrace(E && error) - { - debugThrow(error, nullptr, nullptr); - } - - template - [[gnu::noinline, gnu::noreturn]] - void debugThrow(E && error, const Env * env, const Expr * expr) - { - if (debugRepl && ((env && expr) || !debugTraces.empty())) { - if (!env || !expr) { - const DebugTrace & last = debugTraces.front(); - env = &last.env; - expr = &last.expr; - } - runDebugRepl(&error, *env, *expr); - } - - throw std::move(error); - } - - // This is dangerous, but gets in line with the idea that error creation and - // throwing should not allocate on the stack of hot functions. - // as long as errors are immediately thrown, it works. - ErrorBuilder * errorBuilder; - - template + template [[nodiscard, gnu::noinline]] - ErrorBuilder & error(const Args & ... args) { - errorBuilder = ErrorBuilder::create(*this, args...); - return *errorBuilder; + EvalErrorBuilder & error(const Args & ... args) { + return *new EvalErrorBuilder(*this, args...); } private: @@ -845,22 +778,6 @@ SourcePath resolveExprPath(SourcePath path); */ bool isAllowedURI(std::string_view uri, const Strings & allowedPaths); -struct InvalidPathError : EvalError -{ - Path path; - InvalidPathError(const Path & path); -#ifdef EXCEPTION_NEEDS_THROW_SPEC - ~InvalidPathError() throw () { }; -#endif -}; - -template -void ErrorBuilder::debugThrow() -{ - // NOTE: We always use the -LastTrace version as we push the new trace in withFrame() - state.debugThrowLastTrace(ErrorType(info)); -} - } #include "eval-inline.hh" diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index fee58792b..3396b0219 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -147,8 +147,8 @@ static FlakeInput parseFlakeInput(EvalState & state, NixStringContext emptyContext = {}; attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump()); } else - throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected", - state.symbols[attr.name], showType(*attr.value)); + state.error("flake input attribute '%s' is %s while a string, Boolean, or integer is expected", + state.symbols[attr.name], showType(*attr.value)).debugThrow(); } #pragma GCC diagnostic pop } @@ -295,15 +295,15 @@ static Flake getFlake( std::vector ss; for (auto elem : setting.value->listItems()) { if (elem->type() != nString) - throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected", - state.symbols[setting.name], showType(*setting.value)); + state.error("list element in flake configuration setting '%s' is %s while a string is expected", + state.symbols[setting.name], showType(*setting.value)).debugThrow(); ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, "")); } flake.config.settings.emplace(state.symbols[setting.name], ss); } else - throw TypeError("flake configuration setting '%s' is %s", - state.symbols[setting.name], showType(*setting.value)); + state.error("flake configuration setting '%s' is %s", + state.symbols[setting.name], showType(*setting.value)).debugThrow(); } } @@ -865,11 +865,11 @@ static void prim_flakeRefToString( attrs.emplace(state.symbols[attr.name], std::string(attr.value->string_view())); } else { - state.error( + state.error( "flake reference attribute sets may only contain integers, Booleans, " "and strings, but attribute '%s' is %s", state.symbols[attr.name], - showType(*attr.value)).debugThrow(); + showType(*attr.value)).debugThrow(); } } auto flakeRef = FlakeRef::fromAttrs(attrs); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 51449ccb3..e9ed1ef08 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -49,7 +49,7 @@ std::string PackageInfo::queryName() const { if (name == "" && attrs) { auto i = attrs->find(state->sName); - if (i == attrs->end()) throw TypeError("derivation name missing"); + if (i == attrs->end()) state->error("derivation name missing").debugThrow(); name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation"); } return name; @@ -396,7 +396,8 @@ static void getDerivations(EvalState & state, Value & vIn, } } - else throw TypeError("expression does not evaluate to a derivation (or a set or list of those)"); + else + state.error("expression does not evaluate to a derivation (or a set or list of those)").debugThrow(); } diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc index 99a475ff9..2d12c47c5 100644 --- a/src/libexpr/json-to-value.cc +++ b/src/libexpr/json-to-value.cc @@ -1,4 +1,6 @@ #include "json-to-value.hh" +#include "value.hh" +#include "eval.hh" #include #include @@ -159,7 +161,7 @@ public: } bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) { - throw JSONParseError(ex.what()); + throw JSONParseError("%s", ex.what()); } }; diff --git a/src/libexpr/json-to-value.hh b/src/libexpr/json-to-value.hh index 3b8ec000f..3c8fa5cc0 100644 --- a/src/libexpr/json-to-value.hh +++ b/src/libexpr/json-to-value.hh @@ -1,13 +1,16 @@ #pragma once ///@file -#include "eval.hh" +#include "error.hh" #include namespace nix { -MakeError(JSONParseError, EvalError); +class EvalState; +struct Value; + +MakeError(JSONParseError, Error); void parseJSON(EvalState & state, const std::string_view & s, Value & v); diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index d7a0b5048..af67e847d 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -146,9 +146,9 @@ or { return OR_KW; } try { yylval->n = boost::lexical_cast(yytext); } catch (const boost::bad_lexical_cast &) { - throw ParseError({ + throw ParseError(ErrorInfo{ .msg = hintfmt("invalid integer '%1%'", yytext), - .errPos = state->positions[CUR_POS], + .pos = state->positions[CUR_POS], }); } return INT_LIT; @@ -156,9 +156,9 @@ or { return OR_KW; } {FLOAT} { errno = 0; yylval->nf = strtod(yytext, 0); if (errno != 0) - throw ParseError({ + throw ParseError(ErrorInfo{ .msg = hintfmt("invalid float '%1%'", yytext), - .errPos = state->positions[CUR_POS], + .pos = state->positions[CUR_POS], }); return FLOAT_LIT; } @@ -285,9 +285,9 @@ or { return OR_KW; } {ANY} | <> { - throw ParseError({ + throw ParseError(ErrorInfo{ .msg = hintfmt("path has a trailing slash"), - .errPos = state->positions[CUR_POS], + .pos = state->positions[CUR_POS], }); } diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 6fe4ba81b..6b8f33c42 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -296,10 +296,10 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & enclosing `with'. If there is no `with', then we can issue an "undefined variable" error now. */ if (withLevel == -1) - throw UndefinedVarError({ - .msg = hintfmt("undefined variable '%1%'", es.symbols[name]), - .errPos = es.positions[pos] - }); + es.error( + "undefined variable '%1%'", + es.symbols[name] + ).atPos(pos).debugThrow(); for (auto * e = env.get(); e && !fromWith; e = e->up) fromWith = e->isWith; this->level = withLevel; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index da0ec6e9d..1f944f10b 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -9,28 +9,13 @@ #include "error.hh" #include "chunked-vector.hh" #include "position.hh" +#include "eval-error.hh" #include "pos-idx.hh" #include "pos-table.hh" namespace nix { -MakeError(EvalError, Error); -MakeError(ParseError, Error); -MakeError(AssertionError, EvalError); -MakeError(ThrownError, AssertionError); -MakeError(Abort, EvalError); -MakeError(TypeError, EvalError); -MakeError(UndefinedVarError, Error); -MakeError(MissingArgumentError, EvalError); - -class InfiniteRecursionError : public EvalError -{ - friend class EvalState; -public: - using EvalError::EvalError; -}; - struct Env; struct Value; class EvalState; diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index 0a9f076dc..bdd5bbabe 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -66,7 +66,7 @@ inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, co throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", showAttrPath(symbols, attrPath), positions[prevPos]), - .errPos = positions[pos] + .pos = positions[pos] }); } @@ -74,7 +74,7 @@ inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx pre { throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), - .errPos = positions[pos] + .pos = positions[pos] }); } @@ -155,13 +155,13 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym if (duplicate) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), - .errPos = positions[duplicate->second] + .pos = positions[duplicate->second] }); if (arg && formals->has(arg)) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), - .errPos = positions[pos] + .pos = positions[pos] }); return formals; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index e95da37f7..95f45c80a 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -66,7 +66,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * { throw ParseError({ .msg = hintfmt(error), - .errPos = state->positions[state->at(*loc)] + .pos = state->positions[state->at(*loc)] }); } @@ -155,7 +155,7 @@ expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in let"), - .errPos = state->positions[CUR_POS] + .pos = state->positions[CUR_POS] }); $$ = new ExprLet($2, $4); } @@ -245,7 +245,7 @@ expr_simple if (noURLLiterals) throw ParseError({ .msg = hintfmt("URL literals are disabled"), - .errPos = state->positions[CUR_POS] + .pos = state->positions[CUR_POS] }); $$ = new ExprString(std::string($1)); } @@ -341,7 +341,7 @@ attrs } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = state->positions[state->at(@2)] + .pos = state->positions[state->at(@2)] }); } | { $$ = new AttrPath; } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1197b6e13..1eec6f961 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -39,10 +39,6 @@ namespace nix { * Miscellaneous *************************************************************/ - -InvalidPathError::InvalidPathError(const Path & path) : - EvalError("path '%s' is not valid", path), path(path) {} - StringMap EvalState::realiseContext(const NixStringContext & context) { std::vector drvs; @@ -51,7 +47,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context) for (auto & c : context) { auto ensureValid = [&](const StorePath & p) { if (!store->isValidPath(p)) - debugThrowLastTrace(InvalidPathError(store->printStorePath(p))); + error(store->printStorePath(p)).debugThrow(); }; std::visit(overloaded { [&](const NixStringContextElem::Built & b) { @@ -78,9 +74,10 @@ StringMap EvalState::realiseContext(const NixStringContext & context) if (drvs.empty()) return {}; if (!evalSettings.enableImportFromDerivation) - debugThrowLastTrace(Error( + error( "cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled", - drvs.begin()->to_string(*store))); + drvs.begin()->to_string(*store) + ).debugThrow(); /* Build/substitute the context. */ std::vector buildReqs; @@ -340,16 +337,16 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu void *handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL); if (!handle) - state.debugThrowLastTrace(EvalError("could not open '%1%': %2%", path, dlerror())); + state.error("could not open '%1%': %2%", path, dlerror()).debugThrow(); dlerror(); ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str()); if(!func) { char *message = dlerror(); if (message) - state.debugThrowLastTrace(EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message)); + state.error("could not load symbol '%1%' from '%2%': %3%", sym, path, message).debugThrow(); else - state.debugThrowLastTrace(EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path)); + state.error("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path).debugThrow(); } (func)(state, v); @@ -365,7 +362,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) auto elems = args[0]->listElems(); auto count = args[0]->listSize(); if (count == 0) - state.error("at least one argument to 'exec' required").atPos(pos).debugThrow(); + state.error("at least one argument to 'exec' required").atPos(pos).debugThrow(); NixStringContext context; auto program = state.coerceToString(pos, *elems[0], context, "while evaluating the first element of the argument passed to builtins.exec", @@ -380,7 +377,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) try { auto _ = state.realiseContext(context); // FIXME: Handle CA derivations } catch (InvalidPathError & e) { - state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow(); + state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow(); } auto output = runProgram(program, true, commandArgs); @@ -582,7 +579,7 @@ struct CompareValues if (v1->type() == nInt && v2->type() == nFloat) return v1->integer < v2->fpoint; if (v1->type() != v2->type()) - state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow(); + state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow(); // Allow selecting a subset of enum values #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wswitch-enum" @@ -610,7 +607,7 @@ struct CompareValues } } default: - state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow(); + state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow(); #pragma GCC diagnostic pop } } catch (Error & e) { @@ -637,7 +634,7 @@ static Bindings::iterator getAttr( { Bindings::iterator value = attrSet->find(attrSym); if (value == attrSet->end()) { - state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow(); + state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow(); } return value; } @@ -758,7 +755,7 @@ static RegisterPrimOp primop_break({ auto error = Error(ErrorInfo { .level = lvlInfo, .msg = hintfmt("breakpoint reached"), - .errPos = state.positions[pos], + .pos = state.positions[pos], }); auto & dt = state.debugTraces.front(); @@ -769,7 +766,7 @@ static RegisterPrimOp primop_break({ throw Error(ErrorInfo{ .level = lvlInfo, .msg = hintfmt("quit the debugger"), - .errPos = nullptr, + .pos = nullptr, }); } } @@ -790,7 +787,7 @@ static RegisterPrimOp primop_abort({ NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtins.abort").toOwned(); - state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s)); + state.error("evaluation aborted with the following error message: '%1%'", s).debugThrow(); } }); @@ -809,7 +806,7 @@ static RegisterPrimOp primop_throw({ NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtin.throw").toOwned(); - state.debugThrowLastTrace(ThrownError(s)); + state.error(s).debugThrow(); } }); @@ -1128,37 +1125,33 @@ drvName, Bindings * attrs, Value & v) experimentalFeatureSettings.require(Xp::DynamicDerivations); ingestionMethod = TextIngestionMethod {}; } else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s), - .errPos = state.positions[noPos] - })); + state.error( + "invalid value '%s' for 'outputHashMode' attribute", s + ).atPos(v).debugThrow(); }; auto handleOutputs = [&](const Strings & ss) { outputs.clear(); for (auto & j : ss) { if (outputs.find(j) != outputs.end()) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("duplicate derivation output '%1%'", j), - .errPos = state.positions[noPos] - })); + state.error("duplicate derivation output '%1%'", j) + .atPos(v) + .debugThrow(); /* !!! Check whether j is a valid attribute name. */ /* Derivations cannot be named ‘drv’, because then we'd have an attribute ‘drvPath’ in the resulting set. */ if (j == "drv") - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid derivation output name 'drv'" ), - .errPos = state.positions[noPos] - })); + state.error("invalid derivation output name 'drv'") + .atPos(v) + .debugThrow(); outputs.insert(j); } if (outputs.empty()) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("derivation cannot have an empty set of outputs"), - .errPos = state.positions[noPos] - })); + state.error("derivation cannot have an empty set of outputs") + .atPos(v) + .debugThrow(); }; try { @@ -1281,16 +1274,14 @@ drvName, Bindings * attrs, Value & v) /* Do we have all required attributes? */ if (drv.builder == "") - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("required attribute 'builder' missing"), - .errPos = state.positions[noPos] - })); + state.error("required attribute 'builder' missing") + .atPos(v) + .debugThrow(); if (drv.platform == "") - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("required attribute 'system' missing"), - .errPos = state.positions[noPos] - })); + state.error("required attribute 'system' missing") + .atPos(v) + .debugThrow(); /* Check whether the derivation name is valid. */ if (isDerivation(drvName) && @@ -1298,10 +1289,10 @@ drvName, Bindings * attrs, Value & v) outputs.size() == 1 && *(outputs.begin()) == "out")) { - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("derivation names are allowed to end in '%s' only if they produce a single derivation file", drvExtension), - .errPos = state.positions[noPos] - })); + state.error( + "derivation names are allowed to end in '%s' only if they produce a single derivation file", + drvExtension + ).atPos(v).debugThrow(); } if (outputHash) { @@ -1310,10 +1301,9 @@ drvName, Bindings * attrs, Value & v) Ignore `__contentAddressed` because fixed output derivations are already content addressed. */ if (outputs.size() != 1 || *(outputs.begin()) != "out") - state.debugThrowLastTrace(Error({ - .msg = hintfmt("multiple outputs are not supported in fixed-output derivations"), - .errPos = state.positions[noPos] - })); + state.error( + "multiple outputs are not supported in fixed-output derivations" + ).atPos(v).debugThrow(); auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo)); @@ -1332,10 +1322,8 @@ drvName, Bindings * attrs, Value & v) else if (contentAddressed || isImpure) { if (contentAddressed && isImpure) - throw EvalError({ - .msg = hintfmt("derivation cannot be both content-addressed and impure"), - .errPos = state.positions[noPos] - }); + state.error("derivation cannot be both content-addressed and impure") + .atPos(v).debugThrow(); auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256); auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive); @@ -1376,10 +1364,10 @@ drvName, Bindings * attrs, Value & v) for (auto & i : outputs) { auto h = get(hashModulo.hashes, i); if (!h) - throw AssertionError({ - .msg = hintfmt("derivation produced no hash for output '%s'", i), - .errPos = state.positions[noPos], - }); + state.error( + "derivation produced no hash for output '%s'", + i + ).atPos(v).debugThrow(); auto outPath = state.store->makeOutputPath(i, *h, drvName); drv.env[i] = state.store->printStorePath(outPath); drv.outputs.insert_or_assign( @@ -1485,10 +1473,10 @@ static RegisterPrimOp primop_toPath({ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v) { if (evalSettings.pureEval) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("'%s' is not allowed in pure evaluation mode", "builtins.storePath"), - .errPos = state.positions[pos] - })); + state.error( + "'%s' is not allowed in pure evaluation mode", + "builtins.storePath" + ).atPos(pos).debugThrow(); NixStringContext context; auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path; @@ -1498,10 +1486,8 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, if (!state.store->isStorePath(path.abs())) path = CanonPath(canonPath(path.abs(), true)); if (!state.store->isInStore(path.abs())) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("path '%1%' is not in the Nix store", path), - .errPos = state.positions[pos] - })); + state.error("path '%1%' is not in the Nix store", path) + .atPos(pos).debugThrow(); auto path2 = state.store->toStorePath(path.abs()).first; if (!settings.readOnlyMode) state.store->ensurePath(path2); @@ -1616,7 +1602,10 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V auto path = realisePath(state, pos, *args[0]); auto s = path.readFile(); if (s.find((char) 0) != std::string::npos) - state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path)); + state.error( + "the contents of the file '%1%' cannot be represented as a Nix string", + path + ).atPos(pos).debugThrow(); StorePathSet refs; if (state.store->isInStore(path.path.abs())) { try { @@ -1673,10 +1662,11 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V auto rewrites = state.realiseContext(context); path = rewriteStrings(path, rewrites); } catch (InvalidPathError & e) { - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path), - .errPos = state.positions[pos] - })); + state.error( + "cannot find '%1%', since path '%2%' is not valid", + path, + e.path + ).atPos(pos).debugThrow(); } searchPath.elements.emplace_back(SearchPath::Elem { @@ -1745,10 +1735,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile"); std::optional ha = parseHashAlgo(algo); if (!ha) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("unknown hash algo '%1%'", algo), - .errPos = state.positions[pos] - })); + state.error("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow(); auto path = realisePath(state, pos, *args[1]); @@ -2068,13 +2055,12 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val if (auto p = std::get_if(&c.raw)) refs.insert(p->path); else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt( - "in 'toFile': the file named '%1%' must not contain a reference " - "to a derivation but contains (%2%)", - name, c.to_string()), - .errPos = state.positions[pos] - })); + state.error( + "files created by %1% may not reference derivations, but %2% references %3%", + "builtins.toFile", + name, + c.to_string() + ).atPos(pos).debugThrow(); } auto storePath = settings.readOnlyMode @@ -2243,7 +2229,10 @@ static void addPath( if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) - state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); + state.error( + "store path mismatch in (possibly filtered) path added from '%s'", + path + ).atPos(pos).debugThrow(); state.allowAndSetStorePathString(dstPath, v); } else state.allowAndSetStorePathString(*expectedStorePath, v); @@ -2343,16 +2332,15 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value else if (n == "sha256") expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256); else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]), - .errPos = state.positions[attr.pos] - })); + state.error( + "unsupported argument '%1%' to 'addPath'", + state.symbols[attr.name] + ).atPos(attr.pos).debugThrow(); } if (!path) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("missing required 'path' attribute in the first argument to builtins.path"), - .errPos = state.positions[pos] - })); + state.error( + "missing required 'path' attribute in the first argument to builtins.path" + ).atPos(pos).debugThrow(); if (name.empty()) name = path->baseName(); @@ -2770,10 +2758,7 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * arg return; } if (!args[0]->isLambda()) - state.debugThrowLastTrace(TypeError({ - .msg = hintfmt("'functionArgs' requires a function"), - .errPos = state.positions[pos] - })); + state.error("'functionArgs' requires a function").atPos(pos).debugThrow(); if (!args[0]->lambda.fun->hasFormals()) { v.mkAttrs(&state.emptyBindings); @@ -2943,10 +2928,10 @@ static void elemAt(EvalState & state, const PosIdx pos, Value & list, int n, Val { state.forceList(list, pos, "while evaluating the first argument passed to builtins.elemAt"); if (n < 0 || (unsigned int) n >= list.listSize()) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("list index %1% is out of bounds", n), - .errPos = state.positions[pos] - })); + state.error( + "list index %1% is out of bounds", + n + ).atPos(pos).debugThrow(); state.forceValue(*list.listElems()[n], pos); v = *list.listElems()[n]; } @@ -2991,10 +2976,7 @@ static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value { state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.tail"); if (args[0]->listSize() == 0) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("'tail' called on an empty list"), - .errPos = state.positions[pos] - })); + state.error("'tail' called on an empty list").atPos(pos).debugThrow(); state.mkList(v, args[0]->listSize() - 1); for (unsigned int n = 0; n < v.listSize(); ++n) @@ -3251,7 +3233,7 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList"); if (len < 0) - state.error("cannot create list of size %1%", len).debugThrow(); + state.error("cannot create list of size %1%", len).atPos(pos).debugThrow(); // More strict than striclty (!) necessary, but acceptable // as evaluating map without accessing any values makes little sense. @@ -3568,10 +3550,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value NixFloat f2 = state.forceFloat(*args[1], pos, "while evaluating the second operand of the division"); if (f2 == 0) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("division by zero"), - .errPos = state.positions[pos] - })); + state.error("division by zero").atPos(pos).debugThrow(); if (args[0]->type() == nFloat || args[1]->type() == nFloat) { v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first operand of the division") / f2); @@ -3580,10 +3559,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value NixInt i2 = state.forceInt(*args[1], pos, "while evaluating the second operand of the division"); /* Avoid division overflow as it might raise SIGFPE. */ if (i1 == std::numeric_limits::min() && i2 == -1) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("overflow in integer division"), - .errPos = state.positions[pos] - })); + state.error("overflow in integer division").atPos(pos).debugThrow(); v.mkInt(i1 / i2); } @@ -3714,10 +3690,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring"); if (start < 0) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("negative start position in 'substring'"), - .errPos = state.positions[pos] - })); + state.error("negative start position in 'substring'").atPos(pos).debugThrow(); int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring"); @@ -3782,10 +3755,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString"); std::optional ha = parseHashAlgo(algo); if (!ha) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("unknown hash algo '%1%'", algo), - .errPos = state.positions[pos] - })); + state.error("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow(); NixStringContext context; // discarded auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); @@ -3951,15 +3921,13 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v) } catch (std::regex_error & e) { if (e.code() == std::regex_constants::error_space) { // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("memory limit exceeded by regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("memory limit exceeded by regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("invalid regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } } @@ -4055,15 +4023,13 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v) } catch (std::regex_error & e) { if (e.code() == std::regex_constants::error_space) { // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("memory limit exceeded by regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("memory limit exceeded by regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("invalid regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } } @@ -4139,7 +4105,9 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings"); if (args[0]->listSize() != args[1]->listSize()) - state.error("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths").atPos(pos).debugThrow(); + state.error( + "'from' and 'to' arguments passed to builtins.replaceStrings have different lengths" + ).atPos(pos).debugThrow(); std::vector from; from.reserve(args[0]->listSize()); diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index db940f277..1eec8b316 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -98,30 +98,30 @@ static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, V auto contextSize = context.size(); if (contextSize != 1) { - throw EvalError({ - .msg = hintfmt("context of string '%s' must have exactly one element, but has %d", *s, contextSize), - .errPos = state.positions[pos] - }); + state.error( + "context of string '%s' must have exactly one element, but has %d", + *s, + contextSize + ).atPos(pos).debugThrow(); } NixStringContext context2 { (NixStringContextElem { std::visit(overloaded { [&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep { if (!c.path.isDerivation()) { - throw EvalError({ - .msg = hintfmt("path '%s' is not a derivation", - state.store->printStorePath(c.path)), - .errPos = state.positions[pos], - }); + state.error( + "path '%s' is not a derivation", + state.store->printStorePath(c.path) + ).atPos(pos).debugThrow(); } return NixStringContextElem::DrvDeep { .drvPath = c.path, }; }, [&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep { - throw EvalError({ - .msg = hintfmt("`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", c.output), - .errPos = state.positions[pos], - }); + state.error( + "`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", + c.output + ).atPos(pos).debugThrow(); }, [&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep { /* Reuse original item because we want this to be idempotent. */ @@ -261,10 +261,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar for (auto & i : *args[1]->attrs) { const auto & name = state.symbols[i.name]; if (!state.store->isStorePath(name)) - throw EvalError({ - .msg = hintfmt("context key '%s' is not a store path", name), - .errPos = state.positions[i.pos] - }); + state.error( + "context key '%s' is not a store path", + name + ).atPos(i.pos).debugThrow(); auto namePath = state.store->parseStorePath(name); if (!settings.readOnlyMode) state.store->ensurePath(namePath); @@ -281,10 +281,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar if (iter != i.value->attrs->end()) { if (state.forceBool(*iter->value, iter->pos, "while evaluating the `allOutputs` attribute of a string context")) { if (!isDerivation(name)) { - throw EvalError({ - .msg = hintfmt("tried to add all-outputs context of %s, which is not a derivation, to a string", name), - .errPos = state.positions[i.pos] - }); + state.error( + "tried to add all-outputs context of %s, which is not a derivation, to a string", + name + ).atPos(i.pos).debugThrow(); } context.emplace(NixStringContextElem::DrvDeep { .drvPath = namePath, @@ -296,10 +296,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar if (iter != i.value->attrs->end()) { state.forceList(*iter->value, iter->pos, "while evaluating the `outputs` attribute of a string context"); if (iter->value->listSize() && !isDerivation(name)) { - throw EvalError({ - .msg = hintfmt("tried to add derivation output context of %s, which is not a derivation, to a string", name), - .errPos = state.positions[i.pos] - }); + state.error( + "tried to add derivation output context of %s, which is not a derivation, to a string", + name + ).atPos(i.pos).debugThrow(); } for (auto elem : iter->value->listItems()) { auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context"); diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 27147a5d1..5806b3ff9 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -27,7 +27,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor state.store->printStorePath(fromPath), state.store->printStorePath(rewrittenPath), state.store->printStorePath(*toPathMaybe)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); if (!toPathMaybe) throw Error({ @@ -36,7 +36,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor "Use this value for the 'toPath' attribute passed to 'fetchClosure'", state.store->printStorePath(fromPath), state.store->printStorePath(rewrittenPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -54,7 +54,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor "The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n" "Set 'toPath' to an empty string to make Nix report the correct content-addressed path.", state.store->printStorePath(toPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -80,7 +80,7 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos "to the 'fetchClosure' arguments.\n\n" "Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.", state.store->printStorePath(fromPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -103,7 +103,7 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId "The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n" "Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed", state.store->printStorePath(fromPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -154,14 +154,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg else throw Error({ .msg = hintfmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } if (!fromPath) throw Error({ .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); bool inputAddressed = inputAddressedMaybe.value_or(false); @@ -172,14 +172,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg .msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them", "inputAddressed", "toPath"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } if (!fromStoreUrl) throw Error({ .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); auto parsedURL = parseURL(*fromStoreUrl); @@ -189,13 +189,13 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file")) throw Error({ .msg = hintfmt("'fetchClosure' only supports http:// and https:// stores"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); if (!parsedURL.query.empty()) throw Error({ .msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); auto fromStore = openStore(parsedURL.to_string()); diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 58fe6f173..bb029b5b3 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -38,17 +38,11 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a else if (n == "name") name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial"); else - throw EvalError({ - .msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]), - .errPos = state.positions[attr.pos] - }); + state.error("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]).atPos(attr.pos).debugThrow(); } if (url.empty()) - throw EvalError({ - .msg = hintfmt("'url' argument required"), - .errPos = state.positions[pos] - }); + state.error("'url' argument required").atPos(pos).debugThrow(); } else url = state.coerceToString(pos, *args[0], context, diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index a943095bb..1997d5513 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -100,16 +100,14 @@ static void fetchTree( if (auto aType = args[0]->attrs->get(state.sType)) { if (type) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("unexpected attribute 'type'"), - .errPos = state.positions[pos] - })); + state.error( + "unexpected attribute 'type'" + ).atPos(pos).debugThrow(); type = state.forceStringNoCtx(*aType->value, aType->pos, "while evaluating the `type` attribute passed to builtins.fetchTree"); } else if (!type) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"), - .errPos = state.positions[pos] - })); + state.error( + "attribute 'type' is missing in call to 'fetchTree'" + ).atPos(pos).debugThrow(); attrs.emplace("type", type.value()); @@ -132,8 +130,8 @@ static void fetchTree( attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump()); } else - state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected", - state.symbols[attr.name], showType(*attr.value))); + state.error("fetchTree argument '%s' is %s while a string, Boolean or integer is expected", + state.symbols[attr.name], showType(*attr.value)).debugThrow(); } if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { @@ -142,10 +140,9 @@ static void fetchTree( if (!params.allowNameArgument) if (auto nameIter = attrs.find("name"); nameIter != attrs.end()) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("attribute 'name' isn’t supported in call to 'fetchTree'"), - .errPos = state.positions[pos] - })); + state.error( + "attribute 'name' isn’t supported in call to 'fetchTree'" + ).atPos(pos).debugThrow(); input = fetchers::Input::fromAttrs(std::move(attrs)); } else { @@ -163,10 +160,9 @@ static void fetchTree( input = fetchers::Input::fromAttrs(std::move(attrs)); } else { if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"), - .errPos = state.positions[pos] - })); + state.error( + "passing a string argument to 'fetchTree' requires the 'flakes' experimental feature" + ).atPos(pos).debugThrow(); input = fetchers::Input::fromURL(url); } } @@ -175,10 +171,14 @@ static void fetchTree( input = lookupInRegistries(state.store, input).first; if (evalSettings.pureEval && !input.isLocked()) { + auto fetcher = "fetchTree"; if (params.isFetchGit) - state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos])); - else - state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); + fetcher = "fetchGit"; + + state.error( + "in pure evaluation mode, %s requires a locked input", + fetcher + ).atPos(pos).debugThrow(); } state.checkURI(input.toURLString()); @@ -432,17 +432,13 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v else if (n == "name") name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch"); else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("unsupported argument '%s' to '%s'", n, who), - .errPos = state.positions[attr.pos] - })); + state.error("unsupported argument '%s' to '%s'", n, who) + .atPos(pos).debugThrow(); } if (!url) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("'url' argument required"), - .errPos = state.positions[pos] - })); + state.error( + "'url' argument required").atPos(pos).debugThrow(); } else url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch"); @@ -455,7 +451,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v name = baseNameOf(*url); if (evalSettings.pureEval && !expectedHash) - state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who)); + state.error("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow(); // early exit if pinned and already in the store if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) { @@ -484,9 +480,15 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v auto hash = unpack ? state.store->queryPathInfo(storePath)->narHash : hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath)); - if (hash != *expectedHash) - state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", - *url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true))); + if (hash != *expectedHash) { + state.error( + "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", + *url, + expectedHash->to_string(HashFormat::Nix32, true), + hash.to_string(HashFormat::Nix32, true) + ).withExitStatus(102) + .debugThrow(); + } } state.allowAndSetStorePathString(storePath, v); diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 2f4d4022e..94be7960a 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -83,10 +83,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V try { visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */)); } catch (std::exception & e) { // TODO: toml::syntax_error - throw EvalError({ - .msg = hintfmt("while parsing a TOML string: %s", e.what()), - .errPos = state.positions[pos] - }); + state.error("while parsing TOML: %s", e.what()).atPos(pos).debugThrow(); } } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index 74b3ebf13..b2f116390 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -80,7 +80,7 @@ json printValueAsJSON(EvalState & state, bool strict, try { out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore)); } catch (Error & e) { - e.addTrace({}, + e.addTrace(state.positions[pos], hintfmt("while evaluating list element at index %1%", i)); throw; } @@ -99,13 +99,12 @@ json printValueAsJSON(EvalState & state, bool strict, case nThunk: case nFunction: - auto e = TypeError({ - .msg = hintfmt("cannot convert %1% to JSON", showType(v)), - .errPos = state.positions[v.determinePos(pos)] - }); - e.addTrace(state.positions[pos], hintfmt("message for the trace")); - state.debugThrowLastTrace(e); - throw e; + state.error( + "cannot convert %1% to JSON", + showType(v) + ) + .atPos(v.determinePos(pos)) + .debugThrow(); } return out; } @@ -119,7 +118,8 @@ void printValueAsJSON(EvalState & state, bool strict, json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict, NixStringContext & context, bool copyToStore) const { - state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType())); + state.error("cannot convert %1% to JSON", showType()) + .debugThrow(); } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 214d52271..e7aea4949 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -105,7 +105,7 @@ class ExternalValueBase * Coerce the value to a string. Defaults to uncoercable, i.e. throws an * error. */ - virtual std::string coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const; + virtual std::string coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const; /** * Compare to another value of the same type. Defaults to uncomparable, diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 862ef355b..7b9b3c5b5 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -340,7 +340,7 @@ int handleExceptions(const std::string & programName, std::function fun) return 1; } catch (BaseError & e) { logError(e.info()); - return e.status; + return e.info().status; } catch (std::bad_alloc & e) { printError(error + "out of memory"); return 1; diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 7f0a05d5d..d4bead28e 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -33,7 +33,7 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMod } if (failed.size() == 1 && ex) { - ex->status = worker.failingExitStatus(); + ex->withExitStatus(worker.failingExitStatus()); throw std::move(*ex); } else if (!failed.empty()) { if (ex) logError(ex->info()); @@ -104,7 +104,7 @@ void Store::ensurePath(const StorePath & path) if (goal->exitCode != Goal::ecSuccess) { if (goal->ex) { - goal->ex->status = worker.failingExitStatus(); + goal->ex->withExitStatus(worker.failingExitStatus()); throw std::move(*goal->ex); } else throw Error(worker.failingExitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path)); diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 27ad14ed4..8db93fa39 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -119,7 +119,7 @@ struct TunnelLogger : public Logger if (GET_PROTOCOL_MINOR(clientVersion) >= 26) { to << STDERR_ERROR << *ex; } else { - to << STDERR_ERROR << ex->what() << ex->status; + to << STDERR_ERROR << ex->what() << ex->info().status; } } } diff --git a/src/libutil/error.cc b/src/libutil/error.cc index 1f0cb08c9..e4e50d73b 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -335,7 +335,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s * try { * e->eval(*this, env, v); * if (v.type() != nAttrs) - * throwTypeError("expected a set but found %1%", v); + * error("expected a set but found %1%", v); * } catch (Error & e) { * e.addTrace(pos, errorCtx); * throw; @@ -349,7 +349,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s * e->eval(*this, env, v); * try { * if (v.type() != nAttrs) - * throwTypeError("expected a set but found %1%", v); + * error("expected a set but found %1%", v); * } catch (Error & e) { * e.addTrace(pos, errorCtx); * throw; @@ -411,7 +411,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s oss << einfo.msg << "\n"; - printPosMaybe(oss, "", einfo.errPos); + printPosMaybe(oss, "", einfo.pos); auto suggestions = einfo.suggestions.trim(); if (!suggestions.suggestions.empty()) { diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 764fac1ce..9f9302020 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -84,9 +84,14 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; hintformat msg; - std::shared_ptr errPos; + std::shared_ptr pos; std::list traces; + /** + * Exit status. + */ + unsigned int status = 1; + Suggestions suggestions; static std::optional programName; @@ -103,18 +108,21 @@ class BaseError : public std::exception protected: mutable ErrorInfo err; + /** + * Cached formatted contents of `err.msg`. + */ mutable std::optional what_; + /** + * Format `err.msg` and set `what_` to the resulting value. + */ const std::string & calcWhat() const; public: - unsigned int status = 1; // exit status - BaseError(const BaseError &) = default; template BaseError(unsigned int status, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(args...) } - , status(status) + : err { .level = lvlError, .msg = hintfmt(args...), .status = status } { } template @@ -149,6 +157,15 @@ public: const std::string & msg() const { return calcWhat(); } const ErrorInfo & info() const { calcWhat(); return err; } + void withExitStatus(unsigned int status) + { + err.status = status; + } + + void atPos(std::shared_ptr pos) { + err.pos = pos; + } + void pushTrace(Trace trace) { err.traces.push_front(trace); diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index d68ddacc0..89fbd194a 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -199,7 +199,7 @@ struct JSONLogger : Logger { json["level"] = ei.level; json["msg"] = oss.str(); json["raw_msg"] = ei.msg.str(); - to_json(json, ei.errPos); + to_json(json, ei.pos); if (loggerSettings.showTrace.get() && !ei.traces.empty()) { nlohmann::json traces = nlohmann::json::array(); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 40378e123..017818ed5 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -950,8 +950,8 @@ static void opServe(Strings opFlags, Strings opArgs) store->buildPaths(toDerivedPaths(paths)); out << 0; } catch (Error & e) { - assert(e.status); - out << e.status << e.msg(); + assert(e.info().status); + out << e.info().status << e.msg(); } break; } diff --git a/src/nix/eval.cc b/src/nix/eval.cc index a89fa7412..2e0837c8e 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -104,7 +104,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption } } else - throw TypeError("value at '%s' is not a string or an attribute set", state->positions[pos]); + state->error("value at '%s' is not a string or an attribute set", state->positions[pos]).debugThrow(); }; recurse(*v, pos, *writeTo); diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 0e34bd76a..646e4c831 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -848,10 +848,10 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand auto templateDir = templateDirAttr->getString(); if (!store->isInStore(templateDir)) - throw TypeError( + evalState->error( "'%s' was not found in the Nix store\n" "If you've set '%s' to a string, try using a path instead.", - templateDir, templateDirAttr->getAttrPathStr()); + templateDir, templateDirAttr->getAttrPathStr()).debugThrow(); std::vector changedFiles; std::vector conflictedFiles; @@ -1321,7 +1321,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON { auto aType = visitor.maybeGetAttr("type"); if (!aType || aType->getString() != "app") - throw EvalError("not an app definition"); + state->error("not an app definition").debugThrow(); if (json) { j.emplace("type", "app"); } else { diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index c6a482035..ea90f8ebe 100644 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -67,7 +67,7 @@ path2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \" [[ $(nix eval --raw --expr "builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]] # But without a hash, it fails -expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "'fetchGit' requires a locked input" +expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "fetchGit requires a locked input" # Fetch again. This should be cached. mv $repo ${repo}-tmp @@ -208,7 +208,7 @@ path6=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; ur [[ $path3 = $path6 ]] [[ $(nix eval --impure --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]] -expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "'fetchTree' requires a locked input" +expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "fetchTree requires a locked input" # Explicit ref = "HEAD" should work, and produce the same outPath as without ref path7=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).outPath") diff --git a/tests/functional/lang/eval-fail-attr-name-type.err.exp b/tests/functional/lang/eval-fail-attr-name-type.err.exp index c8d56ba7d..6848a35ed 100644 --- a/tests/functional/lang/eval-fail-attr-name-type.err.exp +++ b/tests/functional/lang/eval-fail-attr-name-type.err.exp @@ -14,3 +14,8 @@ error: 8| error: expected a string but found an integer: 1 + at /pwd/lang/eval-fail-attr-name-type.nix:7:17: + 6| in + 7| attrs.puppy.${key} + | ^ + 8| diff --git a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp index 73f9df8cc..9bbb251e1 100644 --- a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp +++ b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp @@ -5,4 +5,4 @@ error: | ^ 2| key = "value" - error: while parsing a TOML string: Dates and times are not supported + error: while parsing TOML: Dates and times are not supported diff --git a/tests/functional/lang/eval-fail-toJSON.err.exp b/tests/functional/lang/eval-fail-toJSON.err.exp index 4f6003437..ad267711b 100644 --- a/tests/functional/lang/eval-fail-toJSON.err.exp +++ b/tests/functional/lang/eval-fail-toJSON.err.exp @@ -20,6 +20,11 @@ error: 3| true … while evaluating list element at index 3 + at /pwd/lang/eval-fail-toJSON.nix:2:3: + 1| builtins.toJSON { + 2| a.b = [ + | ^ + 3| true … while evaluating attribute 'c' at /pwd/lang/eval-fail-toJSON.nix:7:7: diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp index 94784c651..4326c9650 100644 --- a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp @@ -7,3 +7,8 @@ error: 6| error: expected a string but found a set: { } + at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10: + 4| in + 5| attr.${key} + | ^ + 6| diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 5fca79304..d0d7ca79c 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -12,33 +12,33 @@ namespace nix { TEST_F(ErrorTraceTest, TraceBuilder) { ASSERT_THROW( - state.error("Not much").debugThrow(), + state.error("puppy").debugThrow(), EvalError ); ASSERT_THROW( - state.error("Not much").withTrace(noPos, "No more").debugThrow(), + state.error("puppy").withTrace(noPos, "doggy").debugThrow(), EvalError ); ASSERT_THROW( try { try { - state.error("Not much").withTrace(noPos, "No more").debugThrow(); + state.error("puppy").withTrace(noPos, "doggy").debugThrow(); } catch (Error & e) { - e.addTrace(state.positions[noPos], "Something", ""); + e.addTrace(state.positions[noPos], "beans", ""); throw; } } catch (BaseError & e) { ASSERT_EQ(PrintToString(e.info().msg), - PrintToString(hintfmt("Not much"))); + PrintToString(hintfmt("puppy"))); auto trace = e.info().traces.rbegin(); ASSERT_EQ(e.info().traces.size(), 2); ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("No more"))); + PrintToString(hintfmt("doggy"))); trace++; ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("Something"))); + PrintToString(hintfmt("beans"))); throw; } , EvalError @@ -47,12 +47,12 @@ namespace nix { TEST_F(ErrorTraceTest, NestedThrows) { try { - state.error("Not much").withTrace(noPos, "No more").debugThrow(); + state.error("puppy").withTrace(noPos, "doggy").debugThrow(); } catch (BaseError & e) { try { - state.error("Not much more").debugThrow(); + state.error("beans").debugThrow(); } catch (Error & e2) { - e.addTrace(state.positions[noPos], "Something", ""); + e.addTrace(state.positions[noPos], "beans2", ""); //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); From 87dc4bc7d139a7eccb257e71558314a0d99e8d6a Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 1 Feb 2024 13:08:06 -0800 Subject: [PATCH 009/164] Attach positions to errors in `derivationStrict` --- src/libexpr/primops.cc | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1eec6f961..69f89e0e0 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1085,9 +1085,10 @@ drvName, Bindings * attrs, Value & v) /* Check whether attributes should be passed as a JSON file. */ using nlohmann::json; std::optional jsonObject; + auto pos = v.determinePos(noPos); auto attr = attrs->find(state.sStructuredAttrs); if (attr != attrs->end() && - state.forceBool(*attr->value, noPos, + state.forceBool(*attr->value, pos, "while evaluating the `__structuredAttrs` " "attribute passed to builtins.derivationStrict")) jsonObject = json::object(); @@ -1096,7 +1097,7 @@ drvName, Bindings * attrs, Value & v) bool ignoreNulls = false; attr = attrs->find(state.sIgnoreNulls); if (attr != attrs->end()) - ignoreNulls = state.forceBool(*attr->value, noPos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict"); + ignoreNulls = state.forceBool(*attr->value, pos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict"); /* Build the derivation expression by processing the attributes. */ Derivation drv; @@ -1160,16 +1161,16 @@ drvName, Bindings * attrs, Value & v) const std::string_view context_below(""); if (ignoreNulls) { - state.forceValue(*i->value, noPos); + state.forceValue(*i->value, pos); if (i->value->type() == nNull) continue; } - if (i->name == state.sContentAddressed && state.forceBool(*i->value, noPos, context_below)) { + if (i->name == state.sContentAddressed && state.forceBool(*i->value, pos, context_below)) { contentAddressed = true; experimentalFeatureSettings.require(Xp::CaDerivations); } - else if (i->name == state.sImpure && state.forceBool(*i->value, noPos, context_below)) { + else if (i->name == state.sImpure && state.forceBool(*i->value, pos, context_below)) { isImpure = true; experimentalFeatureSettings.require(Xp::ImpureDerivations); } @@ -1177,9 +1178,9 @@ drvName, Bindings * attrs, Value & v) /* The `args' attribute is special: it supplies the command-line arguments to the builder. */ else if (i->name == state.sArgs) { - state.forceList(*i->value, noPos, context_below); + state.forceList(*i->value, pos, context_below); for (auto elem : i->value->listItems()) { - auto s = state.coerceToString(noPos, *elem, context, + auto s = state.coerceToString(pos, *elem, context, "while evaluating an element of the argument list", true).toOwned(); drv.args.push_back(s); @@ -1194,29 +1195,29 @@ drvName, Bindings * attrs, Value & v) if (i->name == state.sStructuredAttrs) continue; - (*jsonObject)[key] = printValueAsJSON(state, true, *i->value, noPos, context); + (*jsonObject)[key] = printValueAsJSON(state, true, *i->value, pos, context); if (i->name == state.sBuilder) - drv.builder = state.forceString(*i->value, context, noPos, context_below); + drv.builder = state.forceString(*i->value, context, pos, context_below); else if (i->name == state.sSystem) - drv.platform = state.forceStringNoCtx(*i->value, noPos, context_below); + drv.platform = state.forceStringNoCtx(*i->value, pos, context_below); else if (i->name == state.sOutputHash) - outputHash = state.forceStringNoCtx(*i->value, noPos, context_below); + outputHash = state.forceStringNoCtx(*i->value, pos, context_below); else if (i->name == state.sOutputHashAlgo) - outputHashAlgo = state.forceStringNoCtx(*i->value, noPos, context_below); + outputHashAlgo = state.forceStringNoCtx(*i->value, pos, context_below); else if (i->name == state.sOutputHashMode) - handleHashMode(state.forceStringNoCtx(*i->value, noPos, context_below)); + handleHashMode(state.forceStringNoCtx(*i->value, pos, context_below)); else if (i->name == state.sOutputs) { /* Require ‘outputs’ to be a list of strings. */ - state.forceList(*i->value, noPos, context_below); + state.forceList(*i->value, pos, context_below); Strings ss; for (auto elem : i->value->listItems()) - ss.emplace_back(state.forceStringNoCtx(*elem, noPos, context_below)); + ss.emplace_back(state.forceStringNoCtx(*elem, pos, context_below)); handleOutputs(ss); } } else { - auto s = state.coerceToString(noPos, *i->value, context, context_below, true).toOwned(); + auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned(); drv.env.emplace(key, s); if (i->name == state.sBuilder) drv.builder = std::move(s); else if (i->name == state.sSystem) drv.platform = std::move(s); From faaccecbc82d98288582bdc8ca96991796561371 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 1 Feb 2024 13:08:19 -0800 Subject: [PATCH 010/164] Remove `EXCEPTION_NEEDS_THROW_SPEC` We're on C++ 20 now, we don't need this --- src/libutil/error.hh | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 9f9302020..4fb822843 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -31,15 +31,6 @@ #include #include -/* Before 4.7, gcc's std::exception uses empty throw() specifiers for - * its (virtual) destructor and what() in c++11 mode, in violation of spec - */ -#ifdef __GNUC__ -#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 7) -#define EXCEPTION_NEEDS_THROW_SPEC -#endif -#endif - namespace nix { @@ -147,13 +138,7 @@ public: : err(e) { } -#ifdef EXCEPTION_NEEDS_THROW_SPEC - ~BaseError() throw () { }; - const char * what() const throw () { return calcWhat().c_str(); } -#else const char * what() const noexcept override { return calcWhat().c_str(); } -#endif - const std::string & msg() const { return calcWhat(); } const ErrorInfo & info() const { calcWhat(); return err; } From 05535be03a1526061ea3a3ad25459c032e1f8f8c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 2 Feb 2024 13:07:08 +0100 Subject: [PATCH 011/164] Fix test --- tests/functional/fetchurl.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index 578f5a34c..5259dd60e 100644 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -80,4 +80,6 @@ test -x $outPath/fetchurl.sh test -L $outPath/symlink # Make sure that *not* passing a outputHash fails. -expectStderr 100 nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' +expected=100 +if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly +expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' From e67458e5b821e0a3a6839f4637eb96ff873f64ed Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 2 Feb 2024 13:22:18 +0100 Subject: [PATCH 012/164] Better test fix --- tests/functional/fetchurl.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index 5259dd60e..5a05cc5e1 100644 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -80,6 +80,7 @@ test -x $outPath/fetchurl.sh test -L $outPath/symlink # Make sure that *not* passing a outputHash fails. +requireDaemonNewerThan "2.20" expected=100 if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' From 7d7483cafce258edf405756c0dd42a34afe231b9 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 17:38:46 -0800 Subject: [PATCH 013/164] Print positions in `--debugger`, instead of pointers --- src/libcmd/repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7d8f9819..d7af15153 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -232,7 +232,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi : positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]; if (pos) { - out << pos; + out << *pos; if (auto loc = pos->getCodeLines()) { out << "\n"; printCodeLines(out, "", *pos, *loc); From 016db2d10fe00baa3c72ab6b5bbb480371df711f Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 17:49:10 -0800 Subject: [PATCH 014/164] Add position information to `while evaluating the attribute` --- src/libexpr/eval.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..9fee05290 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1384,7 +1384,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) state, *this, env, - state.positions[pos2], + state.positions[getPos()], "while evaluating the attribute '%1%'", showAttrPath(state, env, attrPath)) : nullptr; From 0127d54d5e86db9039e6322d482d26e66af8bd8a Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 19:14:22 -0800 Subject: [PATCH 015/164] Enter debugger more reliably in let expressions and calls --- src/libexpr/eval.cc | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..df40b18b8 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -846,20 +846,20 @@ void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const e.addTrace(positions[pos], hintfmt(s, s2), frame); } +template static std::unique_ptr makeDebugTraceStacker( EvalState & state, Expr & expr, Env & env, std::shared_ptr && pos, - const char * s, - const std::string & s2) + const Args & ... formatArgs) { return std::make_unique(state, DebugTrace { .pos = std::move(pos), .expr = expr, .env = env, - .hint = hintfmt(s, s2), + .hint = hintfmt(formatArgs...), .isError = false }); } @@ -1322,6 +1322,19 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) for (auto & i : attrs->attrs) env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); + auto dts = state.debugRepl + ? makeDebugTraceStacker( + state, + *this, + env2, + getPos() + ? std::make_shared(state.positions[getPos()]) + : nullptr, + "while evaluating a '%1%' expression", + "let" + ) + : nullptr; + body->eval(state, env2, v); } @@ -1718,6 +1731,18 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & void ExprCall::eval(EvalState & state, Env & env, Value & v) { + auto dts = state.debugRepl + ? makeDebugTraceStacker( + state, + *this, + env, + getPos() + ? std::make_shared(state.positions[getPos()]) + : nullptr, + "while calling a function" + ) + : nullptr; + Value vFun; fun->eval(state, env, vFun); From 36dfac75601b246dc22a6a27ee793dd9ef0b8c0e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 19:31:18 -0800 Subject: [PATCH 016/164] Expose locals from `let` expressions to the debugger --- src/libexpr/eval.cc | 13 +++++++++++++ src/libexpr/nixexpr.cc | 9 +++------ 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..4241dca6a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1322,6 +1322,19 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) for (auto & i : attrs->attrs) env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); + auto dts = state.debugRepl + ? makeDebugTraceStacker( + state, + *this, + env2, + getPos() + ? std::make_shared(state.positions[getPos()]) + : nullptr, + "while evaluating a '%1%' expression", + "let" + ) + : nullptr; + body->eval(state, env2, v); } diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 6fe4ba81b..492e131d0 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -409,9 +409,6 @@ void ExprCall::bindVars(EvalState & es, const std::shared_ptr & void ExprLet::bindVars(EvalState & es, const std::shared_ptr & env) { - if (es.debugRepl) - es.exprEnvs.insert(std::make_pair(this, env)); - auto newEnv = std::make_shared(nullptr, env.get(), attrs->attrs.size()); Displacement displ = 0; @@ -423,6 +420,9 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr & for (auto & i : attrs->attrs) i.second.e->bindVars(es, i.second.inherited ? env : newEnv); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, newEnv)); + body->bindVars(es, newEnv); } @@ -447,9 +447,6 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr & break; } - if (es.debugRepl) - es.exprEnvs.insert(std::make_pair(this, env)); - attrs->bindVars(es, env); auto newEnv = std::make_shared(this, env.get()); body->bindVars(es, newEnv); From 6414cd259e7f271e0e7141866cbc79da7f589c93 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 19:58:35 -0800 Subject: [PATCH 017/164] Reduce visual clutter in the debugger --- src/libcmd/repl.cc | 15 +++++++++++++-- src/libexpr/eval.cc | 4 +--- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7d8f9819..5b4d3f9d5 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -243,10 +243,21 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi return out; } +static bool isFirstRepl = true; + void NixRepl::mainLoop() { - std::string error = ANSI_RED "error:" ANSI_NORMAL " "; - notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n"); + if (isFirstRepl) { + std::string_view debuggerNotice = ""; + if (state->debugRepl) { + debuggerNotice = " debugger"; + } + notice("Nix %1%%2%\nType :? for help.", nixVersion, debuggerNotice); + } + + if (isFirstRepl) { + isFirstRepl = false; + } loadFiles(); diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..dc2579dfa 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -821,12 +821,10 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & if (error) { - printError("%s\n\n", error->what()); + printError("%s\n", error->what()); if (trylevel > 0 && error->info().level != lvlInfo) printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n"); - - printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL); } auto se = getStaticEnv(expr); From ec5cc1026db61d4c43c89ffdd8a71ed62cfb842d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B6ren=20Tempel?= Date: Sun, 4 Feb 2024 00:47:47 +0100 Subject: [PATCH 018/164] absPath: Explicitly check if path is empty before accessing it It is entirely possible for the path to be an empty string and many unit tests actually pass it as an empty string (e.g. both_roundrip or turnsEmptyPathIntoCWD). In this case, without this patch, absPath will perform a one-byte out-of-bounds access. This was discovered while enabling the nix test suite on Alpine where we compile all software with `-D_GLIBCXX_ASSERTIONS=1`, thus resulting in a test failure on Alpine. --- src/libutil/file-system.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index cf8a6d967..9fa1f62df 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -25,7 +25,7 @@ Path absPath(PathView path, std::optional dir, bool resolveSymlinks) { std::string scratch; - if (path[0] != '/') { + if (path.empty() || path[0] != '/') { // In this case we need to call `canonPath` on a newly-created // string. We set `scratch` to that string first, and then set // `path` to `scratch`. This ensures the newly-created string From a7939a6c2aad1bec454996d553148d2ba351586c Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:16:30 -0800 Subject: [PATCH 019/164] Rename `yellowtxt` -> `magentatxt` `yellowtxt` wraps its value with `ANSI_WARNING`, but `ANSI_WARNING` has been equal to `ANSI_MAGENTA` for a long time. Now the name is updated. --- src/libstore/build/derivation-goal.cc | 6 +++--- src/libstore/build/local-derivation-goal.cc | 2 +- src/libutil/fmt.hh | 12 +++++------- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 00cbf4228..454c35763 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -708,7 +708,7 @@ void DerivationGoal::tryToBuild() if (!outputLocks.lockPaths(lockFiles, "", false)) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for lock on %s", yellowtxt(showPaths(lockFiles)))); + fmt("waiting for lock on %s", magentatxt(showPaths(lockFiles)))); worker.waitForAWhile(shared_from_this()); return; } @@ -762,7 +762,7 @@ void DerivationGoal::tryToBuild() the wake-up timeout expires. */ if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a machine to build '%s'", yellowtxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a machine to build '%s'", magentatxt(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); outputLocks.unlock(); return; @@ -987,7 +987,7 @@ void DerivationGoal::buildDone() diskFull |= cleanupDecideWhetherDiskFull(); auto msg = fmt("builder for '%s' %s", - yellowtxt(worker.store.printStorePath(drvPath)), + magentatxt(worker.store.printStorePath(drvPath)), statusToString(status)); if (!logger->isVerbose() && !logTail.empty()) { diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 2ba8be7d6..ce8943efe 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -232,7 +232,7 @@ void LocalDerivationGoal::tryLocalBuild() if (!buildUser) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a free build user ID for '%s'", yellowtxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a free build user ID for '%s'", magentatxt(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); return; } diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index ac72e47fb..6430c7707 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -63,19 +63,17 @@ inline std::string fmt(const std::string & fs, const Args & ... args) return f.str(); } -// ----------------------------------------------------------------------------- // format function for hints in errors. same as fmt, except templated values -// are always in yellow. - +// are always in magenta. template -struct yellowtxt +struct magentatxt { - yellowtxt(const T &s) : value(s) {} + magentatxt(const T &s) : value(s) {} const T & value; }; template -std::ostream & operator<<(std::ostream & out, const yellowtxt & y) +std::ostream & operator<<(std::ostream & out, const magentatxt & y) { return out << ANSI_WARNING << y.value << ANSI_NORMAL; } @@ -114,7 +112,7 @@ public: template hintformat & operator%(const T & value) { - fmt % yellowtxt(value); + fmt % magentatxt(value); return *this; } From a7927abdc165c0ed6c55565b333fd4fadcdf3417 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:18:42 -0800 Subject: [PATCH 020/164] Catch `Error`, not `BaseError` in `ValuePrinter` `BaseError` includes `Interrupt`. We probably don't want the value printer to tell you you pressed Ctrl-C while it was working. --- src/libexpr/print.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..e1cb3f0cb 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -255,7 +255,7 @@ private: output << "»"; if (options.ansiColors) output << ANSI_NORMAL; - } catch (BaseError & e) { + } catch (Error & e) { printError_(e); } } @@ -405,7 +405,7 @@ private: output << ANSI_NORMAL; } - void printError_(BaseError & e) + void printError_(Error & e) { if (options.ansiColors) output << ANSI_RED; @@ -422,7 +422,7 @@ private: if (options.force) { try { state.forceValue(v, v.determinePos(noPos)); - } catch (BaseError & e) { + } catch (Error & e) { printError_(e); return; } From c5d525cd8430f31e38128acb3b483cbf17f2f977 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:19:23 -0800 Subject: [PATCH 021/164] Print error messages but not traces MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This makes output of values that include errors much cleaner. Before: ``` nix-repl> { err = builtins.throw "uh oh!"; } { err = «error: … while calling the 'throw' builtin at «string»:1:9: 1| { err = builtins.throw "uh oh!"; } | ^ error: uh oh!»; } ``` After: ``` nix-repl> { err = builtins.throw "uh oh!"; } { err = «error: uh oh!»; } ``` But if the whole expression throws an error, source locations and (if applicable) a stack trace are printed, like you'd expect: ``` nix-repl> builtins.throw "uh oh!" error: … while calling the 'throw' builtin at «string»:1:1: 1| builtins.throw "uh oh!" | ^ error: uh oh! ``` --- src/libexpr/print.cc | 2 +- tests/unit/libexpr/value/print.cc | 44 +++---------------------------- 2 files changed, 5 insertions(+), 41 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..f4b13019e 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -409,7 +409,7 @@ private: { if (options.ansiColors) output << ANSI_RED; - output << "«" << e.msg() << "»"; + output << "«error: " << filterANSIEscapes(e.info().msg.str(), true) << "»"; if (options.ansiColors) output << ANSI_NORMAL; } diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc index c4264a38d..c1de3a6a9 100644 --- a/tests/unit/libexpr/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -460,19 +460,7 @@ TEST_F(ValuePrintingTests, ansiColorsError) test(vError, ANSI_RED - "«" - ANSI_RED - "error:" - ANSI_NORMAL - "\n … while calling the '" - ANSI_MAGENTA - "throw" - ANSI_NORMAL - "' builtin\n\n " - ANSI_RED - "error:" - ANSI_NORMAL - " uh oh!»" + "«error: uh oh!»" ANSI_NORMAL, PrintOptions { .ansiColors = true, @@ -501,19 +489,7 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) test(vAttrs, "{ drvPath = " ANSI_RED - "«" - ANSI_RED - "error:" - ANSI_NORMAL - "\n … while calling the '" - ANSI_MAGENTA - "throw" - ANSI_NORMAL - "' builtin\n\n " - ANSI_RED - "error:" - ANSI_NORMAL - " uh oh!»" + "«error: uh oh!»" ANSI_NORMAL "; type = " ANSI_MAGENTA @@ -527,19 +503,7 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) test(vAttrs, ANSI_RED - "«" - ANSI_RED - "error:" - ANSI_NORMAL - "\n … while calling the '" - ANSI_MAGENTA - "throw" - ANSI_NORMAL - "' builtin\n\n " - ANSI_RED - "error:" - ANSI_NORMAL - " uh oh!»" + "«error: uh oh!»" ANSI_NORMAL, PrintOptions { .ansiColors = true, @@ -560,7 +524,7 @@ TEST_F(ValuePrintingTests, ansiColorsAssert) state.mkThunk_(v, &expr); test(v, - ANSI_RED "«" ANSI_RED "error:" ANSI_NORMAL " assertion '" ANSI_MAGENTA "false" ANSI_NORMAL "' failed»" ANSI_NORMAL, + ANSI_RED "«error: assertion 'false' failed»" ANSI_NORMAL, PrintOptions { .ansiColors = true, .force = true From 9646d62b0c3b1313565124a304ddc4057700ab13 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:21:20 -0800 Subject: [PATCH 022/164] Don't print values in magenta This fixes the opening bracket of lists/attrsets being printed in magenta, unlike the closing bracket. https://github.com/NixOS/nix/pull/9753#issuecomment-1904616088 --- src/libexpr/print.cc | 7 + src/libexpr/print.hh | 10 ++ tests/unit/libexpr/error_traces.cc | 228 ++++++++++++++--------------- 3 files changed, 131 insertions(+), 114 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..277c454d7 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -511,4 +511,11 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer) return output; } +template<> +hintformat & hintformat::operator%(const ValuePrinter & value) +{ + fmt % value; + return *this; +} + } diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index a8300264a..a542bc7b1 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -9,6 +9,7 @@ #include +#include "fmt.hh" #include "print-options.hh" namespace nix { @@ -78,4 +79,13 @@ public: }; std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); + + +/** + * `ValuePrinter` does its own ANSI formatting, so we don't color it + * magenta. + */ +template<> +hintformat & hintformat::operator%(const ValuePrinter & value); + } diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 5fca79304..2f4c9e60d 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -105,7 +105,7 @@ namespace nix { TEST_F(ErrorTraceTest, genericClosure) { ASSERT_TRACE2("genericClosure 1", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure {}", @@ -115,22 +115,22 @@ namespace nix { ASSERT_TRACE2("genericClosure { startSet = 1; }", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = true; }", TypeError, - hintfmt("expected a function but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", @@ -145,7 +145,7 @@ namespace nix { ASSERT_TRACE2("genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); } @@ -154,12 +154,12 @@ namespace nix { TEST_F(ErrorTraceTest, replaceStrings) { ASSERT_TRACE2("replaceStrings 0 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "0" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [] 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "0" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.replaceStrings")); ASSERT_TRACE1("replaceStrings [ 0 ] [] {}", @@ -168,17 +168,17 @@ namespace nix { ASSERT_TRACE2("replaceStrings [ 1 ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"", TypeError, - hintfmt("expected a string but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"old\" ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the third argument passed to builtins.replaceStrings")); } @@ -243,7 +243,7 @@ namespace nix { TEST_F(ErrorTraceTest, ceil) { ASSERT_TRACE2("ceil \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.ceil")); } @@ -252,7 +252,7 @@ namespace nix { TEST_F(ErrorTraceTest, floor) { ASSERT_TRACE2("floor \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.floor")); } @@ -265,7 +265,7 @@ namespace nix { TEST_F(ErrorTraceTest, getEnv) { ASSERT_TRACE2("getEnv [ ]", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.getEnv")); } @@ -286,7 +286,7 @@ namespace nix { TEST_F(ErrorTraceTest, placeholder) { ASSERT_TRACE2("placeholder []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.placeholder")); } @@ -295,7 +295,7 @@ namespace nix { TEST_F(ErrorTraceTest, toPath) { ASSERT_TRACE2("toPath []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.toPath")); ASSERT_TRACE2("toPath \"foo\"", @@ -309,7 +309,7 @@ namespace nix { TEST_F(ErrorTraceTest, storePath) { ASSERT_TRACE2("storePath true", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("cannot coerce %s to a string: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -318,7 +318,7 @@ namespace nix { TEST_F(ErrorTraceTest, pathExists) { ASSERT_TRACE2("pathExists []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while realising the context of a path")); ASSERT_TRACE2("pathExists \"zorglub\"", @@ -332,7 +332,7 @@ namespace nix { TEST_F(ErrorTraceTest, baseNameOf) { ASSERT_TRACE2("baseNameOf []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.baseNameOf")); } @@ -377,7 +377,7 @@ namespace nix { TEST_F(ErrorTraceTest, filterSource) { ASSERT_TRACE2("filterSource [] []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", @@ -387,7 +387,7 @@ namespace nix { ASSERT_TRACE2("filterSource [] ./.", TypeError, - hintfmt("expected a function but found %s: %s", "a list", "[ ]"), + hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.filterSource")); // Usupported by store "dummy" @@ -412,7 +412,7 @@ namespace nix { TEST_F(ErrorTraceTest, attrNames) { ASSERT_TRACE2("attrNames []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the argument passed to builtins.attrNames")); } @@ -421,7 +421,7 @@ namespace nix { TEST_F(ErrorTraceTest, attrValues) { ASSERT_TRACE2("attrValues []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the argument passed to builtins.attrValues")); } @@ -430,12 +430,12 @@ namespace nix { TEST_F(ErrorTraceTest, getAttr) { ASSERT_TRACE2("getAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" {}", @@ -453,12 +453,12 @@ namespace nix { TEST_F(ErrorTraceTest, hasAttr) { ASSERT_TRACE2("hasAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.hasAttr")); ASSERT_TRACE2("hasAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.hasAttr")); } @@ -471,17 +471,17 @@ namespace nix { TEST_F(ErrorTraceTest, removeAttrs) { ASSERT_TRACE2("removeAttrs \"\" \"\"", TypeError, - hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ \"1\" ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); } @@ -490,12 +490,12 @@ namespace nix { TEST_F(ErrorTraceTest, listToAttrs) { ASSERT_TRACE2("listToAttrs 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the argument passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ {} ]", @@ -505,7 +505,7 @@ namespace nix { ASSERT_TRACE2("listToAttrs [ { name = 1; } ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ { name = \"foo\"; } ]", @@ -519,12 +519,12 @@ namespace nix { TEST_F(ErrorTraceTest, intersectAttrs) { ASSERT_TRACE2("intersectAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.intersectAttrs")); ASSERT_TRACE2("intersectAttrs {} []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.intersectAttrs")); } @@ -533,22 +533,22 @@ namespace nix { TEST_F(ErrorTraceTest, catAttrs) { ASSERT_TRACE2("catAttrs [] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", "{ }"), + hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ { foo = 1; } 1 { bar = 5;} ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); } @@ -565,7 +565,7 @@ namespace nix { TEST_F(ErrorTraceTest, mapAttrs) { ASSERT_TRACE2("mapAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.mapAttrs")); // XXX: defered @@ -590,12 +590,12 @@ namespace nix { TEST_F(ErrorTraceTest, zipAttrsWith) { ASSERT_TRACE2("zipAttrsWith [] [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "a list", "[ ]"), + hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.zipAttrsWith")); ASSERT_TRACE2("zipAttrsWith (_: 1) [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); // XXX: How to properly tell that the fucntion takes two arguments ? @@ -622,7 +622,7 @@ namespace nix { TEST_F(ErrorTraceTest, elemAt) { ASSERT_TRACE2("elemAt \"foo\" (-1)", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("elemAt [] (-1)", @@ -639,7 +639,7 @@ namespace nix { TEST_F(ErrorTraceTest, head) { ASSERT_TRACE2("head 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("head []", @@ -652,7 +652,7 @@ namespace nix { TEST_F(ErrorTraceTest, tail) { ASSERT_TRACE2("tail 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.tail")); ASSERT_TRACE1("tail []", @@ -665,12 +665,12 @@ namespace nix { TEST_F(ErrorTraceTest, map) { ASSERT_TRACE2("map 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.map")); ASSERT_TRACE2("map 1 [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.map")); } @@ -679,17 +679,17 @@ namespace nix { TEST_F(ErrorTraceTest, filter) { ASSERT_TRACE2("filter 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.filter")); ASSERT_TRACE2("filter 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.filter")); ASSERT_TRACE2("filter (_: 5) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "5" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "5" ANSI_NORMAL)), hintfmt("while evaluating the return value of the filtering function passed to builtins.filter")); } @@ -698,7 +698,7 @@ namespace nix { TEST_F(ErrorTraceTest, elem) { ASSERT_TRACE2("elem 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.elem")); } @@ -707,17 +707,17 @@ namespace nix { TEST_F(ErrorTraceTest, concatLists) { ASSERT_TRACE2("concatLists 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ 1 ]", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating a value of the list passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ [1] \"foo\" ]", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating a value of the list passed to builtins.concatLists")); } @@ -726,12 +726,12 @@ namespace nix { TEST_F(ErrorTraceTest, length) { ASSERT_TRACE2("length 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.length")); ASSERT_TRACE2("length \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.length")); } @@ -740,21 +740,21 @@ namespace nix { TEST_F(ErrorTraceTest, foldlPrime) { ASSERT_TRACE2("foldl' 1 \"foo\" true", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.foldlStrict")); ASSERT_TRACE2("foldl' (_: 1) \"foo\" true", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the third argument passed to builtins.foldlStrict")); ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL)); + hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("in the left operand of the AND (&&) operator")); } @@ -763,17 +763,17 @@ namespace nix { TEST_F(ErrorTraceTest, any) { ASSERT_TRACE2("any 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.any")); } @@ -782,17 +782,17 @@ namespace nix { TEST_F(ErrorTraceTest, all) { ASSERT_TRACE2("all 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.all")); } @@ -801,12 +801,12 @@ namespace nix { TEST_F(ErrorTraceTest, genList) { ASSERT_TRACE2("genList 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.genList")); ASSERT_TRACE2("genList 1 2", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.genList", "an integer")); // XXX: defered @@ -825,21 +825,21 @@ namespace nix { TEST_F(ErrorTraceTest, sort) { ASSERT_TRACE2("sort 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.sort")); ASSERT_TRACE2("sort 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.sort")); ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL)); + hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the sorting function passed to builtins.sort")); // XXX: Trace too deep, need better asserts @@ -857,17 +857,17 @@ namespace nix { TEST_F(ErrorTraceTest, partition) { ASSERT_TRACE2("partition 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the partition function passed to builtins.partition")); } @@ -876,17 +876,17 @@ namespace nix { TEST_F(ErrorTraceTest, groupBy) { ASSERT_TRACE2("groupBy 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (x: x) [ \"foo\" \"bar\" 1 ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); } @@ -895,22 +895,22 @@ namespace nix { TEST_F(ErrorTraceTest, concatMap) { ASSERT_TRACE2("concatMap 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); } @@ -919,12 +919,12 @@ namespace nix { TEST_F(ErrorTraceTest, add) { ASSERT_TRACE2("add \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument of the addition")); ASSERT_TRACE2("add 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument of the addition")); } @@ -933,12 +933,12 @@ namespace nix { TEST_F(ErrorTraceTest, sub) { ASSERT_TRACE2("sub \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument of the subtraction")); ASSERT_TRACE2("sub 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument of the subtraction")); } @@ -947,12 +947,12 @@ namespace nix { TEST_F(ErrorTraceTest, mul) { ASSERT_TRACE2("mul \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument of the multiplication")); ASSERT_TRACE2("mul 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument of the multiplication")); } @@ -961,12 +961,12 @@ namespace nix { TEST_F(ErrorTraceTest, div) { ASSERT_TRACE2("div \"foo\" 1 # TODO: an integer was expected -> a number", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first operand of the division")); ASSERT_TRACE2("div 1 \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second operand of the division")); ASSERT_TRACE1("div \"foo\" 0", @@ -979,12 +979,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitAnd) { ASSERT_TRACE2("bitAnd 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.bitAnd")); ASSERT_TRACE2("bitAnd 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.bitAnd")); } @@ -993,12 +993,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitOr) { ASSERT_TRACE2("bitOr 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.bitOr")); ASSERT_TRACE2("bitOr 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.bitOr")); } @@ -1007,12 +1007,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitXor) { ASSERT_TRACE2("bitXor 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.bitXor")); ASSERT_TRACE2("bitXor 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.bitXor")); } @@ -1038,7 +1038,7 @@ namespace nix { TEST_F(ErrorTraceTest, toString) { ASSERT_TRACE2("toString { a = 1; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }"), + hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), hintfmt("while evaluating the first argument passed to builtins.toString")); } @@ -1047,17 +1047,17 @@ namespace nix { TEST_F(ErrorTraceTest, substring) { ASSERT_TRACE2("substring {} \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a set", "{ }"), + hintfmt("expected an integer but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the first argument (the start offset) passed to builtins.substring")); ASSERT_TRACE2("substring 3 \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument (the substring length) passed to builtins.substring")); ASSERT_TRACE2("substring 0 3 {}", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the third argument (the string) passed to builtins.substring")); ASSERT_TRACE1("substring (-3) 3 \"sometext\"", @@ -1070,7 +1070,7 @@ namespace nix { TEST_F(ErrorTraceTest, stringLength) { ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the argument passed to builtins.stringLength")); } @@ -1079,7 +1079,7 @@ namespace nix { TEST_F(ErrorTraceTest, hashString) { ASSERT_TRACE2("hashString 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.hashString")); ASSERT_TRACE1("hashString \"foo\" \"content\"", @@ -1088,7 +1088,7 @@ namespace nix { ASSERT_TRACE2("hashString \"sha256\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.hashString")); } @@ -1097,12 +1097,12 @@ namespace nix { TEST_F(ErrorTraceTest, match) { ASSERT_TRACE2("match 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.match")); ASSERT_TRACE2("match \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.match")); ASSERT_TRACE1("match \"(.*\" \"\"", @@ -1115,12 +1115,12 @@ namespace nix { TEST_F(ErrorTraceTest, split) { ASSERT_TRACE2("split 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.split")); ASSERT_TRACE2("split \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.split")); ASSERT_TRACE1("split \"f(o*o\" \"1foo2\"", @@ -1133,17 +1133,17 @@ namespace nix { TEST_F(ErrorTraceTest, concatStringsSep) { ASSERT_TRACE2("concatStringsSep 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", "{ }"), + hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", TypeError, - hintfmt("cannot coerce %s to a string: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("cannot coerce %s to a string: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); } @@ -1152,7 +1152,7 @@ namespace nix { TEST_F(ErrorTraceTest, parseDrvName) { ASSERT_TRACE2("parseDrvName 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.parseDrvName")); } @@ -1161,12 +1161,12 @@ namespace nix { TEST_F(ErrorTraceTest, compareVersions) { ASSERT_TRACE2("compareVersions 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.compareVersions")); ASSERT_TRACE2("compareVersions \"abd\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.compareVersions")); } @@ -1175,7 +1175,7 @@ namespace nix { TEST_F(ErrorTraceTest, splitVersion) { ASSERT_TRACE2("splitVersion 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.splitVersion")); } From 770d2bc779d39c041293011892e80f5fcb6b76df Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:17:22 -0800 Subject: [PATCH 023/164] Key repeated values on attribute binding pointers, not value pointers Closes #8672 --- src/libexpr/print.cc | 4 ++-- tests/functional/repl.sh | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..915e8489a 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -152,7 +152,7 @@ struct ImportantFirstAttrNameCmp } }; -typedef std::set ValuesSeen; +typedef std::set ValuesSeen; class Printer { @@ -262,7 +262,7 @@ private: void printAttrs(Value & v, size_t depth) { - if (seen && !seen->insert(&v).second) { + if (seen && !seen->insert(v.attrs).second) { printRepeated(); return; } diff --git a/tests/functional/repl.sh b/tests/functional/repl.sh index 1b779c1f5..5f399aa44 100644 --- a/tests/functional/repl.sh +++ b/tests/functional/repl.sh @@ -156,7 +156,7 @@ testReplResponseNoRegex ' # Same for let expressions testReplResponseNoRegex ' let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = { ... }; y = { ... }; }' +' '{ x = «repeated»; y = { ... }; }' # The :p command should recursively print sets, but prevent infinite recursion testReplResponseNoRegex ' @@ -171,4 +171,4 @@ testReplResponseNoRegex ' # Same for let expressions testReplResponseNoRegex ' :p let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = { x = «repeated»; y = { a = 1; }; }; y = «repeated»; }' +' '{ x = «repeated»; y = { a = 1; }; }' From e1131b59279f7cf9f9bea93b5355608d78097f65 Mon Sep 17 00:00:00 2001 From: Rodney Lorrimar Date: Sun, 4 Feb 2024 12:02:06 +0800 Subject: [PATCH 024/164] print-dev-env: Avoid using unbound shellHook variable Some tools which consume the "nix print-dev-env" rc script (such as "nix-direnv") are sensitive to the use of unbound variables. They use "set -u". The "nix print-dev-env" rc script initially unsets "shellHook", then loads variables from the derivation, and then evaluates "shellHook". However, most derivations don't have a "shellHook" attribute. So users get the error "shellHook: unbound variable". This can be demonstrated with the command: nix print-dev-env nixpkgs#hello | bash -u This commit changes the rc script to provide an empty fallback value for the "shellHook" variable. Closes: #7951 #8253 --- src/nix/develop.cc | 2 +- tests/functional/nix-shell.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 1f2891378..403178a5d 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -354,7 +354,7 @@ struct Common : InstallableCommand, MixProfile for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"}) out << fmt("export %s=\"$NIX_BUILD_TOP\"\n", i); - out << "eval \"$shellHook\"\n"; + out << "eval \"${shellHook:-}\"\n"; auto script = out.str(); diff --git a/tests/functional/nix-shell.sh b/tests/functional/nix-shell.sh index 13403fadb..04c83138e 100644 --- a/tests/functional/nix-shell.sh +++ b/tests/functional/nix-shell.sh @@ -118,10 +118,10 @@ diff $TEST_ROOT/dev-env{,2}.json # Ensure `nix print-dev-env --json` contains variable assignments. [[ $(jq -r .variables.arr1.value[2] $TEST_ROOT/dev-env.json) = '3 4' ]] -# Run tests involving `source <(nix print-dev-inv)` in subshells to avoid modifying the current +# Run tests involving `source <(nix print-dev-env)` in subshells to avoid modifying the current # environment. -set +u # FIXME: Make print-dev-env `set -u` compliant (issue #7951) +set -u # Ensure `source <(nix print-dev-env)` modifies the environment. ( From 5ccb06ee1b4c757ff4ca0aa6eac15d5656f7774c Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 4 Feb 2024 16:42:00 +0100 Subject: [PATCH 025/164] fix debugger crashing while printing envs fixes #9932 --- .gitignore | 1 + src/libexpr/eval.cc | 8 +++++--- tests/functional/debugger.sh | 13 +++++++++++++ tests/functional/local.mk | 3 ++- 4 files changed, 21 insertions(+), 4 deletions(-) create mode 100644 tests/functional/debugger.sh diff --git a/.gitignore b/.gitignore index a47b195bb..a0a0786ed 100644 --- a/.gitignore +++ b/.gitignore @@ -94,6 +94,7 @@ perl/Makefile.config /tests/functional/ca/config.nix /tests/functional/dyn-drv/config.nix /tests/functional/repl-result-out +/tests/functional/debugger-test-out /tests/functional/test-libstoreconsumer/test-libstoreconsumer # /tests/functional/lang/ diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..398eec410 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -744,7 +744,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & if (se.up && env.up) { std::cout << "static: "; printStaticEnvBindings(st, se); - printWithBindings(st, env); + if (se.isWith) + printWithBindings(st, env); std::cout << std::endl; printEnvBindings(st, *se.up, *env.up, ++lvl); } else { @@ -756,7 +757,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & std::cout << st[i.first] << " "; std::cout << ANSI_NORMAL; std::cout << std::endl; - printWithBindings(st, env); // probably nothing there for the top level. + if (se.isWith) + printWithBindings(st, env); // probably nothing there for the top level. std::cout << std::endl; } @@ -778,7 +780,7 @@ void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const En if (env.up && se.up) { mapStaticEnvBindings(st, *se.up, *env.up, vm); - if (!env.values[0]->isThunk()) { + if (se.isWith && !env.values[0]->isThunk()) { // add 'with' bindings. Bindings::iterator j = env.values[0]->attrs->begin(); while (j != env.values[0]->attrs->end()) { diff --git a/tests/functional/debugger.sh b/tests/functional/debugger.sh new file mode 100644 index 000000000..63d88cbf3 --- /dev/null +++ b/tests/functional/debugger.sh @@ -0,0 +1,13 @@ +source common.sh + +clearStore + +# regression #9932 +echo ":env" | expect 1 nix eval --debugger --expr '(_: throw "oh snap") 42' +echo ":env" | expect 1 nix eval --debugger --expr ' + let x.a = 1; in + with x; + (_: builtins.seq x.a (throw "oh snap")) x.a +' >debugger-test-out +grep -P 'with: .*a' debugger-test-out +grep -P 'static: .*x' debugger-test-out diff --git a/tests/functional/local.mk b/tests/functional/local.mk index 888c7e18a..f369c7c2c 100644 --- a/tests/functional/local.mk +++ b/tests/functional/local.mk @@ -127,7 +127,8 @@ nix_tests = \ toString-path.sh \ read-only-store.sh \ nested-sandboxing.sh \ - impure-env.sh + impure-env.sh \ + debugger.sh ifeq ($(HAVE_LIBCPUID), 1) nix_tests += compute-levels.sh From 721fddac2f1cb633823046d97f465c579540de43 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sun, 4 Feb 2024 22:03:13 +0100 Subject: [PATCH 026/164] use the right heading level (#9935) --- doc/manual/src/installation/upgrading.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/installation/upgrading.md b/doc/manual/src/installation/upgrading.md index 47618e2f5..38edcdbc5 100644 --- a/doc/manual/src/installation/upgrading.md +++ b/doc/manual/src/installation/upgrading.md @@ -16,7 +16,7 @@ nix (Nix) 2.18.1 > Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with [`nix-build`](@docroot@/command-ref/nix-build.md) or [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md), may change the database schema! > Reverting to an older version of Nix may therefore require purging the store database before it can be used. -### Linux multi-user +## Linux multi-user ```console $ sudo su From 8b873edcca2ff9f9f11efe3cba42a291dbdd124a Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sun, 4 Feb 2024 22:15:20 +0100 Subject: [PATCH 027/164] fix anchor link; less weird link texts (#9936) --- doc/manual/src/language/operators.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/manual/src/language/operators.md b/doc/manual/src/language/operators.md index e9cbb5c92..6fd66864b 100644 --- a/doc/manual/src/language/operators.md +++ b/doc/manual/src/language/operators.md @@ -84,7 +84,7 @@ The `+` operator is overloaded to also work on strings and paths. > > *string* `+` *string* -Concatenate two [string]s and merge their string contexts. +Concatenate two [strings][string] and merge their string contexts. [String concatenation]: #string-concatenation @@ -94,7 +94,7 @@ Concatenate two [string]s and merge their string contexts. > > *path* `+` *path* -Concatenate two [path]s. +Concatenate two [paths][path]. The result is a path. [Path concatenation]: #path-concatenation @@ -150,9 +150,9 @@ If an attribute name is present in both, the attribute value from the latter is Comparison is -- [arithmetic] for [number]s -- lexicographic for [string]s and [path]s -- item-wise lexicographic for [list]s: +- [arithmetic] for [numbers][number] +- lexicographic for [strings][string] and [paths][path] +- item-wise lexicographic for [lists][list]: elements at the same index in both lists are compared according to their type and skipped if they are equal. All comparison operators are implemented in terms of `<`, and the following equivalencies hold: @@ -163,12 +163,12 @@ All comparison operators are implemented in terms of `<`, and the following equi | *a* `>` *b* | *b* `<` *a* | | *a* `>=` *b* | `! (` *a* `<` *b* `)` | -[Comparison]: #comparison-operators +[Comparison]: #comparison ## Equality -- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated. -- Comparison of [function]s always returns `false`. +- [Attribute sets][attribute set] and [lists][list] are compared recursively, and therefore are fully evaluated. +- Comparison of [functions][function] always returns `false`. - Numbers are type-compatible, see [arithmetic] operators. - Floating point numbers only differ up to a limited precision. From 8d4890c3f83366a0d40ed7f9c3ee21dbd6a2ef67 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sun, 4 Feb 2024 22:45:10 +0100 Subject: [PATCH 028/164] catch multiple use of link reference (#9937) --- doc/manual/src/language/import-from-derivation.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/manual/src/language/import-from-derivation.md b/doc/manual/src/language/import-from-derivation.md index 03b3f9d91..fb12ba51a 100644 --- a/doc/manual/src/language/import-from-derivation.md +++ b/doc/manual/src/language/import-from-derivation.md @@ -1,6 +1,8 @@ # Import From Derivation -The value of a Nix expression can depend on the contents of a [store object](@docroot@/glossary.md#gloss-store-object). +The value of a Nix expression can depend on the contents of a [store object]. + +[store object]: @docroot@/glossary.md#gloss-store-object Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD): From a6737b7e179fba2681393335c69c97df9bd5a2b0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Feb 2024 15:13:11 +0100 Subject: [PATCH 029/164] CanonPath, SourcePath: Change operator + to / This is less confusing and makes it more similar to std::filesystem::path. --- src/libexpr/eval.cc | 4 ++-- src/libexpr/primops.cc | 2 +- src/libfetchers/filtering-input-accessor.cc | 14 +++++++------- src/libfetchers/fs-input-accessor.cc | 2 +- src/libfetchers/git-utils.cc | 2 +- src/libfetchers/git.cc | 4 ++-- src/libfetchers/mercurial.cc | 2 +- src/libfetchers/path.cc | 2 +- src/libstore/binary-cache-store.cc | 4 ++-- src/libstore/local-fs-store.cc | 2 +- src/libstore/nar-accessor.cc | 2 +- src/libutil/archive.cc | 10 +++++----- src/libutil/canon-path.cc | 4 ++-- src/libutil/canon-path.hh | 4 ++-- src/libutil/fs-sink.cc | 2 +- src/libutil/git.cc | 2 +- src/libutil/source-path.cc | 8 ++++---- src/libutil/source-path.hh | 5 +++-- src/nix-env/nix-env.cc | 4 ++-- src/nix/ls.cc | 2 +- src/nix/run.cc | 2 +- src/nix/why-depends.cc | 2 +- tests/unit/libutil/canon-path.cc | 10 +++++----- 23 files changed, 48 insertions(+), 47 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..bebc94873 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2689,14 +2689,14 @@ SourcePath resolveExprPath(SourcePath path) // Basic cycle/depth limit to avoid infinite loops. if (++followCount >= maxFollow) throw Error("too many symbolic links encountered while traversing the path '%s'", path); - auto p = path.parent().resolveSymlinks() + path.baseName(); + auto p = path.parent().resolveSymlinks() / path.baseName(); if (p.lstat().type != InputAccessor::tSymlink) break; path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; } /* If `path' refers to a directory, append `/default.nix'. */ if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory) - return path + "default.nix"; + return path / "default.nix"; return path; } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1197b6e13..f8ded0cf8 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1816,7 +1816,7 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va // detailed node info quickly in this case we produce a thunk to // query the file type lazily. auto epath = state.allocValue(); - epath->mkPath(path + name); + epath->mkPath(path / name); if (!readFileType) readFileType = &state.getBuiltin("readFileType"); attr.mkApp(readFileType, epath); diff --git a/src/libfetchers/filtering-input-accessor.cc b/src/libfetchers/filtering-input-accessor.cc index 581ce3c1d..087a100af 100644 --- a/src/libfetchers/filtering-input-accessor.cc +++ b/src/libfetchers/filtering-input-accessor.cc @@ -5,26 +5,26 @@ namespace nix { std::string FilteringInputAccessor::readFile(const CanonPath & path) { checkAccess(path); - return next->readFile(prefix + path); + return next->readFile(prefix / path); } bool FilteringInputAccessor::pathExists(const CanonPath & path) { - return isAllowed(path) && next->pathExists(prefix + path); + return isAllowed(path) && next->pathExists(prefix / path); } std::optional FilteringInputAccessor::maybeLstat(const CanonPath & path) { checkAccess(path); - return next->maybeLstat(prefix + path); + return next->maybeLstat(prefix / path); } InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path) { checkAccess(path); DirEntries entries; - for (auto & entry : next->readDirectory(prefix + path)) { - if (isAllowed(path + entry.first)) + for (auto & entry : next->readDirectory(prefix / path)) { + if (isAllowed(path / entry.first)) entries.insert(std::move(entry)); } return entries; @@ -33,12 +33,12 @@ InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath std::string FilteringInputAccessor::readLink(const CanonPath & path) { checkAccess(path); - return next->readLink(prefix + path); + return next->readLink(prefix / path); } std::string FilteringInputAccessor::showPath(const CanonPath & path) { - return next->showPath(prefix + path); + return next->showPath(prefix / path); } void FilteringInputAccessor::checkAccess(const CanonPath & path) diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc index c3d8d273c..46bc6b70d 100644 --- a/src/libfetchers/fs-input-accessor.cc +++ b/src/libfetchers/fs-input-accessor.cc @@ -48,7 +48,7 @@ struct FSInputAccessor : InputAccessor, PosixSourceAccessor CanonPath makeAbsPath(const CanonPath & path) { - return root + path; + return root / path; } std::optional getPhysicalPath(const CanonPath & path) override diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 382a363f0..1256a4c2c 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -295,7 +295,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this throw Error("getting working directory status: %s", git_error_last()->message); /* Get submodule info. */ - auto modulesFile = path + ".gitmodules"; + auto modulesFile = path / ".gitmodules"; if (pathExists(modulesFile.abs())) info.submodules = parseSubmodules(modulesFile); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index f9a1cb1bc..26fe79596 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -319,7 +319,7 @@ struct GitInputScheme : InputScheme if (!repoInfo.isLocal) throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string()); - writeFile((CanonPath(repoInfo.url) + path).abs(), contents); + writeFile((CanonPath(repoInfo.url) / path).abs(), contents); auto result = runProgram(RunOptions { .program = "git", @@ -680,7 +680,7 @@ struct GitInputScheme : InputScheme std::map> mounts; for (auto & submodule : repoInfo.workdirInfo.submodules) { - auto submodulePath = CanonPath(repoInfo.url) + submodule.path; + auto submodulePath = CanonPath(repoInfo.url) / submodule.path; fetchers::Attrs attrs; attrs.insert_or_assign("type", "git"); attrs.insert_or_assign("url", submodulePath.abs()); diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 9982389ab..55e2eae03 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -141,7 +141,7 @@ struct MercurialInputScheme : InputScheme if (!isLocal) throw Error("cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", path, input.to_string()); - auto absPath = CanonPath(repoPath) + path; + auto absPath = CanonPath(repoPath) / path; writeFile(absPath.abs(), contents); diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index f9b973320..d3b0e475d 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -84,7 +84,7 @@ struct PathInputScheme : InputScheme std::string_view contents, std::optional commitMsg) const override { - writeFile((CanonPath(getAbsPath(input)) + path).abs(), contents); + writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents); } CanonPath getAbsPath(const Input & input) const diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index ea1279e2e..189d1d305 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -235,14 +235,14 @@ ref BinaryCacheStore::addToStoreCommon( std::regex regex2("^[0-9a-f]{38}\\.debug$"); for (auto & [s1, _type] : narAccessor->readDirectory(buildIdDir)) { - auto dir = buildIdDir + s1; + auto dir = buildIdDir / s1; if (narAccessor->lstat(dir).type != SourceAccessor::tDirectory || !std::regex_match(s1, regex1)) continue; for (auto & [s2, _type] : narAccessor->readDirectory(dir)) { - auto debugPath = dir + s2; + auto debugPath = dir / s2; if (narAccessor->lstat(debugPath).type != SourceAccessor::tRegular || !std::regex_match(s2, regex2)) diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index 953f3a264..81c385ddb 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -28,7 +28,7 @@ struct LocalStoreAccessor : PosixSourceAccessor auto [storePath, rest] = store->toStorePath(path.abs()); if (requireValidPath && !store->isValidPath(storePath)) throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); - return CanonPath(store->getRealStoreDir()) + storePath.to_string() + CanonPath(rest); + return CanonPath(store->getRealStoreDir()) / storePath.to_string() / CanonPath(rest); } std::optional maybeLstat(const CanonPath & path) override diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index b13e4c52c..cecf8148f 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -277,7 +277,7 @@ json listNar(ref accessor, const CanonPath & path, bool recurse) json &res2 = obj["entries"]; for (const auto & [name, type] : accessor->readDirectory(path)) { if (recurse) { - res2[name] = listNar(accessor, path + name, true); + res2[name] = listNar(accessor, path / name, true); } else res2[name] = json::object(); } diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 6062392cd..b783b29e0 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -77,20 +77,20 @@ void SourceAccessor::dumpPath( std::string name(i.first); size_t pos = i.first.find(caseHackSuffix); if (pos != std::string::npos) { - debug("removing case hack suffix from '%s'", path + i.first); + debug("removing case hack suffix from '%s'", path / i.first); name.erase(pos); } if (!unhacked.emplace(name, i.first).second) throw Error("file name collision in between '%s' and '%s'", - (path + unhacked[name]), - (path + i.first)); + (path / unhacked[name]), + (path / i.first)); } else unhacked.emplace(i.first, i.first); for (auto & i : unhacked) - if (filter((path + i.first).abs())) { + if (filter((path / i.first).abs())) { sink << "entry" << "(" << "name" << i.first << "node"; - dump(path + i.second); + dump(path / i.second); sink << ")"; } } diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index 0a0f96a05..bf948be5d 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -63,7 +63,7 @@ void CanonPath::extend(const CanonPath & x) path += x.abs(); } -CanonPath CanonPath::operator + (const CanonPath & x) const +CanonPath CanonPath::operator / (const CanonPath & x) const { auto res = *this; res.extend(x); @@ -78,7 +78,7 @@ void CanonPath::push(std::string_view c) path += c; } -CanonPath CanonPath::operator + (std::string_view c) const +CanonPath CanonPath::operator / (std::string_view c) const { auto res = *this; res.push(c); diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index 997c8c731..fb2d9244b 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -190,14 +190,14 @@ public: /** * Concatenate two paths. */ - CanonPath operator + (const CanonPath & x) const; + CanonPath operator / (const CanonPath & x) const; /** * Add a path component to this one. It must not contain any slashes. */ void push(std::string_view c); - CanonPath operator + (std::string_view c) const; + CanonPath operator / (std::string_view c) const; /** * Check whether access to this path is allowed, which is the case diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index b6f8db592..95b6088da 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -34,7 +34,7 @@ void copyRecursive( sink.createDirectory(to); for (auto & [name, _] : accessor.readDirectory(from)) { copyRecursive( - accessor, from + name, + accessor, from / name, sink, to + "/" + name); break; } diff --git a/src/libutil/git.cc b/src/libutil/git.cc index 3b8c3ebac..5733531fa 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -259,7 +259,7 @@ Mode dump( { Tree entries; for (auto & [name, _] : accessor.readDirectory(path)) { - auto child = path + name; + auto child = path / name; if (!filter(child.abs())) continue; auto entry = hook(child); diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc index d85b0b7fe..341daf39c 100644 --- a/src/libutil/source-path.cc +++ b/src/libutil/source-path.cc @@ -41,11 +41,11 @@ std::optional SourcePath::getPhysicalPath() const std::string SourcePath::to_string() const { return accessor->showPath(path); } -SourcePath SourcePath::operator+(const CanonPath & x) const -{ return {accessor, path + x}; } +SourcePath SourcePath::operator / (const CanonPath & x) const +{ return {accessor, path / x}; } -SourcePath SourcePath::operator+(std::string_view c) const -{ return {accessor, path + c}; } +SourcePath SourcePath::operator / (std::string_view c) const +{ return {accessor, path / c}; } bool SourcePath::operator==(const SourcePath & x) const { diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh index bf5625ca5..bde07b08f 100644 --- a/src/libutil/source-path.hh +++ b/src/libutil/source-path.hh @@ -89,14 +89,15 @@ struct SourcePath /** * Append a `CanonPath` to this path. */ - SourcePath operator + (const CanonPath & x) const; + SourcePath operator / (const CanonPath & x) const; /** * Append a single component `c` to this path. `c` must not * contain a slash. A slash is implicitly added between this path * and `c`. */ - SourcePath operator+(std::string_view c) const; + SourcePath operator / (std::string_view c) const; + bool operator==(const SourcePath & x) const; bool operator!=(const SourcePath & x) const; bool operator<(const SourcePath & x) const; diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index d5b46c57a..dfc6e70eb 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -97,7 +97,7 @@ static bool isNixExpr(const SourcePath & path, struct InputAccessor::Stat & st) { return st.type == InputAccessor::tRegular - || (st.type == InputAccessor::tDirectory && (path + "default.nix").resolveSymlinks().pathExists()); + || (st.type == InputAccessor::tDirectory && (path / "default.nix").resolveSymlinks().pathExists()); } @@ -116,7 +116,7 @@ static void getAllExprs(EvalState & state, are implemented using profiles). */ if (i == "manifest.nix") continue; - auto path2 = (path + i).resolveSymlinks(); + auto path2 = (path / i).resolveSymlinks(); InputAccessor::Stat st; try { diff --git a/src/nix/ls.cc b/src/nix/ls.cc index 231456c9c..63f97f2d3 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -72,7 +72,7 @@ struct MixLs : virtual Args, MixJSON if (st.type == SourceAccessor::Type::tDirectory && !showDirectory) { auto names = accessor->readDirectory(curPath); for (auto & [name, type] : names) - showFile(curPath + name, relPath + "/" + name); + showFile(curPath / name, relPath + "/" + name); } else showFile(curPath, relPath); }; diff --git a/src/nix/run.cc b/src/nix/run.cc index 9bca5b9d0..e86837679 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -124,7 +124,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment if (true) pathAdditions.push_back(store->printStorePath(path) + "/bin"); - auto propPath = CanonPath(store->printStorePath(path)) + "nix-support" + "propagated-user-env-packages"; + auto propPath = CanonPath(store->printStorePath(path)) / "nix-support" / "propagated-user-env-packages"; if (auto st = accessor->maybeLstat(propPath); st && st->type == SourceAccessor::tRegular) { for (auto & p : tokenizeString(accessor->readFile(propPath))) todo.push(store->parseStorePath(p)); diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index aecf65922..e299585ff 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -225,7 +225,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions if (st->type == SourceAccessor::Type::tDirectory) { auto names = accessor->readDirectory(p); for (auto & [name, type] : names) - visitPath(p + name); + visitPath(p / name); } else if (st->type == SourceAccessor::Type::tRegular) { diff --git a/tests/unit/libutil/canon-path.cc b/tests/unit/libutil/canon-path.cc index fc94ccc3d..bf11abe3e 100644 --- a/tests/unit/libutil/canon-path.cc +++ b/tests/unit/libutil/canon-path.cc @@ -80,29 +80,29 @@ namespace nix { { CanonPath p1("a//foo/bar//"); CanonPath p2("xyzzy/bla"); - ASSERT_EQ((p1 + p2).abs(), "/a/foo/bar/xyzzy/bla"); + ASSERT_EQ((p1 / p2).abs(), "/a/foo/bar/xyzzy/bla"); } { CanonPath p1("/"); CanonPath p2("/a/b"); - ASSERT_EQ((p1 + p2).abs(), "/a/b"); + ASSERT_EQ((p1 / p2).abs(), "/a/b"); } { CanonPath p1("/a/b"); CanonPath p2("/"); - ASSERT_EQ((p1 + p2).abs(), "/a/b"); + ASSERT_EQ((p1 / p2).abs(), "/a/b"); } { CanonPath p("/foo/bar"); - ASSERT_EQ((p + "x").abs(), "/foo/bar/x"); + ASSERT_EQ((p / "x").abs(), "/foo/bar/x"); } { CanonPath p("/"); - ASSERT_EQ((p + "foo" + "bar").abs(), "/foo/bar"); + ASSERT_EQ((p / "foo" / "bar").abs(), "/foo/bar"); } } From 24205a87039cab89e6efcd6ec7d62de1c2c3b51f Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:00:39 -0800 Subject: [PATCH 030/164] Add release note --- ...-location-in-while-evaluating-attribute.md | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 doc/manual/rl-next/source-location-in-while-evaluating-attribute.md diff --git a/doc/manual/rl-next/source-location-in-while-evaluating-attribute.md b/doc/manual/rl-next/source-location-in-while-evaluating-attribute.md new file mode 100644 index 000000000..0e0b74c5a --- /dev/null +++ b/doc/manual/rl-next/source-location-in-while-evaluating-attribute.md @@ -0,0 +1,23 @@ +--- +synopsis: "In the debugger, `while evaluating the attribute` errors now include position information" +prs: 9915 +--- + +Before: + +``` +0: while evaluating the attribute 'python311.pythonForBuild.pkgs' +0x600001522598 +``` + +After: + +``` +0: while evaluating the attribute 'python311.pythonForBuild.pkgs' +/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27 + + 131| + 132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs; + | ^ + 133| in +``` From 601fc7d15978827a04a1bc44e92a8a42a512f50a Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:13:26 -0800 Subject: [PATCH 031/164] Add release note --- ...debugger-more-reliably-in-let-and-calls.md | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md diff --git a/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md b/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md new file mode 100644 index 000000000..c93225816 --- /dev/null +++ b/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md @@ -0,0 +1,25 @@ +--- +synopsis: The `--debugger` will start more reliably in `let` expressions and function calls +prs: 9917 +issues: 6649 +--- + +Previously, if you attempted to evaluate this file with the debugger: + +```nix +let + a = builtins.trace "before inner break" ( + builtins.break "hello" + ); + b = builtins.trace "before outer break" ( + builtins.break a + ); +in + b +``` + +Nix would correctly enter the debugger at `builtins.break a`, but if you asked +it to `:continue`, it would skip over the `builtins.break "hello"` expression +entirely. + +Now, Nix will correctly enter the debugger at both breakpoints. From b63a8d7c46e7a59c3e133c94af24dfcf517fe50b Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:15:29 -0800 Subject: [PATCH 032/164] Add release note --- .../rl-next/debugger-locals-for-let-expressions.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 doc/manual/rl-next/debugger-locals-for-let-expressions.md diff --git a/doc/manual/rl-next/debugger-locals-for-let-expressions.md b/doc/manual/rl-next/debugger-locals-for-let-expressions.md new file mode 100644 index 000000000..736208724 --- /dev/null +++ b/doc/manual/rl-next/debugger-locals-for-let-expressions.md @@ -0,0 +1,9 @@ +--- +synopsis: "`--debugger` can now access bindings from `let` expressions" +prs: 9918 +issues: 8827. +--- + +Breakpoints and errors in the bindings of a `let` expression can now access +those bindings in the debugger. Previously, only the body of `let` expressions +could access those bindings. From 155bc761f601346c8113cc760aaf26306136403c Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:16:39 -0800 Subject: [PATCH 033/164] Add release note --- doc/manual/rl-next/reduce-debugger-clutter.md | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 doc/manual/rl-next/reduce-debugger-clutter.md diff --git a/doc/manual/rl-next/reduce-debugger-clutter.md b/doc/manual/rl-next/reduce-debugger-clutter.md new file mode 100644 index 000000000..9bc902eee --- /dev/null +++ b/doc/manual/rl-next/reduce-debugger-clutter.md @@ -0,0 +1,37 @@ +--- +synopsis: "Visual clutter in `--debugger` is reduced" +prs: 9919 +--- + +Before: +``` +info: breakpoint reached + + +Starting REPL to allow you to inspect the current state of the evaluator. + +Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help. + +nix-repl> :continue +error: uh oh + + +Starting REPL to allow you to inspect the current state of the evaluator. + +Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help. + +nix-repl> +``` + +After: + +``` +info: breakpoint reached + +Nix 2.20.0pre20231222_dirty debugger +Type :? for help. +nix-repl> :continue +error: uh oh + +nix-repl> +``` From 657a6078121bf08525e9cd286c6f8887e983a22e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:21:08 -0800 Subject: [PATCH 034/164] Add release note --- .../rl-next/better-errors-in-nix-repl.md | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 doc/manual/rl-next/better-errors-in-nix-repl.md diff --git a/doc/manual/rl-next/better-errors-in-nix-repl.md b/doc/manual/rl-next/better-errors-in-nix-repl.md new file mode 100644 index 000000000..4deaa8c70 --- /dev/null +++ b/doc/manual/rl-next/better-errors-in-nix-repl.md @@ -0,0 +1,40 @@ +--- +synopsis: Concise error printing in `nix repl` +prs: 9928 +--- + +Previously, if an element of a list or attribute set threw an error while +evaluating, `nix repl` would print the entire error (including source location +information) inline. This output was clumsy and difficult to parse: + +``` +nix-repl> { err = builtins.throw "uh oh!"; } +{ err = «error: + … while calling the 'throw' builtin + at «string»:1:9: + 1| { err = builtins.throw "uh oh!"; } + | ^ + + error: uh oh!»; } +``` + +Now, only the error message is displayed, making the output much more readable. +``` +nix-repl> { err = builtins.throw "uh oh!"; } +{ err = «error: uh oh!»; } +``` + +However, if the whole expression being evaluated throws an error, source +locations and (if applicable) a stack trace are printed, just like you'd expect: + +``` +nix-repl> builtins.throw "uh oh!" +error: + … while calling the 'throw' builtin + at «string»:1:1: + 1| builtins.throw "uh oh!" + | ^ + + error: uh oh! +``` + From c0a15fb7d03dfb8f53bc6726c414bc88aa362592 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sun, 4 Feb 2024 00:40:30 -0800 Subject: [PATCH 035/164] Pretty-print values in the REPL Pretty-print values in the REPL by printing each item in a list or attrset on a separate line. When possible, single-item lists and attrsets are printed on one line, as long as they don't contain a nested list, attrset, or thunk. Before: ``` { attrs = { a = { b = { c = { }; }; }; }; list = [ 1 ]; list' = [ 1 2 3 ]; } ``` After: ``` { attrs = { a = { b = { c = { }; }; }; }; list = [ 1 ]; list' = [ 1 2 3 ]; } ``` --- src/libcmd/repl.cc | 3 +- src/libexpr/print-options.hh | 22 ++++ src/libexpr/print.cc | 114 ++++++++++++++++-- ...al-fail-bad-string-interpolation-4.err.exp | 2 +- tests/functional/repl.sh | 69 ++++++++++- tests/unit/libexpr/value/print.cc | 8 +- 6 files changed, 195 insertions(+), 23 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7af15153..2c64bd7a6 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -101,7 +101,8 @@ struct NixRepl .ansiColors = true, .force = true, .derivationPaths = true, - .maxDepth = maxDepth + .maxDepth = maxDepth, + .prettyIndent = 2 }); } }; diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh index e03746ece..94767df9c 100644 --- a/src/libexpr/print-options.hh +++ b/src/libexpr/print-options.hh @@ -17,24 +17,29 @@ struct PrintOptions * If true, output ANSI color sequences. */ bool ansiColors = false; + /** * If true, force values. */ bool force = false; + /** * If true and `force` is set, print derivations as * `«derivation /nix/store/...»` instead of as attribute sets. */ bool derivationPaths = false; + /** * If true, track which values have been printed and skip them on * subsequent encounters. Useful for self-referential values. */ bool trackRepeated = true; + /** * Maximum depth to evaluate to. */ size_t maxDepth = std::numeric_limits::max(); + /** * Maximum number of attributes in attribute sets to print. * @@ -42,6 +47,7 @@ struct PrintOptions * attribute set encountered. */ size_t maxAttrs = std::numeric_limits::max(); + /** * Maximum number of list items to print. * @@ -49,10 +55,26 @@ struct PrintOptions * list encountered. */ size_t maxListItems = std::numeric_limits::max(); + /** * Maximum string length to print. */ size_t maxStringLength = std::numeric_limits::max(); + + /** + * Indentation width for pretty-printing. + * + * If set to 0 (the default), values are not pretty-printed. + */ + size_t prettyIndent = 0; + + /** + * True if pretty-printing is enabled. + */ + inline bool prettyPrint() + { + return prettyIndent > 0; + } }; /** diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 68d381033..1ff026b3d 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -153,6 +153,7 @@ struct ImportantFirstAttrNameCmp }; typedef std::set ValuesSeen; +typedef std::vector> AttrVec; class Printer { @@ -163,6 +164,21 @@ private: std::optional seen; size_t attrsPrinted = 0; size_t listItemsPrinted = 0; + std::string indent; + + void increaseIndent() + { + if (options.prettyPrint()) { + indent.append(options.prettyIndent, ' '); + } + } + + void decreaseIndent() + { + if (options.prettyPrint()) { + indent.resize(indent.size() - options.prettyIndent); + } + } void printRepeated() { @@ -260,6 +276,28 @@ private: } } + bool shouldPrettyPrintAttrs(AttrVec & v) + { + if (!options.prettyPrint() || v.empty()) { + return false; + } + + // Pretty-print attrsets with more than one item. + if (v.size() > 1) { + return true; + } + + auto item = v[0].second; + if (!item) { + return true; + } + + // Pretty-print single-item attrsets only if they contain nested + // structures. + auto itemType = item->type(); + return itemType == nList || itemType == nAttrs || itemType == nThunk; + } + void printAttrs(Value & v, size_t depth) { if (seen && !seen->insert(v.attrs).second) { @@ -270,9 +308,10 @@ private: if (options.force && options.derivationPaths && state.isDerivation(v)) { printDerivation(v); } else if (depth < options.maxDepth) { - output << "{ "; + increaseIndent(); + output << "{"; - std::vector> sorted; + AttrVec sorted; for (auto & i : *v.attrs) sorted.emplace_back(std::pair(state.symbols[i.name], i.value)); @@ -281,7 +320,15 @@ private: else std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp()); + auto prettyPrint = shouldPrettyPrintAttrs(sorted); + for (auto & i : sorted) { + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + if (attrsPrinted >= options.maxAttrs) { printElided(sorted.size() - attrsPrinted, "attribute", "attributes"); break; @@ -290,13 +337,42 @@ private: printAttributeName(output, i.first); output << " = "; print(*i.second, depth + 1); - output << "; "; + output << ";"; attrsPrinted++; } + decreaseIndent(); + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } output << "}"; - } else + } else { output << "{ ... }"; + } + } + + bool shouldPrettyPrintList(std::span list) + { + if (!options.prettyPrint() || list.empty()) { + return false; + } + + // Pretty-print lists with more than one item. + if (list.size() > 1) { + return true; + } + + auto item = list[0]; + if (!item) { + return true; + } + + // Pretty-print single-item lists only if they contain nested + // structures. + auto itemType = item->type(); + return itemType == nList || itemType == nAttrs || itemType == nThunk; } void printList(Value & v, size_t depth) @@ -306,11 +382,20 @@ private: return; } - output << "[ "; if (depth < options.maxDepth) { - for (auto elem : v.listItems()) { + increaseIndent(); + output << "["; + auto listItems = v.listItems(); + auto prettyPrint = shouldPrettyPrintList(listItems); + for (auto elem : listItems) { + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + if (listItemsPrinted >= options.maxListItems) { - printElided(v.listSize() - listItemsPrinted, "item", "items"); + printElided(listItems.size() - listItemsPrinted, "item", "items"); break; } @@ -319,13 +404,19 @@ private: } else { printNullptr(); } - output << " "; listItemsPrinted++; } + + decreaseIndent(); + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + output << "]"; + } else { + output << "[ ... ]"; } - else - output << "... "; - output << "]"; } void printFunction(Value & v) @@ -488,6 +579,7 @@ public: { attrsPrinted = 0; listItemsPrinted = 0; + indent.clear(); if (options.trackRepeated) { seen.emplace(); diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp index 5119238d7..6f907106b 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp @@ -6,4 +6,4 @@ error: | ^ 10| - error: cannot coerce a set to a string: { a = { a = { a = { a = "ha"; b = "ha"; c = "ha"; d = "ha"; e = "ha"; f = "ha"; g = "ha"; h = "ha"; j = "ha"; }; «4294967295 attributes elided»}; «4294967294 attributes elided»}; «4294967293 attributes elided»} + error: cannot coerce a set to a string: { a = { a = { a = { a = "ha"; b = "ha"; c = "ha"; d = "ha"; e = "ha"; f = "ha"; g = "ha"; h = "ha"; j = "ha"; }; «4294967295 attributes elided» }; «4294967294 attributes elided» }; «4294967293 attributes elided» } diff --git a/tests/functional/repl.sh b/tests/functional/repl.sh index 5f399aa44..4938c2267 100644 --- a/tests/functional/repl.sh +++ b/tests/functional/repl.sh @@ -146,29 +146,86 @@ echo "$replResult" | grepQuiet -s afterChange # Normal output should print attributes in lexicographical order non-recursively testReplResponseNoRegex ' { a = { b = 2; }; l = [ 1 2 3 ]; s = "string"; n = 1234; x = rec { y = { z = { inherit y; }; }; }; } -' '{ a = { ... }; l = [ ... ]; n = 1234; s = "string"; x = { ... }; }' +' \ +'{ + a = { ... }; + l = [ ... ]; + n = 1234; + s = "string"; + x = { ... }; +} +' # Same for lists, but order is preserved testReplResponseNoRegex ' [ 42 1 "thingy" ({ a = 1; }) ([ 1 2 3 ]) ] -' '[ 42 1 "thingy" { ... } [ ... ] ]' +' \ +'[ + 42 + 1 + "thingy" + { ... } + [ ... ] +] +' # Same for let expressions testReplResponseNoRegex ' let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = «repeated»; y = { ... }; }' +' \ +'{ + x = { ... }; + y = { ... }; +} +' # The :p command should recursively print sets, but prevent infinite recursion testReplResponseNoRegex ' :p { a = { b = 2; }; s = "string"; n = 1234; x = rec { y = { z = { inherit y; }; }; }; } -' '{ a = { b = 2; }; n = 1234; s = "string"; x = { y = { z = { y = «repeated»; }; }; }; }' +' \ +'{ + a = { b = 2; }; + n = 1234; + s = "string"; + x = { + y = { + z = { + y = «repeated»; + }; + }; + }; +} +' # Same for lists testReplResponseNoRegex ' :p [ 42 1 "thingy" (rec { a = 1; b = { inherit a; inherit b; }; }) ([ 1 2 3 ]) ] -' '[ 42 1 "thingy" { a = 1; b = { a = 1; b = «repeated»; }; } [ 1 2 3 ] ]' +' \ +'[ + 42 + 1 + "thingy" + { + a = 1; + b = { + a = 1; + b = «repeated»; + }; + } + [ + 1 + 2 + 3 + ] +] +' # Same for let expressions testReplResponseNoRegex ' :p let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = «repeated»; y = { a = 1; }; }' +' \ +'{ + x = «repeated»; + y = { a = 1 }; +} +' diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc index c4264a38d..db1e4f3a3 100644 --- a/tests/unit/libexpr/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -756,7 +756,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vAttrs.mkAttrs(builder.finish()); test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL "}", + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL " }", PrintOptions { .ansiColors = true, .maxAttrs = 1 @@ -769,7 +769,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vAttrs.mkAttrs(builder.finish()); test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL "}", + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL " }", PrintOptions { .ansiColors = true, .maxAttrs = 1 @@ -793,7 +793,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vList.bigList.size = 2; test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL "]", + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL " ]", PrintOptions { .ansiColors = true, .maxListItems = 1 @@ -806,7 +806,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vList.bigList.size = 3; test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL "]", + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL " ]", PrintOptions { .ansiColors = true, .maxListItems = 1 From 2d74b56aee84051d386f124c092d143b9cc437f9 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Tue, 6 Feb 2024 23:22:34 +0100 Subject: [PATCH 036/164] fix location of `_redirects` file the Netlify `_redirects` file must be in the root directory [0] of the files to serve, and mdBook copies all the files in `src` that aren't `.md` to the output directory [1]. [0]: https://docs.netlify.com/routing/redirects/ [1]: https://rust-lang.github.io/mdBook/guide/creating.html#source-files --- doc/manual/{ => src}/_redirects | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/manual/{ => src}/_redirects (100%) diff --git a/doc/manual/_redirects b/doc/manual/src/_redirects similarity index 100% rename from doc/manual/_redirects rename to doc/manual/src/_redirects From 474fc4078acbe062fcc31ce91c69c8f33bf00d5f Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 6 Feb 2024 16:49:28 -0800 Subject: [PATCH 037/164] Add comments --- src/libexpr/eval-error.cc | 2 +- src/libexpr/eval-error.hh | 30 ++++++++---------------------- 2 files changed, 9 insertions(+), 23 deletions(-) diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc index b9411cbf4..250c59a19 100644 --- a/src/libexpr/eval-error.cc +++ b/src/libexpr/eval-error.cc @@ -91,7 +91,7 @@ void EvalErrorBuilder::debugThrow() // `EvalState` is the only class that can construct an `EvalErrorBuilder`, // and it does so in dynamic storage. This is the final method called on - // any such instancve and must delete itself before throwing the underlying + // any such instance and must delete itself before throwing the underlying // error. auto error = std::move(this->error); delete this; diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh index ee69dce64..711743886 100644 --- a/src/libexpr/eval-error.hh +++ b/src/libexpr/eval-error.hh @@ -56,6 +56,11 @@ public: } }; +/** + * `EvalErrorBuilder`s may only be constructed by `EvalState`. The `debugThrow` + * method must be the final method in any such `EvalErrorBuilder` usage, and it + * handles deleting the object. + */ template class EvalErrorBuilder final { @@ -90,29 +95,10 @@ public: [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs); + /** + * Delete the `EvalErrorBuilder` and throw the underlying exception. + */ [[gnu::noinline, gnu::noreturn]] void debugThrow(); }; -/** - * The size needed to allocate any `EvalErrorBuilder`. - * - * The list of classes here needs to be kept in sync with the list of `template - * class` declarations in `eval-error.cc`. - * - * This is used by `EvalState` to preallocate a buffer of sufficient size for - * any `EvalErrorBuilder` to avoid allocating while evaluating Nix code. - */ -constexpr size_t EVAL_ERROR_BUILDER_SIZE = std::max({ - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), -}); - } From 9723f533d85133fa3c4d9421a58c7765cb61e733 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 6 Feb 2024 16:50:47 -0800 Subject: [PATCH 038/164] Add comment --- src/libexpr/eval.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index afe89cd30..3c7c5da27 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -239,6 +239,7 @@ public: template [[nodiscard, gnu::noinline]] EvalErrorBuilder & error(const Args & ... args) { + // `EvalErrorBuilder::debugThrow` performs the corresponding `delete`. return *new EvalErrorBuilder(*this, args...); } From bc085022494fe90f733aef0832b6d7dcc34709cf Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 26 Jan 2024 15:54:33 -0500 Subject: [PATCH 039/164] Support arbitrary stores in Perl bindings Fix #9859 It's a breaking change but that's fine; we can just update Hydra to use the new bindings. --- perl/.yath.rc | 2 + perl/default.nix | 18 +++- perl/lib/Nix/Store.pm | 19 ++-- perl/lib/Nix/Store.xs | 201 +++++++++++++++++++++++++++--------------- perl/local.mk | 3 + perl/t/init.t | 13 +++ 6 files changed, 171 insertions(+), 85 deletions(-) create mode 100644 perl/.yath.rc create mode 100644 perl/t/init.t diff --git a/perl/.yath.rc b/perl/.yath.rc new file mode 100644 index 000000000..118bf80c8 --- /dev/null +++ b/perl/.yath.rc @@ -0,0 +1,2 @@ +[test] +-I=rel(lib/Nix) diff --git a/perl/default.nix b/perl/default.nix index 4687976a1..7103574c9 100644 --- a/perl/default.nix +++ b/perl/default.nix @@ -5,12 +5,12 @@ , nix, curl, bzip2, xz, boost, libsodium, darwin }: -perl.pkgs.toPerlModule (stdenv.mkDerivation { +perl.pkgs.toPerlModule (stdenv.mkDerivation (finalAttrs: { name = "nix-perl-${nix.version}"; src = fileset.toSource { root = ../.; - fileset = fileset.unions [ + fileset = fileset.unions ([ ../.version ../m4 ../mk @@ -20,7 +20,10 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation { ./configure.ac ./lib ./local.mk - ]; + ] ++ lib.optionals finalAttrs.doCheck [ + ./.yath.rc + ./t + ]); }; nativeBuildInputs = @@ -40,6 +43,13 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation { ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security; + # `perlPackages.Test2Harness` is marked broken for Darwin + doCheck = !stdenv.isDarwin; + + nativeCheckInputs = [ + perlPackages.Test2Harness + ]; + configureFlags = [ "--with-dbi=${perlPackages.DBI}/${perl.libPrefix}" "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}" @@ -48,4 +58,4 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation { enableParallelBuilding = true; postUnpack = "sourceRoot=$sourceRoot/perl"; -}) +})) diff --git a/perl/lib/Nix/Store.pm b/perl/lib/Nix/Store.pm index 3e4bbee0a..16f2e17c8 100644 --- a/perl/lib/Nix/Store.pm +++ b/perl/lib/Nix/Store.pm @@ -12,17 +12,20 @@ our %EXPORT_TAGS = ( 'all' => [ qw( ) ] ); our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } ); our @EXPORT = qw( - setVerbosity - isValidPath queryReferences queryPathInfo queryDeriver queryPathHash - queryPathFromHashPart - topoSortPaths computeFSClosure followLinksToStorePath exportPaths importPaths + StoreWrapper + StoreWrapper::new + StoreWrapper::isValidPath StoreWrapper::queryReferences StoreWrapper::queryPathInfo StoreWrapper::queryDeriver StoreWrapper::queryPathHash + StoreWrapper::queryPathFromHashPart + StoreWrapper::topoSortPaths StoreWrapper::computeFSClosure followLinksToStorePath StoreWrapper::exportPaths StoreWrapper::importPaths + StoreWrapper::addToStore StoreWrapper::makeFixedOutputPath + StoreWrapper::derivationFromPath + StoreWrapper::addTempRoot + StoreWrapper::queryRawRealisation + hashPath hashFile hashString convertHash signString checkSignature - addToStore makeFixedOutputPath - derivationFromPath - addTempRoot getBinDir getStoreDir - queryRawRealisation + setVerbosity ); our $VERSION = '0.15'; diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 423c01cf7..6730197b5 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -17,47 +17,61 @@ #include #include - using namespace nix; +static bool libStoreInitialized = false; -static ref store() -{ - static std::shared_ptr _store; - if (!_store) { - try { - initLibStore(); - _store = openStore(); - } catch (Error & e) { - croak("%s", e.what()); - } - } - return ref(_store); -} - +struct StoreWrapper { + ref store; +}; MODULE = Nix::Store PACKAGE = Nix::Store PROTOTYPES: ENABLE +TYPEMAP: < _store; try { - RETVAL = store()->isValidPath(store()->parseStorePath(path)); + if (!libStoreInitialized) { + initLibStore(); + libStoreInitialized = true; + } + if (items == 1) { + _store = openStore(); + RETVAL = new StoreWrapper { + .store = ref{_store} + }; + } else { + RETVAL = new StoreWrapper { + .store = openStore(s) + }; + } } catch (Error & e) { croak("%s", e.what()); } @@ -65,52 +79,81 @@ int isValidPath(char * path) RETVAL -SV * queryReferences(char * path) +void init() + CODE: + if (!libStoreInitialized) { + initLibStore(); + libStoreInitialized = true; + } + + +void setVerbosity(int level) + CODE: + verbosity = (Verbosity) level; + + +int +StoreWrapper::isValidPath(char * path) + CODE: + try { + RETVAL = THIS->store->isValidPath(THIS->store->parseStorePath(path)); + } catch (Error & e) { + croak("%s", e.what()); + } + OUTPUT: + RETVAL + + +SV * +StoreWrapper::queryReferences(char * path) PPCODE: try { - for (auto & i : store()->queryPathInfo(store()->parseStorePath(path))->references) - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); + for (auto & i : THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->references) + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * queryPathHash(char * path) +SV * +StoreWrapper::queryPathHash(char * path) PPCODE: try { - auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true); + auto s = THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * queryDeriver(char * path) +SV * +StoreWrapper::queryDeriver(char * path) PPCODE: try { - auto info = store()->queryPathInfo(store()->parseStorePath(path)); + auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path)); if (!info->deriver) XSRETURN_UNDEF; - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * queryPathInfo(char * path, int base32) +SV * +StoreWrapper::queryPathInfo(char * path, int base32) PPCODE: try { - auto info = store()->queryPathInfo(store()->parseStorePath(path)); + auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path)); if (!info->deriver) XPUSHs(&PL_sv_undef); else - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0))); auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); mXPUSHi(info->registrationTime); mXPUSHi(info->narSize); AV * refs = newAV(); for (auto & i : info->references) - av_push(refs, newSVpv(store()->printStorePath(i).c_str(), 0)); + av_push(refs, newSVpv(THIS->store->printStorePath(i).c_str(), 0)); XPUSHs(sv_2mortal(newRV((SV *) refs))); AV * sigs = newAV(); for (auto & i : info->sigs) @@ -120,10 +163,11 @@ SV * queryPathInfo(char * path, int base32) croak("%s", e.what()); } -SV * queryRawRealisation(char * outputId) +SV * +StoreWrapper::queryRawRealisation(char * outputId) PPCODE: try { - auto realisation = store()->queryRealisation(DrvOutput::parse(outputId)); + auto realisation = THIS->store->queryRealisation(DrvOutput::parse(outputId)); if (realisation) XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0))); else @@ -133,46 +177,50 @@ SV * queryRawRealisation(char * outputId) } -SV * queryPathFromHashPart(char * hashPart) +SV * +StoreWrapper::queryPathFromHashPart(char * hashPart) PPCODE: try { - auto path = store()->queryPathFromHashPart(hashPart); - XPUSHs(sv_2mortal(newSVpv(path ? store()->printStorePath(*path).c_str() : "", 0))); + auto path = THIS->store->queryPathFromHashPart(hashPart); + XPUSHs(sv_2mortal(newSVpv(path ? THIS->store->printStorePath(*path).c_str() : "", 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * computeFSClosure(int flipDirection, int includeOutputs, ...) +SV * +StoreWrapper::computeFSClosure(int flipDirection, int includeOutputs, ...) PPCODE: try { StorePathSet paths; for (int n = 2; n < items; ++n) - store()->computeFSClosure(store()->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs); + THIS->store->computeFSClosure(THIS->store->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs); for (auto & i : paths) - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * topoSortPaths(...) +SV * +StoreWrapper::topoSortPaths(...) PPCODE: try { StorePathSet paths; - for (int n = 0; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n)))); - auto sorted = store()->topoSortPaths(paths); + for (int n = 0; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); + auto sorted = THIS->store->topoSortPaths(paths); for (auto & i : sorted) - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * followLinksToStorePath(char * path) +SV * +StoreWrapper::followLinksToStorePath(char * path) CODE: try { - RETVAL = newSVpv(store()->printStorePath(store()->followLinksToStorePath(path)).c_str(), 0); + RETVAL = newSVpv(THIS->store->printStorePath(THIS->store->followLinksToStorePath(path)).c_str(), 0); } catch (Error & e) { croak("%s", e.what()); } @@ -180,29 +228,32 @@ SV * followLinksToStorePath(char * path) RETVAL -void exportPaths(int fd, ...) +void +StoreWrapper::exportPaths(int fd, ...) PPCODE: try { StorePathSet paths; - for (int n = 1; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n)))); + for (int n = 1; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); FdSink sink(fd); - store()->exportPaths(paths, sink); + THIS->store->exportPaths(paths, sink); } catch (Error & e) { croak("%s", e.what()); } -void importPaths(int fd, int dontCheckSigs) +void +StoreWrapper::importPaths(int fd, int dontCheckSigs) PPCODE: try { FdSource source(fd); - store()->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs); + THIS->store->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs); } catch (Error & e) { croak("%s", e.what()); } -SV * hashPath(char * algo, int base32, char * path) +SV * +hashPath(char * algo, int base32, char * path) PPCODE: try { PosixSourceAccessor accessor; @@ -280,64 +331,67 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg) RETVAL -SV * addToStore(char * srcPath, int recursive, char * algo) +SV * +StoreWrapper::addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; PosixSourceAccessor accessor; - auto path = store()->addToStore( + auto path = THIS->store->addToStore( std::string(baseNameOf(srcPath)), accessor, CanonPath::fromCwd(srcPath), method, parseHashAlgo(algo)); - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) +SV * +StoreWrapper::makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) PPCODE: try { auto h = Hash::parseAny(hash, parseHashAlgo(algo)); auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; - auto path = store()->makeFixedOutputPath(name, FixedOutputInfo { + auto path = THIS->store->makeFixedOutputPath(name, FixedOutputInfo { .method = method, .hash = h, .references = {}, }); - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * derivationFromPath(char * drvPath) +SV * +StoreWrapper::derivationFromPath(char * drvPath) PREINIT: HV *hash; CODE: try { - Derivation drv = store()->derivationFromPath(store()->parseStorePath(drvPath)); + Derivation drv = THIS->store->derivationFromPath(THIS->store->parseStorePath(drvPath)); hash = newHV(); HV * outputs = newHV(); - for (auto & i : drv.outputsAndOptPaths(*store())) { + for (auto & i : drv.outputsAndOptPaths(*THIS->store)) { hv_store( outputs, i.first.c_str(), i.first.size(), !i.second.second ? newSV(0) /* null value */ - : newSVpv(store()->printStorePath(*i.second.second).c_str(), 0), + : newSVpv(THIS->store->printStorePath(*i.second.second).c_str(), 0), 0); } hv_stores(hash, "outputs", newRV((SV *) outputs)); AV * inputDrvs = newAV(); for (auto & i : drv.inputDrvs.map) - av_push(inputDrvs, newSVpv(store()->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second + av_push(inputDrvs, newSVpv(THIS->store->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs)); AV * inputSrcs = newAV(); for (auto & i : drv.inputSrcs) - av_push(inputSrcs, newSVpv(store()->printStorePath(i).c_str(), 0)); + av_push(inputSrcs, newSVpv(THIS->store->printStorePath(i).c_str(), 0)); hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs)); hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0)); @@ -361,10 +415,11 @@ SV * derivationFromPath(char * drvPath) RETVAL -void addTempRoot(char * storePath) +void +StoreWrapper::addTempRoot(char * storePath) PPCODE: try { - store()->addTempRoot(store()->parseStorePath(storePath)); + THIS->store->addTempRoot(THIS->store->parseStorePath(storePath)); } catch (Error & e) { croak("%s", e.what()); } diff --git a/perl/local.mk b/perl/local.mk index 0eae651d8..ed4764eb9 100644 --- a/perl/local.mk +++ b/perl/local.mk @@ -41,3 +41,6 @@ Store_FORCE_INSTALL = 1 Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config + +check: all + yath test diff --git a/perl/t/init.t b/perl/t/init.t new file mode 100644 index 000000000..80197e013 --- /dev/null +++ b/perl/t/init.t @@ -0,0 +1,13 @@ +use strict; +use warnings; +use Test2::V0; + +use Nix::Store; + +my $s = new Nix::Store("dummy://"); + +my $res = $s->isValidPath("/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"); + +ok(!$res, "should not have path"); + +done_testing; From 140de3b2780c6c49030b118051e15f32d202bc49 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Thu, 8 Feb 2024 09:00:00 +0100 Subject: [PATCH 040/164] manual: fold sidebar sections the table of contents is very long now, and folded sections allow for a better overview. --- doc/manual/book.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/book.toml b/doc/manual/book.toml index 73fb7e75e..d524dbb13 100644 --- a/doc/manual/book.toml +++ b/doc/manual/book.toml @@ -6,6 +6,8 @@ additional-css = ["custom.css"] additional-js = ["redirects.js"] edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}" git-repository-url = "https://github.com/NixOS/nix" +fold.enable = true +fold.level = 1 [preprocessor.anchors] renderers = ["html"] From e486b76eef135cdb1f112b9bb2ffcbf6a08f7c96 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Thu, 8 Feb 2024 09:08:58 +0100 Subject: [PATCH 041/164] move JSON section into Formats and Protocols --- doc/manual/src/SUMMARY.md.in | 8 ++++---- doc/manual/src/_redirects | 1 + doc/manual/src/{ => protocols}/json/derivation.md | 0 doc/manual/src/{ => protocols}/json/store-object-info.md | 5 +++-- src/nix/derivation-add.md | 2 +- src/nix/derivation-show.md | 2 +- 6 files changed, 10 insertions(+), 8 deletions(-) rename doc/manual/src/{ => protocols}/json/derivation.md (100%) rename doc/manual/src/{ => protocols}/json/store-object-info.md (96%) diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index 695d63dfc..167f54206 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -104,10 +104,10 @@ - [Channels](command-ref/files/channels.md) - [Default Nix expression](command-ref/files/default-nix-expression.md) - [Architecture and Design](architecture/architecture.md) -- [JSON Formats](json/index.md) - - [Store Object Info](json/store-object-info.md) - - [Derivation](json/derivation.md) -- [Protocols](protocols/index.md) +- [Formats and Protocols](protocols/index.md) + - [JSON Formats](protocols/json/index.md) + - [Store Object Info](protocols/json/store-object-info.md) + - [Derivation](protocols/json/derivation.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) diff --git a/doc/manual/src/_redirects b/doc/manual/src/_redirects index 62c693c97..8bf0e854b 100644 --- a/doc/manual/src/_redirects +++ b/doc/manual/src/_redirects @@ -36,5 +36,6 @@ /package-management/s3-substituter /store/types/s3-binary-cache-store 301! /protocols/protocols /protocols 301! +/json/* /protocols/json/:splat 301! /release-notes/release-notes /release-notes 301! diff --git a/doc/manual/src/json/derivation.md b/doc/manual/src/protocols/json/derivation.md similarity index 100% rename from doc/manual/src/json/derivation.md rename to doc/manual/src/protocols/json/derivation.md diff --git a/doc/manual/src/json/store-object-info.md b/doc/manual/src/protocols/json/store-object-info.md similarity index 96% rename from doc/manual/src/json/store-object-info.md rename to doc/manual/src/protocols/json/store-object-info.md index db43c2fa1..ba4ab098f 100644 --- a/doc/manual/src/json/store-object-info.md +++ b/doc/manual/src/protocols/json/store-object-info.md @@ -14,11 +14,11 @@ Info about a [store object]. * `narHash`: - Hash of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar). + Hash of the [file system object] part of the store object when serialized as a [Nix Archive]. * `narSize`: - Size of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar). + Size of the [file system object] part of the store object when serialized as a [Nix Archive]. * `references`: @@ -30,6 +30,7 @@ Info about a [store object]. [store path]: @docroot@/glossary.md#gloss-store-path [file system object]: @docroot@/store/file-system-object.md +[Nix Archive]: @docroot@/glossary.md#gloss-nar ## Impure fields diff --git a/src/nix/derivation-add.md b/src/nix/derivation-add.md index d9b8467df..331cbdd88 100644 --- a/src/nix/derivation-add.md +++ b/src/nix/derivation-add.md @@ -14,6 +14,6 @@ a Nix expression evaluates. `nix derivation add` takes a single derivation in the following format: -{{#include ../../json/derivation.md}} +{{#include ../../protocols/json/derivation.md}} )"" diff --git a/src/nix/derivation-show.md b/src/nix/derivation-show.md index 884f1adc6..2437ea08f 100644 --- a/src/nix/derivation-show.md +++ b/src/nix/derivation-show.md @@ -52,6 +52,6 @@ By default, this command only shows top-level derivations, but with [store path]: @docroot@/glossary.md#gloss-store-path -{{#include ../../json/derivation.md}} +{{#include ../../protocols/json/derivation.md}} )"" From d24c8aa49141fc384deafee50da65a05553a124b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:22:30 +0100 Subject: [PATCH 042/164] Simplify a conditional in the repl initialisation --- src/libcmd/repl.cc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 5b4d3f9d5..9826f0fac 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -255,9 +255,7 @@ void NixRepl::mainLoop() notice("Nix %1%%2%\nType :? for help.", nixVersion, debuggerNotice); } - if (isFirstRepl) { - isFirstRepl = false; - } + isFirstRepl = false; loadFiles(); From 0dea16e686e7128efd95a28e2000639bb169125a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:50:12 +0100 Subject: [PATCH 043/164] Mention `OPTIMIZE=0` more prominently in the hacking guide This is a game-changer when developing, it shouldn't just be hidden amongst a list of more advanced variables. --- doc/manual/src/contributing/hacking.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 9e2470859..d36d46620 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -44,13 +44,13 @@ To build Nix itself in this shell: ```console [nix-shell]$ autoreconfPhase [nix-shell]$ configurePhase -[nix-shell]$ make -j $NIX_BUILD_CORES +[nix-shell]$ make -j $NIX_BUILD_CORES OPTIMIZE=0 ``` To install it in `$(pwd)/outputs` and test it: ```console -[nix-shell]$ make install +[nix-shell]$ make install OPTIMIZE=0 [nix-shell]$ make installcheck check -j $NIX_BUILD_CORES [nix-shell]$ nix --version nix (Nix) 2.12 From 4687beecef87b358a514825e3700e47962ca2194 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 6 Feb 2024 16:23:58 -0500 Subject: [PATCH 044/164] Get rid of `CanonPath::fromCwd` As discussed in the last Nix team meeting (2024-02-95), this method doesn't belong because `CanonPath` is a virtual/ideal absolute path format, not used in file systems beyond the native OS format for which a "current working directory" is defined. Progress towards #9205 --- perl/lib/Nix/Store.xs | 8 ++-- src/libcmd/common-eval-args.cc | 6 +-- src/libcmd/common-eval-args.hh | 2 +- src/libcmd/editor-for.cc | 2 +- src/libcmd/installables.cc | 5 +- src/libcmd/repl.cc | 2 +- src/libexpr/eval.cc | 6 +-- src/libexpr/eval.hh | 5 ++ src/libexpr/paths.cc | 6 ++- src/libfetchers/fs-input-accessor.cc | 64 +++++--------------------- src/libfetchers/fs-input-accessor.hh | 5 +- src/libfetchers/git-utils.cc | 22 ++++----- src/libfetchers/git-utils.hh | 2 +- src/libfetchers/git.cc | 18 ++++---- src/libutil/archive.cc | 4 +- src/libutil/canon-path.cc | 5 -- src/libutil/canon-path.hh | 2 - src/libutil/posix-source-accessor.cc | 45 ++++++++++++++---- src/libutil/posix-source-accessor.hh | 29 +++++++++++- src/libutil/source-accessor.hh | 4 +- src/libutil/source-path.cc | 2 +- src/libutil/source-path.hh | 2 +- src/nix-build/nix-build.cc | 4 +- src/nix-env/nix-env.cc | 2 +- src/nix-instantiate/nix-instantiate.cc | 4 +- src/nix-store/nix-store.cc | 16 +++---- src/nix/add-to-store.cc | 4 +- src/nix/eval.cc | 2 +- src/nix/hash.cc | 4 +- src/nix/prefetch.cc | 5 +- 30 files changed, 152 insertions(+), 135 deletions(-) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 6730197b5..4a928594b 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -256,9 +256,9 @@ SV * hashPath(char * algo, int base32, char * path) PPCODE: try { - PosixSourceAccessor accessor; + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); Hash h = hashPath( - accessor, CanonPath::fromCwd(path), + accessor, canonPath, FileIngestionMethod::Recursive, parseHashAlgo(algo)).first; auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); @@ -336,10 +336,10 @@ StoreWrapper::addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; - PosixSourceAccessor accessor; + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(srcPath); auto path = THIS->store->addToStore( std::string(baseNameOf(srcPath)), - accessor, CanonPath::fromCwd(srcPath), + accessor, canonPath, method, parseHashAlgo(algo)); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0))); } catch (Error & e) { diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index 193972272..58f04e225 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -156,7 +156,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) for (auto & i : autoArgs) { auto v = state.allocValue(); if (i.second[0] == 'E') - state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(CanonPath::fromCwd()))); + state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath("."))); else v->mkString(((std::string_view) i.second).substr(1)); res.insert(state.symbols.create(i.first), v); @@ -164,7 +164,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) return res.finish(); } -SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir) +SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir) { if (EvalSettings::isPseudoUrl(s)) { auto storePath = fetchers::downloadTarball( @@ -185,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDi } else - return state.rootPath(CanonPath(s, baseDir)); + return state.rootPath(baseDir ? absPath(s, *baseDir) : absPath(s)); } } diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh index 4b403d936..2eb63e15d 100644 --- a/src/libcmd/common-eval-args.hh +++ b/src/libcmd/common-eval-args.hh @@ -29,6 +29,6 @@ private: std::map autoArgs; }; -SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd()); +SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr); } diff --git a/src/libcmd/editor-for.cc b/src/libcmd/editor-for.cc index 67653d9c9..6bf36bd64 100644 --- a/src/libcmd/editor-for.cc +++ b/src/libcmd/editor-for.cc @@ -17,7 +17,7 @@ Strings editorFor(const SourcePath & file, uint32_t line) editor.find("vim") != std::string::npos || editor.find("kak") != std::string::npos)) args.push_back(fmt("+%d", line)); - args.push_back(path->abs()); + args.push_back(path->string()); return args; } diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 736c41a1e..16d25d3cf 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -487,10 +487,11 @@ Installables SourceExprCommand::parseInstallables( state->eval(e, *vFile); } else if (file) { - state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile); + auto dir = absPath(getCommandBaseDir()); + state->evalFile(lookupFileArg(*state, *file, &dir), *vFile); } else { - CanonPath dir(CanonPath::fromCwd(getCommandBaseDir())); + Path dir = absPath(getCommandBaseDir()); auto e = state->parseExprFromString(*expr, state->rootPath(dir)); state->eval(e, *vFile); } diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 4b51fe393..137332895 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -899,7 +899,7 @@ void NixRepl::addVarToScope(const Symbol name, Value & v) Expr * NixRepl::parseString(std::string s) { - return state->parseExprFromString(std::move(s), state->rootPath(CanonPath::fromCwd()), staticEnv); + return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 43f8dea07..eb1b3a5f0 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -434,14 +434,14 @@ EvalState::EvalState( , emptyBindings(0) , rootFS( evalSettings.restrictEval || evalSettings.pureEval - ? ref(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {}, + ? ref(AllowListInputAccessor::create(makeFSInputAccessor(), {}, [](const CanonPath & path) -> RestrictedPathError { auto modeInformation = evalSettings.pureEval ? "in pure evaluation mode (use '--impure' to override)" : "in restricted mode"; throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); })) - : makeFSInputAccessor(CanonPath::root)) + : makeFSInputAccessor()) , corepkgsFS(makeMemoryInputAccessor()) , internalFS(makeMemoryInputAccessor()) , derivationInternal{corepkgsFS->addFile( @@ -2763,7 +2763,7 @@ Expr * EvalState::parseStdin() // drainFD should have left some extra space for terminators buffer.append("\0\0", 2); auto s = make_ref(std::move(buffer)); - return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv); + return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 2368187b1..b75646dbd 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -372,6 +372,11 @@ public: */ SourcePath rootPath(CanonPath path); + /** + * Variant which accepts relative paths too. + */ + SourcePath rootPath(PathView path); + /** * Allow access to a path. */ diff --git a/src/libexpr/paths.cc b/src/libexpr/paths.cc index 099607638..50d0d9895 100644 --- a/src/libexpr/paths.cc +++ b/src/libexpr/paths.cc @@ -1,5 +1,4 @@ #include "eval.hh" -#include "fs-input-accessor.hh" namespace nix { @@ -8,4 +7,9 @@ SourcePath EvalState::rootPath(CanonPath path) return {rootFS, std::move(path)}; } +SourcePath EvalState::rootPath(PathView path) +{ + return {rootFS, CanonPath(absPath(path))}; +} + } diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc index 46bc6b70d..ee24c621a 100644 --- a/src/libfetchers/fs-input-accessor.cc +++ b/src/libfetchers/fs-input-accessor.cc @@ -6,72 +6,30 @@ namespace nix { struct FSInputAccessor : InputAccessor, PosixSourceAccessor { - CanonPath root; - - FSInputAccessor(const CanonPath & root) - : root(root) - { - displayPrefix = root.isRoot() ? "" : root.abs(); - } - - void readFile( - const CanonPath & path, - Sink & sink, - std::function sizeCallback) override - { - auto absPath = makeAbsPath(path); - PosixSourceAccessor::readFile(absPath, sink, sizeCallback); - } - - bool pathExists(const CanonPath & path) override - { - return PosixSourceAccessor::pathExists(makeAbsPath(path)); - } - - std::optional maybeLstat(const CanonPath & path) override - { - return PosixSourceAccessor::maybeLstat(makeAbsPath(path)); - } - - DirEntries readDirectory(const CanonPath & path) override - { - DirEntries res; - for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path))) - res.emplace(entry); - return res; - } - - std::string readLink(const CanonPath & path) override - { - return PosixSourceAccessor::readLink(makeAbsPath(path)); - } - - CanonPath makeAbsPath(const CanonPath & path) - { - return root / path; - } - - std::optional getPhysicalPath(const CanonPath & path) override - { - return makeAbsPath(path); - } + using PosixSourceAccessor::PosixSourceAccessor; }; -ref makeFSInputAccessor(const CanonPath & root) +ref makeFSInputAccessor() { - return make_ref(root); + return make_ref(); +} + +ref makeFSInputAccessor(std::filesystem::path root) +{ + return make_ref(std::move(root)); } ref makeStorePathAccessor( ref store, const StorePath & storePath) { - return makeFSInputAccessor(CanonPath(store->toRealPath(storePath))); + // FIXME: should use `store->getFSAccessor()` + return makeFSInputAccessor(std::filesystem::path { store->toRealPath(storePath) }); } SourcePath getUnfilteredRootPath(CanonPath path) { - static auto rootFS = makeFSInputAccessor(CanonPath::root); + static auto rootFS = makeFSInputAccessor(); return {rootFS, path}; } diff --git a/src/libfetchers/fs-input-accessor.hh b/src/libfetchers/fs-input-accessor.hh index a98e83511..e60906bd8 100644 --- a/src/libfetchers/fs-input-accessor.hh +++ b/src/libfetchers/fs-input-accessor.hh @@ -8,8 +8,9 @@ namespace nix { class StorePath; class Store; -ref makeFSInputAccessor( - const CanonPath & root); +ref makeFSInputAccessor(); + +ref makeFSInputAccessor(std::filesystem::path root); ref makeStorePathAccessor( ref store, diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 1256a4c2c..cb4a84e53 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -140,15 +140,15 @@ T peelObject(git_repository * repo, git_object * obj, git_object_t type) struct GitRepoImpl : GitRepo, std::enable_shared_from_this { /** Location of the repository on disk. */ - CanonPath path; + std::filesystem::path path; Repository repo; - GitRepoImpl(CanonPath _path, bool create, bool bare) + GitRepoImpl(std::filesystem::path _path, bool create, bool bare) : path(std::move(_path)) { initLibGit2(); - if (pathExists(path.abs())) { + if (pathExists(path.native())) { if (git_repository_open(Setter(repo), path.c_str())) throw Error("opening Git repository '%s': %s", path, git_error_last()->message); } else { @@ -221,10 +221,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return toHash(*oid); } - std::vector parseSubmodules(const CanonPath & configFile) + std::vector parseSubmodules(const std::filesystem::path & configFile) { GitConfig config; - if (git_config_open_ondisk(Setter(config), configFile.abs().c_str())) + if (git_config_open_ondisk(Setter(config), configFile.c_str())) throw Error("parsing .gitmodules file: %s", git_error_last()->message); ConfigIterator it; @@ -296,7 +296,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this /* Get submodule info. */ auto modulesFile = path / ".gitmodules"; - if (pathExists(modulesFile.abs())) + if (pathExists(modulesFile)) info.submodules = parseSubmodules(modulesFile); return info; @@ -389,10 +389,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this auto dir = this->path; Strings gitArgs; if (shallow) { - gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec }; + gitArgs = { "-C", dir, "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec }; } else { - gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--", url, refspec }; + gitArgs = { "-C", dir, "fetch", "--quiet", "--force", "--", url, refspec }; } runProgram(RunOptions { @@ -438,7 +438,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this .args = { "-c", "gpg.ssh.allowedSignersFile=" + allowedSignersFile, - "-C", path.abs(), + "-C", path, "verify-commit", rev.gitRev() }, @@ -465,7 +465,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this } }; -ref GitRepo::openRepo(const CanonPath & path, bool create, bool bare) +ref GitRepo::openRepo(const std::filesystem::path & path, bool create, bool bare) { return make_ref(path, create, bare); } @@ -781,7 +781,7 @@ std::vector> GitRepoImpl::getSubmodules auto rawAccessor = getRawAccessor(rev); - for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) { + for (auto & submodule : parseSubmodules(pathTemp)) { auto rev = rawAccessor->getSubmoduleRev(submodule.path); result.push_back({std::move(submodule), rev}); } diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index 768554780..e55affb12 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -12,7 +12,7 @@ struct GitRepo virtual ~GitRepo() { } - static ref openRepo(const CanonPath & path, bool create = false, bool bare = false); + static ref openRepo(const std::filesystem::path & path, bool create = false, bool bare = false); virtual uint64_t getRevCount(const Hash & rev) = 0; diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 26fe79596..bef945d54 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -415,7 +415,7 @@ struct GitInputScheme : InputScheme // If this is a local directory and no ref or revision is // given, then allow the use of an unclean working tree. if (!input.getRef() && !input.getRev() && repoInfo.isLocal) - repoInfo.workdirInfo = GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirInfo(); + repoInfo.workdirInfo = GitRepo::openRepo(repoInfo.url)->getWorkdirInfo(); return repoInfo; } @@ -429,7 +429,7 @@ struct GitInputScheme : InputScheme if (auto res = cache->lookup(key)) return getIntAttr(*res, "lastModified"); - auto lastModified = GitRepo::openRepo(CanonPath(repoDir))->getLastModified(rev); + auto lastModified = GitRepo::openRepo(repoDir)->getLastModified(rev); cache->upsert(key, Attrs{{"lastModified", lastModified}}); @@ -447,7 +447,7 @@ struct GitInputScheme : InputScheme Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url)); - auto revCount = GitRepo::openRepo(CanonPath(repoDir))->getRevCount(rev); + auto revCount = GitRepo::openRepo(repoDir)->getRevCount(rev); cache->upsert(key, Attrs{{"revCount", revCount}}); @@ -457,7 +457,7 @@ struct GitInputScheme : InputScheme std::string getDefaultRef(const RepoInfo & repoInfo) const { auto head = repoInfo.isLocal - ? GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirRef() + ? GitRepo::openRepo(repoInfo.url)->getWorkdirRef() : readHeadCached(repoInfo.url); if (!head) { warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url); @@ -510,7 +510,7 @@ struct GitInputScheme : InputScheme if (repoInfo.isLocal) { repoDir = repoInfo.url; if (!input.getRev()) - input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev()); + input.attrs.insert_or_assign("rev", GitRepo::openRepo(repoDir)->resolveRef(ref).gitRev()); } else { Path cacheDir = getCachePath(repoInfo.url, getShallowAttr(input)); repoDir = cacheDir; @@ -519,7 +519,7 @@ struct GitInputScheme : InputScheme createDirs(dirOf(cacheDir)); PathLocks cacheDirLock({cacheDir}); - auto repo = GitRepo::openRepo(CanonPath(cacheDir), true, true); + auto repo = GitRepo::openRepo(cacheDir, true, true); Path localRefFile = ref.compare(0, 5, "refs/") == 0 @@ -588,7 +588,7 @@ struct GitInputScheme : InputScheme // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder } - auto repo = GitRepo::openRepo(CanonPath(repoDir)); + auto repo = GitRepo::openRepo(repoDir); auto isShallow = repo->isShallow(); @@ -664,7 +664,7 @@ struct GitInputScheme : InputScheme for (auto & submodule : repoInfo.workdirInfo.submodules) repoInfo.workdirInfo.files.insert(submodule.path); - auto repo = GitRepo::openRepo(CanonPath(repoInfo.url), false, false); + auto repo = GitRepo::openRepo(repoInfo.url, false, false); auto exportIgnore = getExportIgnoreAttr(input); @@ -703,7 +703,7 @@ struct GitInputScheme : InputScheme } if (!repoInfo.workdirInfo.isDirty) { - auto repo = GitRepo::openRepo(CanonPath(repoInfo.url)); + auto repo = GitRepo::openRepo(repoInfo.url); if (auto ref = repo->getWorkdirRef()) input.attrs.insert_or_assign("ref", *ref); diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index b783b29e0..351ee094b 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -110,8 +110,8 @@ void SourceAccessor::dumpPath( time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter) { - PosixSourceAccessor accessor; - accessor.dumpPath(CanonPath::fromCwd(path), sink, filter); + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); + accessor.dumpPath(canonPath, sink, filter); return accessor.mtime; } diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index bf948be5d..1223ba33c 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -20,11 +20,6 @@ CanonPath::CanonPath(const std::vector & elems) push(s); } -CanonPath CanonPath::fromCwd(std::string_view path) -{ - return CanonPath(unchecked_t(), absPath(path)); -} - std::optional CanonPath::parent() const { if (isRoot()) return std::nullopt; diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index fb2d9244b..2f8ff381e 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -52,8 +52,6 @@ public: */ CanonPath(const std::vector & elems); - static CanonPath fromCwd(std::string_view path = "."); - static CanonPath root; /** diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 5f26fa67b..0300de01e 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -6,6 +6,33 @@ namespace nix { +PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && root) + : root(std::move(root)) +{ + assert(root.empty() || root.is_absolute()); + displayPrefix = root; +} + +PosixSourceAccessor::PosixSourceAccessor() + : PosixSourceAccessor(std::filesystem::path {}) +{ } + +std::pair PosixSourceAccessor::createAtRoot(const std::filesystem::path & path) +{ + std::filesystem::path path2 = absPath(path.native()); + return { + PosixSourceAccessor { path2.root_path() }, + CanonPath { static_cast(path2.relative_path()) }, + }; +} + +std::filesystem::path PosixSourceAccessor::makeAbsPath(const CanonPath & path) +{ + return root.empty() + ? (std::filesystem::path { path.abs() }) + : root / path.rel(); +} + void PosixSourceAccessor::readFile( const CanonPath & path, Sink & sink, @@ -13,9 +40,11 @@ void PosixSourceAccessor::readFile( { assertNoSymlinks(path); - AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC | O_NOFOLLOW); + auto ap = makeAbsPath(path); + + AutoCloseFD fd = open(ap.c_str(), O_RDONLY | O_CLOEXEC | O_NOFOLLOW); if (!fd) - throw SysError("opening file '%1%'", path); + throw SysError("opening file '%1%'", ap.native()); struct stat st; if (fstat(fd.get(), &st) == -1) @@ -46,7 +75,7 @@ void PosixSourceAccessor::readFile( bool PosixSourceAccessor::pathExists(const CanonPath & path) { if (auto parent = path.parent()) assertNoSymlinks(*parent); - return nix::pathExists(path.abs()); + return nix::pathExists(makeAbsPath(path)); } std::optional PosixSourceAccessor::cachedLstat(const CanonPath & path) @@ -60,7 +89,7 @@ std::optional PosixSourceAccessor::cachedLstat(const CanonPath & pa } std::optional st{std::in_place}; - if (::lstat(path.c_str(), &*st)) { + if (::lstat(makeAbsPath(path).c_str(), &*st)) { if (errno == ENOENT || errno == ENOTDIR) st.reset(); else @@ -95,7 +124,7 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & { assertNoSymlinks(path); DirEntries res; - for (auto & entry : nix::readDirectory(path.abs())) { + for (auto & entry : nix::readDirectory(makeAbsPath(path))) { std::optional type; switch (entry.type) { case DT_REG: type = Type::tRegular; break; @@ -110,12 +139,12 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & std::string PosixSourceAccessor::readLink(const CanonPath & path) { if (auto parent = path.parent()) assertNoSymlinks(*parent); - return nix::readLink(path.abs()); + return nix::readLink(makeAbsPath(path)); } -std::optional PosixSourceAccessor::getPhysicalPath(const CanonPath & path) +std::optional PosixSourceAccessor::getPhysicalPath(const CanonPath & path) { - return path; + return makeAbsPath(path); } void PosixSourceAccessor::assertNoSymlinks(CanonPath path) diff --git a/src/libutil/posix-source-accessor.hh b/src/libutil/posix-source-accessor.hh index b2bd39805..717c8f017 100644 --- a/src/libutil/posix-source-accessor.hh +++ b/src/libutil/posix-source-accessor.hh @@ -9,6 +9,16 @@ namespace nix { */ struct PosixSourceAccessor : virtual SourceAccessor { + /** + * Optional root path to prefix all operations into the native file + * system. This allows prepending funny things like `C:\` that + * `CanonPath` intentionally doesn't support. + */ + const std::filesystem::path root; + + PosixSourceAccessor(); + PosixSourceAccessor(std::filesystem::path && root); + /** * The most recent mtime seen by lstat(). This is a hack to * support dumpPathAndGetMtime(). Should remove this eventually. @@ -28,7 +38,22 @@ struct PosixSourceAccessor : virtual SourceAccessor std::string readLink(const CanonPath & path) override; - std::optional getPhysicalPath(const CanonPath & path) override; + std::optional getPhysicalPath(const CanonPath & path) override; + + /** + * Create a `PosixSourceAccessor` and `CanonPath` corresponding to + * some native path. + * + * The `PosixSourceAccessor` is rooted as far up the tree as + * possible, (e.g. on Windows it could scoped to a drive like + * `C:\`). This allows more `..` parent accessing to work. + * + * See + * [`std::filesystem::path::root_path`](https://en.cppreference.com/w/cpp/filesystem/path/root_path) + * and + * [`std::filesystem::path::relative_path`](https://en.cppreference.com/w/cpp/filesystem/path/relative_path). + */ + static std::pair createAtRoot(const std::filesystem::path & path); private: @@ -38,6 +63,8 @@ private: void assertNoSymlinks(CanonPath path); std::optional cachedLstat(const CanonPath & path); + + std::filesystem::path makeAbsPath(const CanonPath & path); }; } diff --git a/src/libutil/source-accessor.hh b/src/libutil/source-accessor.hh index 4f4ff09c1..aff7da09c 100644 --- a/src/libutil/source-accessor.hh +++ b/src/libutil/source-accessor.hh @@ -1,5 +1,7 @@ #pragma once +#include + #include "canon-path.hh" #include "hash.hh" @@ -119,7 +121,7 @@ struct SourceAccessor * possible. This is only possible for filesystems that are * materialized in the root filesystem. */ - virtual std::optional getPhysicalPath(const CanonPath & path) + virtual std::optional getPhysicalPath(const CanonPath & path) { return std::nullopt; } bool operator == (const SourceAccessor & x) const diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc index 341daf39c..0f154e779 100644 --- a/src/libutil/source-path.cc +++ b/src/libutil/source-path.cc @@ -35,7 +35,7 @@ void SourcePath::dumpPath( PathFilter & filter) const { return accessor->dumpPath(path, sink, filter); } -std::optional SourcePath::getPhysicalPath() const +std::optional SourcePath::getPhysicalPath() const { return accessor->getPhysicalPath(path); } std::string SourcePath::to_string() const diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh index bde07b08f..a2f4ddd1e 100644 --- a/src/libutil/source-path.hh +++ b/src/libutil/source-path.hh @@ -82,7 +82,7 @@ struct SourcePath * Return the location of this path in the "real" filesystem, if * it has a physical location. */ - std::optional getPhysicalPath() const; + std::optional getPhysicalPath() const; std::string to_string() const; diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 549adfbf7..a372e4b1c 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -299,7 +299,7 @@ static void main_nix_build(int argc, char * * argv) else for (auto i : left) { if (fromArgs) - exprs.push_back(state->parseExprFromString(std::move(i), state->rootPath(CanonPath::fromCwd()))); + exprs.push_back(state->parseExprFromString(std::move(i), state->rootPath("."))); else { auto absolute = i; try { @@ -400,7 +400,7 @@ static void main_nix_build(int argc, char * * argv) try { auto expr = state->parseExprFromString( "(import {}).bashInteractive", - state->rootPath(CanonPath::fromCwd())); + state->rootPath(".")); Value v; state->eval(expr, v); diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index dfc6e70eb..1f311733b 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -413,7 +413,7 @@ static void queryInstSources(EvalState & state, loadSourceExpr(state, *instSource.nixExprPath, vArg); for (auto & i : args) { - Expr * eFun = state.parseExprFromString(i, state.rootPath(CanonPath::fromCwd())); + Expr * eFun = state.parseExprFromString(i, state.rootPath(".")); Value vFun, vTmp; state.eval(eFun, vFun); vTmp.mkApp(&vFun, &vArg); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index b9e626aed..86e6f008d 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -168,7 +168,7 @@ static int main_nix_instantiate(int argc, char * * argv) for (auto & i : files) { auto p = state->findFile(i); if (auto fn = p.getPhysicalPath()) - std::cout << fn->abs() << std::endl; + std::cout << fn->native() << std::endl; else throw Error("'%s' has no physical path", p); } @@ -184,7 +184,7 @@ static int main_nix_instantiate(int argc, char * * argv) for (auto & i : files) { Expr * e = fromArgs - ? state->parseExprFromString(i, state->rootPath(CanonPath::fromCwd())) + ? state->parseExprFromString(i, state->rootPath(".")) : state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, i))); processExpr(*state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 40378e123..f6a36da0d 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -176,12 +176,11 @@ static void opAdd(Strings opFlags, Strings opArgs) { if (!opFlags.empty()) throw UsageError("unknown flag"); - PosixSourceAccessor accessor; - for (auto & i : opArgs) + for (auto & i : opArgs) { + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i); cout << fmt("%s\n", store->printStorePath(store->addToStore( - std::string(baseNameOf(i)), - accessor, - CanonPath::fromCwd(i)))); + std::string(baseNameOf(i)), accessor, canonPath))); + } } @@ -201,14 +200,15 @@ static void opAddFixed(Strings opFlags, Strings opArgs) HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front()); opArgs.pop_front(); - PosixSourceAccessor accessor; - for (auto & i : opArgs) + for (auto & i : opArgs) { + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i); std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow( baseNameOf(i), accessor, - CanonPath::fromCwd(i), + canonPath, method, hashAlgo).path)); + } } diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 7c534517d..d3e66dc21 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -60,9 +60,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand { if (!namePart) namePart = baseNameOf(path); - PosixSourceAccessor accessor; - - auto path2 = CanonPath::fromCwd(path); + auto [accessor, path2] = PosixSourceAccessor::createAtRoot(path); auto storePath = dryRun ? store->computeStorePath( diff --git a/src/nix/eval.cc b/src/nix/eval.cc index a89fa7412..31b2ccd3c 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -66,7 +66,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption if (apply) { auto vApply = state->allocValue(); - state->eval(state->parseExprFromString(*apply, state->rootPath(CanonPath::fromCwd())), *vApply); + state->eval(state->parseExprFromString(*apply, state->rootPath(".")), *vApply); auto vRes = state->allocValue(); state->callFunction(*vApply, *v, *vRes, noPos); v = vRes; diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 4837891c6..eec1c0eae 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -89,8 +89,8 @@ struct CmdHashBase : Command else hashSink = std::make_unique(ha); - PosixSourceAccessor accessor; - dumpPath(accessor, CanonPath::fromCwd(path), *hashSink, mode); + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); + dumpPath(accessor, canonPath, *hashSink, mode); Hash h = hashSink->finish().first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 84b79ea28..6e3f878d9 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -123,10 +123,9 @@ std::tuple prefetchFile( Activity act(*logger, lvlChatty, actUnknown, fmt("adding '%s' to the store", url)); - PosixSourceAccessor accessor; + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(tmpFile); auto info = store->addToStoreSlow( - *name, - accessor, CanonPath::fromCwd(tmpFile), + *name, accessor, canonPath, ingestionMethod, hashAlgo, {}, expectedHash); storePath = info.path; assert(info.ca); From 754c35abfbed653492859136cd41a80b38009e27 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:33:05 -0800 Subject: [PATCH 045/164] Add release notes for "Debugger prints source position information" --- doc/manual/rl-next/debugger-positions.md | 25 ++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 doc/manual/rl-next/debugger-positions.md diff --git a/doc/manual/rl-next/debugger-positions.md b/doc/manual/rl-next/debugger-positions.md new file mode 100644 index 000000000..2fe868413 --- /dev/null +++ b/doc/manual/rl-next/debugger-positions.md @@ -0,0 +1,25 @@ +--- +synopsis: Debugger prints source position information +prs: 9913 +--- + +The `--debugger` now prints source location information, instead of the +pointers of source location information. Before: + +``` +nix-repl> :bt +0: while evaluating the attribute 'python311.pythonForBuild.pkgs' +0x600001522598 +``` + +After: + +``` +0: while evaluating the attribute 'python311.pythonForBuild.pkgs' +/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27 + + 131| + 132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs; + | ^ + 133| in +``` From 9a5d52262fd83ab11cb36ba2ba91ea27b2389670 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:33:31 -0800 Subject: [PATCH 046/164] Add release notes for "Nix no longer attempts to `git add` files that are `.gitignore`d" --- doc/manual/src/release-notes/rl-2.20.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 26869e90a..5152926e7 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -167,3 +167,7 @@ error: expected a set but found an integer ``` +- Flake operations like `nix develop` will no longer fail when run in a Git + repository where the `flake.lock` file is `.gitignore`d + [#8854](https://github.com/NixOS/nix/issues/8854) + [#9324](https://github.com/NixOS/nix/pull/9324) From abb5fef355afc14819c96de08a3687c2257bd10c Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:33:58 -0800 Subject: [PATCH 047/164] Add release notes for "Functions are printed with more detail" --- doc/manual/rl-next/lambda-printing.md | 50 +++++++++++++++++++++++++ doc/manual/src/release-notes/rl-2.20.md | 19 ++++++++++ 2 files changed, 69 insertions(+) create mode 100644 doc/manual/rl-next/lambda-printing.md diff --git a/doc/manual/rl-next/lambda-printing.md b/doc/manual/rl-next/lambda-printing.md new file mode 100644 index 000000000..3a63f3068 --- /dev/null +++ b/doc/manual/rl-next/lambda-printing.md @@ -0,0 +1,50 @@ +--- +synopsis: Functions are printed with more detail +prs: 9606 +issues: 7145 +--- + +Functions and `builtins` are printed with more detail in `nix repl`, `nix +eval`, `builtins.trace`, and most other places values are printed. + +Before: + +``` +$ nix repl nixpkgs +nix-repl> builtins.map +«primop» + +nix-repl> builtins.map lib.id +«primop-app» + +nix-repl> builtins.trace lib.id "my-value" +trace: +"my-value" + +$ nix eval --file functions.nix +{ id = ; primop = ; primop-app = ; } +``` + +After: + +``` +$ nix repl nixpkgs +nix-repl> builtins.map +«primop map» + +nix-repl> builtins.map lib.id +«partially applied primop map» + +nix-repl> builtins.trace lib.id "my-value" +trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5» +"my-value" + +$ nix eval --file functions.nix +{ id = «lambda id @ /Users/wiggles/nix/functions.nix:2:8»; primop = «primop map»; primop-app = «partially applied primop map»; } +``` + +This was actually released in Nix 2.20, but wasn't added to the release notes +so we're announcing it here. The historical release notes have been updated as well. + +[type-error]: https://github.com/NixOS/nix/pull/9753 +[coercion-error]: https://github.com/NixOS/nix/pull/9754 diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 5152926e7..666d0b4db 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -167,6 +167,25 @@ error: expected a set but found an integer ``` + +- Functions are printed with more detail [#7145](https://github.com/NixOS/nix/issues/7145) [#9606](https://github.com/NixOS/nix/pull/9606) + + `nix repl`, `nix eval`, `builtins.trace`, and most other places values are + printed will now include function names and source location information: + + ``` + $ nix repl nixpkgs + nix-repl> builtins.map + «primop map» + + nix-repl> builtins.map lib.id + «partially applied primop map» + + nix-repl> builtins.trace lib.id "my-value" + trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5» + "my-value" + ``` + - Flake operations like `nix develop` will no longer fail when run in a Git repository where the `flake.lock` file is `.gitignore`d [#8854](https://github.com/NixOS/nix/issues/8854) From 24cdb81bb043a156346dd9e235e66889567c5fdc Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:34:14 -0800 Subject: [PATCH 048/164] Add release notes for "Nix commands respect Ctrl-C" --- doc/manual/rl-next/more-commands-respect-ctrl-c.md | 13 +++++++++++++ doc/manual/src/release-notes/rl-2.20.md | 10 ++++++++++ 2 files changed, 23 insertions(+) create mode 100644 doc/manual/rl-next/more-commands-respect-ctrl-c.md diff --git a/doc/manual/rl-next/more-commands-respect-ctrl-c.md b/doc/manual/rl-next/more-commands-respect-ctrl-c.md new file mode 100644 index 000000000..948930c96 --- /dev/null +++ b/doc/manual/rl-next/more-commands-respect-ctrl-c.md @@ -0,0 +1,13 @@ +--- +synopsis: Nix commands respect Ctrl-C +prs: 9687 6995 +issues: 7245 +--- + +Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed +while performing various operations (including `nix develop`, `nix flake +update`, and so on). With several fixes to Nix's signal handlers, Nix commands +will now exit quickly after Ctrl-C is pressed. + +This was actually released in Nix 2.20, but wasn't added to the release notes +so we're announcing it here. The historical release notes have been updated as well. diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 666d0b4db..8ede168a4 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -190,3 +190,13 @@ repository where the `flake.lock` file is `.gitignore`d [#8854](https://github.com/NixOS/nix/issues/8854) [#9324](https://github.com/NixOS/nix/pull/9324) + +- Nix commands will now respect Ctrl-C + [#7145](https://github.com/NixOS/nix/issues/7145) + [#6995](https://github.com/NixOS/nix/pull/6995) + [#9687](https://github.com/NixOS/nix/pull/9687) + + Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed + while performing various operations (including `nix develop`, `nix flake + update`, and so on). With several fixes to Nix's signal handlers, Nix + commands will now exit quickly after Ctrl-C is pressed. From 4f0d43a397205c185eea81e553e30fefc2c0d9f5 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:34:28 -0800 Subject: [PATCH 049/164] Add release notes for "`nix repl` now respects Ctrl-C while printing values" --- doc/manual/rl-next/repl-ctrl-c-while-printing.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 doc/manual/rl-next/repl-ctrl-c-while-printing.md diff --git a/doc/manual/rl-next/repl-ctrl-c-while-printing.md b/doc/manual/rl-next/repl-ctrl-c-while-printing.md new file mode 100644 index 000000000..15b0daa0a --- /dev/null +++ b/doc/manual/rl-next/repl-ctrl-c-while-printing.md @@ -0,0 +1,8 @@ +--- +synopsis: "`nix repl` now respects Ctrl-C while printing values" +prs: 9927 +--- + +`nix repl` will now halt immediately when Ctrl-C is pressed while it's printing +a value. This is useful if you got curious about what would happen if you +printed all of Nixpkgs. From 837c350bcd36f51d656fdb3bf1c40bce398181b0 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:34:46 -0800 Subject: [PATCH 050/164] Add release notes for "Cycle detection in `nix repl` is simpler and more reliable" --- doc/manual/rl-next/repl-cycle-detection.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 doc/manual/rl-next/repl-cycle-detection.md diff --git a/doc/manual/rl-next/repl-cycle-detection.md b/doc/manual/rl-next/repl-cycle-detection.md new file mode 100644 index 000000000..de24c4be1 --- /dev/null +++ b/doc/manual/rl-next/repl-cycle-detection.md @@ -0,0 +1,22 @@ +--- +synopsis: Cycle detection in `nix repl` is simpler and more reliable +prs: 9926 +issues: 8672 +--- + +The cycle detection in `nix repl`, `nix eval`, `builtins.trace`, and everywhere +else values are printed is now simpler and matches the cycle detection in +`nix-instantiate --eval` output. + +Before: + +``` +nix eval --expr 'let self = { inherit self; }; in self' +{ self = { self = «repeated»; }; } +``` + +After: + +``` +{ self = «repeated»; } +``` From 7f8960d0f29991d6df8320059378d67530b45c50 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:35:06 -0800 Subject: [PATCH 051/164] Add release notes for "Stack size is increased on macOS" --- doc/manual/rl-next/stack-size-macos.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 doc/manual/rl-next/stack-size-macos.md diff --git a/doc/manual/rl-next/stack-size-macos.md b/doc/manual/rl-next/stack-size-macos.md new file mode 100644 index 000000000..b1c40bb5a --- /dev/null +++ b/doc/manual/rl-next/stack-size-macos.md @@ -0,0 +1,9 @@ +--- +synopsis: Stack size is increased on macOS +prs: 9860 +--- + +Previously, Nix would set the stack size to 64MiB on Linux, but would leave the +stack size set to the default (approximately 8KiB) on macOS. Now, the stack +size is correctly set to 64MiB on macOS as well, which should reduce stack +overflow segfaults in deeply-recursive Nix expressions. From b2868acbdc125bf3638f21dd8c5684cc56d4b739 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:35:21 -0800 Subject: [PATCH 052/164] Add release notes for "Stack traces are more compact" --- .../rl-next/stack-traces-are-more-compact.md | 51 +++++++++++++++++++ doc/manual/src/release-notes/rl-2.20.md | 19 +++++++ 2 files changed, 70 insertions(+) create mode 100644 doc/manual/rl-next/stack-traces-are-more-compact.md diff --git a/doc/manual/rl-next/stack-traces-are-more-compact.md b/doc/manual/rl-next/stack-traces-are-more-compact.md new file mode 100644 index 000000000..751465da1 --- /dev/null +++ b/doc/manual/rl-next/stack-traces-are-more-compact.md @@ -0,0 +1,51 @@ +--- +synopsis: Stack traces are more compact +prs: 9619 +--- + +Stack traces printed with `--show-trace` are more compact. + +Before: + +``` +error: + … while evaluating the attribute 'body' + + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: + + 3| + 4| body = x "x"; + | ^ + 5| } + + error: assertion '(arg == "y")' failed + + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: + + 1| let { + 2| x = arg: assert arg == "y"; 123; + | ^ + 3| +``` + +After: + +``` +error: + … while evaluating the attribute 'body' + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: + 3| + 4| body = x "x"; + | ^ + 5| } + + error: assertion '(arg == "y")' failed + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: + 1| let { + 2| x = arg: assert arg == "y"; 123; + | ^ + 3| +``` + +This was actually released in Nix 2.20, but wasn't added to the release notes +so we're announcing it here. The historical release notes have been updated as well. diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 8ede168a4..4dd49c5ea 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -200,3 +200,22 @@ while performing various operations (including `nix develop`, `nix flake update`, and so on). With several fixes to Nix's signal handlers, Nix commands will now exit quickly after Ctrl-C is pressed. + +- Blank lines have been removed from stack traces, rendering them more compact [#9619](https://github.com/NixOS/nix/pull/9619) + + ``` + error: + … while evaluating the attribute 'body' + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: + 3| + 4| body = x "x"; + | ^ + 5| } + + error: assertion '(arg == "y")' failed + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: + 1| let { + 2| x = arg: assert arg == "y"; 123; + | ^ + 3| + ``` From 0f1269243b242be033ff031ab1993e05cf25d857 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:09:47 -0800 Subject: [PATCH 053/164] Revert "Add release notes for "Stack traces are more compact"" This reverts commit b2868acbdc125bf3638f21dd8c5684cc56d4b739. --- .../rl-next/stack-traces-are-more-compact.md | 51 ------------------- doc/manual/src/release-notes/rl-2.20.md | 19 ------- 2 files changed, 70 deletions(-) delete mode 100644 doc/manual/rl-next/stack-traces-are-more-compact.md diff --git a/doc/manual/rl-next/stack-traces-are-more-compact.md b/doc/manual/rl-next/stack-traces-are-more-compact.md deleted file mode 100644 index 751465da1..000000000 --- a/doc/manual/rl-next/stack-traces-are-more-compact.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -synopsis: Stack traces are more compact -prs: 9619 ---- - -Stack traces printed with `--show-trace` are more compact. - -Before: - -``` -error: - … while evaluating the attribute 'body' - - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: - - 3| - 4| body = x "x"; - | ^ - 5| } - - error: assertion '(arg == "y")' failed - - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: - - 1| let { - 2| x = arg: assert arg == "y"; 123; - | ^ - 3| -``` - -After: - -``` -error: - … while evaluating the attribute 'body' - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: - 3| - 4| body = x "x"; - | ^ - 5| } - - error: assertion '(arg == "y")' failed - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: - 1| let { - 2| x = arg: assert arg == "y"; 123; - | ^ - 3| -``` - -This was actually released in Nix 2.20, but wasn't added to the release notes -so we're announcing it here. The historical release notes have been updated as well. diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 4dd49c5ea..8ede168a4 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -200,22 +200,3 @@ while performing various operations (including `nix develop`, `nix flake update`, and so on). With several fixes to Nix's signal handlers, Nix commands will now exit quickly after Ctrl-C is pressed. - -- Blank lines have been removed from stack traces, rendering them more compact [#9619](https://github.com/NixOS/nix/pull/9619) - - ``` - error: - … while evaluating the attribute 'body' - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: - 3| - 4| body = x "x"; - | ^ - 5| } - - error: assertion '(arg == "y")' failed - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: - 1| let { - 2| x = arg: assert arg == "y"; 123; - | ^ - 3| - ``` From a27651908fc1b5ef73a81e46434a408c5868fa7b Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:11:45 -0800 Subject: [PATCH 054/164] Add assertion for decreasing the indent MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Théophane Hufschmitt <7226587+thufschmitt@users.noreply.github.com> --- src/libexpr/print.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 1ff026b3d..cdc9f6dbe 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -176,6 +176,7 @@ private: void decreaseIndent() { if (options.prettyPrint()) { + assert(indent.size() >= options.prettyIndent); indent.resize(indent.size() - options.prettyIndent); } } From 1c5f5d4291df7bf80806e57c75d2ec67bced8616 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:17:20 -0800 Subject: [PATCH 055/164] `prettyPrint` -> `shouldPrettyPrint` --- src/libexpr/print-options.hh | 2 +- src/libexpr/print.cc | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh index 94767df9c..6c5e80c61 100644 --- a/src/libexpr/print-options.hh +++ b/src/libexpr/print-options.hh @@ -71,7 +71,7 @@ struct PrintOptions /** * True if pretty-printing is enabled. */ - inline bool prettyPrint() + inline bool shouldPrettyPrint() { return prettyIndent > 0; } diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index cdc9f6dbe..a8eac8288 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -168,14 +168,14 @@ private: void increaseIndent() { - if (options.prettyPrint()) { + if (options.shouldPrettyPrint()) { indent.append(options.prettyIndent, ' '); } } void decreaseIndent() { - if (options.prettyPrint()) { + if (options.shouldPrettyPrint()) { assert(indent.size() >= options.prettyIndent); indent.resize(indent.size() - options.prettyIndent); } @@ -279,7 +279,7 @@ private: bool shouldPrettyPrintAttrs(AttrVec & v) { - if (!options.prettyPrint() || v.empty()) { + if (!options.shouldPrettyPrint() || v.empty()) { return false; } @@ -356,7 +356,7 @@ private: bool shouldPrettyPrintList(std::span list) { - if (!options.prettyPrint() || list.empty()) { + if (!options.shouldPrettyPrint() || list.empty()) { return false; } From 403c90ddf58a3f16a44dfe1f20004b6baa4e5ce2 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:17:33 -0800 Subject: [PATCH 056/164] Extract `printSpace` helper --- src/libexpr/print.cc | 39 +++++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index a8eac8288..5605aad28 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -181,6 +181,21 @@ private: } } + /** + * Print a space (for separating items or attributes). + * + * If pretty-printing is enabled, a newline and the current `indent` is + * printed instead. + */ + void printSpace(bool prettyPrint) + { + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + } + void printRepeated() { if (options.ansiColors) @@ -324,11 +339,7 @@ private: auto prettyPrint = shouldPrettyPrintAttrs(sorted); for (auto & i : sorted) { - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); if (attrsPrinted >= options.maxAttrs) { printElided(sorted.size() - attrsPrinted, "attribute", "attributes"); @@ -343,11 +354,7 @@ private: } decreaseIndent(); - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); output << "}"; } else { output << "{ ... }"; @@ -389,11 +396,7 @@ private: auto listItems = v.listItems(); auto prettyPrint = shouldPrettyPrintList(listItems); for (auto elem : listItems) { - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); if (listItemsPrinted >= options.maxListItems) { printElided(listItems.size() - listItemsPrinted, "item", "items"); @@ -409,11 +412,7 @@ private: } decreaseIndent(); - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); output << "]"; } else { output << "[ ... ]"; From 149bd63afb30c5ae58eb3cc03fc208f89547cc16 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:16:30 -0800 Subject: [PATCH 057/164] Cleanup `fmt.hh` When I started contributing to Nix, I found the mix of definitions and names in `fmt.hh` to be rather confusing, especially the small difference between `hintfmt` and `hintformat`. I've renamed many classes and added documentation to most definitions. - `formatHelper` is no longer exported. - `fmt`'s documentation is now with `fmt` rather than (misleadingly) above `formatHelper`. - `yellowtxt` is renamed to `Magenta`. `yellowtxt` wraps its value with `ANSI_WARNING`, but `ANSI_WARNING` has been equal to `ANSI_MAGENTA` for a long time. Now the name is updated. - `normaltxt` is renamed to `Uncolored`. - `hintfmt` has been merged into `hintformat` as extra constructor functions. - `hintformat` has been renamed to `hintfmt`. - The single-argument `hintformat(std::string)` constructor has been renamed to a static member `hintformat::interpolate` to avoid pitfalls with using user-generated strings as format strings. --- src/build-remote/build-remote.cc | 2 +- src/libexpr/eval.hh | 2 +- src/libexpr/value/context.hh | 2 +- src/libstore/build/derivation-goal.cc | 8 +- src/libstore/build/local-derivation-goal.cc | 2 +- src/libstore/filetransfer.cc | 2 +- src/libstore/sqlite.cc | 6 +- src/libstore/sqlite.hh | 6 +- src/libutil/error.cc | 4 +- src/libutil/error.hh | 10 +- src/libutil/fmt.hh | 157 +++++++++++++------- src/libutil/logging.hh | 11 ++ tests/unit/libexpr/error_traces.cc | 1 - tests/unit/libutil/logging.cc | 2 +- 14 files changed, 135 insertions(+), 80 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 519e03242..94b672976 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -202,7 +202,7 @@ static int main_build_remote(int argc, char * * argv) else drvstr = ""; - auto error = hintformat(errorText); + auto error = hintfmt(errorText); error % drvstr % neededSystem diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 3c7c5da27..f72135527 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -148,7 +148,7 @@ struct DebugTrace { std::shared_ptr pos; const Expr & expr; const Env & env; - hintformat hint; + hintfmt hint; bool isError; }; diff --git a/src/libexpr/value/context.hh b/src/libexpr/value/context.hh index 51fd30a44..2abd1c9d4 100644 --- a/src/libexpr/value/context.hh +++ b/src/libexpr/value/context.hh @@ -20,7 +20,7 @@ public: { raw = raw_; auto hf = hintfmt(args...); - err.msg = hintfmt("Bad String Context element: %1%: %2%", normaltxt(hf.str()), raw); + err.msg = hintfmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw); } }; diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 454c35763..d3bbdf1ed 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -708,7 +708,7 @@ void DerivationGoal::tryToBuild() if (!outputLocks.lockPaths(lockFiles, "", false)) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for lock on %s", magentatxt(showPaths(lockFiles)))); + fmt("waiting for lock on %s", Magenta(showPaths(lockFiles)))); worker.waitForAWhile(shared_from_this()); return; } @@ -762,7 +762,7 @@ void DerivationGoal::tryToBuild() the wake-up timeout expires. */ if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a machine to build '%s'", magentatxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); outputLocks.unlock(); return; @@ -987,7 +987,7 @@ void DerivationGoal::buildDone() diskFull |= cleanupDecideWhetherDiskFull(); auto msg = fmt("builder for '%s' %s", - magentatxt(worker.store.printStorePath(drvPath)), + Magenta(worker.store.printStorePath(drvPath)), statusToString(status)); if (!logger->isVerbose() && !logTail.empty()) { @@ -1523,7 +1523,7 @@ void DerivationGoal::done( outputLocks.unlock(); buildResult.status = status; if (ex) - buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg)); + buildResult.errorMsg = fmt("%s", Uncolored(ex->info().msg)); if (buildResult.status == BuildResult::TimedOut) worker.timedOut = true; if (buildResult.status == BuildResult::PermanentFailure) diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index ce8943efe..a2f411b8a 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -232,7 +232,7 @@ void LocalDerivationGoal::tryLocalBuild() if (!buildUser) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a free build user ID for '%s'", magentatxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a free build user ID for '%s'", Magenta(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); return; } diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index dcbec4acd..eb39be158 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -887,7 +887,7 @@ FileTransferError::FileTransferError(FileTransfer::Error error, std::optionalsize() < 1024 || response->find("") != std::string::npos)) - err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), chomp(*response)); + err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", Uncolored(hf.str()), chomp(*response)); else err.msg = hf; } diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index d7432a305..ff14ec420 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -10,19 +10,19 @@ namespace nix { -SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintformat && hf) +SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf) : Error(""), path(path), errMsg(errMsg), errNo(errNo), extendedErrNo(extendedErrNo), offset(offset) { auto offsetStr = (offset == -1) ? "" : "at offset " + std::to_string(offset) + ": "; err.msg = hintfmt("%s: %s%s, %s (in '%s')", - normaltxt(hf.str()), + Uncolored(hf.str()), offsetStr, sqlite3_errstr(extendedErrNo), errMsg, path ? path : "(in-memory)"); } -[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintformat && hf) +[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintfmt && hf) { int err = sqlite3_errcode(db); int exterr = sqlite3_extended_errcode(db); diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 0c08267f7..33ebb5892 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -145,16 +145,16 @@ struct SQLiteError : Error throw_(db, hintfmt(fs, args...)); } - SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintformat && hf); + SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf); protected: template SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, const std::string & fs, const Args & ... args) - : SQLiteError(path, errNo, extendedErrNo, offset, hintfmt(fs, args...)) + : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, hintfmt(fs, args...)) { } - [[noreturn]] static void throw_(sqlite3 * db, hintformat && hf); + [[noreturn]] static void throw_(sqlite3 * db, hintfmt && hf); }; diff --git a/src/libutil/error.cc b/src/libutil/error.cc index e4e50d73b..e3b30b3a1 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -11,7 +11,7 @@ namespace nix { -void BaseError::addTrace(std::shared_ptr && e, hintformat hint, bool frame) +void BaseError::addTrace(std::shared_ptr && e, hintfmt hint, bool frame) { err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame }); } @@ -37,7 +37,7 @@ const std::string & BaseError::calcWhat() const std::optional ErrorInfo::programName = std::nullopt; -std::ostream & operator <<(std::ostream & os, const hintformat & hf) +std::ostream & operator <<(std::ostream & os, const hintfmt & hf) { return os << hf.str(); } diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 4fb822843..966f4d770 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -63,7 +63,7 @@ void printCodeLines(std::ostream & out, struct Trace { std::shared_ptr pos; - hintformat hint; + hintfmt hint; bool frame; }; @@ -74,7 +74,7 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; - hintformat msg; + hintfmt msg; std::shared_ptr pos; std::list traces; @@ -126,7 +126,7 @@ public: : err { .level = lvlError, .msg = hintfmt(args...), .suggestions = sug } { } - BaseError(hintformat hint) + BaseError(hintfmt hint) : err { .level = lvlError, .msg = hint } { } @@ -162,7 +162,7 @@ public: addTrace(std::move(e), hintfmt(std::string(fs), args...)); } - void addTrace(std::shared_ptr && e, hintformat hint, bool frame = false); + void addTrace(std::shared_ptr && e, hintfmt hint, bool frame = false); bool hasTrace() const { return !err.traces.empty(); } @@ -215,7 +215,7 @@ public: : SystemError(""), errNo(errNo) { auto hf = hintfmt(args...); - err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo)); + err.msg = hintfmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo)); } /** diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index 6430c7707..9c2cc1e85 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -8,37 +8,53 @@ namespace nix { - +namespace { /** - * Inherit some names from other namespaces for convenience. - */ -using boost::format; - - -/** - * A variadic template that does nothing. Useful to call a function - * for all variadic arguments but ignoring the result. - */ -struct nop { template nop(T...) {} }; - - -/** - * A helper for formatting strings. ‘fmt(format, a_0, ..., a_n)’ is - * equivalent to ‘boost::format(format) % a_0 % ... % - * ... a_n’. However, ‘fmt(s)’ is equivalent to ‘s’ (so no %-expansion - * takes place). + * A helper for writing `boost::format` expressions. + * + * These are equivalent: + * + * ``` + * formatHelper(formatter, a_0, ..., a_n) + * formatter % a_0 % ... % a_n + * ``` + * + * With a single argument, `formatHelper(s)` is a no-op. */ template inline void formatHelper(F & f) -{ -} +{ } template inline void formatHelper(F & f, const T & x, const Args & ... args) { + // Interpolate one argument and then recurse. formatHelper(f % x, args...); } +} +/** + * A helper for writing a `boost::format` expression to a string. + * + * These are (roughly) equivalent: + * + * ``` + * fmt(formatString, a_0, ..., a_n) + * (boost::format(formatString) % a_0 % ... % a_n).str() + * ``` + * + * However, when called with a single argument, the string is returned + * unchanged. + * + * If you write code like this: + * + * ``` + * std::cout << boost::format(stringFromUserInput) << std::endl; + * ``` + * + * And `stringFromUserInput` contains formatting placeholders like `%s`, then + * the code will crash at runtime. `fmt` helps you avoid this pitfall. + */ inline std::string fmt(const std::string & s) { return s; @@ -63,61 +79,107 @@ inline std::string fmt(const std::string & fs, const Args & ... args) return f.str(); } -// format function for hints in errors. same as fmt, except templated values -// are always in magenta. +/** + * Values wrapped in this struct are printed in magenta. + * + * By default, arguments to `hintfmt` are printed in magenta. To avoid this, + * either wrap the argument in `Uncolored` or add a specialization of + * `hintfmt::operator%`. + */ template -struct magentatxt +struct Magenta { - magentatxt(const T &s) : value(s) {} + Magenta(const T &s) : value(s) {} const T & value; }; template -std::ostream & operator<<(std::ostream & out, const magentatxt & y) +std::ostream & operator<<(std::ostream & out, const Magenta & y) { return out << ANSI_WARNING << y.value << ANSI_NORMAL; } +/** + * Values wrapped in this class are printed without coloring. + * + * By default, arguments to `hintfmt` are printed in magenta (see `Magenta`). + */ template -struct normaltxt +struct Uncolored { - normaltxt(const T & s) : value(s) {} + Uncolored(const T & s) : value(s) {} const T & value; }; template -std::ostream & operator<<(std::ostream & out, const normaltxt & y) +std::ostream & operator<<(std::ostream & out, const Uncolored & y) { return out << ANSI_NORMAL << y.value; } -class hintformat +/** + * A wrapper around `boost::format` which colors interpolated arguments in + * magenta by default. + */ +class hintfmt { +private: + boost::format fmt; + public: - hintformat(const std::string & format) : fmt(format) + /** + * Construct a `hintfmt` from a format string, with values to be + * interpolated later with `%`. + * + * This isn't exposed as a single-argument constructor to avoid + * accidentally constructing `hintfmt`s with user-controlled strings. See + * the note on `fmt` for more information. + */ + static hintfmt interpolate(const std::string & formatString) { - fmt.exceptions(boost::io::all_error_bits ^ - boost::io::too_many_args_bit ^ - boost::io::too_few_args_bit); + hintfmt result((boost::format(formatString))); + result.fmt.exceptions( + boost::io::all_error_bits ^ + boost::io::too_many_args_bit ^ + boost::io::too_few_args_bit); + return result; } - hintformat(const hintformat & hf) + /** + * Format the given string literally, without interpolating format + * placeholders. + */ + hintfmt(const std::string & literal) + : hintfmt("%s", Uncolored(literal)) + { } + + /** + * Interpolate the given arguments into the format string. + */ + template + hintfmt(const std::string & format, const Args & ... args) + : fmt(format) + { + formatHelper(*this, args...); + } + + hintfmt(const hintfmt & hf) : fmt(hf.fmt) { } - hintformat(format && fmt) + hintfmt(boost::format && fmt) : fmt(std::move(fmt)) { } template - hintformat & operator%(const T & value) + hintfmt & operator%(const T & value) { - fmt % magentatxt(value); + fmt % Magenta(value); return *this; } template - hintformat & operator%(const normaltxt & value) + hintfmt & operator%(const Uncolored & value) { fmt % value.value; return *this; @@ -127,25 +189,8 @@ public: { return fmt.str(); } - -private: - format fmt; }; -std::ostream & operator<<(std::ostream & os, const hintformat & hf); - -template -inline hintformat hintfmt(const std::string & fs, const Args & ... args) -{ - hintformat f(fs); - formatHelper(f, args...); - return f; -} - -inline hintformat hintfmt(const std::string & plain_string) -{ - // we won't be receiving any args in this case, so just print the original string - return hintfmt("%s", normaltxt(plain_string)); -} +std::ostream & operator<<(std::ostream & os, const hintfmt & hf); } diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 183f2d8e1..9e81132e3 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -120,6 +120,17 @@ public: { } }; +/** + * A variadic template that does nothing. + * + * Useful to call a function with each argument in a parameter pack. + */ +struct nop +{ + template nop(T...) + { } +}; + ActivityId getCurActivity(); void setCurActivity(const ActivityId activityId); diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 8e8726195..3cfa2b61b 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -53,7 +53,6 @@ namespace nix { state.error("beans").debugThrow(); } catch (Error & e2) { e.addTrace(state.positions[noPos], "beans2", ""); - //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); ASSERT_FALSE(&e.info() == &e2.info()); diff --git a/tests/unit/libutil/logging.cc b/tests/unit/libutil/logging.cc index 8950a26d4..c8c7c091f 100644 --- a/tests/unit/libutil/logging.cc +++ b/tests/unit/libutil/logging.cc @@ -62,7 +62,7 @@ namespace nix { throw TestError(e.info()); } catch (Error &e) { ErrorInfo ei = e.info(); - ei.msg = hintfmt("%s; subsequent error message.", normaltxt(e.info().msg.str())); + ei.msg = hintfmt("%s; subsequent error message.", Uncolored(e.info().msg.str())); testing::internal::CaptureStderr(); logger->logEI(ei); From c0e7f50c1a46693d06fab8a36526a4beaa702389 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 20:35:19 -0800 Subject: [PATCH 058/164] Rename `hintfmt` to `HintFmt` --- src/build-remote/build-remote.cc | 2 +- src/libexpr/eval-error.cc | 10 +- src/libexpr/eval-error.hh | 2 +- src/libexpr/eval.cc | 8 +- src/libexpr/eval.hh | 2 +- src/libexpr/flake/flake.cc | 4 +- src/libexpr/lexer.l | 6 +- src/libexpr/parser-state.hh | 8 +- src/libexpr/parser.y | 8 +- src/libexpr/primops.cc | 10 +- src/libexpr/primops/fetchClosure.cc | 22 +- src/libexpr/print.cc | 2 +- src/libexpr/print.hh | 2 +- src/libexpr/value-to-json.cc | 4 +- src/libexpr/value/context.hh | 4 +- src/libstore/build/local-derivation-goal.cc | 2 +- src/libstore/filetransfer.cc | 4 +- src/libstore/sqlite.cc | 10 +- src/libstore/sqlite.hh | 8 +- src/libutil/current-process.cc | 2 +- src/libutil/error.cc | 4 +- src/libutil/error.hh | 20 +- src/libutil/fmt.hh | 67 +- src/libutil/serialise.cc | 4 +- src/nix/daemon.cc | 2 +- src/nix/eval.cc | 2 +- src/nix/flake.cc | 18 +- tests/unit/libexpr/error_traces.cc | 651 ++++++++++---------- tests/unit/libutil/logging.cc | 36 +- 29 files changed, 460 insertions(+), 464 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 94b672976..118468477 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -202,7 +202,7 @@ static int main_build_remote(int argc, char * * argv) else drvstr = ""; - auto error = hintfmt(errorText); + auto error = HintFmt(errorText); error % drvstr % neededSystem diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc index 250c59a19..f4cdeec5c 100644 --- a/src/libexpr/eval-error.cc +++ b/src/libexpr/eval-error.cc @@ -28,7 +28,7 @@ template EvalErrorBuilder & EvalErrorBuilder::withTrace(PosIdx pos, const std::string_view text) { error.err.traces.push_front( - Trace{.pos = error.state.positions[pos], .hint = hintfmt(std::string(text)), .frame = false}); + Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = false}); return *this; } @@ -36,7 +36,7 @@ template EvalErrorBuilder & EvalErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text) { error.err.traces.push_front( - Trace{.pos = error.state.positions[pos], .hint = hintformat(std::string(text)), .frame = true}); + Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = true}); return *this; } @@ -57,13 +57,13 @@ EvalErrorBuilder & EvalErrorBuilder::withFrame(const Env & env, const Expr .pos = error.state.positions[expr.getPos()], .expr = expr, .env = env, - .hint = hintformat("Fake frame for debugging purposes"), + .hint = HintFmt("Fake frame for debugging purposes"), .isError = true}); return *this; } template -EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, hintformat hint, bool frame) +EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, HintFmt hint, bool frame) { error.addTrace(error.state.positions[pos], hint, frame); return *this; @@ -75,7 +75,7 @@ EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs) { - addTrace(error.state.positions[pos], hintfmt(std::string(formatString), formatArgs...)); + addTrace(error.state.positions[pos], HintFmt(std::string(formatString), formatArgs...)); return *this; } diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh index 711743886..392902ad2 100644 --- a/src/libexpr/eval-error.hh +++ b/src/libexpr/eval-error.hh @@ -89,7 +89,7 @@ public: [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrame(const Env & e, const Expr & ex); - [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, hintformat hint, bool frame = false); + [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, HintFmt hint, bool frame = false); template [[nodiscard, gnu::noinline]] EvalErrorBuilder & diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 5bc62589c..bffbd5f1a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -803,7 +803,7 @@ void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const { - e.addTrace(positions[pos], hintfmt(s, s2), frame); + e.addTrace(positions[pos], HintFmt(s, s2), frame); } template @@ -819,7 +819,7 @@ static std::unique_ptr makeDebugTraceStacker( .pos = std::move(pos), .expr = expr, .env = env, - .hint = hintfmt(formatArgs...), + .hint = HintFmt(formatArgs...), .isError = false }); } @@ -2792,7 +2792,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa res = { store->toRealPath(storePath) }; } catch (FileTransferError & e) { logWarning({ - .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) + .msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) }); } } @@ -2825,7 +2825,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa res = { path }; else { logWarning({ - .msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value) + .msg = HintFmt("Nix search path entry '%1%' does not exist, ignoring", value) }); res = std::nullopt; } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index f72135527..756ab98e3 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -148,7 +148,7 @@ struct DebugTrace { std::shared_ptr pos; const Expr & expr; const Env & env; - hintfmt hint; + HintFmt hint; bool isError; }; diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 3396b0219..451780c89 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -155,7 +155,7 @@ static FlakeInput parseFlakeInput(EvalState & state, } catch (Error & e) { e.addTrace( state.positions[attr.pos], - hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name])); + HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name])); throw; } } @@ -164,7 +164,7 @@ static FlakeInput parseFlakeInput(EvalState & state, try { input.ref = FlakeRef::fromAttrs(attrs); } catch (Error & e) { - e.addTrace(state.positions[pos], hintfmt("while evaluating flake input")); + e.addTrace(state.positions[pos], HintFmt("while evaluating flake input")); throw; } else { diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index af67e847d..380048c77 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -147,7 +147,7 @@ or { return OR_KW; } yylval->n = boost::lexical_cast(yytext); } catch (const boost::bad_lexical_cast &) { throw ParseError(ErrorInfo{ - .msg = hintfmt("invalid integer '%1%'", yytext), + .msg = HintFmt("invalid integer '%1%'", yytext), .pos = state->positions[CUR_POS], }); } @@ -157,7 +157,7 @@ or { return OR_KW; } yylval->nf = strtod(yytext, 0); if (errno != 0) throw ParseError(ErrorInfo{ - .msg = hintfmt("invalid float '%1%'", yytext), + .msg = HintFmt("invalid float '%1%'", yytext), .pos = state->positions[CUR_POS], }); return FLOAT_LIT; @@ -286,7 +286,7 @@ or { return OR_KW; } {ANY} | <> { throw ParseError(ErrorInfo{ - .msg = hintfmt("path has a trailing slash"), + .msg = HintFmt("path has a trailing slash"), .pos = state->positions[CUR_POS], }); } diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index bdd5bbabe..87aeaeef5 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -64,7 +64,7 @@ struct ParserState inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", + .msg = HintFmt("attribute '%1%' already defined at %2%", showAttrPath(symbols, attrPath), positions[prevPos]), .pos = positions[pos] }); @@ -73,7 +73,7 @@ inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, co inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), + .msg = HintFmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), .pos = positions[pos] }); } @@ -154,13 +154,13 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym } if (duplicate) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), + .msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), .pos = positions[duplicate->second] }); if (arg && formals->has(arg)) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), + .msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]), .pos = positions[pos] }); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 95f45c80a..a3ba13c66 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -65,7 +65,7 @@ using namespace nix; void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error) { throw ParseError({ - .msg = hintfmt(error), + .msg = HintFmt(error), .pos = state->positions[state->at(*loc)] }); } @@ -154,7 +154,7 @@ expr_function | LET binds IN_KW expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ - .msg = hintfmt("dynamic attributes not allowed in let"), + .msg = HintFmt("dynamic attributes not allowed in let"), .pos = state->positions[CUR_POS] }); $$ = new ExprLet($2, $4); @@ -244,7 +244,7 @@ expr_simple static bool noURLLiterals = experimentalFeatureSettings.isEnabled(Xp::NoUrlLiterals); if (noURLLiterals) throw ParseError({ - .msg = hintfmt("URL literals are disabled"), + .msg = HintFmt("URL literals are disabled"), .pos = state->positions[CUR_POS] }); $$ = new ExprString(std::string($1)); @@ -340,7 +340,7 @@ attrs delete str; } else throw ParseError({ - .msg = hintfmt("dynamic attributes not allowed in inherit"), + .msg = HintFmt("dynamic attributes not allowed in inherit"), .pos = state->positions[state->at(@2)] }); } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 5e2bbe16f..8c6aeffac 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -754,7 +754,7 @@ static RegisterPrimOp primop_break({ if (state.debugRepl && !state.debugTraces.empty()) { auto error = Error(ErrorInfo { .level = lvlInfo, - .msg = hintfmt("breakpoint reached"), + .msg = HintFmt("breakpoint reached"), .pos = state.positions[pos], }); @@ -765,7 +765,7 @@ static RegisterPrimOp primop_break({ // If the user elects to quit the repl, throw an exception. throw Error(ErrorInfo{ .level = lvlInfo, - .msg = hintfmt("quit the debugger"), + .msg = HintFmt("quit the debugger"), .pos = nullptr, }); } @@ -820,7 +820,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * * auto message = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtins.addErrorContext", false, false).toOwned(); - e.addTrace(nullptr, hintfmt(message), true); + e.addTrace(nullptr, HintFmt(message), true); throw; } } @@ -1071,7 +1071,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * * often results from the composition of several functions * (derivationStrict, derivation, mkDerivation, mkPythonModule, etc.) */ - e.addTrace(nullptr, hintfmt( + e.addTrace(nullptr, HintFmt( "while evaluating derivation '%s'\n" " whose name attribute is located at %s", drvName, pos), true); @@ -1232,7 +1232,7 @@ drvName, Bindings * attrs, Value & v) } catch (Error & e) { e.addTrace(state.positions[i->pos], - hintfmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName), + HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName), true); throw; } diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 5806b3ff9..f51a6465d 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -23,7 +23,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath); if (toPathMaybe && *toPathMaybe != rewrittenPath) throw Error({ - .msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected", + .msg = HintFmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected", state.store->printStorePath(fromPath), state.store->printStorePath(rewrittenPath), state.store->printStorePath(*toPathMaybe)), @@ -31,7 +31,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor }); if (!toPathMaybe) throw Error({ - .msg = hintfmt( + .msg = HintFmt( "rewriting '%s' to content-addressed form yielded '%s'\n" "Use this value for the 'toPath' attribute passed to 'fetchClosure'", state.store->printStorePath(fromPath), @@ -50,7 +50,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor // We don't perform the rewriting when outPath already exists, as an optimisation. // However, we can quickly detect a mistake if the toPath is input addressed. throw Error({ - .msg = hintfmt( + .msg = HintFmt( "The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n" "Set 'toPath' to an empty string to make Nix report the correct content-addressed path.", state.store->printStorePath(toPath)), @@ -73,7 +73,7 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos if (!info->isContentAddressed(*state.store)) { throw Error({ - .msg = hintfmt( + .msg = HintFmt( "The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n" "If you do intend to fetch an input-addressed store path, add\n\n" " inputAddressed = true;\n\n" @@ -99,7 +99,7 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId if (info->isContentAddressed(*state.store)) { throw Error({ - .msg = hintfmt( + .msg = HintFmt( "The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n" "Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed", state.store->printStorePath(fromPath)), @@ -153,14 +153,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg else throw Error({ - .msg = hintfmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), + .msg = HintFmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), .pos = state.positions[pos] }); } if (!fromPath) throw Error({ - .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), + .msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), .pos = state.positions[pos] }); @@ -169,7 +169,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg if (inputAddressed) { if (toPath) throw Error({ - .msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them", + .msg = HintFmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them", "inputAddressed", "toPath"), .pos = state.positions[pos] @@ -178,7 +178,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg if (!fromStoreUrl) throw Error({ - .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), + .msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), .pos = state.positions[pos] }); @@ -188,13 +188,13 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg parsedURL.scheme != "https" && !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file")) throw Error({ - .msg = hintfmt("'fetchClosure' only supports http:// and https:// stores"), + .msg = HintFmt("'fetchClosure' only supports http:// and https:// stores"), .pos = state.positions[pos] }); if (!parsedURL.query.empty()) throw Error({ - .msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), + .msg = HintFmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), .pos = state.positions[pos] }); diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 9f31f3340..7e90e47eb 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -512,7 +512,7 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer) } template<> -hintformat & hintformat::operator%(const ValuePrinter & value) +HintFmt & HintFmt::operator%(const ValuePrinter & value) { fmt % value; return *this; diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index a542bc7b1..7ddda81b8 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -86,6 +86,6 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); * magenta. */ template<> -hintformat & hintformat::operator%(const ValuePrinter & value); +HintFmt & HintFmt::operator%(const ValuePrinter & value); } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index b2f116390..3f877a7fd 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -64,7 +64,7 @@ json printValueAsJSON(EvalState & state, bool strict, out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore); } catch (Error & e) { e.addTrace(state.positions[a.pos], - hintfmt("while evaluating attribute '%1%'", j)); + HintFmt("while evaluating attribute '%1%'", j)); throw; } } @@ -81,7 +81,7 @@ json printValueAsJSON(EvalState & state, bool strict, out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore)); } catch (Error & e) { e.addTrace(state.positions[pos], - hintfmt("while evaluating list element at index %1%", i)); + HintFmt("while evaluating list element at index %1%", i)); throw; } i++; diff --git a/src/libexpr/value/context.hh b/src/libexpr/value/context.hh index 2abd1c9d4..7f23cd3a4 100644 --- a/src/libexpr/value/context.hh +++ b/src/libexpr/value/context.hh @@ -19,8 +19,8 @@ public: : Error("") { raw = raw_; - auto hf = hintfmt(args...); - err.msg = hintfmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw); + auto hf = HintFmt(args...); + err.msg = HintFmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw); } }; diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index a2f411b8a..2f60d2f38 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -92,7 +92,7 @@ void handleDiffHook( } catch (Error & error) { ErrorInfo ei = error.info(); // FIXME: wrap errors. - ei.msg = hintfmt("diff hook execution failed: %s", ei.msg.str()); + ei.msg = HintFmt("diff hook execution failed: %s", ei.msg.str()); logError(ei); } } diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index eb39be158..ebfae346f 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -882,12 +882,12 @@ template FileTransferError::FileTransferError(FileTransfer::Error error, std::optional response, const Args & ... args) : Error(args...), error(error), response(response) { - const auto hf = hintfmt(args...); + const auto hf = HintFmt(args...); // FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how // to print different messages for different verbosity levels. For now // we add some heuristics for detecting when we want to show the response. if (response && (response->size() < 1024 || response->find("") != std::string::npos)) - err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", Uncolored(hf.str()), chomp(*response)); + err.msg = HintFmt("%1%\n\nresponse body:\n\n%2%", Uncolored(hf.str()), chomp(*response)); else err.msg = hf; } diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index ff14ec420..06abfb90b 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -10,11 +10,11 @@ namespace nix { -SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf) +SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf) : Error(""), path(path), errMsg(errMsg), errNo(errNo), extendedErrNo(extendedErrNo), offset(offset) { auto offsetStr = (offset == -1) ? "" : "at offset " + std::to_string(offset) + ": "; - err.msg = hintfmt("%s: %s%s, %s (in '%s')", + err.msg = HintFmt("%s: %s%s, %s (in '%s')", Uncolored(hf.str()), offsetStr, sqlite3_errstr(extendedErrNo), @@ -22,7 +22,7 @@ SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int ex path ? path : "(in-memory)"); } -[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintfmt && hf) +[[noreturn]] void SQLiteError::throw_(sqlite3 * db, HintFmt && hf) { int err = sqlite3_errcode(db); int exterr = sqlite3_extended_errcode(db); @@ -33,7 +33,7 @@ SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int ex if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) { auto exp = SQLiteBusy(path, errMsg, err, exterr, offset, std::move(hf)); - exp.err.msg = hintfmt( + exp.err.msg = HintFmt( err == SQLITE_PROTOCOL ? "SQLite database '%s' is busy (SQLITE_PROTOCOL)" : "SQLite database '%s' is busy", @@ -249,7 +249,7 @@ void handleSQLiteBusy(const SQLiteBusy & e, time_t & nextWarning) if (now > nextWarning) { nextWarning = now + 10; logWarning({ - .msg = hintfmt(e.what()) + .msg = HintFmt(e.what()) }); } diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 33ebb5892..003e4d101 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -142,19 +142,19 @@ struct SQLiteError : Error template [[noreturn]] static void throw_(sqlite3 * db, const std::string & fs, const Args & ... args) { - throw_(db, hintfmt(fs, args...)); + throw_(db, HintFmt(fs, args...)); } - SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf); + SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf); protected: template SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, const std::string & fs, const Args & ... args) - : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, hintfmt(fs, args...)) + : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, HintFmt(fs, args...)) { } - [[noreturn]] static void throw_(sqlite3 * db, hintfmt && hf); + [[noreturn]] static void throw_(sqlite3 * db, HintFmt && hf); }; diff --git a/src/libutil/current-process.cc b/src/libutil/current-process.cc index 01f64f211..47aa137d8 100644 --- a/src/libutil/current-process.cc +++ b/src/libutil/current-process.cc @@ -63,7 +63,7 @@ void setStackSize(rlim_t stackSize) if (setrlimit(RLIMIT_STACK, &limit) != 0) { logger->log( lvlError, - hintfmt( + HintFmt( "Failed to increase stack size from %1% to %2% (maximum allowed stack size: %3%): %4%", savedStackSize, stackSize, diff --git a/src/libutil/error.cc b/src/libutil/error.cc index e3b30b3a1..4a9efc0b5 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -11,7 +11,7 @@ namespace nix { -void BaseError::addTrace(std::shared_ptr && e, hintfmt hint, bool frame) +void BaseError::addTrace(std::shared_ptr && e, HintFmt hint, bool frame) { err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame }); } @@ -37,7 +37,7 @@ const std::string & BaseError::calcWhat() const std::optional ErrorInfo::programName = std::nullopt; -std::ostream & operator <<(std::ostream & os, const hintfmt & hf) +std::ostream & operator <<(std::ostream & os, const HintFmt & hf) { return os << hf.str(); } diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 966f4d770..2e5de5d32 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -63,7 +63,7 @@ void printCodeLines(std::ostream & out, struct Trace { std::shared_ptr pos; - hintfmt hint; + HintFmt hint; bool frame; }; @@ -74,7 +74,7 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; - hintfmt msg; + HintFmt msg; std::shared_ptr pos; std::list traces; @@ -113,20 +113,20 @@ public: template BaseError(unsigned int status, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(args...), .status = status } + : err { .level = lvlError, .msg = HintFmt(args...), .status = status } { } template explicit BaseError(const std::string & fs, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(fs, args...) } + : err { .level = lvlError, .msg = HintFmt(fs, args...) } { } template BaseError(const Suggestions & sug, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(args...), .suggestions = sug } + : err { .level = lvlError, .msg = HintFmt(args...), .suggestions = sug } { } - BaseError(hintfmt hint) + BaseError(HintFmt hint) : err { .level = lvlError, .msg = hint } { } @@ -159,10 +159,10 @@ public: template void addTrace(std::shared_ptr && e, std::string_view fs, const Args & ... args) { - addTrace(std::move(e), hintfmt(std::string(fs), args...)); + addTrace(std::move(e), HintFmt(std::string(fs), args...)); } - void addTrace(std::shared_ptr && e, hintfmt hint, bool frame = false); + void addTrace(std::shared_ptr && e, HintFmt hint, bool frame = false); bool hasTrace() const { return !err.traces.empty(); } @@ -214,8 +214,8 @@ public: SysError(int errNo, const Args & ... args) : SystemError(""), errNo(errNo) { - auto hf = hintfmt(args...); - err.msg = hintfmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo)); + auto hf = HintFmt(args...); + err.msg = HintFmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo)); } /** diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index 9c2cc1e85..e996f4ba2 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -31,6 +31,17 @@ inline void formatHelper(F & f, const T & x, const Args & ... args) // Interpolate one argument and then recurse. formatHelper(f % x, args...); } + +/** + * Set the correct exceptions for `fmt`. + */ +void setExceptions(boost::format & fmt) +{ + fmt.exceptions( + boost::io::all_error_bits ^ + boost::io::too_many_args_bit ^ + boost::io::too_few_args_bit); +} } /** @@ -74,7 +85,7 @@ template inline std::string fmt(const std::string & fs, const Args & ... args) { boost::format f(fs); - f.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit); + setExceptions(f); formatHelper(f, args...); return f.str(); } @@ -82,9 +93,9 @@ inline std::string fmt(const std::string & fs, const Args & ... args) /** * Values wrapped in this struct are printed in magenta. * - * By default, arguments to `hintfmt` are printed in magenta. To avoid this, + * By default, arguments to `HintFmt` are printed in magenta. To avoid this, * either wrap the argument in `Uncolored` or add a specialization of - * `hintfmt::operator%`. + * `HintFmt::operator%`. */ template struct Magenta @@ -102,7 +113,7 @@ std::ostream & operator<<(std::ostream & out, const Magenta & y) /** * Values wrapped in this class are printed without coloring. * - * By default, arguments to `hintfmt` are printed in magenta (see `Magenta`). + * By default, arguments to `HintFmt` are printed in magenta (see `Magenta`). */ template struct Uncolored @@ -121,65 +132,49 @@ std::ostream & operator<<(std::ostream & out, const Uncolored & y) * A wrapper around `boost::format` which colors interpolated arguments in * magenta by default. */ -class hintfmt +class HintFmt { private: boost::format fmt; public: - /** - * Construct a `hintfmt` from a format string, with values to be - * interpolated later with `%`. - * - * This isn't exposed as a single-argument constructor to avoid - * accidentally constructing `hintfmt`s with user-controlled strings. See - * the note on `fmt` for more information. - */ - static hintfmt interpolate(const std::string & formatString) - { - hintfmt result((boost::format(formatString))); - result.fmt.exceptions( - boost::io::all_error_bits ^ - boost::io::too_many_args_bit ^ - boost::io::too_few_args_bit); - return result; - } - /** * Format the given string literally, without interpolating format * placeholders. */ - hintfmt(const std::string & literal) - : hintfmt("%s", Uncolored(literal)) + HintFmt(const std::string & literal) + : HintFmt("%s", Uncolored(literal)) { } /** * Interpolate the given arguments into the format string. */ template - hintfmt(const std::string & format, const Args & ... args) - : fmt(format) - { - formatHelper(*this, args...); - } + HintFmt(const std::string & format, const Args & ... args) + : HintFmt(boost::format(format), args...) + { } - hintfmt(const hintfmt & hf) + HintFmt(const HintFmt & hf) : fmt(hf.fmt) { } - hintfmt(boost::format && fmt) + template + HintFmt(boost::format && fmt, const Args & ... args) : fmt(std::move(fmt)) - { } + { + setExceptions(fmt); + formatHelper(*this, args...); + } template - hintfmt & operator%(const T & value) + HintFmt & operator%(const T & value) { fmt % Magenta(value); return *this; } template - hintfmt & operator%(const Uncolored & value) + HintFmt & operator%(const Uncolored & value) { fmt % value.value; return *this; @@ -191,6 +186,6 @@ public: } }; -std::ostream & operator<<(std::ostream & os, const hintfmt & hf); +std::ostream & operator<<(std::ostream & os, const HintFmt & hf); } diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 7fc211491..70c16ff0d 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -448,7 +448,7 @@ Error readError(Source & source) auto msg = readString(source); ErrorInfo info { .level = level, - .msg = hintfmt(msg), + .msg = HintFmt(msg), }; auto havePos = readNum(source); assert(havePos == 0); @@ -457,7 +457,7 @@ Error readError(Source & source) havePos = readNum(source); assert(havePos == 0); info.traces.push_back(Trace { - .hint = hintfmt(readString(source)) + .hint = HintFmt(readString(source)) }); } return Error(std::move(info)); diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc index 4dada8e0e..8afcbe982 100644 --- a/src/nix/daemon.cc +++ b/src/nix/daemon.cc @@ -377,7 +377,7 @@ static void daemonLoop(std::optional forceTrustClientOpt) } catch (Error & error) { auto ei = error.info(); // FIXME: add to trace? - ei.msg = hintfmt("error processing connection: %1%", ei.msg.str()); + ei.msg = HintFmt("error processing connection: %1%", ei.msg.str()); logError(ei); } } diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 2e0837c8e..e6a022e5f 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -98,7 +98,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption } catch (Error & e) { e.addTrace( state->positions[attr.pos], - hintfmt("while evaluating the attribute '%s'", name)); + HintFmt("while evaluating the attribute '%s'", name)); throw; } } diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 646e4c831..4504bb22e 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -411,7 +411,7 @@ struct CmdFlakeCheck : FlakeCommand return storePath; } } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the derivation '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the derivation '%s'", attrPath)); reportError(e); } return std::nullopt; @@ -430,7 +430,7 @@ struct CmdFlakeCheck : FlakeCommand } #endif } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the app definition '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the app definition '%s'", attrPath)); reportError(e); } }; @@ -454,7 +454,7 @@ struct CmdFlakeCheck : FlakeCommand // FIXME: if we have a 'nixpkgs' input, use it to // evaluate the overlay. } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the overlay '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the overlay '%s'", attrPath)); reportError(e); } }; @@ -465,7 +465,7 @@ struct CmdFlakeCheck : FlakeCommand fmt("checking NixOS module '%s'", attrPath)); state->forceValue(v, pos); } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the NixOS module '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the NixOS module '%s'", attrPath)); reportError(e); } }; @@ -491,7 +491,7 @@ struct CmdFlakeCheck : FlakeCommand } } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the Hydra jobset '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the Hydra jobset '%s'", attrPath)); reportError(e); } }; @@ -506,7 +506,7 @@ struct CmdFlakeCheck : FlakeCommand if (!state->isDerivation(*vToplevel)) throw Error("attribute 'config.system.build.toplevel' is not a derivation"); } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the NixOS configuration '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the NixOS configuration '%s'", attrPath)); reportError(e); } }; @@ -540,7 +540,7 @@ struct CmdFlakeCheck : FlakeCommand throw Error("template '%s' has unsupported attribute '%s'", attrPath, name); } } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the template '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the template '%s'", attrPath)); reportError(e); } }; @@ -554,7 +554,7 @@ struct CmdFlakeCheck : FlakeCommand throw Error("bundler must be a function"); // TODO: check types of inputs/outputs? } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the template '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the template '%s'", attrPath)); reportError(e); } }; @@ -774,7 +774,7 @@ struct CmdFlakeCheck : FlakeCommand warn("unknown flake output '%s'", name); } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking flake output '%s'", name)); + e.addTrace(resolve(pos), HintFmt("while checking flake output '%s'", name)); reportError(e); } }); diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 3cfa2b61b..a899d3113 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -31,14 +31,14 @@ namespace nix { } } catch (BaseError & e) { ASSERT_EQ(PrintToString(e.info().msg), - PrintToString(hintfmt("puppy"))); + PrintToString(HintFmt("puppy"))); auto trace = e.info().traces.rbegin(); ASSERT_EQ(e.info().traces.size(), 2); ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("doggy"))); + PrintToString(HintFmt("doggy"))); trace++; ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("beans"))); + PrintToString(HintFmt("beans"))); throw; } , EvalError @@ -53,6 +53,7 @@ namespace nix { state.error("beans").debugThrow(); } catch (Error & e2) { e.addTrace(state.positions[noPos], "beans2", ""); + //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); ASSERT_FALSE(&e.info() == &e2.info()); @@ -73,7 +74,7 @@ namespace nix { ASSERT_EQ(e.info().traces.size(), 1) << "while testing " args << std::endl << e.what(); \ auto trace = e.info().traces.rbegin(); \ ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(hintfmt("while calling the '%s' builtin", name))); \ + PrintToString(HintFmt("while calling the '%s' builtin", name))); \ throw; \ } \ , type \ @@ -95,7 +96,7 @@ namespace nix { PrintToString(context)); \ ++trace; \ ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(hintfmt("while calling the '%s' builtin", name))); \ + PrintToString(HintFmt("while calling the '%s' builtin", name))); \ throw; \ } \ , type \ @@ -104,48 +105,48 @@ namespace nix { TEST_F(ErrorTraceTest, genericClosure) { ASSERT_TRACE2("genericClosure 1", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.genericClosure")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure {}", TypeError, - hintfmt("attribute '%s' missing", "startSet"), - hintfmt("in the attrset passed as argument to builtins.genericClosure")); + HintFmt("attribute '%s' missing", "startSet"), + HintFmt("in the attrset passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = 1; }", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = true; }", TypeError, - hintfmt("expected a function but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); + HintFmt("expected a function but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); + HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); + HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", TypeError, - hintfmt("attribute '%s' missing", "key"), - hintfmt("in one of the attrsets generated by (or initially passed to) builtins.genericClosure")); + HintFmt("attribute '%s' missing", "key"), + HintFmt("in one of the attrsets generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [{ key = ''a''; }]; }", EvalError, - hintfmt("cannot compare %s with %s", "a string", "an integer"), - hintfmt("while comparing the `key` attributes of two genericClosure elements")); + HintFmt("cannot compare %s with %s", "a string", "an integer"), + HintFmt("while comparing the `key` attributes of two genericClosure elements")); ASSERT_TRACE2("genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); + HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); } @@ -153,32 +154,32 @@ namespace nix { TEST_F(ErrorTraceTest, replaceStrings) { ASSERT_TRACE2("replaceStrings 0 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.replaceStrings")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "0" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [] 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.replaceStrings")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "0" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.replaceStrings")); ASSERT_TRACE1("replaceStrings [ 0 ] [] {}", EvalError, - hintfmt("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths")); + HintFmt("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths")); ASSERT_TRACE2("replaceStrings [ 1 ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"", TypeError, - hintfmt("expected a string but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); + HintFmt("expected a string but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"old\" ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the third argument passed to builtins.replaceStrings")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the third argument passed to builtins.replaceStrings")); } @@ -242,8 +243,8 @@ namespace nix { TEST_F(ErrorTraceTest, ceil) { ASSERT_TRACE2("ceil \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.ceil")); + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.ceil")); } @@ -251,8 +252,8 @@ namespace nix { TEST_F(ErrorTraceTest, floor) { ASSERT_TRACE2("floor \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.floor")); + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.floor")); } @@ -264,8 +265,8 @@ namespace nix { TEST_F(ErrorTraceTest, getEnv) { ASSERT_TRACE2("getEnv [ ]", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.getEnv")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.getEnv")); } @@ -285,8 +286,8 @@ namespace nix { TEST_F(ErrorTraceTest, placeholder) { ASSERT_TRACE2("placeholder []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.placeholder")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.placeholder")); } @@ -294,13 +295,13 @@ namespace nix { TEST_F(ErrorTraceTest, toPath) { ASSERT_TRACE2("toPath []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.toPath")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.toPath")); ASSERT_TRACE2("toPath \"foo\"", EvalError, - hintfmt("string '%s' doesn't represent an absolute path", "foo"), - hintfmt("while evaluating the first argument passed to builtins.toPath")); + HintFmt("string '%s' doesn't represent an absolute path", "foo"), + HintFmt("while evaluating the first argument passed to builtins.toPath")); } @@ -308,8 +309,8 @@ namespace nix { TEST_F(ErrorTraceTest, storePath) { ASSERT_TRACE2("storePath true", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); + HintFmt("cannot coerce %s to a string: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -317,13 +318,13 @@ namespace nix { TEST_F(ErrorTraceTest, pathExists) { ASSERT_TRACE2("pathExists []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while realising the context of a path")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while realising the context of a path")); ASSERT_TRACE2("pathExists \"zorglub\"", EvalError, - hintfmt("string '%s' doesn't represent an absolute path", "zorglub"), - hintfmt("while realising the context of a path")); + HintFmt("string '%s' doesn't represent an absolute path", "zorglub"), + HintFmt("while realising the context of a path")); } @@ -331,8 +332,8 @@ namespace nix { TEST_F(ErrorTraceTest, baseNameOf) { ASSERT_TRACE2("baseNameOf []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.baseNameOf")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.baseNameOf")); } @@ -376,30 +377,30 @@ namespace nix { TEST_F(ErrorTraceTest, filterSource) { ASSERT_TRACE2("filterSource [] []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", EvalError, - hintfmt("string '%s' doesn't represent an absolute path", "foo"), - hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); + HintFmt("string '%s' doesn't represent an absolute path", "foo"), + HintFmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] ./.", TypeError, - hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.filterSource")); + HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.filterSource")); // Usupported by store "dummy" // ASSERT_TRACE2("filterSource (_: 1) ./.", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "an integer"), - // hintfmt("while adding path '/home/layus/projects/nix'")); + // HintFmt("attempt to call something which is not a function but %s", "an integer"), + // HintFmt("while adding path '/home/layus/projects/nix'")); // ASSERT_TRACE2("filterSource (_: _: 1) ./.", // TypeError, - // hintfmt("expected a Boolean but found %s: %s", "an integer", "1"), - // hintfmt("while evaluating the return value of the path filter function")); + // HintFmt("expected a Boolean but found %s: %s", "an integer", "1"), + // HintFmt("while evaluating the return value of the path filter function")); } @@ -411,8 +412,8 @@ namespace nix { TEST_F(ErrorTraceTest, attrNames) { ASSERT_TRACE2("attrNames []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the argument passed to builtins.attrNames")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the argument passed to builtins.attrNames")); } @@ -420,8 +421,8 @@ namespace nix { TEST_F(ErrorTraceTest, attrValues) { ASSERT_TRACE2("attrValues []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the argument passed to builtins.attrValues")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the argument passed to builtins.attrValues")); } @@ -429,18 +430,18 @@ namespace nix { TEST_F(ErrorTraceTest, getAttr) { ASSERT_TRACE2("getAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.getAttr")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.getAttr")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" {}", TypeError, - hintfmt("attribute '%s' missing", "foo"), - hintfmt("in the attribute set under consideration")); + HintFmt("attribute '%s' missing", "foo"), + HintFmt("in the attribute set under consideration")); } @@ -452,13 +453,13 @@ namespace nix { TEST_F(ErrorTraceTest, hasAttr) { ASSERT_TRACE2("hasAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.hasAttr")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.hasAttr")); ASSERT_TRACE2("hasAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.hasAttr")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.hasAttr")); } @@ -470,18 +471,18 @@ namespace nix { TEST_F(ErrorTraceTest, removeAttrs) { ASSERT_TRACE2("removeAttrs \"\" \"\"", TypeError, - hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ \"1\" ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); } @@ -489,28 +490,28 @@ namespace nix { TEST_F(ErrorTraceTest, listToAttrs) { ASSERT_TRACE2("listToAttrs 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the argument passed to builtins.listToAttrs")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the argument passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating an element of the list passed to builtins.listToAttrs")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ {} ]", TypeError, - hintfmt("attribute '%s' missing", "name"), - hintfmt("in a {name=...; value=...;} pair")); + HintFmt("attribute '%s' missing", "name"), + HintFmt("in a {name=...; value=...;} pair")); ASSERT_TRACE2("listToAttrs [ { name = 1; } ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ { name = \"foo\"; } ]", TypeError, - hintfmt("attribute '%s' missing", "value"), - hintfmt("in a {name=...; value=...;} pair")); + HintFmt("attribute '%s' missing", "value"), + HintFmt("in a {name=...; value=...;} pair")); } @@ -518,13 +519,13 @@ namespace nix { TEST_F(ErrorTraceTest, intersectAttrs) { ASSERT_TRACE2("intersectAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.intersectAttrs")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.intersectAttrs")); ASSERT_TRACE2("intersectAttrs {} []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.intersectAttrs")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.intersectAttrs")); } @@ -532,23 +533,23 @@ namespace nix { TEST_F(ErrorTraceTest, catAttrs) { ASSERT_TRACE2("catAttrs [] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.catAttrs")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.catAttrs")); + HintFmt("expected a list but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ { foo = 1; } 1 { bar = 5;} ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); } @@ -556,7 +557,7 @@ namespace nix { TEST_F(ErrorTraceTest, functionArgs) { ASSERT_TRACE1("functionArgs {}", TypeError, - hintfmt("'functionArgs' requires a function")); + HintFmt("'functionArgs' requires a function")); } @@ -564,24 +565,24 @@ namespace nix { TEST_F(ErrorTraceTest, mapAttrs) { ASSERT_TRACE2("mapAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.mapAttrs")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.mapAttrs")); // XXX: defered // ASSERT_TRACE2("mapAttrs \"\" { foo.bar = 1; }", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "a string"), - // hintfmt("while evaluating the attribute 'foo'")); + // HintFmt("attempt to call something which is not a function but %s", "a string"), + // HintFmt("while evaluating the attribute 'foo'")); // ASSERT_TRACE2("mapAttrs (x: x + \"1\") { foo.bar = 1; }", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "a string"), - // hintfmt("while evaluating the attribute 'foo'")); + // HintFmt("attempt to call something which is not a function but %s", "a string"), + // HintFmt("while evaluating the attribute 'foo'")); // ASSERT_TRACE2("mapAttrs (x: y: x + 1) { foo.bar = 1; }", // TypeError, - // hintfmt("cannot coerce %s to a string", "an integer"), - // hintfmt("while evaluating a path segment")); + // HintFmt("cannot coerce %s to a string", "an integer"), + // HintFmt("while evaluating a path segment")); } @@ -589,27 +590,27 @@ namespace nix { TEST_F(ErrorTraceTest, zipAttrsWith) { ASSERT_TRACE2("zipAttrsWith [] [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.zipAttrsWith")); + HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.zipAttrsWith")); ASSERT_TRACE2("zipAttrsWith (_: 1) [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); // XXX: How to properly tell that the fucntion takes two arguments ? // The same question also applies to sort, and maybe others. // Due to lazyness, we only create a thunk, and it fails later on. // ASSERT_TRACE2("zipAttrsWith (_: 1) [ { foo = 1; } ]", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "an integer"), - // hintfmt("while evaluating the attribute 'foo'")); + // HintFmt("attempt to call something which is not a function but %s", "an integer"), + // HintFmt("while evaluating the attribute 'foo'")); // XXX: Also deferred deeply // ASSERT_TRACE2("zipAttrsWith (a: b: a + b) [ { foo = 1; } { foo = 2; } ]", // TypeError, - // hintfmt("cannot coerce %s to a string", "a list"), - // hintfmt("while evaluating a path segment")); + // HintFmt("cannot coerce %s to a string", "a list"), + // HintFmt("while evaluating a path segment")); } @@ -621,16 +622,16 @@ namespace nix { TEST_F(ErrorTraceTest, elemAt) { ASSERT_TRACE2("elemAt \"foo\" (-1)", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.elemAt")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("elemAt [] (-1)", Error, - hintfmt("list index %d is out of bounds", -1)); + HintFmt("list index %d is out of bounds", -1)); ASSERT_TRACE1("elemAt [\"foo\"] 3", Error, - hintfmt("list index %d is out of bounds", 3)); + HintFmt("list index %d is out of bounds", 3)); } @@ -638,12 +639,12 @@ namespace nix { TEST_F(ErrorTraceTest, head) { ASSERT_TRACE2("head 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.elemAt")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("head []", Error, - hintfmt("list index %d is out of bounds", 0)); + HintFmt("list index %d is out of bounds", 0)); } @@ -651,12 +652,12 @@ namespace nix { TEST_F(ErrorTraceTest, tail) { ASSERT_TRACE2("tail 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.tail")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.tail")); ASSERT_TRACE1("tail []", Error, - hintfmt("'tail' called on an empty list")); + HintFmt("'tail' called on an empty list")); } @@ -664,13 +665,13 @@ namespace nix { TEST_F(ErrorTraceTest, map) { ASSERT_TRACE2("map 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.map")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.map")); ASSERT_TRACE2("map 1 [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.map")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.map")); } @@ -678,18 +679,18 @@ namespace nix { TEST_F(ErrorTraceTest, filter) { ASSERT_TRACE2("filter 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.filter")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.filter")); ASSERT_TRACE2("filter 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.filter")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.filter")); ASSERT_TRACE2("filter (_: 5) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "5" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the filtering function passed to builtins.filter")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "5" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the filtering function passed to builtins.filter")); } @@ -697,8 +698,8 @@ namespace nix { TEST_F(ErrorTraceTest, elem) { ASSERT_TRACE2("elem 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.elem")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.elem")); } @@ -706,18 +707,18 @@ namespace nix { TEST_F(ErrorTraceTest, concatLists) { ASSERT_TRACE2("concatLists 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.concatLists")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ 1 ]", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating a value of the list passed to builtins.concatLists")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ [1] \"foo\" ]", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating a value of the list passed to builtins.concatLists")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed to builtins.concatLists")); } @@ -725,13 +726,13 @@ namespace nix { TEST_F(ErrorTraceTest, length) { ASSERT_TRACE2("length 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.length")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.length")); ASSERT_TRACE2("length \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.length")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.length")); } @@ -739,22 +740,22 @@ namespace nix { TEST_F(ErrorTraceTest, foldlPrime) { ASSERT_TRACE2("foldl' 1 \"foo\" true", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.foldlStrict")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.foldlStrict")); ASSERT_TRACE2("foldl' (_: 1) \"foo\" true", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the third argument passed to builtins.foldlStrict")); + HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the third argument passed to builtins.foldlStrict")); ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); + HintFmt("attempt to call something which is not a function but %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("in the left operand of the AND (&&) operator")); + HintFmt("expected a Boolean but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("in the left operand of the AND (&&) operator")); } @@ -762,18 +763,18 @@ namespace nix { TEST_F(ErrorTraceTest, any) { ASSERT_TRACE2("any 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.any")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.any")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.any")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.any")); } @@ -781,18 +782,18 @@ namespace nix { TEST_F(ErrorTraceTest, all) { ASSERT_TRACE2("all 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.all")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.all")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.all")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.all")); } @@ -800,23 +801,23 @@ namespace nix { TEST_F(ErrorTraceTest, genList) { ASSERT_TRACE2("genList 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.genList")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.genList")); ASSERT_TRACE2("genList 1 2", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.genList", "an integer")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.genList", "an integer")); // XXX: defered // ASSERT_TRACE2("genList (x: x + \"foo\") 2 #TODO", // TypeError, - // hintfmt("cannot add %s to an integer", "a string"), - // hintfmt("while evaluating anonymous lambda")); + // HintFmt("cannot add %s to an integer", "a string"), + // HintFmt("while evaluating anonymous lambda")); ASSERT_TRACE1("genList false (-3)", EvalError, - hintfmt("cannot create list of size %d", -3)); + HintFmt("cannot create list of size %d", -3)); } @@ -824,31 +825,31 @@ namespace nix { TEST_F(ErrorTraceTest, sort) { ASSERT_TRACE2("sort 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.sort")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.sort")); ASSERT_TRACE2("sort 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.sort")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.sort")); ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); + HintFmt("attempt to call something which is not a function but %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the sorting function passed to builtins.sort")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the sorting function passed to builtins.sort")); // XXX: Trace too deep, need better asserts // ASSERT_TRACE1("sort (a: b: a <= b) [ \"foo\" {} ] # TODO", // TypeError, - // hintfmt("cannot compare %s with %s", "a string", "a set")); + // HintFmt("cannot compare %s with %s", "a string", "a set")); // ASSERT_TRACE1("sort (a: b: a <= b) [ {} {} ] # TODO", // TypeError, - // hintfmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); + // HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); } @@ -856,18 +857,18 @@ namespace nix { TEST_F(ErrorTraceTest, partition) { ASSERT_TRACE2("partition 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.partition")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.partition")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the partition function passed to builtins.partition")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the partition function passed to builtins.partition")); } @@ -875,18 +876,18 @@ namespace nix { TEST_F(ErrorTraceTest, groupBy) { ASSERT_TRACE2("groupBy 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.groupBy")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.groupBy")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (x: x) [ \"foo\" \"bar\" 1 ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); } @@ -894,23 +895,23 @@ namespace nix { TEST_F(ErrorTraceTest, concatMap) { ASSERT_TRACE2("concatMap 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.concatMap")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.concatMap")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.concatMap")); } @@ -918,13 +919,13 @@ namespace nix { TEST_F(ErrorTraceTest, add) { ASSERT_TRACE2("add \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument of the addition")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the addition")); ASSERT_TRACE2("add 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument of the addition")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the addition")); } @@ -932,13 +933,13 @@ namespace nix { TEST_F(ErrorTraceTest, sub) { ASSERT_TRACE2("sub \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument of the subtraction")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the subtraction")); ASSERT_TRACE2("sub 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument of the subtraction")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the subtraction")); } @@ -946,13 +947,13 @@ namespace nix { TEST_F(ErrorTraceTest, mul) { ASSERT_TRACE2("mul \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument of the multiplication")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the multiplication")); ASSERT_TRACE2("mul 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument of the multiplication")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the multiplication")); } @@ -960,17 +961,17 @@ namespace nix { TEST_F(ErrorTraceTest, div) { ASSERT_TRACE2("div \"foo\" 1 # TODO: an integer was expected -> a number", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first operand of the division")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first operand of the division")); ASSERT_TRACE2("div 1 \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second operand of the division")); + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second operand of the division")); ASSERT_TRACE1("div \"foo\" 0", EvalError, - hintfmt("division by zero")); + HintFmt("division by zero")); } @@ -978,13 +979,13 @@ namespace nix { TEST_F(ErrorTraceTest, bitAnd) { ASSERT_TRACE2("bitAnd 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.bitAnd")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitAnd")); ASSERT_TRACE2("bitAnd 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.bitAnd")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitAnd")); } @@ -992,13 +993,13 @@ namespace nix { TEST_F(ErrorTraceTest, bitOr) { ASSERT_TRACE2("bitOr 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.bitOr")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitOr")); ASSERT_TRACE2("bitOr 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.bitOr")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitOr")); } @@ -1006,13 +1007,13 @@ namespace nix { TEST_F(ErrorTraceTest, bitXor) { ASSERT_TRACE2("bitXor 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.bitXor")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitXor")); ASSERT_TRACE2("bitXor 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.bitXor")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitXor")); } @@ -1020,16 +1021,16 @@ namespace nix { TEST_F(ErrorTraceTest, lessThan) { ASSERT_TRACE1("lessThan 1 \"foo\"", EvalError, - hintfmt("cannot compare %s with %s", "an integer", "a string")); + HintFmt("cannot compare %s with %s", "an integer", "a string")); ASSERT_TRACE1("lessThan {} {}", EvalError, - hintfmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); + HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); ASSERT_TRACE2("lessThan [ 1 2 ] [ \"foo\" ]", EvalError, - hintfmt("cannot compare %s with %s", "an integer", "a string"), - hintfmt("while comparing two list elements")); + HintFmt("cannot compare %s with %s", "an integer", "a string"), + HintFmt("while comparing two list elements")); } @@ -1037,8 +1038,8 @@ namespace nix { TEST_F(ErrorTraceTest, toString) { ASSERT_TRACE2("toString { a = 1; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), - hintfmt("while evaluating the first argument passed to builtins.toString")); + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), + HintFmt("while evaluating the first argument passed to builtins.toString")); } @@ -1046,22 +1047,22 @@ namespace nix { TEST_F(ErrorTraceTest, substring) { ASSERT_TRACE2("substring {} \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the first argument (the start offset) passed to builtins.substring")); + HintFmt("expected an integer but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the first argument (the start offset) passed to builtins.substring")); ASSERT_TRACE2("substring 3 \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument (the substring length) passed to builtins.substring")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument (the substring length) passed to builtins.substring")); ASSERT_TRACE2("substring 0 3 {}", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the third argument (the string) passed to builtins.substring")); + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the third argument (the string) passed to builtins.substring")); ASSERT_TRACE1("substring (-3) 3 \"sometext\"", EvalError, - hintfmt("negative start position in 'substring'")); + HintFmt("negative start position in 'substring'")); } @@ -1069,8 +1070,8 @@ namespace nix { TEST_F(ErrorTraceTest, stringLength) { ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the argument passed to builtins.stringLength")); + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the argument passed to builtins.stringLength")); } @@ -1078,17 +1079,17 @@ namespace nix { TEST_F(ErrorTraceTest, hashString) { ASSERT_TRACE2("hashString 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.hashString")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.hashString")); ASSERT_TRACE1("hashString \"foo\" \"content\"", UsageError, - hintfmt("unknown hash algorithm '%s', expect 'md5', 'sha1', 'sha256', or 'sha512'", "foo")); + HintFmt("unknown hash algorithm '%s', expect 'md5', 'sha1', 'sha256', or 'sha512'", "foo")); ASSERT_TRACE2("hashString \"sha256\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.hashString")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.hashString")); } @@ -1096,17 +1097,17 @@ namespace nix { TEST_F(ErrorTraceTest, match) { ASSERT_TRACE2("match 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.match")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.match")); ASSERT_TRACE2("match \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.match")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.match")); ASSERT_TRACE1("match \"(.*\" \"\"", EvalError, - hintfmt("invalid regular expression '%s'", "(.*")); + HintFmt("invalid regular expression '%s'", "(.*")); } @@ -1114,17 +1115,17 @@ namespace nix { TEST_F(ErrorTraceTest, split) { ASSERT_TRACE2("split 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.split")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.split")); ASSERT_TRACE2("split \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.split")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.split")); ASSERT_TRACE1("split \"f(o*o\" \"1foo2\"", EvalError, - hintfmt("invalid regular expression '%s'", "f(o*o")); + HintFmt("invalid regular expression '%s'", "f(o*o")); } @@ -1132,18 +1133,18 @@ namespace nix { TEST_F(ErrorTraceTest, concatStringsSep) { ASSERT_TRACE2("concatStringsSep 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); + HintFmt("expected a list but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", TypeError, - hintfmt("cannot coerce %s to a string: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); + HintFmt("cannot coerce %s to a string: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); } @@ -1151,8 +1152,8 @@ namespace nix { TEST_F(ErrorTraceTest, parseDrvName) { ASSERT_TRACE2("parseDrvName 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.parseDrvName")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.parseDrvName")); } @@ -1160,13 +1161,13 @@ namespace nix { TEST_F(ErrorTraceTest, compareVersions) { ASSERT_TRACE2("compareVersions 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.compareVersions")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.compareVersions")); ASSERT_TRACE2("compareVersions \"abd\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.compareVersions")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.compareVersions")); } @@ -1174,8 +1175,8 @@ namespace nix { TEST_F(ErrorTraceTest, splitVersion) { ASSERT_TRACE2("splitVersion 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.splitVersion")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.splitVersion")); } @@ -1188,108 +1189,108 @@ namespace nix { TEST_F(ErrorTraceTest, derivationStrict) { ASSERT_TRACE2("derivationStrict \"\"", TypeError, - hintfmt("expected a set but found %s: %s", "a string", "\"\""), - hintfmt("while evaluating the argument passed to builtins.derivationStrict")); + HintFmt("expected a set but found %s: %s", "a string", "\"\""), + HintFmt("while evaluating the argument passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict {}", TypeError, - hintfmt("attribute '%s' missing", "name"), - hintfmt("in the attrset passed as argument to builtins.derivationStrict")); + HintFmt("attribute '%s' missing", "name"), + HintFmt("in the attrset passed as argument to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = 1; }", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", "1"), - hintfmt("while evaluating the `name` attribute passed to builtins.derivationStrict")); + HintFmt("expected a string but found %s: %s", "an integer", "1"), + HintFmt("while evaluating the `name` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; }", TypeError, - hintfmt("required attribute 'builder' missing"), - hintfmt("while evaluating derivation 'foo'")); + HintFmt("required attribute 'builder' missing"), + HintFmt("while evaluating derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __structuredAttrs = 15; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", "15"), - hintfmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); + HintFmt("expected a Boolean but found %s: %s", "an integer", "15"), + HintFmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __ignoreNulls = 15; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", "15"), - hintfmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); + HintFmt("expected a Boolean but found %s: %s", "an integer", "15"), + HintFmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = 15; }", TypeError, - hintfmt("invalid value '15' for 'outputHashMode' attribute"), - hintfmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); + HintFmt("invalid value '15' for 'outputHashMode' attribute"), + HintFmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = \"custom\"; }", TypeError, - hintfmt("invalid value 'custom' for 'outputHashMode' attribute"), - hintfmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); + HintFmt("invalid value 'custom' for 'outputHashMode' attribute"), + HintFmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating the attribute 'system' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating the attribute 'system' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drv\"; }", TypeError, - hintfmt("invalid derivation output name 'drv'"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("invalid derivation output name 'drv'"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = []; }", TypeError, - hintfmt("derivation cannot have an empty set of outputs"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("derivation cannot have an empty set of outputs"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"drv\" ]; }", TypeError, - hintfmt("invalid derivation output name 'drv'"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("invalid derivation output name 'drv'"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"out\" \"out\" ]; }", TypeError, - hintfmt("duplicate derivation output 'out'"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("duplicate derivation output 'out'"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __contentAddressed = \"true\"; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - hintfmt("while evaluating the attribute '__contentAddressed' of derivation 'foo'")); + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt("while evaluating the attribute '__contentAddressed' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - hintfmt("while evaluating the attribute '__impure' of derivation 'foo'")); + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt("while evaluating the attribute '__impure' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - hintfmt("while evaluating the attribute '__impure' of derivation 'foo'")); + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt("while evaluating the attribute '__impure' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = \"foo\"; }", TypeError, - hintfmt("expected a list but found %s: %s", "a string", "\"foo\""), - hintfmt("while evaluating the attribute 'args' of derivation 'foo'")); + HintFmt("expected a list but found %s: %s", "a string", "\"foo\""), + HintFmt("while evaluating the attribute 'args' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating an element of the argument list")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating an element of the argument list")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating the attribute 'FOO' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating the attribute 'FOO' of derivation 'foo'")); } */ diff --git a/tests/unit/libutil/logging.cc b/tests/unit/libutil/logging.cc index c8c7c091f..1d7304f05 100644 --- a/tests/unit/libutil/logging.cc +++ b/tests/unit/libutil/logging.cc @@ -42,7 +42,7 @@ namespace nix { makeJSONLogger(*logger)->logEI({ .name = "error name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foFile, problem_file, 02, 13) @@ -62,7 +62,7 @@ namespace nix { throw TestError(e.info()); } catch (Error &e) { ErrorInfo ei = e.info(); - ei.msg = hintfmt("%s; subsequent error message.", Uncolored(e.info().msg.str())); + ei.msg = HintFmt("%s; subsequent error message.", Uncolored(e.info().msg.str())); testing::internal::CaptureStderr(); logger->logEI(ei); @@ -176,7 +176,7 @@ namespace nix { logError({ .name = "error name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foString, problem_file, 02, 13), @@ -193,7 +193,7 @@ namespace nix { logError({ .name = "error name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foFile, problem_file, 02, 13) @@ -208,7 +208,7 @@ namespace nix { logError({ .name = "error name", - .msg = hintfmt("hint %1%", "only"), + .msg = HintFmt("hint %1%", "only"), }); auto str = testing::internal::GetCapturedStderr(); @@ -225,7 +225,7 @@ namespace nix { logWarning({ .name = "name", - .msg = hintfmt("there was a %1%", "warning"), + .msg = HintFmt("there was a %1%", "warning"), }); auto str = testing::internal::GetCapturedStderr(); @@ -241,7 +241,7 @@ namespace nix { logWarning({ .name = "warning name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foStdin, problem_file, 2, 13), @@ -264,7 +264,7 @@ namespace nix { auto e = AssertionError(ErrorInfo { .name = "wat", - .msg = hintfmt("it has been %1% days since our last error", "zero"), + .msg = HintFmt("it has been %1% days since our last error", "zero"), .errPos = Pos(foString, problem_file, 2, 13), }); @@ -290,7 +290,7 @@ namespace nix { auto e = AssertionError(ErrorInfo { .name = "wat", - .msg = hintfmt("it has been %1% days since our last error", "zero"), + .msg = HintFmt("it has been %1% days since our last error", "zero"), .errPos = Pos(foString, problem_file, 2, 13), }); @@ -310,39 +310,39 @@ namespace nix { /* ---------------------------------------------------------------------------- - * hintfmt + * HintFmt * --------------------------------------------------------------------------*/ - TEST(hintfmt, percentStringWithoutArgs) { + TEST(HintFmt, percentStringWithoutArgs) { const char *teststr = "this is 100%s correct!"; ASSERT_STREQ( - hintfmt(teststr).str().c_str(), + HintFmt(teststr).str().c_str(), teststr); } - TEST(hintfmt, fmtToHintfmt) { + TEST(HintFmt, fmtToHintfmt) { ASSERT_STREQ( - hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(), + HintFmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(), "the color of this this text is not yellow"); } - TEST(hintfmt, tooFewArguments) { + TEST(HintFmt, tooFewArguments) { ASSERT_STREQ( - hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(), + HintFmt("only one arg %1% %2%", "fulfilled").str().c_str(), "only one arg " ANSI_WARNING "fulfilled" ANSI_NORMAL " "); } - TEST(hintfmt, tooManyArguments) { + TEST(HintFmt, tooManyArguments) { ASSERT_STREQ( - hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(), + HintFmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(), "what about this " ANSI_WARNING "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL); } From 8b7eb7400b166b1c2ef45a6d66999041f33c40bf Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 17:41:34 -0800 Subject: [PATCH 059/164] Enter debugger on `builtins.trace` with an option --- src/libexpr/eval-settings.hh | 3 +++ src/libexpr/primops.cc | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index 2f6c12d45..757daebc0 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -127,6 +127,9 @@ struct EvalSettings : Config Setting maxCallDepth{this, 10000, "max-call-depth", "The maximum function call depth to allow before erroring."}; + + Setting builtinsTraceDebugger{this, false, "builtins-trace-debugger", + "Whether to enter the debugger on `builtins.trace` calls."}; }; extern EvalSettings evalSettings; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 5e2bbe16f..a24a2d018 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -995,6 +995,10 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu printError("trace: %1%", args[0]->string_view()); else printError("trace: %1%", ValuePrinter(state, *args[0])); + if (evalSettings.builtinsTraceDebugger && state.debugRepl && !state.debugTraces.empty()) { + const DebugTrace & last = state.debugTraces.front(); + state.runDebugRepl(nullptr, last.env, last.expr); + } state.forceValue(*args[1], pos); v = *args[1]; } From 4440eb54e7274734ec442081f55023853efa8708 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:03:08 -0800 Subject: [PATCH 060/164] Add release note --- doc/manual/rl-next/debugger-on-trace.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 doc/manual/rl-next/debugger-on-trace.md diff --git a/doc/manual/rl-next/debugger-on-trace.md b/doc/manual/rl-next/debugger-on-trace.md new file mode 100644 index 000000000..d4e55d59c --- /dev/null +++ b/doc/manual/rl-next/debugger-on-trace.md @@ -0,0 +1,9 @@ +--- +synopsis: Enter the `--debugger` when `builtins.trace` is called if `builtins-trace-debugger` is set +prs: 9914 +--- + +If the `builtins-trace-debugger` option is set and `--debugger` is given, +`builtins.trace` calls will behave similarly to `builtins.break` and will enter +the debug REPL. This is useful for determining where warnings are being emitted +from. From 953eb0cba2aad89753a39da6c98d409d1b88f88e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 15:55:20 -0800 Subject: [PATCH 061/164] Fix tests --- tests/unit/libexpr/error_traces.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index a899d3113..7b32b320b 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -26,7 +26,7 @@ namespace nix { try { state.error("puppy").withTrace(noPos, "doggy").debugThrow(); } catch (Error & e) { - e.addTrace(state.positions[noPos], "beans", ""); + e.addTrace(state.positions[noPos], "beans"); throw; } } catch (BaseError & e) { @@ -52,7 +52,7 @@ namespace nix { try { state.error("beans").debugThrow(); } catch (Error & e2) { - e.addTrace(state.positions[noPos], "beans2", ""); + e.addTrace(state.positions[noPos], "beans2"); //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); @@ -807,7 +807,7 @@ namespace nix { ASSERT_TRACE2("genList 1 2", TypeError, HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.genList", "an integer")); + HintFmt("while evaluating the first argument passed to builtins.genList")); // XXX: defered // ASSERT_TRACE2("genList (x: x + \"foo\") 2 #TODO", From 1fe7b016699c4e2a7435ba29d1ecc6830ae88946 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Fri, 9 Feb 2024 06:27:24 +0100 Subject: [PATCH 062/164] Don't hardcode the `-O2` compiler flag autoconf authors apparently decided that setting `-O2` by default was a good idea. I disagree, and Nix has its own way of deciding that (with `OPTIMIZE={0,1}`). Explicitly set `CFLAGS` and `CXXFLAGS` in the configure script to disable that behaviour. Fix #9965 --- configure.ac | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/configure.ac b/configure.ac index 8c29c1e62..676b145a5 100644 --- a/configure.ac +++ b/configure.ac @@ -47,6 +47,10 @@ AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')]) # State should be stored in /nix/var, unless the user overrides it explicitly. test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var +# Assign a default value to C{,XX}FLAGS as the default configure script sets them +# to -O2 otherwise, which we don't want to have hardcoded +CFLAGS=${CFLAGS-""} +CXXFLAGS=${CXXFLAGS-""} AC_PROG_CC AC_PROG_CXX From 60045f9c9650ae87f04a2fe507817ad9b5318104 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Fri, 9 Feb 2024 10:41:03 +0100 Subject: [PATCH 063/164] add clickable anchor links how the different invocations relate to each other seems be confusing, which is relatable because one has to wire it up in your head while reading. an explicit reference should make it unambiguous and easier to notice due to links being highlighted. --- doc/manual/src/command-ref/nix-collect-garbage.md | 2 +- doc/manual/src/command-ref/nix-env/delete-generations.md | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/manual/src/command-ref/nix-collect-garbage.md b/doc/manual/src/command-ref/nix-collect-garbage.md index 3cab79f0e..1bc88d858 100644 --- a/doc/manual/src/command-ref/nix-collect-garbage.md +++ b/doc/manual/src/command-ref/nix-collect-garbage.md @@ -51,7 +51,7 @@ These options are for deleting old [profiles] prior to deleting unreachable [sto - [`--delete-old`](#opt-delete-old) / `-d`\ Delete all old generations of profiles. - This is the equivalent of invoking `nix-env --delete-generations old` on each found profile. + This is the equivalent of invoking [`nix-env --delete-generations old`](@docroot@/command-ref/nix-env/delete-generations.md#generations-old) on each found profile. - [`--delete-older-than`](#opt-delete-older-than) *period*\ Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time). diff --git a/doc/manual/src/command-ref/nix-env/delete-generations.md b/doc/manual/src/command-ref/nix-env/delete-generations.md index adc6fc219..6b6ea798e 100644 --- a/doc/manual/src/command-ref/nix-env/delete-generations.md +++ b/doc/manual/src/command-ref/nix-env/delete-generations.md @@ -12,13 +12,13 @@ This operation deletes the specified generations of the current profile. *generations* can be a one of the following: -- `...`:\ +- [`...`](#generations-list):\ A list of generation numbers, each one a separate command-line argument. Delete exactly the profile generations given by their generation number. Deleting the current generation is not allowed. -- The special value `old` +- [The special value `old`](#generations-old) Delete all generations except the current one. @@ -30,7 +30,7 @@ This operation deletes the specified generations of the current profile. > Because one can roll back to a previous generation, it is possible to have generations newer than the current one. > They will also be deleted. -- `d`:\ +- [`d`](#generations-time):\ The last *number* days *Example*: `30d` @@ -38,7 +38,7 @@ This operation deletes the specified generations of the current profile. Delete all generations created more than *number* days ago, except the most recent one of them. This allows rolling back to generations that were available within the specified period. -- `+`:\ +- [`+`](#generations-count):\ The last *number* generations up to the present *Example*: `+5` From fb5a792280a55bf783528f0903204e674417c70a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 9 Feb 2024 15:55:24 +0100 Subject: [PATCH 064/164] runPostBuildHook(): Be less chatty Don't spam the user with "running post-build-hook" messages. It's up to the post-build hook if it has something interesting to say. --- src/libstore/build/derivation-goal.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index d3bbdf1ed..1b326ee13 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -891,7 +891,7 @@ void runPostBuildHook( if (hook == "") return; - Activity act(logger, lvlInfo, actPostBuildHook, + Activity act(logger, lvlTalkative, actPostBuildHook, fmt("running post-build-hook '%s'", settings.postBuildHook), Logger::Fields{store.printStorePath(drvPath)}); PushActivity pact(act.id); From 8f3253c6f4041f500631e1dac5ba75f335e9c70a Mon Sep 17 00:00:00 2001 From: Alois Wohlschlager Date: Fri, 9 Feb 2024 18:56:42 +0100 Subject: [PATCH 065/164] Restore manual pages Commit d536c57e878a04f795c1ef8ee3232a47035da2cf inadvertedly broke build and installation of all non-autogenerated manual pages (in particular, all the ones documenting the stable CLI), by moving the definition of the man-pages variable in doc/manual/local.mk after its usage in mk/lib.mk. Move including the former earlier so that the correct order is restored. --- Makefile | 25 ++++++++++++++----------- mk/lib.mk | 4 ++++ 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/Makefile b/Makefile index 7bbfbddbe..d3542c3e9 100644 --- a/Makefile +++ b/Makefile @@ -47,6 +47,17 @@ makefiles += \ tests/functional/plugins/local.mk endif +# Some makefiles require access to built programs and must be included late. +makefiles-late = + +ifeq ($(ENABLE_DOC_GEN), yes) +makefiles-late += doc/manual/local.mk +endif + +ifeq ($(ENABLE_INTERNAL_API_DOCS), yes) +makefiles-late += doc/internal-api/local.mk +endif + # Miscellaneous global Flags OPTIMIZE = 1 @@ -95,24 +106,16 @@ installcheck: @exit 1 endif -# Documentation or else fallback stub rules. -# -# The documentation makefiles be included after `mk/lib.mk` so rules -# refer to variables defined by `mk/lib.mk`. Rules are not "lazy" like -# variables, unfortunately. +# Documentation fallback stub rules. -ifeq ($(ENABLE_DOC_GEN), yes) -$(eval $(call include-sub-makefile, doc/manual/local.mk)) -else +ifneq ($(ENABLE_DOC_GEN), yes) .PHONY: manual-html manpages manual-html manpages: @echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'." @exit 1 endif -ifeq ($(ENABLE_INTERNAL_API_DOCS), yes) -$(eval $(call include-sub-makefile, doc/internal-api/local.mk)) -else +ifneq ($(ENABLE_INTERNAL_API_DOCS), yes) .PHONY: internal-api-html internal-api-html: @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'." diff --git a/mk/lib.mk b/mk/lib.mk index 10ce8d436..fe0add1c9 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -97,6 +97,10 @@ $(foreach test-group, $(install-tests-groups), \ $(eval $(call run-test,$(test),$(install_test_init))) \ $(eval $(test-group).test-group: $(test).test))) +# Include makefiles requiring built programs. +$(foreach mf, $(makefiles-late), $(eval $(call include-sub-makefile,$(mf)))) + + $(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file)))))) From 53eecae52546219f3f3e7bebac9792ea5d816ffc Mon Sep 17 00:00:00 2001 From: BOHverkill Date: Sat, 10 Feb 2024 17:17:48 +0100 Subject: [PATCH 066/164] Fix link to derivation in string interpolation doc The reference link definition for it pointing to the glossary was removed, so it is currently not displayed as a link. --- doc/manual/src/language/string-interpolation.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/src/language/string-interpolation.md b/doc/manual/src/language/string-interpolation.md index 6e28d2664..7d81c2020 100644 --- a/doc/manual/src/language/string-interpolation.md +++ b/doc/manual/src/language/string-interpolation.md @@ -20,6 +20,8 @@ Rather than writing (where `freetype` is a [derivation]), you can instead write +[derivation]: ../glossary.md#gloss-derivation + ```nix "--with-freetype2-library=${freetype}/lib" ``` From fae8c15737a8a1df85cc75f55c0bffa712b9ac0a Mon Sep 17 00:00:00 2001 From: BOHverkill Date: Sat, 10 Feb 2024 17:44:33 +0100 Subject: [PATCH 067/164] Fix link to manual in CONTRIBUTING.md --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ffcc0268f..a0c2b16f4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -63,7 +63,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy). - Functional tests – [`tests/functional/**.sh`](./tests/functional) - Unit tests – [`src/*/tests`](./src/) - Integration tests – [`tests/nixos/*`](./tests/nixos) - - [ ] User documentation in the [manual](..doc/manual/src) + - [ ] User documentation in the [manual](./doc/manual/src) - [ ] API documentation in header files - [ ] Code and comments are self-explanatory - [ ] Commit message explains **why** the change was made From f298159a2bac2932208907f6319a0ba80b2721c6 Mon Sep 17 00:00:00 2001 From: nbelakovski Date: Sat, 10 Feb 2024 18:52:39 -0800 Subject: [PATCH 068/164] Add a note about lists values.md There's probably more that can be said, but I thought it might be helpful to put something here about how to access elements of a list for folks coming from more or less any other programming language. If this is rarely used, it might be nice to add to the documentation something about why it's rarely used. --- doc/manual/src/language/values.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md index aea68a441..99dc0245d 100644 --- a/doc/manual/src/language/values.md +++ b/doc/manual/src/language/values.md @@ -156,6 +156,8 @@ function and the fifth being a set. Note that lists are only lazy in values, and they are strict in length. +Elements in a list can be accessed using `builtins.elemAt`. + ## Attribute Set An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`). From 4496a4537b56d69c7227088c4174a1ecbedd2ed5 Mon Sep 17 00:00:00 2001 From: nbelakovski Date: Sun, 11 Feb 2024 22:52:49 -0800 Subject: [PATCH 069/164] Update values.md Link to elemAt --- doc/manual/src/language/values.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md index 99dc0245d..74ffc7070 100644 --- a/doc/manual/src/language/values.md +++ b/doc/manual/src/language/values.md @@ -156,7 +156,7 @@ function and the fifth being a set. Note that lists are only lazy in values, and they are strict in length. -Elements in a list can be accessed using `builtins.elemAt`. +Elements in a list can be accessed using [`builtins.elemAt`](./builtins.md#builtins-elemAt). ## Attribute Set From 8669c02468994887be91072ac58b1ee43380d354 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 070/164] add test for inherit-from semantics --- tests/functional/lang/eval-okay-inherit-from.err.exp | 2 ++ tests/functional/lang/eval-okay-inherit-from.exp | 1 + tests/functional/lang/eval-okay-inherit-from.nix | 6 ++++++ 3 files changed, 9 insertions(+) create mode 100644 tests/functional/lang/eval-okay-inherit-from.err.exp create mode 100644 tests/functional/lang/eval-okay-inherit-from.exp create mode 100644 tests/functional/lang/eval-okay-inherit-from.nix diff --git a/tests/functional/lang/eval-okay-inherit-from.err.exp b/tests/functional/lang/eval-okay-inherit-from.err.exp new file mode 100644 index 000000000..51881205b --- /dev/null +++ b/tests/functional/lang/eval-okay-inherit-from.err.exp @@ -0,0 +1,2 @@ +trace: used +trace: used diff --git a/tests/functional/lang/eval-okay-inherit-from.exp b/tests/functional/lang/eval-okay-inherit-from.exp new file mode 100644 index 000000000..43bd0e899 --- /dev/null +++ b/tests/functional/lang/eval-okay-inherit-from.exp @@ -0,0 +1 @@ +[ 1 2 { __overrides = { y = { d = [ ]; }; }; c = [ ]; d = 4; x = { c = [ ]; }; y = «repeated»; } ] diff --git a/tests/functional/lang/eval-okay-inherit-from.nix b/tests/functional/lang/eval-okay-inherit-from.nix new file mode 100644 index 000000000..d1fad7d69 --- /dev/null +++ b/tests/functional/lang/eval-okay-inherit-from.nix @@ -0,0 +1,6 @@ +let + inherit (builtins.trace "used" { a = 1; b = 2; }) a b; + x.c = 3; + y.d = 4; +in + [ a b rec { x.c = []; inherit (x) c; inherit (y) d; __overrides.y.d = []; } ] From 73065a400d176b21f518c1f4ece90c31318b218d Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 071/164] add test for inherit expr printing --- tests/functional/lang/parse-okay-inherits.exp | 1 + tests/functional/lang/parse-okay-inherits.nix | 9 +++++++++ 2 files changed, 10 insertions(+) create mode 100644 tests/functional/lang/parse-okay-inherits.exp create mode 100644 tests/functional/lang/parse-okay-inherits.nix diff --git a/tests/functional/lang/parse-okay-inherits.exp b/tests/functional/lang/parse-okay-inherits.exp new file mode 100644 index 000000000..050b54afd --- /dev/null +++ b/tests/functional/lang/parse-okay-inherits.exp @@ -0,0 +1 @@ +(let c = { }; b = 2; in { a = 1; inherit b ; d = (c).d; e = (c).e; f = 3; }) diff --git a/tests/functional/lang/parse-okay-inherits.nix b/tests/functional/lang/parse-okay-inherits.nix new file mode 100644 index 000000000..10596c8ad --- /dev/null +++ b/tests/functional/lang/parse-okay-inherits.nix @@ -0,0 +1,9 @@ +let + c = {}; + b = 2; +in { + a = 1; + inherit b; + inherit (c) d e; + f = 3; +} From c66ee57edc6cac3571bfbf77d0c0ea4d25b4e805 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 072/164] preserve information about whether/how an attribute was inherited --- src/libexpr/eval.cc | 6 +++--- src/libexpr/nixexpr.cc | 8 ++++---- src/libexpr/nixexpr.hh | 17 ++++++++++++++--- src/libexpr/parser-state.hh | 2 +- src/libexpr/parser.y | 11 +++++++++-- 5 files changed, 31 insertions(+), 13 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index bffbd5f1a..12d7d825f 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1207,11 +1207,11 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) Displacement displ = 0; for (auto & i : attrs) { Value * vAttr; - if (hasOverrides && !i.second.inherited) { + if (hasOverrides && !i.second.inherited()) { vAttr = state.allocValue(); mkThunk(*vAttr, env2, i.second.e); } else - vAttr = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); + vAttr = i.second.e->maybeThunk(state, i.second.inherited() ? env : env2); env2.values[displ++] = vAttr; v.attrs->push_back(Attr(i.first, vAttr, i.second.pos)); } @@ -1282,7 +1282,7 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) environment. */ Displacement displ = 0; for (auto & i : attrs->attrs) - env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); + env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited() ? env : env2); auto dts = state.debugRepl ? makeDebugTraceStacker( diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 46737fea6..4c06864fd 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -82,7 +82,7 @@ void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const return sa < sb; }); for (auto & i : sorted) { - if (i->second.inherited) + if (i->second.inherited()) str << "inherit " << symbols[i->first] << " " << "; "; else { str << symbols[i->first] << " = "; @@ -153,7 +153,7 @@ void ExprLet::show(const SymbolTable & symbols, std::ostream & str) const { str << "(let "; for (auto & i : attrs->attrs) - if (i.second.inherited) { + if (i.second.inherited()) { str << "inherit " << symbols[i.first] << "; "; } else { @@ -343,7 +343,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr // No need to sort newEnv since attrs is in sorted order. for (auto & i : attrs) - i.second.e->bindVars(es, i.second.inherited ? env : newEnv); + i.second.e->bindVars(es, i.second.inherited() ? env : newEnv); for (auto & i : dynamicAttrs) { i.nameExpr->bindVars(es, newEnv); @@ -418,7 +418,7 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr & // No need to sort newEnv since attrs->attrs is in sorted order. for (auto & i : attrs->attrs) - i.second.e->bindVars(es, i.second.inherited ? env : newEnv); + i.second.e->bindVars(es, i.second.inherited() ? env : newEnv); if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, newEnv)); diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 1f944f10b..c8f47b02b 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -160,13 +160,24 @@ struct ExprAttrs : Expr bool recursive; PosIdx pos; struct AttrDef { - bool inherited; + enum class Kind { + /** `attr = expr;` */ + Plain, + /** `inherit attr1 attrn;` */ + Inherited, + /** `inherit (expr) attr1 attrn;` */ + InheritedFrom, + }; + + Kind kind; Expr * e; PosIdx pos; Displacement displ; // displacement - AttrDef(Expr * e, const PosIdx & pos, bool inherited=false) - : inherited(inherited), e(e), pos(pos) { }; + AttrDef(Expr * e, const PosIdx & pos, Kind kind = Kind::Plain) + : kind(kind), e(e), pos(pos) { }; AttrDef() { }; + + bool inherited() const { return kind == Kind::Inherited; } }; typedef std::map AttrDefs; AttrDefs attrs; diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index 87aeaeef5..ae38de130 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -89,7 +89,7 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * if (i->symbol) { ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); if (j != attrs->attrs.end()) { - if (!j->second.inherited) { + if (!j->second.inherited()) { ExprAttrs * attrs2 = dynamic_cast(j->second.e); if (!attrs2) dupAttr(attrPath, pos, j->second.pos); attrs = attrs2; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index a3ba13c66..0898b81f7 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -313,7 +313,9 @@ binds if ($$->attrs.find(i.symbol) != $$->attrs.end()) state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos); auto pos = state->at(@3); - $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true)); + $$->attrs.emplace( + i.symbol, + ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, ExprAttrs::AttrDef::Kind::Inherited)); } delete $3; } @@ -323,7 +325,12 @@ binds for (auto & i : *$6) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos); - $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->at(@6))); + $$->attrs.emplace( + i.symbol, + ExprAttrs::AttrDef( + new ExprSelect(CUR_POS, $4, i.symbol), + state->at(@6), + ExprAttrs::AttrDef::Kind::InheritedFrom)); } delete $6; } From 1f542adb3e18e7078e6a589182a53a47d971748a Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 073/164] add ExprAttrs::AttrDef::chooseByKind MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit in place of inherited() — not quite useful yet since we don't distinguish plain and inheritFrom attr kinds so far. --- src/libexpr/eval.cc | 22 +++++++++++++++------- src/libexpr/nixexpr.cc | 28 +++++++++++++++++----------- src/libexpr/nixexpr.hh | 14 ++++++++++++++ 3 files changed, 46 insertions(+), 18 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 12d7d825f..91341e167 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1209,9 +1209,9 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) Value * vAttr; if (hasOverrides && !i.second.inherited()) { vAttr = state.allocValue(); - mkThunk(*vAttr, env2, i.second.e); + mkThunk(*vAttr, *i.second.chooseByKind(&env2, &env, &env2), i.second.e); } else - vAttr = i.second.e->maybeThunk(state, i.second.inherited() ? env : env2); + vAttr = i.second.e->maybeThunk(state, *i.second.chooseByKind(&env2, &env, &env2)); env2.values[displ++] = vAttr; v.attrs->push_back(Attr(i.first, vAttr, i.second.pos)); } @@ -1243,9 +1243,14 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) } } - else - for (auto & i : attrs) - v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), i.second.pos)); + else { + for (auto & i : attrs) { + v.attrs->push_back(Attr( + i.first, + i.second.e->maybeThunk(state, *i.second.chooseByKind(&env, &env, &env)), + i.second.pos)); + } + } /* Dynamic attrs apply *after* rec and __overrides. */ for (auto & i : dynamicAttrs) { @@ -1281,8 +1286,11 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) while the inherited attributes are evaluated in the original environment. */ Displacement displ = 0; - for (auto & i : attrs->attrs) - env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited() ? env : env2); + for (auto & i : attrs->attrs) { + env2.values[displ++] = i.second.e->maybeThunk( + state, + *i.second.chooseByKind(&env2, &env, &env2)); + } auto dts = state.debugRepl ? makeDebugTraceStacker( diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 4c06864fd..f967777f2 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -334,16 +334,19 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); if (recursive) { - auto newEnv = std::make_shared(nullptr, env.get(), recursive ? attrs.size() : 0); + auto newEnv = [&] () -> std::shared_ptr { + auto newEnv = std::make_shared(nullptr, env.get(), attrs.size()); - Displacement displ = 0; - for (auto & i : attrs) - newEnv->vars.emplace_back(i.first, i.second.displ = displ++); + Displacement displ = 0; + for (auto & i : attrs) + newEnv->vars.emplace_back(i.first, i.second.displ = displ++); + return newEnv; + }(); // No need to sort newEnv since attrs is in sorted order. for (auto & i : attrs) - i.second.e->bindVars(es, i.second.inherited() ? env : newEnv); + i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, newEnv)); for (auto & i : dynamicAttrs) { i.nameExpr->bindVars(es, newEnv); @@ -352,7 +355,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr } else { for (auto & i : attrs) - i.second.e->bindVars(es, env); + i.second.e->bindVars(es, i.second.chooseByKind(env, env, env)); for (auto & i : dynamicAttrs) { i.nameExpr->bindVars(es, env); @@ -409,16 +412,19 @@ void ExprCall::bindVars(EvalState & es, const std::shared_ptr & void ExprLet::bindVars(EvalState & es, const std::shared_ptr & env) { - auto newEnv = std::make_shared(nullptr, env.get(), attrs->attrs.size()); + auto newEnv = [&] () -> std::shared_ptr { + auto newEnv = std::make_shared(nullptr, env.get(), attrs->attrs.size()); - Displacement displ = 0; - for (auto & i : attrs->attrs) - newEnv->vars.emplace_back(i.first, i.second.displ = displ++); + Displacement displ = 0; + for (auto & i : attrs->attrs) + newEnv->vars.emplace_back(i.first, i.second.displ = displ++); + return newEnv; + }(); // No need to sort newEnv since attrs->attrs is in sorted order. for (auto & i : attrs->attrs) - i.second.e->bindVars(es, i.second.inherited() ? env : newEnv); + i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, newEnv)); if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, newEnv)); diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index c8f47b02b..2d8dafe44 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -178,6 +178,20 @@ struct ExprAttrs : Expr AttrDef() { }; bool inherited() const { return kind == Kind::Inherited; } + + template + const T & chooseByKind(const T & plain, const T & inherited, const T & inheritedFrom) const + { + switch (kind) { + case Kind::Plain: + return plain; + case Kind::Inherited: + return inherited; + default: + case Kind::InheritedFrom: + return inheritedFrom; + } + } }; typedef std::map AttrDefs; AttrDefs attrs; From 6c08fba533ef31cad2bdc03ba72ecf58dc8ee5a0 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 074/164] use the same bindings print for ExprAttrs and ExprLet this also has the effect of sorting let bindings lexicographically rather than by symbol creation order as was previously done, giving a better canonicalization in the process. --- src/libexpr/nixexpr.cc | 21 ++++++++----------- src/libexpr/nixexpr.hh | 2 ++ tests/functional/lang/parse-okay-inherits.exp | 2 +- 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index f967777f2..c0812bb30 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -70,10 +70,8 @@ void ExprOpHasAttr::show(const SymbolTable & symbols, std::ostream & str) const str << ") ? " << showAttrPath(symbols, attrPath) << ")"; } -void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const +void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) const { - if (recursive) str << "rec "; - str << "{ "; typedef const decltype(attrs)::value_type * Attr; std::vector sorted; for (auto & i : attrs) sorted.push_back(&i); @@ -97,6 +95,13 @@ void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const i.valueExpr->show(symbols, str); str << "; "; } +} + +void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const +{ + if (recursive) str << "rec "; + str << "{ "; + showBindings(symbols, str); str << "}"; } @@ -152,15 +157,7 @@ void ExprCall::show(const SymbolTable & symbols, std::ostream & str) const void ExprLet::show(const SymbolTable & symbols, std::ostream & str) const { str << "(let "; - for (auto & i : attrs->attrs) - if (i.second.inherited()) { - str << "inherit " << symbols[i.first] << "; "; - } - else { - str << symbols[i.first] << " = "; - i.second.e->show(symbols, str); - str << "; "; - } + attrs->showBindings(symbols, str); str << "in "; body->show(symbols, str); str << ")"; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 2d8dafe44..4a93143b4 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -207,6 +207,8 @@ struct ExprAttrs : Expr ExprAttrs() : recursive(false) { }; PosIdx getPos() const override { return pos; } COMMON_METHODS + + void showBindings(const SymbolTable & symbols, std::ostream & str) const; }; struct ExprList : Expr diff --git a/tests/functional/lang/parse-okay-inherits.exp b/tests/functional/lang/parse-okay-inherits.exp index 050b54afd..722101ceb 100644 --- a/tests/functional/lang/parse-okay-inherits.exp +++ b/tests/functional/lang/parse-okay-inherits.exp @@ -1 +1 @@ -(let c = { }; b = 2; in { a = 1; inherit b ; d = (c).d; e = (c).e; f = 3; }) +(let b = 2; c = { }; in { a = 1; inherit b ; d = (c).d; e = (c).e; f = 3; }) From ecf8b12d60ad2929f9998666cf0966475b91e291 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 075/164] group inherit by source during Expr::show for plain inherits this is really just a stylistic choice, but for inherit-from it actually fixes an exponential size increase problem during expr printing (as may happen during assertion failure reporting, on during duplicate attr detection in the parser) --- src/libexpr/nixexpr.cc | 32 +++++++++++++++++-- tests/functional/lang/parse-okay-inherits.exp | 2 +- .../functional/lang/parse-okay-subversion.exp | 2 +- 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index c0812bb30..82e69de51 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -79,10 +79,36 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co std::string_view sa = symbols[a->first], sb = symbols[b->first]; return sa < sb; }); + std::vector inherits; + std::map> inheritsFrom; for (auto & i : sorted) { - if (i->second.inherited()) - str << "inherit " << symbols[i->first] << " " << "; "; - else { + switch (i->second.kind) { + case AttrDef::Kind::Plain: + break; + case AttrDef::Kind::Inherited: + inherits.push_back(i->first); + break; + case AttrDef::Kind::InheritedFrom: { + auto & select = dynamic_cast(*i->second.e); + inheritsFrom[select.e].push_back(i->first); + break; + } + } + } + if (!inherits.empty()) { + str << "inherit"; + for (auto sym : inherits) str << " " << symbols[sym]; + str << "; "; + } + for (const auto & [from, syms] : inheritsFrom) { + str << "inherit ("; + from->show(symbols, str); + str << ")"; + for (auto sym : syms) str << " " << symbols[sym]; + str << "; "; + } + for (auto & i : sorted) { + if (i->second.kind == AttrDef::Kind::Plain) { str << symbols[i->first] << " = "; i->second.e->show(symbols, str); str << "; "; diff --git a/tests/functional/lang/parse-okay-inherits.exp b/tests/functional/lang/parse-okay-inherits.exp index 722101ceb..1355527e6 100644 --- a/tests/functional/lang/parse-okay-inherits.exp +++ b/tests/functional/lang/parse-okay-inherits.exp @@ -1 +1 @@ -(let b = 2; c = { }; in { a = 1; inherit b ; d = (c).d; e = (c).e; f = 3; }) +(let b = 2; c = { }; in { inherit b; inherit (c) d e; a = 1; f = 3; }) diff --git a/tests/functional/lang/parse-okay-subversion.exp b/tests/functional/lang/parse-okay-subversion.exp index 4168ee8bf..2303932c4 100644 --- a/tests/functional/lang/parse-okay-subversion.exp +++ b/tests/functional/lang/parse-okay-subversion.exp @@ -1 +1 @@ -({ fetchurl, localServer ? false, httpServer ? false, sslSupport ? false, pythonBindings ? false, javaSwigBindings ? false, javahlBindings ? false, stdenv, openssl ? null, httpd ? null, db4 ? null, expat, swig ? null, j2sdk ? null }: assert (expat != null); assert (localServer -> (db4 != null)); assert (httpServer -> ((httpd != null) && ((httpd).expat == expat))); assert (sslSupport -> ((openssl != null) && (httpServer -> ((httpd).openssl == openssl)))); assert (pythonBindings -> ((swig != null) && (swig).pythonSupport)); assert (javaSwigBindings -> ((swig != null) && (swig).javaSupport)); assert (javahlBindings -> (j2sdk != null)); ((stdenv).mkDerivation { builder = /foo/bar; db4 = (if localServer then db4 else null); inherit expat ; inherit httpServer ; httpd = (if httpServer then httpd else null); j2sdk = (if javaSwigBindings then (swig).j2sdk else (if javahlBindings then j2sdk else null)); inherit javaSwigBindings ; inherit javahlBindings ; inherit localServer ; name = "subversion-1.1.1"; openssl = (if sslSupport then openssl else null); patches = (if javahlBindings then [ (/javahl.patch) ] else [ ]); python = (if pythonBindings then (swig).python else null); inherit pythonBindings ; src = (fetchurl { md5 = "a180c3fe91680389c210c99def54d9e0"; url = "http://subversion.tigris.org/tarballs/subversion-1.1.1.tar.bz2"; }); inherit sslSupport ; swig = (if (pythonBindings || javaSwigBindings) then swig else null); })) +({ fetchurl, localServer ? false, httpServer ? false, sslSupport ? false, pythonBindings ? false, javaSwigBindings ? false, javahlBindings ? false, stdenv, openssl ? null, httpd ? null, db4 ? null, expat, swig ? null, j2sdk ? null }: assert (expat != null); assert (localServer -> (db4 != null)); assert (httpServer -> ((httpd != null) && ((httpd).expat == expat))); assert (sslSupport -> ((openssl != null) && (httpServer -> ((httpd).openssl == openssl)))); assert (pythonBindings -> ((swig != null) && (swig).pythonSupport)); assert (javaSwigBindings -> ((swig != null) && (swig).javaSupport)); assert (javahlBindings -> (j2sdk != null)); ((stdenv).mkDerivation { inherit expat httpServer javaSwigBindings javahlBindings localServer pythonBindings sslSupport; builder = /foo/bar; db4 = (if localServer then db4 else null); httpd = (if httpServer then httpd else null); j2sdk = (if javaSwigBindings then (swig).j2sdk else (if javahlBindings then j2sdk else null)); name = "subversion-1.1.1"; openssl = (if sslSupport then openssl else null); patches = (if javahlBindings then [ (/javahl.patch) ] else [ ]); python = (if pythonBindings then (swig).python else null); src = (fetchurl { md5 = "a180c3fe91680389c210c99def54d9e0"; url = "http://subversion.tigris.org/tarballs/subversion-1.1.1.tar.bz2"; }); swig = (if (pythonBindings || javaSwigBindings) then swig else null); })) From 619ca631d07218dfe04bb53e5abb855ecf2bb67a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Feb 2024 15:29:48 +0100 Subject: [PATCH 076/164] Fix "may be used uninitialized" warning --- src/libstore/store-api.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 439c9530c..e3715343e 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -847,7 +847,7 @@ void Store::substitutePaths(const StorePathSet & paths) if (!willSubstitute.empty()) try { std::vector subs; - for (auto & p : willSubstitute) subs.push_back(DerivedPath::Opaque{p}); + for (auto & p : willSubstitute) subs.emplace_back(DerivedPath::Opaque{p}); buildPaths(subs); } catch (Error & e) { logWarning(e.info()); From a9b69b2fff8b33bc62234f8031f9acf257d9fbe0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Feb 2024 16:34:59 +0100 Subject: [PATCH 077/164] builtin:{unpack-channel,buildenv}: Get output path from the derivation Similar to 1ee42c5b88eb0533ebcf8b2579ec82f2be80e4b2, get the "out" path from the derivation (and complain if it doesn't exist), rather than getting it from the environment. --- src/libstore/build/local-derivation-goal.cc | 13 +++++++------ src/libstore/build/local-derivation-goal.hh | 2 +- src/libstore/builtins.hh | 10 ++++++++-- src/libstore/builtins/buildenv.cc | 6 ++++-- src/libstore/builtins/buildenv.hh | 4 +++- src/libstore/builtins/fetchurl.cc | 15 ++++++--------- src/libstore/builtins/unpack-channel.cc | 6 ++++-- 7 files changed, 33 insertions(+), 23 deletions(-) diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 2f60d2f38..b373c74b2 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -2130,16 +2130,17 @@ void LocalDerivationGoal::runChild() try { logger = makeJSONLogger(*logger); - BasicDerivation & drv2(*drv); - for (auto & e : drv2.env) - e.second = rewriteStrings(e.second, inputRewrites); + std::map outputs; + for (auto & e : drv->outputs) + outputs.insert_or_assign(e.first, + worker.store.printStorePath(scratchOutputs.at(e.first))); if (drv->builder == "builtin:fetchurl") - builtinFetchurl(drv2, netrcData); + builtinFetchurl(*drv, outputs, netrcData); else if (drv->builder == "builtin:buildenv") - builtinBuildenv(drv2); + builtinBuildenv(*drv, outputs); else if (drv->builder == "builtin:unpack-channel") - builtinUnpackChannel(drv2); + builtinUnpackChannel(*drv, outputs); else throw Error("unsupported builtin builder '%1%'", drv->builder.substr(8)); _exit(0); diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh index 88152a645..f25cb9424 100644 --- a/src/libstore/build/local-derivation-goal.hh +++ b/src/libstore/build/local-derivation-goal.hh @@ -106,7 +106,7 @@ struct LocalDerivationGoal : public DerivationGoal RedirectedOutputs redirectedOutputs; /** - * The outputs paths used during the build. + * The output paths used during the build. * * - Input-addressed derivations or fixed content-addressed outputs are * sometimes built when some of their outputs already exist, and can not diff --git a/src/libstore/builtins.hh b/src/libstore/builtins.hh index d201fb3ac..93558b49e 100644 --- a/src/libstore/builtins.hh +++ b/src/libstore/builtins.hh @@ -6,7 +6,13 @@ namespace nix { // TODO: make pluggable. -void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData); -void builtinUnpackChannel(const BasicDerivation & drv); +void builtinFetchurl( + const BasicDerivation & drv, + const std::map & outputs, + const std::string & netrcData); + +void builtinUnpackChannel( + const BasicDerivation & drv, + const std::map & outputs); } diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index 9283251ac..1ed7b39cc 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -161,7 +161,9 @@ void buildProfile(const Path & out, Packages && pkgs) debug("created %d symlinks in user environment", state.symlinks); } -void builtinBuildenv(const BasicDerivation & drv) +void builtinBuildenv( + const BasicDerivation & drv, + const std::map & outputs) { auto getAttr = [&](const std::string & name) { auto i = drv.env.find(name); @@ -169,7 +171,7 @@ void builtinBuildenv(const BasicDerivation & drv) return i->second; }; - Path out = getAttr("out"); + auto out = outputs.at("out"); createDirs(out); /* Convert the stuff we get from the environment back into a diff --git a/src/libstore/builtins/buildenv.hh b/src/libstore/builtins/buildenv.hh index b24633e27..8e112e176 100644 --- a/src/libstore/builtins/buildenv.hh +++ b/src/libstore/builtins/buildenv.hh @@ -45,6 +45,8 @@ typedef std::vector Packages; void buildProfile(const Path & out, Packages && pkgs); -void builtinBuildenv(const BasicDerivation & drv); +void builtinBuildenv( + const BasicDerivation & drv, + const std::map & outputs); } diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index a9f2e748e..4fb67f933 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -6,7 +6,10 @@ namespace nix { -void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) +void builtinFetchurl( + const BasicDerivation & drv, + const std::map & outputs, + const std::string & netrcData) { /* Make the host's netrc data available. Too bad curl requires this to be stored in a file. It would be nice if we could just @@ -24,14 +27,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) if (!dof) throw Error("'builtin:fetchurl' must be a fixed-output derivation"); - auto getAttr = [&](const std::string & name) { - auto i = drv.env.find(name); - if (i == drv.env.end()) throw Error("attribute '%s' missing", name); - return i->second; - }; - - Path storePath = getAttr("out"); - auto mainUrl = getAttr("url"); + auto storePath = outputs.at("out"); + auto mainUrl = drv.env.at("url"); bool unpack = getOr(drv.env, "unpack", "") == "1"; /* Note: have to use a fresh fileTransfer here because we're in diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc index ba04bb16c..6f68d4c0b 100644 --- a/src/libstore/builtins/unpack-channel.cc +++ b/src/libstore/builtins/unpack-channel.cc @@ -3,7 +3,9 @@ namespace nix { -void builtinUnpackChannel(const BasicDerivation & drv) +void builtinUnpackChannel( + const BasicDerivation & drv, + const std::map & outputs) { auto getAttr = [&](const std::string & name) { auto i = drv.env.find(name); @@ -11,7 +13,7 @@ void builtinUnpackChannel(const BasicDerivation & drv) return i->second; }; - Path out = getAttr("out"); + auto out = outputs.at("out"); auto channelName = getAttr("channelName"); auto src = getAttr("src"); From 91557df4a78e47fdadcea59fbca7751511b73bf5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 12 Feb 2024 11:16:12 -0500 Subject: [PATCH 078/164] Apply suggestions from code review Co-authored-by: Robert Hensing --- doc/manual/src/SUMMARY.md.in | 2 +- doc/manual/src/protocols/store-path.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index e6390c60a..d86372845 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -106,7 +106,7 @@ - [Architecture and Design](architecture/architecture.md) - [Protocols](protocols/index.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - - [Exact Store Path Specification](protocols/store-path.md) + - [Store Path Specification](protocols/store-path.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) - [Contributing](contributing/index.md) diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md index d1c35b05e..e7bc050e7 100644 --- a/doc/manual/src/protocols/store-path.md +++ b/doc/manual/src/protocols/store-path.md @@ -47,7 +47,7 @@ where For either the outputs built from derivations, paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256]. - (in that case "source" is used; it's silly, but it's done that way for compatibility). + (in that case "source" is used; this is only necessary for compatibility). `` is the name of the output (usually, "out"). For content-addressed store objects, ``, is always "out". From ac1301ddfdc0d92a23378f2ea75b221740c15bab Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 12 Feb 2024 11:16:53 -0500 Subject: [PATCH 079/164] Convert store path "grammar" to EBNF --- doc/manual/src/protocols/store-path.md | 70 +++++++++++++++----------- 1 file changed, 40 insertions(+), 30 deletions(-) diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md index e7bc050e7..d5dec77b5 100644 --- a/doc/manual/src/protocols/store-path.md +++ b/doc/manual/src/protocols/store-path.md @@ -5,66 +5,69 @@ This is the complete specification for how store paths are calculated. Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to. But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful. -```bnf - ::= /- +```ebnf +store-path = store-dir "/" digest "-" name ``` where -- `` = base-32 representation of the first 160 bits of a [SHA-256] hash of `
`
+- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `pre`
 
   Th is :the hash part of the store name
 
-- `
` = the string `:sha256:::`;
+- `pre` = the string
+
+  ```ebnf
+  type ":" sha256 ":" inner-digest ":" store ":" name
+  ```
 
   Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
   (e.g. you won't get `/nix/store/-name1` and `/nix/store/-name2`, or `/gnu/store/-name1`, with equal hash parts).
 
-- `` = the name of the store object.
+- `name` = the name of the store object.
 
-- `` = the [store directory](@docroot@/store/store-path.md#store-directory)
+- `store` = the [store directory](@docroot@/store/store-path.md#store-directory)
 
-- `` = one of:
+- `type` = one of:
 
-  - ```bnf
-    text:::...
+  - ```ebnf
+    "text" ( ":" store-path )*
     ```
 
     for encoded derivations written to the store.
-    ` ... ` are the store paths referenced by this path.
-    Those are encoded in the form described by ``.
+    The optional trailing store paths are the references of the store object.
 
-  - ```bnf
-    source:::...::self
+  - ```ebnf
+    "source" ( ":" store-path )*
     ```
 
     For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
     Just like in the text case, we can have the store objects referenced by their paths.
     Additionally, we can have an optional `:self` label to denote self reference.
 
-  - ```bnf
-    output:
+  - ```ebnf
+    "output:" id
     ```
 
     For either the outputs built from derivations,
     paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256].
     (in that case "source" is used; this is only necessary for compatibility).
 
-    `` is the name of the output (usually, "out").
-    For content-addressed store objects, ``, is always "out".
+    `id` is the name of the output (usually, "out").
+    For content-addressed store objects, `id`, is always "out".
 
-- `` = base-16 representation of a SHA-256 hash of ``
+- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-pre`
 
-- `` = one of the following based on ``:
+- `inner-pre` = one of the following based on `type`:
 
-  - if `` = `text:...`:
+  - if `type` = `"text:" ...`:
 
     the string written to the resulting store path.
 
-  - if `` = `source:...`:
+  - if `type` = `"source:" ...`:
 
     the the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object.
 
-  - if `` = `output:`:
+  - if `type` = `"output:" id`:
 
     - For input-addressed derivation outputs:
 
@@ -72,31 +75,38 @@ where
 
     - For content-addressed store paths:
 
-      the string `fixed:out:::`, where
+      the string
 
-      - `` = one of:
+      ```ebnf
+      "fixed:out:" rec algo ":" hash ":"
+      ```
+
+      where
+
+      - `rec` = one of:
 
         - `r:` hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
         - `` (empty string) for hashes of the flat (single file) serialization
 
-      - `` = `md5`, `sha1` or `sha256`
+      - `algo` = `md5`, `sha1` or `sha256`
 
-      - `` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
+      - `hash` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
 
-      Note that `` = `out`, regardless of the name part of the store path.
-      Also note that NAR + SHA-256 must not use this case, and instead must use the `` = `source:...` case.
+      Note that `id` = `out`, regardless of the name part of the store path.
+      Also note that NAR + SHA-256 must not use this case, and instead must use the `type` = `"source:" ...` case.
 
 [Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
 [sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
 
 ## Historical Note
 
-The `` = `source:...` and `` = `output:out` grammars technically overlap, in that both can represent data hashed by its SHA-256 NAR serialization.
+The `type` = `"source:" ...` and `type` = `"output:out"` grammars technically overlap in purpose,
+in that both can represent data hashed by its SHA-256 NAR serialization.
 
 The original reason for this way of computing names was to prevent name collisions (for security).
 For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
-The former would have an `` starting with `output:out:`, while the latter would have an `` starting with `source:`.
+The former would have an `inner-pre` starting with `output:out:`, while the latter would have an `inner-pre` starting with `source:`.
 
 Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
 Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).

From 95190e68ed8f6c152f8ba01b2da7baeacb342c0e Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:21:54 -0500
Subject: [PATCH 080/164] =?UTF-8?q?Mention=20the=20use=20of=20Extended=20B?=
 =?UTF-8?q?ackus=E2=80=93Naur=20form?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 doc/manual/src/protocols/store-path.md | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index d5dec77b5..57da808f9 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -2,6 +2,8 @@
 
 This is the complete specification for how store paths are calculated.
 
+The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), but must deviate for a few things such as hash functions which we treat as bidirectional for specification purposes.
+
 Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to.
 But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful.
 

From 30f6b0f9c55407207bd421b9a5446b455acd1e8e Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:22:08 -0500
Subject: [PATCH 081/164] `pre` -> `fingerprint` in store path grammar

As suggested by @roberth in
https://github.com/NixOS/nix/pull/9295#discussion_r1486402040.

Thanks!
---
 doc/manual/src/protocols/store-path.md | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 57da808f9..649bb4c45 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -12,11 +12,11 @@ store-path = store-dir "/" digest "-" name
 ```
 where
 
-- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `pre`
+- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `fingerprint`
 
   Th is :the hash part of the store name
 
-- `pre` = the string
+- `fingerprint` = the string
 
   ```ebnf
   type ":" sha256 ":" inner-digest ":" store ":" name
@@ -57,9 +57,9 @@ where
     `id` is the name of the output (usually, "out").
     For content-addressed store objects, `id`, is always "out".
 
-- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-pre`
+- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-fingerprint`
 
-- `inner-pre` = one of the following based on `type`:
+- `inner-fingerprint` = one of the following based on `type`:
 
   - if `type` = `"text:" ...`:
 
@@ -108,7 +108,7 @@ in that both can represent data hashed by its SHA-256 NAR serialization.
 
 The original reason for this way of computing names was to prevent name collisions (for security).
 For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
-The former would have an `inner-pre` starting with `output:out:`, while the latter would have an `inner-pre` starting with `source:`.
+The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
 
 Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
 Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).

From 0862d7ce57f7e16cf7f8ded3db7586a20fa8da28 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:26:02 -0500
Subject: [PATCH 082/164] Move around non-terminals

---
 doc/manual/src/protocols/store-path.md | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 649bb4c45..61f9d1604 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -12,9 +12,13 @@ store-path = store-dir "/" digest "-" name
 ```
 where
 
+- `name` = the name of the store object.
+
+- `store-dir` = the [store directory](@docroot@/store/store-path.md#store-directory)
+
 - `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `fingerprint`
 
-  Th is :the hash part of the store name
+  This the hash part of the store name
 
 - `fingerprint` = the string
 
@@ -25,10 +29,6 @@ where
   Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
   (e.g. you won't get `/nix/store/-name1` and `/nix/store/-name2`, or `/gnu/store/-name1`, with equal hash parts).
 
-- `name` = the name of the store object.
-
-- `store` = the [store directory](@docroot@/store/store-path.md#store-directory)
-
 - `type` = one of:
 
   - ```ebnf

From 4c3e4d6d7167e4fbd284eb00063882b8442e3e99 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:33:06 -0500
Subject: [PATCH 083/164] Sections, EBNF tweaks

---
 doc/manual/src/protocols/store-path.md | 34 +++++++++++++++++---------
 1 file changed, 22 insertions(+), 12 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 61f9d1604..ff075b3b6 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -7,6 +7,8 @@ The format of this specification is close to [Extended Backus–Naur form](https
 Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to.
 But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful.
 
+## Store path proper
+
 ```ebnf
 store-path = store-dir "/" digest "-" name
 ```
@@ -20,10 +22,10 @@ where
 
   This the hash part of the store name
 
-- `fingerprint` = the string
+## Fingerprint
 
-  ```ebnf
-  type ":" sha256 ":" inner-digest ":" store ":" name
+- ```ebnf
+  fingerprint = type ":" sha256 ":" inner-digest ":" store ":" name
   ```
 
   Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
@@ -32,14 +34,14 @@ where
 - `type` = one of:
 
   - ```ebnf
-    "text" ( ":" store-path )*
+    | "text" ( ":" store-path )*
     ```
 
     for encoded derivations written to the store.
     The optional trailing store paths are the references of the store object.
 
   - ```ebnf
-    "source" ( ":" store-path )*
+    | "source" ( ":" store-path )*
     ```
 
     For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
@@ -47,7 +49,7 @@ where
     Additionally, we can have an optional `:self` label to denote self reference.
 
   - ```ebnf
-    "output:" id
+    | "output:" id
     ```
 
     For either the outputs built from derivations,
@@ -59,6 +61,8 @@ where
 
 - `inner-digest` = base-16 representation of a SHA-256 hash of `inner-fingerprint`
 
+## Inner fingerprint
+
 - `inner-fingerprint` = one of the following based on `type`:
 
   - if `type` = `"text:" ...`:
@@ -77,8 +81,6 @@ where
 
     - For content-addressed store paths:
 
-      the string
-
       ```ebnf
       "fixed:out:" rec algo ":" hash ":"
       ```
@@ -87,15 +89,23 @@ where
 
       - `rec` = one of:
 
-        - `r:` hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
+        - ```ebnf
+          | "r:"
+          ```
+          hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
-        - `` (empty string) for hashes of the flat (single file) serialization
+        - ```ebnf
+          |
+          ```
+          (empty string) for hashes of the flat (single file) serialization
 
-      - `algo` = `md5`, `sha1` or `sha256`
+      - ```ebf
+        algo = "md5" | "sha1" | "sha256"
+        ```
 
       - `hash` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
 
-      Note that `id` = `out`, regardless of the name part of the store path.
+      Note that `id` = `"out"`, regardless of the name part of the store path.
       Also note that NAR + SHA-256 must not use this case, and instead must use the `type` = `"source:" ...` case.
 
 [Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR

From c873a140d711eb1c9f268f0903021bb68e764684 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:34:54 -0500
Subject: [PATCH 084/164] Apply suggestions from code review

Co-authored-by: Robert Hensing 
---
 doc/manual/src/protocols/store-path.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index ff075b3b6..2fc4bf7af 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -120,7 +120,7 @@ The original reason for this way of computing names was to prevent name collisio
 For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
 The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
 
-Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
+Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separating derivation-produced vs manually-hashed content-addressed data like this was not useful.
 Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
 This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
 It also removes the ambiguity from the grammar.

From 5169f5f4d9743fa10a8578625d2c290141949d54 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 12:01:54 -0500
Subject: [PATCH 085/164] Apply suggestions from code review

Co-authored-by: Robert Hensing 
---
 doc/manual/src/protocols/store-path.md | 2 +-
 src/libstore/store-api.cc              | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 2fc4bf7af..1f619e6a2 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -111,7 +111,7 @@ where
 [Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
 [sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
 
-## Historical Note
+### Historical Note
 
 The `type` = `"source:" ...` and `type` = `"output:out"` grammars technically overlap in purpose,
 in that both can represent data hashed by its SHA-256 NAR serialization.
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 1fb6cdce7..4238cbbf5 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -69,7 +69,7 @@ StorePath Store::followLinksToStorePath(std::string_view path) const
 The exact specification of store paths is in `protocols/store-path.md`
 in the Nix manual. These few functions implement that specification.
 
-If changes do these functions go behind mere implementation changes but
+If changes to these functions go beyond mere implementation changes i.e.
 also update the user-visible behavior, please update the specification
 to match.
 */

From 898fd1e48d117c7cd28bbc04cd230450f1df9adc Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 12:04:37 -0500
Subject: [PATCH 086/164] Update doc/manual/src/protocols/store-path.md

---
 doc/manual/src/protocols/store-path.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 1f619e6a2..595c7a10e 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -121,6 +121,6 @@ For instance, the thinking was that it shouldn't be feasible to come up with a d
 The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
 
 Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separating derivation-produced vs manually-hashed content-addressed data like this was not useful.
-Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
+Now, data that is content-addressed with SHA-256 + NAR-serialization always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
 This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
 It also removes the ambiguity from the grammar.

From f29d2a9d11c6b1c4cb8011e45dc45d99e4d572bd Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 12:30:28 -0500
Subject: [PATCH 087/164] Small EBNF fix

---
 doc/manual/src/protocols/store-path.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 595c7a10e..fcf8038fc 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -95,11 +95,11 @@ where
           hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
         - ```ebnf
-          |
+          | ""
           ```
           (empty string) for hashes of the flat (single file) serialization
 
-      - ```ebf
+      - ```ebnf
         algo = "md5" | "sha1" | "sha256"
         ```
 

From 64cbd4c05a413eae55cde784594472f921fc7367 Mon Sep 17 00:00:00 2001
From: Anton Samokhvalov 
Date: Mon, 12 Feb 2024 23:37:40 +0300
Subject: [PATCH 088/164] Update nar-info-disk-cache.cc

fix case when asserts are no-op, like in release build
---
 src/libstore/nar-info-disk-cache.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 310105c75..07beb8acb 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -209,7 +209,7 @@ public:
 
             {
                 auto r(state->insertCache.use()(uri)(time(0))(storeDir)(wantMassQuery)(priority));
-                assert(r.next());
+                if (!r.next()) { abort(); }
                 ret.id = (int) r.getInt(0);
             }
 

From 64a076fe0678ee46fbec1446df1fcfbb713cfdf6 Mon Sep 17 00:00:00 2001
From: "Travis A. Everett" 
Date: Tue, 13 Feb 2024 01:18:08 -0600
Subject: [PATCH 089/164] install-darwin: fix symbolic perms for install cmd

The symbolic form in use here doesn't seem to have an effect
in either the BSD or coreutils install commands, leaving the
daemon plist with empty permissions. This seems to cause its
own problems.

I think I've got the right symbolic syntax now :)
---
 scripts/install-darwin-multi-user.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh
index 766f81bde..24c9052f9 100644
--- a/scripts/install-darwin-multi-user.sh
+++ b/scripts/install-darwin-multi-user.sh
@@ -102,7 +102,7 @@ poly_extra_try_me_commands() {
 poly_configure_nix_daemon_service() {
     task "Setting up the nix-daemon LaunchDaemon"
     _sudo "to set up the nix-daemon as a LaunchDaemon" \
-          /usr/bin/install -m -rw-r--r-- "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
+          /usr/bin/install -m "u=rw,go=r" "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
 
     _sudo "to load the LaunchDaemon plist for nix-daemon" \
           launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist

From ce19338f9fa4e8fa1fea7faf33c0f2c384e590dd Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 13:30:17 +0100
Subject: [PATCH 090/164] update glossary entry on store types

the interesting information is on the proper pages, and is now presented
a bit more prominently.

the paragraph was a bit confusing to read, also because an anchor link
to an inline definition was in the middle of the sentence. "local store"
now has its own glossary entry.
---
 doc/manual/src/glossary.md | 23 +++++++++++++----------
 1 file changed, 13 insertions(+), 10 deletions(-)

diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 13b2906f7..51f1e3a71 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -59,18 +59,21 @@
 
 - [store]{#gloss-store}
 
-  A collection of store objects, with operations to manipulate that collection.
-  See [Nix store](./store/index.md) for details.
+  A collection of [store objects][store object], with operations to manipulate that collection.
+  See [Nix Store](./store/index.md) for details.
 
-  There are many types of stores.
-  See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md) for a complete list.
-
-  From the perspective of the location where Nix is invoked, the Nix store can be  referred to _local_ or _remote_.
-  Only a [local store]{#gloss-local-store} exposes a location in the file system of the machine where Nix is invoked that allows access to store objects, typically `/nix/store`.
-  Local stores can be used for building [derivations](#gloss-derivation).
-  See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
+  There are many types of stores, see [Store Types](./store/types/index.md) for details.
 
   [store]: #gloss-store
+
+- [local store]{#gloss-local-store}
+
+  From the perspective of the location where Nix is invoked, the Nix [store] can be referred to _local_ or _remote_.
+  Only a local store exposes a file system directory, typically `/nix/store`, to allow operating system processes to directly access store objects.
+  Local stores can be used for building [derivations][derivation].
+
+  See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
+
   [local store]: #gloss-local-store
 
 - [chroot store]{#gloss-chroot-store}
@@ -87,7 +90,7 @@
 
 - [store path]{#gloss-store-path}
 
-  The location of a [store object](@docroot@/store/index.md#store-object) in the file system, i.e., an immediate child of the Nix store directory.
+  The location of a [store object] in the file system, i.e., an immediate child of the Nix store directory.
 
   > **Example**
   >

From e37d50289509dcac2303bc4de7065879dd58c731 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 13:34:51 +0100
Subject: [PATCH 091/164] add instructions to wipe the substituter lookup cache
 (#9498)

* add instructions to wipe the substituter lookup cache
---
 src/libstore/globals.hh | 15 +++++++++++----
 1 file changed, 11 insertions(+), 4 deletions(-)

diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 070e252b6..941adba78 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -793,10 +793,17 @@ public:
     Setting ttlNegativeNarInfoCache{
         this, 3600, "narinfo-cache-negative-ttl",
         R"(
-          The TTL in seconds for negative lookups. If a store path is queried
-          from a substituter but was not found, there will be a negative
-          lookup cached in the local disk cache database for the specified
-          duration.
+          The TTL in seconds for negative lookups.
+          If a store path is queried from a [substituter](#conf-substituters) but was not found, there will be a negative lookup cached in the local disk cache database for the specified duration.
+
+          Set to `0` to force updating the lookup cache.
+
+          To wipe the lookup cache completely:
+
+          ```shell-session
+          $ rm $HOME/.cache/nix/binary-cache-v*.sqlite*
+          # rm /root/.cache/nix/binary-cache-v*.sqlite*
+          ```
         )"};
 
     Setting ttlPositiveNarInfoCache{

From fd82ba0985aefc2a5498045f0caf16f8b2566cf1 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 14:13:56 +0100
Subject: [PATCH 092/164] extract reference documentation on remote builds
 (#9526)

- move all reference documentation to the `builders` configuration setting
- reword documentation on machine specification, add examples
- disable showing the default value, as it rendered as `@/dummy/machines`, which is wrong
- highlight the examples
- link to the configuration docs for distributed builds
- builder -> build machine

Co-authored-by: Janik H 
---
 doc/manual/redirects.js                       |   2 +-
 .../src/advanced-topics/distributed-builds.md | 101 ++-------------
 doc/manual/src/contributing/hacking.md        |   4 +-
 doc/manual/src/glossary.md                    |   2 +-
 .../src/language/advanced-attributes.md       |   2 +-
 doc/manual/src/language/derivations.md        |   2 +-
 src/libstore/build/derivation-goal.cc         |  10 +-
 src/libstore/build/worker.cc                  |  22 +++-
 src/libstore/globals.hh                       | 118 +++++++++++++++++-
 9 files changed, 152 insertions(+), 111 deletions(-)

diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js
index d04f32b49..27ab1853c 100644
--- a/doc/manual/redirects.js
+++ b/doc/manual/redirects.js
@@ -18,7 +18,7 @@ const redirects = {
     "chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
     "chap-diff-hook": "advanced-topics/diff-hook.html",
     "check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
-    "chap-distributed-builds": "advanced-topics/distributed-builds.html",
+    "chap-distributed-builds": "command-ref/conf-file.html#conf-builders",
     "chap-post-build-hook": "advanced-topics/post-build-hook.html",
     "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
     "chap-writing-nix-expressions": "language/index.html",
diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md
index 507c5ecb7..52acd039c 100644
--- a/doc/manual/src/advanced-topics/distributed-builds.md
+++ b/doc/manual/src/advanced-topics/distributed-builds.md
@@ -36,16 +36,8 @@ error: cannot connect to 'mac'
 then you need to ensure that the `PATH` of non-interactive login shells
 contains Nix.
 
-> **Warning**
->
-> If you are building via the Nix daemon, it is the Nix daemon user account (that is, `root`) that should have SSH access to a user (not necessarily `root`) on the remote machine.
->
-> If you can’t or don’t want to configure `root` to be able to access the remote machine, you can use a private Nix store instead by passing e.g. `--store ~/my-nix` when running a Nix command from the local machine.
-
-The list of remote machines can be specified on the command line or in
-the Nix configuration file. The former is convenient for testing. For
-example, the following command allows you to build a derivation for
-`x86_64-darwin` on a Linux machine:
+The [list of remote build machines](@docroot@/command-ref/conf-file.md#conf-builders) can be specified on the command line or in the Nix configuration file.
+For example, the following command allows you to build a derivation for `x86_64-darwin` on a Linux machine:
 
 ```console
 $ uname
@@ -60,97 +52,20 @@ $ cat ./result
 Darwin
 ```
 
-It is possible to specify multiple builders separated by a semicolon or
-a newline, e.g.
+It is possible to specify multiple build machines separated by a semicolon or a newline, e.g.
 
 ```console
   --builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd'
 ```
 
-Each machine specification consists of the following elements, separated
-by spaces. Only the first element is required. To leave a field at its
-default, set it to `-`.
-
-1.  The URI of the remote store in the format
-    `ssh://[username@]hostname`, e.g. `ssh://nix@mac` or `ssh://mac`.
-    For backward compatibility, `ssh://` may be omitted. The hostname
-    may be an alias defined in your `~/.ssh/config`.
-
-2.  A comma-separated list of Nix platform type identifiers, such as
-    `x86_64-darwin`. It is possible for a machine to support multiple
-    platform types, e.g., `i686-linux,x86_64-linux`. If omitted, this
-    defaults to the local platform type.
-
-3.  The SSH identity file to be used to log in to the remote machine. If
-    omitted, SSH will use its regular identities.
-
-4.  The maximum number of builds that Nix will execute in parallel on
-    the machine. Typically this should be equal to the number of CPU
-    cores. For instance, the machine `itchy` in the example will execute
-    up to 8 builds in parallel.
-
-5.  The “speed factor”, indicating the relative speed of the machine. If
-    there are multiple machines of the right type, Nix will prefer the
-    fastest, taking load into account.
-
-6.  A comma-separated list of *supported features*. If a derivation has
-    the `requiredSystemFeatures` attribute, then Nix will only perform
-    the derivation on a machine that has the specified features. For
-    instance, the attribute
-
-    ```nix
-    requiredSystemFeatures = [ "kvm" ];
-    ```
-
-    will cause the build to be performed on a machine that has the `kvm`
-    feature.
-
-7.  A comma-separated list of *mandatory features*. A machine will only
-    be used to build a derivation if all of the machine’s mandatory
-    features appear in the derivation’s `requiredSystemFeatures`
-    attribute.
-
-8.  The (base64-encoded) public host key of the remote machine. If omitted, SSH
-    will use its regular known-hosts file. Specifically, the field is calculated
-    via `base64 -w0 /etc/ssh/ssh_host_ed25519_key.pub`.
-
-For example, the machine specification
-
-    nix@scratchy.labs.cs.uu.nl  i686-linux      /home/nix/.ssh/id_scratchy_auto        8 1 kvm
-    nix@itchy.labs.cs.uu.nl     i686-linux      /home/nix/.ssh/id_scratchy_auto        8 2
-    nix@poochie.labs.cs.uu.nl   i686-linux      /home/nix/.ssh/id_scratchy_auto        1 2 kvm benchmark
-
-specifies several machines that can perform `i686-linux` builds.
-However, `poochie` will only do builds that have the attribute
-
-```nix
-requiredSystemFeatures = [ "benchmark" ];
-```
-
-or
-
-```nix
-requiredSystemFeatures = [ "benchmark" "kvm" ];
-```
-
-`itchy` cannot do builds that require `kvm`, but `scratchy` does support
-such builds. For regular builds, `itchy` will be preferred over
-`scratchy` because it has a higher speed factor.
-
-Remote builders can also be configured in `nix.conf`, e.g.
+Remote build machines can also be configured in [`nix.conf`](@docroot@/command-ref/conf-file.md), e.g.
 
     builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd
 
-Finally, remote builders can be configured in a separate configuration
-file included in `builders` via the syntax `@file`. For example,
+Finally, remote build machines can be configured in a separate configuration
+file included in `builders` via the syntax `@/path/to/file`. For example,
 
     builders = @/etc/nix/machines
 
-causes the list of machines in `/etc/nix/machines` to be included. (This
-is the default.)
-
-If you want the builders to use caches, you likely want to set the
-option `builders-use-substitutes` in your local `nix.conf`.
-
-To build only on remote builders and disable building on the local
-machine, you can use the option `--max-jobs 0`.
+causes the list of machines in `/etc/nix/machines` to be included.
+(This is the default.)
diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 9e2470859..6c9be3635 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -147,10 +147,10 @@ Nix can be built for various platforms, as specified in [`flake.nix`]:
 
 In order to build Nix for a different platform than the one you're currently
 on, you need a way for your current Nix installation to build code for that
-platform. Common solutions include [remote builders] and [binary format emulation]
+platform. Common solutions include [remote build machines] and [binary format emulation]
 (only supported on NixOS).
 
-[remote builders]: ../advanced-topics/distributed-builds.md
+[remote builders]: @docroot@/language/derivations.md#attr-builder
 [binary format emulation]: https://nixos.org/manual/nixos/stable/options.html#opt-boot.binfmt.emulatedSystems
 
 Given such a setup, executing the build only requires selecting the respective attribute.
diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 13b2906f7..359f727d7 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -37,7 +37,7 @@
   This can be achieved by:
   - Fetching a pre-built [store object] from a [substituter]
   - Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation]
-  - Delegating to a [remote builder](@docroot@/advanced-topics/distributed-builds.html) and retrieving the outputs
+  - Delegating to a [remote machine](@docroot@/command-ref/conf-file.md#conf-builders) and retrieving the outputs
   
 
   See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm.
diff --git a/doc/manual/src/language/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md
index 5a6c00cd4..7306fc182 100644
--- a/doc/manual/src/language/advanced-attributes.md
+++ b/doc/manual/src/language/advanced-attributes.md
@@ -257,7 +257,7 @@ Derivations can declare some infrequently used optional attributes.
     of the environment (typically, a few hundred kilobyte).
 
   - [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\
-    If this attribute is set to `true` and [distributed building is enabled](../advanced-topics/distributed-builds.md), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
+    If this attribute is set to `true` and [distributed building is enabled](@docroot@/command-ref/conf-file.md#conf-builders), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
     This is useful for derivations that are cheapest to build locally.
 
   - [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\
diff --git a/doc/manual/src/language/derivations.md b/doc/manual/src/language/derivations.md
index cbb30d074..75f824a34 100644
--- a/doc/manual/src/language/derivations.md
+++ b/doc/manual/src/language/derivations.md
@@ -36,7 +36,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect
   The system type on which the [`builder`](#attr-builder) executable is meant to be run.
 
   A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option].
-  It can automatically [build on other platforms](../advanced-topics/distributed-builds.md) by forwarding build requests to other machines.
+  It can automatically [build on other platforms](@docroot@/language/derivations.md#attr-builder) by forwarding build requests to other machines.
 
   [`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system
 
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 1b326ee13..29bf2852f 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -780,9 +780,13 @@ void DerivationGoal::tryToBuild()
 
 void DerivationGoal::tryLocalBuild() {
     throw Error(
-        "unable to build with a primary store that isn't a local store; "
-        "either pass a different '--store' or enable remote builds."
-        "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
+        R"(
+        Unable to build with a primary store that isn't a local store;
+        either pass a different '--store' or enable remote builds.
+
+        For more information check 'man nix.conf' and search for '/machines'.
+        )"
+    );
 }
 
 
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index d57e22393..3a34f4006 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -331,13 +331,23 @@ void Worker::run(const Goals & _topGoals)
             if (awake.empty() && 0U == settings.maxBuildJobs)
             {
                 if (getMachines().empty())
-                   throw Error("unable to start any build; either increase '--max-jobs' "
-                            "or enable remote builds."
-                            "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
+                   throw Error(
+                        R"(
+                        Unable to start any build;
+                        either increase '--max-jobs' or enable remote builds.
+
+                        For more information run 'man nix.conf' and search for '/machines'.
+                        )"
+                    );
                 else
-                   throw Error("unable to start any build; remote machines may not have "
-                            "all required system features."
-                            "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
+                   throw Error(
+                        R"(
+                        Unable to start any build;
+                        remote machines may not have all required system features.
+
+                        For more information run 'man nix.conf' and search for '/machines'.
+                        )"
+                    );
 
             }
             assert(!awake.empty());
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 941adba78..fa2dc8681 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -270,9 +270,121 @@ public:
     Setting builders{
         this, "@" + nixConfDir + "/machines", "builders",
         R"(
-          A semicolon-separated list of build machines.
-          For the exact format and examples, see [the manual chapter on remote builds](../advanced-topics/distributed-builds.md)
-        )"};
+          A semicolon- or newline-separated list of build machines.
+
+          In addition to the [usual ways of setting configuration options](@docroot@/command-ref/conf-file.md), the value can be read from a file by prefixing its absolute path with `@`.
+
+          > **Example**
+          >
+          > This is the default setting:
+          >
+          > ```
+          > builders = @/etc/nix/machines
+          > ```
+
+          Each machine specification consists of the following elements, separated by spaces.
+          Only the first element is required.
+          To leave a field at its default, set it to `-`.
+
+          1. The URI of the remote store in the format `ssh://[username@]hostname`.
+
+             > **Example**
+             >
+             > `ssh://nix@mac`
+
+             For backward compatibility, `ssh://` may be omitted.
+             The hostname may be an alias defined in `~/.ssh/config`.
+
+          2. A comma-separated list of [Nix system types](@docroot@/contributing/hacking.md#system-type).
+             If omitted, this defaults to the local platform type.
+
+             > **Example**
+             >
+             > `aarch64-darwin`
+
+             It is possible for a machine to support multiple platform types.
+
+             > **Example**
+             >
+             > `i686-linux,x86_64-linux`
+
+          3. The SSH identity file to be used to log in to the remote machine.
+             If omitted, SSH will use its regular identities.
+
+             > **Example**
+             >
+             > `/home/user/.ssh/id_mac`
+
+          4. The maximum number of builds that Nix will execute in parallel on the machine.
+             Typically this should be equal to the number of CPU cores.
+
+          5. The “speed factor”, indicating the relative speed of the machine as a positive integer.
+             If there are multiple machines of the right type, Nix will prefer the fastest, taking load into account.
+
+          6. A comma-separated list of supported [system features](#conf-system-features).
+
+             A machine will only be used to build a derivation if all the features in the derivation's [`requiredSystemFeatures`](@docroot@/language/advanced-attributes.html#adv-attr-requiredSystemFeatures) attribute are supported by that machine.
+
+          7. A comma-separated list of required [system features](#conf-system-features).
+
+             A machine will only be used to build a derivation if all of the machine’s required features appear in the derivation’s [`requiredSystemFeatures`](@docroot@/language/advanced-attributes.html#adv-attr-requiredSystemFeatures) attribute.
+
+          8. The (base64-encoded) public host key of the remote machine.
+             If omitted, SSH will use its regular `known_hosts` file.
+
+             The value for this field can be obtained via `base64 -w0`.
+
+          > **Example**
+          >
+          > Multiple builders specified on the command line:
+          >
+          > ```console
+          > --builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd'
+          > ```
+
+          > **Example**
+          >
+          > This specifies several machines that can perform `i686-linux` builds:
+          >
+          > ```
+          > nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy 8 1 kvm
+          > nix@itchy.labs.cs.uu.nl    i686-linux /home/nix/.ssh/id_scratchy 8 2
+          > nix@poochie.labs.cs.uu.nl  i686-linux /home/nix/.ssh/id_scratchy 1 2 kvm benchmark
+          > ```
+          >
+          > However, `poochie` will only build derivations that have the attribute
+          >
+          > ```nix
+          > requiredSystemFeatures = [ "benchmark" ];
+          > ```
+          >
+          > or
+          >
+          > ```nix
+          > requiredSystemFeatures = [ "benchmark" "kvm" ];
+          > ```
+          >
+          > `itchy` cannot do builds that require `kvm`, but `scratchy` does support such builds.
+          > For regular builds, `itchy` will be preferred over `scratchy` because it has a higher speed factor.
+
+          For Nix to use substituters, the calling user must be in the [`trusted-users`](#conf-trusted-users) list.
+
+          > **Note**
+          >
+          > A build machine must be accessible via SSH and have Nix installed.
+          > `nix` must be available in `$PATH` for the user connecting over SSH.
+
+          > **Warning**
+          >
+          > If you are building via the Nix daemon (default), the Nix daemon user account on the local machine (that is, `root`) requires access to a user account on the remote machine (not necessarily `root`).
+          >
+          > If you can’t or don’t want to configure `root` to be able to access the remote machine, set [`store`](#conf-store) to any [local store](@docroot@/store/types/local-store.html), e.g. by passing `--store /tmp` to the command on the local machine.
+
+          To build only on remote machines and disable local builds, set [`max-jobs`](#conf-max-jobs) to 0.
+
+          If you want the remote machines to use substituters, set [`builders-use-substitutes`](#conf-builders-use-substituters) to `true`.
+        )",
+        {}, false};
 
     Setting alwaysAllowSubstitutes{
         this, false, "always-allow-substitutes",

From bb63bd50e6d817e5ca52c1d1d21232164a64f993 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 13 Feb 2024 14:14:20 +0100
Subject: [PATCH 093/164] : Restore support for "impure =
 true"

---
 src/libstore/builtins/fetchurl.cc      | 8 ++++----
 tests/functional/fetchurl.sh           | 2 +-
 tests/functional/impure-derivations.sh | 4 ++++
 3 files changed, 9 insertions(+), 5 deletions(-)

diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index a9f2e748e..559efcc17 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -20,9 +20,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
     if (!out)
         throw Error("'builtin:fetchurl' requires an 'out' output");
 
-    auto dof = std::get_if(&out->raw);
-    if (!dof)
-        throw Error("'builtin:fetchurl' must be a fixed-output derivation");
+    if (!(drv.type().isFixed() || drv.type().isImpure()))
+        throw Error("'builtin:fetchurl' must be a fixed-output or impure derivation");
 
     auto getAttr = [&](const std::string & name) {
         auto i = drv.env.find(name);
@@ -67,7 +66,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
     };
 
     /* Try the hashed mirrors first. */
-    if (dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
+    auto dof = std::get_if(&out->raw);
+    if (dof && dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
         for (auto hashedMirror : settings.hashedMirrors.get())
             try {
                 if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh
index 5a05cc5e1..a3620f52b 100644
--- a/tests/functional/fetchurl.sh
+++ b/tests/functional/fetchurl.sh
@@ -83,4 +83,4 @@ test -L $outPath/symlink
 requireDaemonNewerThan "2.20"
 expected=100
 if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly
-expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation'
+expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output or impure derivation'
diff --git a/tests/functional/impure-derivations.sh b/tests/functional/impure-derivations.sh
index 39d053a04..54ed6f5dd 100644
--- a/tests/functional/impure-derivations.sh
+++ b/tests/functional/impure-derivations.sh
@@ -63,3 +63,7 @@ path5=$(nix build -L --no-link --json --file ./impure-derivations.nix contentAdd
 path6=$(nix build -L --no-link --json --file ./impure-derivations.nix inputAddressedAfterCA | jq -r .[].outputs.out)
 [[ $(< $path6) = X ]]
 [[ $(< $TEST_ROOT/counter) = 5 ]]
+
+# Test nix/fetchurl.nix.
+path7=$(nix build -L --no-link --print-out-paths --expr "import  { impure = true; url = file://$PWD/impure-derivations.sh; }")
+cmp $path7 $PWD/impure-derivations.sh

From 39c353f6fa40a5e0ace9e2c3e69848108944845c Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 14:52:51 +0100
Subject: [PATCH 094/164] reword description of the `cores` setting (#9522)

* reword description of the `cores` setting

- be precise about the `builder` executable
- clearly distinguish between `builder` and job parallelism
- clarify the role of `mkDerivation` in the example
- remove prose for the default, it's shown programmatically
- mention relation to `max-jobs`
---
 src/libstore/globals.hh | 23 +++++++++++++++--------
 1 file changed, 15 insertions(+), 8 deletions(-)

diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index fa2dc8681..8330d6571 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -180,14 +180,21 @@ public:
         getDefaultCores(),
         "cores",
         R"(
-          Sets the value of the `NIX_BUILD_CORES` environment variable in the
-          invocation of builders. Builders can use this variable at their
-          discretion to control the maximum amount of parallelism. For
-          instance, in Nixpkgs, if the derivation attribute
-          `enableParallelBuilding` is set to `true`, the builder passes the
-          `-jN` flag to GNU Make. It can be overridden using the `--cores`
-          command line switch and defaults to `1`. The value `0` means that
-          the builder should use all available CPU cores in the system.
+          Sets the value of the `NIX_BUILD_CORES` environment variable in the [invocation of the `builder` executable](@docroot@/language/derivations.md#builder-execution) of a derivation.
+          The `builder` executable can use this variable to control its own maximum amount of parallelism.
+
+          
+          For instance, in Nixpkgs, if the attribute `enableParallelBuilding` for the `mkDerivation` build helper is set to `true`, it will pass the `-j${NIX_BUILD_CORES}` flag to GNU Make.
+
+          The value `0` means that the `builder` should use all available CPU cores in the system.
+
+          > **Note**
+          >
+          > The number of parallel local Nix build jobs is independently controlled with the [`max-jobs`](#conf-max-jobs) setting.
         )",
         {"build-cores"},
         // Don't document the machine-specific default value

From 8bebf9607cbf07fbf0f98d835f20df1f9736d5ff Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 09:37:45 -0500
Subject: [PATCH 095/164] Split `hash.sh` test in two

Converting hashes and hashing files are pretty separate tasks, and more
test parallelism is better.
---
 tests/functional/hash-convert.sh           | 105 +++++++++++++++++++++
 tests/functional/{hash.sh => hash-path.sh} | 104 --------------------
 tests/functional/local.mk                  |   3 +-
 3 files changed, 107 insertions(+), 105 deletions(-)
 create mode 100644 tests/functional/hash-convert.sh
 rename tests/functional/{hash.sh => hash-path.sh} (51%)

diff --git a/tests/functional/hash-convert.sh b/tests/functional/hash-convert.sh
new file mode 100644
index 000000000..9b3afc10b
--- /dev/null
+++ b/tests/functional/hash-convert.sh
@@ -0,0 +1,105 @@
+source common.sh
+
+# Conversion with `nix hash` `nix-hash` and `nix hash convert`
+try3() {
+    # $1 = hash algo
+    # $2 = expected hash in base16
+    # $3 = expected hash in base32
+    # $4 = expected hash in base64
+    h64=$(nix hash convert --hash-algo "$1" --to base64 "$2")
+    [ "$h64" = "$4" ]
+    h64=$(nix-hash --type "$1" --to-base64 "$2")
+    [ "$h64" = "$4" ]
+    # Deprecated experiment
+    h64=$(nix hash to-base64 --type "$1" "$2")
+    [ "$h64" = "$4" ]
+
+    sri=$(nix hash convert --hash-algo "$1" --to sri "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix-hash --type "$1" --to-sri "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash to-sri --type "$1" "$2")
+    [ "$sri" = "$1-$4" ]
+    h32=$(nix hash convert --hash-algo "$1" --to base32 "$2")
+    [ "$h32" = "$3" ]
+    h32=$(nix-hash --type "$1" --to-base32 "$2")
+    [ "$h32" = "$3" ]
+    h32=$(nix hash to-base32 --type "$1" "$2")
+    [ "$h32" = "$3" ]
+    h16=$(nix-hash --type "$1" --to-base16 "$h32")
+    [ "$h16" = "$2" ]
+
+    h16=$(nix hash convert --hash-algo "$1" --to base16 "$h64")
+    [ "$h16" = "$2" ]
+    h16=$(nix hash to-base16 --type "$1" "$h64")
+    [ "$h16" = "$2" ]
+    h16=$(nix hash convert --to base16 "$sri")
+    [ "$h16" = "$2" ]
+    h16=$(nix hash to-base16 "$sri")
+    [ "$h16" = "$2" ]
+
+    #
+    # Converting from SRI
+    #
+
+    # Input hash algo auto-detected from SRI and output defaults to SRI as well.
+    sri=$(nix hash convert "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --from sri "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --to sri "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --from sri --to sri "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --to base64 "$1-$4")
+    [ "$sri" = "$4" ]
+
+    #
+    # Auto-detecting the input from algo and length.
+    #
+
+    sri=$(nix hash convert --hash-algo "$1" "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$3")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --hash-algo "$1" "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$3")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$4")
+    [ "$sri" = "$1-$4" ]
+
+    #
+    # Asserting input format succeeds.
+    #
+
+    sri=$(nix hash convert --hash-algo "$1" --from base16 "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" --from nix32 "$3")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" --from base64 "$4")
+    [ "$sri" = "$1-$4" ]
+
+    #
+    # Asserting input format fails.
+    #
+
+    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?")
+    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
+    fail=$(nix hash convert --hash-algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?")
+    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
+    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?")
+    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
+
+}
+
+try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8="
+try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
+try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" "IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ=="
diff --git a/tests/functional/hash.sh b/tests/functional/hash-path.sh
similarity index 51%
rename from tests/functional/hash.sh
rename to tests/functional/hash-path.sh
index ff270076e..6d096b29b 100644
--- a/tests/functional/hash.sh
+++ b/tests/functional/hash-path.sh
@@ -80,107 +80,3 @@ try2 md5 "20f3ffe011d4cfa7d72bfabef7882836"
 rm $TEST_ROOT/hash-path/hello
 ln -s x $TEST_ROOT/hash-path/hello
 try2 md5 "f78b733a68f5edbdf9413899339eaa4a"
-
-# Conversion with `nix hash` `nix-hash` and `nix hash convert`
-try3() {
-    # $1 = hash algo
-    # $2 = expected hash in base16
-    # $3 = expected hash in base32
-    # $4 = expected hash in base64
-    h64=$(nix hash convert --hash-algo "$1" --to base64 "$2")
-    [ "$h64" = "$4" ]
-    h64=$(nix-hash --type "$1" --to-base64 "$2")
-    [ "$h64" = "$4" ]
-    # Deprecated experiment
-    h64=$(nix hash to-base64 --type "$1" "$2")
-    [ "$h64" = "$4" ]
-
-    sri=$(nix hash convert --hash-algo "$1" --to sri "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix-hash --type "$1" --to-sri "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash to-sri --type "$1" "$2")
-    [ "$sri" = "$1-$4" ]
-    h32=$(nix hash convert --hash-algo "$1" --to base32 "$2")
-    [ "$h32" = "$3" ]
-    h32=$(nix-hash --type "$1" --to-base32 "$2")
-    [ "$h32" = "$3" ]
-    h32=$(nix hash to-base32 --type "$1" "$2")
-    [ "$h32" = "$3" ]
-    h16=$(nix-hash --type "$1" --to-base16 "$h32")
-    [ "$h16" = "$2" ]
-
-    h16=$(nix hash convert --hash-algo "$1" --to base16 "$h64")
-    [ "$h16" = "$2" ]
-    h16=$(nix hash to-base16 --type "$1" "$h64")
-    [ "$h16" = "$2" ]
-    h16=$(nix hash convert --to base16 "$sri")
-    [ "$h16" = "$2" ]
-    h16=$(nix hash to-base16 "$sri")
-    [ "$h16" = "$2" ]
-
-    #
-    # Converting from SRI
-    #
-
-    # Input hash algo auto-detected from SRI and output defaults to SRI as well.
-    sri=$(nix hash convert "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --from sri "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --to sri "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --from sri --to sri "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --to base64 "$1-$4")
-    [ "$sri" = "$4" ]
-
-    #
-    # Auto-detecting the input from algo and length.
-    #
-
-    sri=$(nix hash convert --hash-algo "$1" "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$3")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --hash-algo "$1" "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$3")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$4")
-    [ "$sri" = "$1-$4" ]
-
-    #
-    # Asserting input format succeeds.
-    #
-
-    sri=$(nix hash convert --hash-algo "$1" --from base16 "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" --from nix32 "$3")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" --from base64 "$4")
-    [ "$sri" = "$1-$4" ]
-
-    #
-    # Asserting input format fails.
-    #
-
-    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?")
-    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
-    fail=$(nix hash convert --hash-algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?")
-    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
-    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?")
-    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
-
-}
-
-try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8="
-try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
-try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" "IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ=="
diff --git a/tests/functional/local.mk b/tests/functional/local.mk
index f369c7c2c..18eb887cd 100644
--- a/tests/functional/local.mk
+++ b/tests/functional/local.mk
@@ -47,7 +47,8 @@ nix_tests = \
   optimise-store.sh \
   substitute-with-invalid-ca.sh \
   signing.sh \
-  hash.sh \
+  hash-convert.sh \
+  hash-path.sh \
   gc-non-blocking.sh \
   check.sh \
   nix-shell.sh \

From 5b69409f6b479ff28870c0502682882ee14a9dc8 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 16:08:44 +0100
Subject: [PATCH 096/164] only refer to the local store page

---
 doc/manual/src/glossary.md | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 51f1e3a71..6126b7e47 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -68,10 +68,6 @@
 
 - [local store]{#gloss-local-store}
 
-  From the perspective of the location where Nix is invoked, the Nix [store] can be referred to _local_ or _remote_.
-  Only a local store exposes a file system directory, typically `/nix/store`, to allow operating system processes to directly access store objects.
-  Local stores can be used for building [derivations][derivation].
-
   See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
 
   [local store]: #gloss-local-store

From bb2189235100a551ab416ff301bef6efd3adbc66 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Wed, 7 Feb 2024 15:41:10 +0100
Subject: [PATCH 097/164] *.in files: Depend on config.status

---
 mk/templates.mk | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/mk/templates.mk b/mk/templates.mk
index 866bdc17f..d5dae61c7 100644
--- a/mk/templates.mk
+++ b/mk/templates.mk
@@ -10,10 +10,10 @@ endef
 
 ifneq ($(MAKECMDGOALS), clean)
 
-$(buildprefix)%.h: %.h.in
+$(buildprefix)%.h: %.h.in $(buildprefix)config.status
 	$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%)
 
-$(buildprefix)%: %.in
+$(buildprefix)%: %.in $(buildprefix)config.status
 	$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%)
 
 endif

From f27205f743fcfd05126f5fa7cc83eefea7873f1f Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 16:25:07 +0100
Subject: [PATCH 098/164] redirect local and chroot store to main page

---
 doc/manual/redirects.js    |  6 +++++-
 doc/manual/src/glossary.md | 11 +----------
 2 files changed, 6 insertions(+), 11 deletions(-)

diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js
index d04f32b49..e25b17c76 100644
--- a/doc/manual/redirects.js
+++ b/doc/manual/redirects.js
@@ -358,7 +358,11 @@ const redirects = {
     "one-time-setup": "testing.html#one-time-setup",
     "using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
     "characterization-testing": "#characterisation-testing-unit",
-  }
+  },
+  "glossary.html": {
+    "gloss-local-store": "store/types/local-store.html",
+    "gloss-chroot-store": "store/types/local-store.html",
+  },
 };
 
 // the following code matches the current page's URL against the set of redirects.
diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 6126b7e47..d257a8189 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -66,16 +66,6 @@
 
   [store]: #gloss-store
 
-- [local store]{#gloss-local-store}
-
-  See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
-
-  [local store]: #gloss-local-store
-
-- [chroot store]{#gloss-chroot-store}
-
-  A [local store] whose canonical path is anything other than `/nix/store`.
-
 - [binary cache]{#gloss-binary-cache}
 
   A *binary cache* is a Nix store which uses a different format: its
@@ -242,6 +232,7 @@
   - All paths in the store path's [closure] are valid.
 
   [validity]: #gloss-validity
+  [local store]: @docroot@/store/types/local-store.md
 
 - [user environment]{#gloss-user-env}
 

From 41dd9857c7dbd8a2df9c8da4b7cf8e0399088452 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 09:54:07 -0500
Subject: [PATCH 099/164] Proper `parse` and `render` functions for
 `FileIngestionMethod` and `ContentAddressMethod`

No outward facing behavior is changed.

Older methods with same names that operate on on method + algo pair (for
old-style `:algo`) are renamed to `*WithAlgo`.)

The functions are unit-tested in the same way the names for the hash
algorithms are tested.
---
 src/libstore/content-address.cc            | 31 +++++++++++++++----
 src/libstore/content-address.hh            | 22 +++++++++++---
 src/libstore/daemon.cc                     |  2 +-
 src/libstore/derivations.cc                | 12 ++++----
 src/libstore/remote-store.cc               |  2 +-
 src/libutil/file-content-address.cc        | 25 ++++++++++++++++
 src/libutil/file-content-address.hh        | 17 +++++++++++
 src/nix/add-to-store.cc                    | 13 +-------
 tests/unit/libstore/content-address.cc     | 35 ++++++++++++++++++++++
 tests/unit/libutil/file-content-address.cc | 33 ++++++++++++++++++++
 10 files changed, 162 insertions(+), 30 deletions(-)
 create mode 100644 tests/unit/libstore/content-address.cc
 create mode 100644 tests/unit/libutil/file-content-address.cc

diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index fc408f5af..2091f8e02 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -4,7 +4,7 @@
 
 namespace nix {
 
-std::string makeFileIngestionPrefix(FileIngestionMethod m)
+std::string_view makeFileIngestionPrefix(FileIngestionMethod m)
 {
     switch (m) {
     case FileIngestionMethod::Flat:
@@ -16,10 +16,29 @@ std::string makeFileIngestionPrefix(FileIngestionMethod m)
     }
 }
 
-std::string ContentAddressMethod::renderPrefix() const
+std::string_view ContentAddressMethod::render() const
 {
     return std::visit(overloaded {
-        [](TextIngestionMethod) -> std::string { return "text:"; },
+        [](TextIngestionMethod) -> std::string_view { return "text"; },
+        [](FileIngestionMethod m2) {
+             /* Not prefixed for back compat with things that couldn't produce text before. */
+            return renderFileIngestionMethod(m2);
+        },
+    }, raw);
+}
+
+ContentAddressMethod ContentAddressMethod::parse(std::string_view m)
+{
+    if (m == "text")
+        return TextIngestionMethod {};
+    else
+        return parseFileIngestionMethod(m);
+}
+
+std::string_view ContentAddressMethod::renderPrefix() const
+{
+    return std::visit(overloaded {
+        [](TextIngestionMethod) -> std::string_view { return "text:"; },
         [](FileIngestionMethod m2) {
              /* Not prefixed for back compat with things that couldn't produce text before. */
             return makeFileIngestionPrefix(m2);
@@ -38,7 +57,7 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
     return FileIngestionMethod::Flat;
 }
 
-std::string ContentAddressMethod::render(HashAlgorithm ha) const
+std::string ContentAddressMethod::renderWithAlgo(HashAlgorithm ha) const
 {
     return std::visit(overloaded {
         [&](const TextIngestionMethod & th) {
@@ -133,7 +152,7 @@ ContentAddress ContentAddress::parse(std::string_view rawCa)
     };
 }
 
-std::pair ContentAddressMethod::parse(std::string_view caMethod)
+std::pair ContentAddressMethod::parseWithAlgo(std::string_view caMethod)
 {
     std::string asPrefix = std::string{caMethod} + ":";
     // parseContentAddressMethodPrefix takes its argument by reference
@@ -155,7 +174,7 @@ std::string renderContentAddress(std::optional ca)
 
 std::string ContentAddress::printMethodAlgo() const
 {
-    return method.renderPrefix()
+    return std::string { method.renderPrefix() }
         + printHashAlgo(hash.algo);
 }
 
diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh
index f0973412b..80538df50 100644
--- a/src/libstore/content-address.hh
+++ b/src/libstore/content-address.hh
@@ -36,7 +36,7 @@ struct TextIngestionMethod : std::monostate { };
  * Compute the prefix to the hash algorithm which indicates how the
  * files were ingested.
  */
-std::string makeFileIngestionPrefix(FileIngestionMethod m);
+std::string_view makeFileIngestionPrefix(FileIngestionMethod m);
 
 /**
  * An enumeration of all the ways we can content-address store objects.
@@ -59,6 +59,20 @@ struct ContentAddressMethod
 
     MAKE_WRAPPER_CONSTRUCTOR(ContentAddressMethod);
 
+    /**
+     * Parse a content addressing method (name).
+     *
+     * The inverse of `render`.
+     */
+    static ContentAddressMethod parse(std::string_view rawCaMethod);
+
+    /**
+     * Render a content addressing method (name).
+     *
+     * The inverse of `parse`.
+     */
+    std::string_view render() const;
+
     /**
      * Parse the prefix tag which indicates how the files
      * were ingested, with the fixed output case not prefixed for back
@@ -74,12 +88,12 @@ struct ContentAddressMethod
      *
      * The rough inverse of `parsePrefix()`.
      */
-    std::string renderPrefix() const;
+    std::string_view renderPrefix() const;
 
     /**
      * Parse a content addressing method and hash type.
      */
-    static std::pair parse(std::string_view rawCaMethod);
+    static std::pair parseWithAlgo(std::string_view rawCaMethod);
 
     /**
      * Render a content addressing method and hash type in a
@@ -87,7 +101,7 @@ struct ContentAddressMethod
      *
      * The rough inverse of `parse()`.
      */
-    std::string render(HashAlgorithm ht) const;
+    std::string renderWithAlgo(HashAlgorithm ht) const;
 
     /**
      * Get the underlying way to content-address file system objects.
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 8db93fa39..cf5020dfe 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -400,7 +400,7 @@ static void performOp(TunnelLogger * logger, ref store,
             logger->startWork();
             auto pathInfo = [&]() {
                 // NB: FramedSource must be out of scope before logger->stopWork();
-                auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
+                auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parseWithAlgo(camStr);
                 auto hashAlgo = hashAlgo_; // work around clang bug
                 FramedSource source(from);
                 // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 393806652..36042c06c 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -601,7 +601,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
             },
             [&](const DerivationOutput::CAFloating & dof) {
                 s += ','; printUnquotedString(s, "");
-                s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo));
+                s += ','; printUnquotedString(s, std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo));
                 s += ','; printUnquotedString(s, "");
             },
             [&](const DerivationOutput::Deferred &) {
@@ -612,7 +612,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
             [&](const DerivationOutput::Impure & doi) {
                 // FIXME
                 s += ','; printUnquotedString(s, "");
-                s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo));
+                s += ','; printUnquotedString(s, std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo));
                 s += ','; printUnquotedString(s, "impure");
             }
         }, i.second.raw);
@@ -984,7 +984,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
             },
             [&](const DerivationOutput::CAFloating & dof) {
                 out << ""
-                    << (dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo))
+                    << (std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo))
                     << "";
             },
             [&](const DerivationOutput::Deferred &) {
@@ -994,7 +994,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
             },
             [&](const DerivationOutput::Impure & doi) {
                 out << ""
-                    << (doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo))
+                    << (std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo))
                     << "impure";
             },
         }, i.second.raw);
@@ -1221,11 +1221,11 @@ nlohmann::json DerivationOutput::toJSON(
             // FIXME print refs?
         },
         [&](const DerivationOutput::CAFloating & dof) {
-            res["hashAlgo"] = dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo);
+            res["hashAlgo"] = std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo);
         },
         [&](const DerivationOutput::Deferred &) {},
         [&](const DerivationOutput::Impure & doi) {
-            res["hashAlgo"] = doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo);
+            res["hashAlgo"] = std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo);
             res["impure"] = true;
         },
     }, raw);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index ccf95beef..fadef45ff 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -435,7 +435,7 @@ ref RemoteStore::addCAToStore(
         conn->to
             << WorkerProto::Op::AddToStore
             << name
-            << caMethod.render(hashAlgo);
+            << caMethod.renderWithAlgo(hashAlgo);
         WorkerProto::write(*this, *conn, references);
         conn->to << repair;
 
diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 9917986f6..6ea7b2ab4 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -3,6 +3,31 @@
 
 namespace nix {
 
+FileIngestionMethod parseFileIngestionMethod(std::string_view input)
+{
+    if (input == "flat") {
+        return FileIngestionMethod::Flat;
+    } else if (input == "nar") {
+        return FileIngestionMethod::Recursive;
+    } else {
+        throw UsageError("Unknown file ingestion method '%s', expect `flat` or `nar`");
+    }
+}
+
+
+std::string_view renderFileIngestionMethod(FileIngestionMethod method)
+{
+    switch (method) {
+    case FileIngestionMethod::Flat:
+        return "flat";
+    case FileIngestionMethod::Recursive:
+        return "nar";
+    default:
+        assert(false);
+    }
+}
+
+
 void dumpPath(
     SourceAccessor & accessor, const CanonPath & path,
     Sink & sink,
diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh
index 7f7544e41..41f23f2af 100644
--- a/src/libutil/file-content-address.hh
+++ b/src/libutil/file-content-address.hh
@@ -23,6 +23,23 @@ enum struct FileIngestionMethod : uint8_t {
     Recursive = 1,
 };
 
+/**
+ * Parse a `FileIngestionMethod` by name. Choice of:
+ *
+ *  - `flat`: `FileIngestionMethod::Flat`
+ *  - `nar`: `FileIngestionMethod::Recursive`
+ *
+ * Oppostite of `renderFileIngestionMethod`.
+ */
+FileIngestionMethod parseFileIngestionMethod(std::string_view input);
+
+/**
+ * Render a `FileIngestionMethod` by name.
+ *
+ * Oppostite of `parseFileIngestionMethod`.
+ */
+std::string_view renderFileIngestionMethod(FileIngestionMethod method);
+
 /**
  * Dump a serialization of the given file system object.
  */
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index d3e66dc21..9ea37ab4c 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -6,17 +6,6 @@
 
 using namespace nix;
 
-static FileIngestionMethod parseIngestionMethod(std::string_view input)
-{
-    if (input == "flat") {
-        return FileIngestionMethod::Flat;
-    } else if (input == "nar") {
-        return FileIngestionMethod::Recursive;
-    } else {
-        throw UsageError("Unknown hash mode '%s', expect `flat` or `nar`");
-    }
-}
-
 struct CmdAddToStore : MixDryRun, StoreCommand
 {
     Path path;
@@ -49,7 +38,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
             )",
             .labels = {"hash-mode"},
             .handler = {[this](std::string s) {
-                this->caMethod = parseIngestionMethod(s);
+                this->caMethod = parseFileIngestionMethod(s);
             }},
         });
 
diff --git a/tests/unit/libstore/content-address.cc b/tests/unit/libstore/content-address.cc
new file mode 100644
index 000000000..98c1eace3
--- /dev/null
+++ b/tests/unit/libstore/content-address.cc
@@ -0,0 +1,35 @@
+#include 
+
+#include "content-address.hh"
+
+namespace nix {
+
+/* ----------------------------------------------------------------------------
+ * ContentAddressMethod::parse, ContentAddressMethod::render
+ * --------------------------------------------------------------------------*/
+
+TEST(ContentAddressMethod, testRoundTripPrintParse_1) {
+    for (const ContentAddressMethod & cam : {
+        ContentAddressMethod { TextIngestionMethod {} },
+        ContentAddressMethod { FileIngestionMethod::Flat },
+        ContentAddressMethod { FileIngestionMethod::Recursive },
+    }) {
+        EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam);
+    }
+}
+
+TEST(ContentAddressMethod, testRoundTripPrintParse_2) {
+    for (const std::string_view camS : {
+        "text",
+        "flat",
+        "nar",
+    }) {
+        EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS);
+    }
+}
+
+TEST(ContentAddressMethod, testParseContentAddressMethodOptException) {
+    EXPECT_THROW(ContentAddressMethod::parse("narwhal"), UsageError);
+}
+
+}
diff --git a/tests/unit/libutil/file-content-address.cc b/tests/unit/libutil/file-content-address.cc
new file mode 100644
index 000000000..2e819ce40
--- /dev/null
+++ b/tests/unit/libutil/file-content-address.cc
@@ -0,0 +1,33 @@
+#include 
+
+#include "file-content-address.hh"
+
+namespace nix {
+
+/* ----------------------------------------------------------------------------
+ * parseFileIngestionMethod, renderFileIngestionMethod
+ * --------------------------------------------------------------------------*/
+
+TEST(FileIngestionMethod, testRoundTripPrintParse_1) {
+    for (const FileIngestionMethod fim : {
+        FileIngestionMethod::Flat,
+        FileIngestionMethod::Recursive,
+    }) {
+        EXPECT_EQ(parseFileIngestionMethod(renderFileIngestionMethod(fim)), fim);
+    }
+}
+
+TEST(FileIngestionMethod, testRoundTripPrintParse_2) {
+    for (const std::string_view fimS : {
+        "flat",
+        "nar",
+    }) {
+        EXPECT_EQ(renderFileIngestionMethod(parseFileIngestionMethod(fimS)), fimS);
+    }
+}
+
+TEST(FileIngestionMethod, testParseFileIngestionMethodOptException) {
+    EXPECT_THROW(parseFileIngestionMethod("narwhal"), UsageError);
+}
+
+}

From db41a0616a42f8fb52b189f7fd05c2f09764426f Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 11:14:49 -0500
Subject: [PATCH 100/164] Use `ContentAddressMethod::render` in one more place

Good to deduplicate the code.
---
 src/libfetchers/fetch-to-store.cc | 18 ++----------------
 1 file changed, 2 insertions(+), 16 deletions(-)

diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc
index 196489e05..c27880662 100644
--- a/src/libfetchers/fetch-to-store.cc
+++ b/src/libfetchers/fetch-to-store.cc
@@ -21,23 +21,9 @@ StorePath fetchToStore(
         cacheKey = fetchers::Attrs{
             {"_what", "fetchToStore"},
             {"store", store.storeDir},
-            {"name", std::string(name)},
+            {"name", std::string{name}},
             {"fingerprint", *path.accessor->fingerprint},
-            {
-                "method",
-                std::visit(overloaded {
-                    [](const TextIngestionMethod &) {
-                        return "text";
-                    },
-                    [](const FileIngestionMethod & fim) {
-                        switch (fim) {
-                        case FileIngestionMethod::Flat: return "flat";
-                        case FileIngestionMethod::Recursive: return "nar";
-                        default: assert(false);
-                        }
-                    },
-                }, method.raw),
-            },
+            {"method", std::string{method.render()}},
             {"path", path.path.abs()}
         };
         if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {

From 89e21ab4bd1561c6eab2eeb63088f4e34fa4059f Mon Sep 17 00:00:00 2001
From: Alois Wohlschlager 
Date: Sat, 10 Feb 2024 20:56:54 +0100
Subject: [PATCH 101/164] Restore `builtins.pathExists` behavior on broken
 symlinks

Commit 83c067c0fa0cc5a2dca440e5c986afe40b163802 changed `builtins.pathExists`
to resolve symlinks before checking for existence. Consequently, if the path
refers to a symlink itself, existence of the target of the symlink (instead of
the symlink itself) was checked. Restore the previous behavior by skipping
symlink resolution in the last component.
---
 src/libexpr/primops.cc                        | 15 +++++++-----
 src/libutil/source-path.cc                    | 22 ++++++++++-------
 src/libutil/source-path.hh                    | 24 +++++++++++++++----
 .../functional/lang/eval-okay-pathexists.nix  |  3 +++
 .../functional/lang/symlink-resolution/broken |  1 +
 5 files changed, 46 insertions(+), 19 deletions(-)
 create mode 120000 tests/functional/lang/symlink-resolution/broken

diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 8c6aeffac..dde7c0fe7 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -115,7 +115,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
     return res;
 }
 
-static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bool resolveSymlinks = true)
+static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, std::optional resolveSymlinks = SymlinkResolution::Full)
 {
     NixStringContext context;
 
@@ -127,7 +127,7 @@ static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bo
             auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
             path = {path.accessor, CanonPath(realPath)};
         }
-        return resolveSymlinks ? path.resolveSymlinks() : path;
+        return resolveSymlinks ? path.resolveSymlinks(*resolveSymlinks) : path;
     } catch (Error & e) {
         e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
         throw;
@@ -167,7 +167,7 @@ static void mkOutputString(
    argument. */
 static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v)
 {
-    auto path = realisePath(state, pos, vPath, false);
+    auto path = realisePath(state, pos, vPath, std::nullopt);
     auto path2 = path.path.abs();
 
     // FIXME
@@ -1521,13 +1521,16 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args,
     try {
         auto & arg = *args[0];
 
-        auto path = realisePath(state, pos, arg);
-
         /* SourcePath doesn't know about trailing slash. */
+        state.forceValue(arg, pos);
         auto mustBeDir = arg.type() == nString
             && (arg.string_view().ends_with("/")
                 || arg.string_view().ends_with("/."));
 
+        auto symlinkResolution =
+            mustBeDir ? SymlinkResolution::Full : SymlinkResolution::Ancestors;
+        auto path = realisePath(state, pos, arg, symlinkResolution);
+
         auto st = path.maybeLstat();
         auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
         v.mkBool(exists);
@@ -1765,7 +1768,7 @@ static std::string_view fileTypeToString(InputAccessor::Type type)
 
 static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
 {
-    auto path = realisePath(state, pos, *args[0], false);
+    auto path = realisePath(state, pos, *args[0], std::nullopt);
     /* Retrieve the directory entry type and stringize it. */
     v.mkString(fileTypeToString(path.lstat().type));
 }
diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc
index 341daf39c..66c95405f 100644
--- a/src/libutil/source-path.cc
+++ b/src/libutil/source-path.cc
@@ -62,7 +62,7 @@ bool SourcePath::operator<(const SourcePath & x) const
     return std::tie(*accessor, path) < std::tie(*x.accessor, x.path);
 }
 
-SourcePath SourcePath::resolveSymlinks() const
+SourcePath SourcePath::resolveSymlinks(SymlinkResolution mode) const
 {
     auto res = SourcePath(accessor);
 
@@ -72,6 +72,8 @@ SourcePath SourcePath::resolveSymlinks() const
     for (auto & c : path)
         todo.push_back(std::string(c));
 
+    bool resolve_last = mode == SymlinkResolution::Full;
+
     while (!todo.empty()) {
         auto c = *todo.begin();
         todo.pop_front();
@@ -81,14 +83,16 @@ SourcePath SourcePath::resolveSymlinks() const
             res.path.pop();
         else {
             res.path.push(c);
-            if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) {
-                if (!linksAllowed--)
-                    throw Error("infinite symlink recursion in path '%s'", path);
-                auto target = res.readLink();
-                res.path.pop();
-                if (hasPrefix(target, "/"))
-                    res.path = CanonPath::root;
-                todo.splice(todo.begin(), tokenizeString>(target, "/"));
+            if (resolve_last || !todo.empty()) {
+                if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) {
+                    if (!linksAllowed--)
+                        throw Error("infinite symlink recursion in path '%s'", path);
+                    auto target = res.readLink();
+                    res.path.pop();
+                    if (hasPrefix(target, "/"))
+                        res.path = CanonPath::root;
+                    todo.splice(todo.begin(), tokenizeString>(target, "/"));
+                }
             }
         }
     }
diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh
index bde07b08f..b4cfa9ce8 100644
--- a/src/libutil/source-path.hh
+++ b/src/libutil/source-path.hh
@@ -11,6 +11,22 @@
 
 namespace nix {
 
+enum class SymlinkResolution {
+    /**
+     * Resolve symlinks in the ancestors only.
+     *
+     * Only the last component of the result is possibly a symlink.
+     */
+    Ancestors,
+
+    /**
+     * Resolve symlinks fully, realpath(3)-style.
+     *
+     * No component of the result will be a symlink.
+     */
+    Full,
+};
+
 /**
  * An abstraction for accessing source files during
  * evaluation. Currently, it's just a wrapper around `CanonPath` that
@@ -103,11 +119,11 @@ struct SourcePath
     bool operator<(const SourcePath & x) const;
 
     /**
-     * Resolve any symlinks in this `SourcePath` (including its
-     * parents). The result is a `SourcePath` in which no element is a
-     * symlink.
+     * Resolve any symlinks in this `SourcePath` according to the
+     * given resolution mode.
      */
-    SourcePath resolveSymlinks() const;
+    SourcePath resolveSymlinks(
+        SymlinkResolution mode = SymlinkResolution::Full) const;
 };
 
 std::ostream & operator << (std::ostream & str, const SourcePath & path);
diff --git a/tests/functional/lang/eval-okay-pathexists.nix b/tests/functional/lang/eval-okay-pathexists.nix
index 31697f66a..022b22fea 100644
--- a/tests/functional/lang/eval-okay-pathexists.nix
+++ b/tests/functional/lang/eval-okay-pathexists.nix
@@ -29,3 +29,6 @@ builtins.pathExists (./lib.nix)
 && builtins.pathExists (builtins.toPath { outPath = builtins.toString ./lib.nix; })
 && builtins.pathExists ./lib.nix
 && !builtins.pathExists ./bla.nix
+&& builtins.pathExists ./symlink-resolution/foo/overlays/overlay.nix
+&& builtins.pathExists ./symlink-resolution/broken
+&& builtins.pathExists (builtins.toString ./symlink-resolution/foo/overlays + "/.")
diff --git a/tests/functional/lang/symlink-resolution/broken b/tests/functional/lang/symlink-resolution/broken
new file mode 120000
index 000000000..e07da690b
--- /dev/null
+++ b/tests/functional/lang/symlink-resolution/broken
@@ -0,0 +1 @@
+nonexistent
\ No newline at end of file

From 9d64613dcac181f889f6831a08404e2483d41da4 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 12:50:10 -0500
Subject: [PATCH 102/164] Update src/libutil/file-content-address.cc

---
 src/libutil/file-content-address.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 6ea7b2ab4..6753e0f49 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -23,7 +23,7 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
     case FileIngestionMethod::Recursive:
         return "nar";
     default:
-        assert(false);
+        abort();
     }
 }
 

From a694cfb7bd6fadc9b0c385551c0255ec5c0da068 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Tue, 13 Feb 2024 11:09:12 -0800
Subject: [PATCH 103/164] Fix "Failed tcsetattr(TCSADRAIN)" when `nix repl` is
 not a TTY

Before:
```
$ echo builtins.nixVersion | nix repl
Welcome to Nix 2.18.1. Type :? for help.

Failed tcsetattr(TCSADRAIN): Inappropriate ioctl for device
"2.18.1"

Failed tcsetattr(TCSADRAIN): Inappropriate ioctl for device
```

After:
```
$ echo builtins.nixVersion | nix repl
Nix 2.21.0pre20240131_dirty
Type :? for help.
"2.21.0pre20240131_dirty"
```
---
 src/libcmd/repl.cc | 1 -
 1 file changed, 1 deletion(-)

diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 26b032693..a7cd15efe 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -351,7 +351,6 @@ bool NixRepl::getLine(std::string & input, const std::string & prompt)
     };
 
     setupSignals();
-    Finally resetTerminal([&]() { rl_deprep_terminal(); });
     char * s = readline(prompt.c_str());
     Finally doFree([&]() { free(s); });
     restoreSignals();

From 6d2b446e2b71d288f0f9e02270c948f66516f33e Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Wed, 14 Feb 2024 08:49:47 -0800
Subject: [PATCH 104/164] Add release notes for "Pretty print values in `nix
 repl`"

---
 .../rl-next/pretty-print-in-nix-repl.md       | 24 +++++++++++++++++++
 1 file changed, 24 insertions(+)
 create mode 100644 doc/manual/rl-next/pretty-print-in-nix-repl.md

diff --git a/doc/manual/rl-next/pretty-print-in-nix-repl.md b/doc/manual/rl-next/pretty-print-in-nix-repl.md
new file mode 100644
index 000000000..26ba5162a
--- /dev/null
+++ b/doc/manual/rl-next/pretty-print-in-nix-repl.md
@@ -0,0 +1,24 @@
+---
+synopsis: "`nix repl` pretty-prints values"
+prs: 9931
+---
+
+`nix repl` will now pretty-print values:
+
+```
+{
+  attrs = {
+    a = {
+      b = {
+        c = { };
+      };
+    };
+  };
+  list = [ 1 ];
+  list' = [
+    1
+    2
+    3
+  ];
+}
+```

From 67a6d344487af252d25001b5c43409b56b33ac9d Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Wed, 14 Feb 2024 19:07:18 +0100
Subject: [PATCH 105/164] GitInputAccessor: Speed up lookup()

A command like

  rm -rf ~/.cache/nix/tarball-cache/ ~/.cache/nix/fetcher-cache-v1.sqlite*; nix flake metadata 'git+file:///home/eelco/Dev/nixpkgs?rev=9463103069725474698139ab10f17a9d125da859'

was spending about 84% of its runtime in lookup(), specifically in
git_tree_entry_bypath(). (The reading of blobs is less than 3%.)

It appears libgit2 doesn't do a lot of caching of trees, so we now
make sure that when we look up a path, we add all its parents, and all
the immediate children of the parents (since we have them in memory
anyway), to our own cache.

This speed up the command above from 17.2s to 7.8s on my machine.

Fixes (or at least should improve a lot) #9684.
---
 src/libfetchers/git-utils.cc | 61 ++++++++++++++++++++++++++++++------
 1 file changed, 51 insertions(+), 10 deletions(-)

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index cb4a84e53..466bdc6c7 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -576,20 +576,61 @@ struct GitInputAccessor : InputAccessor
     /* Recursively look up 'path' relative to the root. */
     git_tree_entry * lookup(const CanonPath & path)
     {
-        if (path.isRoot()) return nullptr;
-
         auto i = lookupCache.find(path);
-        if (i == lookupCache.end()) {
-            TreeEntry entry;
-            if (auto err = git_tree_entry_bypath(Setter(entry), root.get(), std::string(path.rel()).c_str())) {
-                if (err != GIT_ENOTFOUND)
-                    throw Error("looking up '%s': %s", showPath(path), git_error_last()->message);
-            }
+        if (i != lookupCache.end()) return i->second.get();
 
-            i = lookupCache.emplace(path, std::move(entry)).first;
+        auto parent = path.parent();
+        if (!parent) return nullptr;
+
+        auto name = path.baseName().value();
+
+        auto parentTree = lookupTree(*parent);
+        if (!parentTree) return nullptr;
+
+        auto count = git_tree_entrycount(parentTree->get());
+
+        git_tree_entry * res = nullptr;
+
+        /* Add all the tree entries to the cache to speed up
+           subsequent lookups. */
+        for (size_t n = 0; n < count; ++n) {
+            auto entry = git_tree_entry_byindex(parentTree->get(), n);
+
+            TreeEntry copy;
+            if (git_tree_entry_dup(Setter(copy), entry))
+                throw Error("dupping tree entry: %s", git_error_last()->message);
+
+            auto entryName = std::string_view(git_tree_entry_name(entry));
+
+            if (entryName == name)
+                res = copy.get();
+
+            auto path2 = *parent;
+            path2.push(entryName);
+            lookupCache.emplace(path2, std::move(copy)).first->second.get();
         }
 
-        return &*i->second;
+        return res;
+    }
+
+    std::optional lookupTree(const CanonPath & path)
+    {
+        if (path.isRoot()) {
+            Tree tree;
+            if (git_tree_dup(Setter(tree), root.get()))
+                throw Error("duplicating directory '%s': %s", showPath(path), git_error_last()->message);
+            return tree;
+        }
+
+        auto entry = lookup(path);
+        if (!entry || git_tree_entry_type(entry) != GIT_OBJECT_TREE)
+            return std::nullopt;
+
+        Tree tree;
+        if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry))
+            throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message);
+
+        return tree;
     }
 
     git_tree_entry * need(const CanonPath & path)

From ba6a5f06eeaeb2a81f4e6871b8ef19927987409e Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Thu, 21 Dec 2023 03:49:52 -0500
Subject: [PATCH 106/164] Split `GitRepoImpl::importTarball`

There is now a separation of:

1. A `FileSystemObjectSink` for writing to git repos

2. Adapting libarchive to use that parse sink.

The prepares a proper separation of concerns.
---
 src/libfetchers/git-utils.cc | 388 +++++++++++++++++++++--------------
 src/libfetchers/git-utils.hh |  15 +-
 src/libutil/fs-sink.hh       |   2 +
 3 files changed, 249 insertions(+), 156 deletions(-)

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 51631e769..980a5a4d7 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -7,6 +7,7 @@
 #include "processes.hh"
 #include "signals.hh"
 #include "users.hh"
+#include "fs-sink.hh"
 
 #include 
 #include 
@@ -23,9 +24,6 @@
 #include 
 #include 
 
-#include "tarfile.hh"
-#include 
-
 #include 
 #include 
 #include 
@@ -317,157 +315,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this
         return std::nullopt;
     }
 
-    TarballInfo importTarball(Source & source) override
-    {
-        TarArchive archive(source);
-
-        struct PendingDir
-        {
-            std::string name;
-            TreeBuilder builder;
-        };
-
-        std::vector pendingDirs;
-
-        auto pushBuilder = [&](std::string name)
-        {
-            git_treebuilder * b;
-            if (git_treebuilder_new(&b, *this, nullptr))
-                throw Error("creating a tree builder: %s", git_error_last()->message);
-            pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) });
-        };
-
-        auto popBuilder = [&]() -> std::pair
-        {
-            assert(!pendingDirs.empty());
-            auto pending = std::move(pendingDirs.back());
-            git_oid oid;
-            if (git_treebuilder_write(&oid, pending.builder.get()))
-                throw Error("creating a tree object: %s", git_error_last()->message);
-            pendingDirs.pop_back();
-            return {oid, pending.name};
-        };
-
-        auto addToTree = [&](const std::string & name, const git_oid & oid, git_filemode_t mode)
-        {
-            assert(!pendingDirs.empty());
-            auto & pending = pendingDirs.back();
-            if (git_treebuilder_insert(nullptr, pending.builder.get(), name.c_str(), &oid, mode))
-                throw Error("adding a file to a tree builder: %s", git_error_last()->message);
-        };
-
-        auto updateBuilders = [&](std::span names)
-        {
-            // Find the common prefix of pendingDirs and names.
-            size_t prefixLen = 0;
-            for (; prefixLen < names.size() && prefixLen + 1 < pendingDirs.size(); ++prefixLen)
-                if (names[prefixLen] != pendingDirs[prefixLen + 1].name)
-                    break;
-
-            // Finish the builders that are not part of the common prefix.
-            for (auto n = pendingDirs.size(); n > prefixLen + 1; --n) {
-                auto [oid, name] = popBuilder();
-                addToTree(name, oid, GIT_FILEMODE_TREE);
-            }
-
-            // Create builders for the new directories.
-            for (auto n = prefixLen; n < names.size(); ++n)
-                pushBuilder(names[n]);
-        };
-
-        pushBuilder("");
-
-        size_t componentsToStrip = 1;
-
-        time_t lastModified = 0;
-
-        for (;;) {
-            // FIXME: merge with extract_archive
-            struct archive_entry * entry;
-            int r = archive_read_next_header(archive.archive, &entry);
-            if (r == ARCHIVE_EOF) break;
-            auto path = archive_entry_pathname(entry);
-            if (!path)
-                throw Error("cannot get archive member name: %s", archive_error_string(archive.archive));
-            if (r == ARCHIVE_WARN)
-                warn(archive_error_string(archive.archive));
-            else
-                archive.check(r);
-
-            lastModified = std::max(lastModified, archive_entry_mtime(entry));
-
-            auto pathComponents = tokenizeString>(path, "/");
-
-            std::span pathComponents2{pathComponents};
-
-            if (pathComponents2.size() <= componentsToStrip) continue;
-            pathComponents2 = pathComponents2.subspan(componentsToStrip);
-
-            updateBuilders(
-                archive_entry_filetype(entry) == AE_IFDIR
-                ? pathComponents2
-                : pathComponents2.first(pathComponents2.size() - 1));
-
-            switch (archive_entry_filetype(entry)) {
-
-            case AE_IFDIR:
-                // Nothing to do right now.
-                break;
-
-            case AE_IFREG: {
-
-                git_writestream * stream = nullptr;
-                if (git_blob_create_from_stream(&stream, *this, nullptr))
-                    throw Error("creating a blob stream object: %s", git_error_last()->message);
-
-                while (true) {
-                    std::vector buf(128 * 1024);
-                    auto n = archive_read_data(archive.archive, buf.data(), buf.size());
-                    if (n < 0)
-                        throw Error("cannot read file '%s' from tarball", path);
-                    if (n == 0) break;
-                    if (stream->write(stream, (const char *) buf.data(), n))
-                        throw Error("writing a blob for tarball member '%s': %s", path, git_error_last()->message);
-                }
-
-                git_oid oid;
-                if (git_blob_create_from_stream_commit(&oid, stream))
-                    throw Error("creating a blob object for tarball member '%s': %s", path, git_error_last()->message);
-
-                addToTree(*pathComponents.rbegin(), oid,
-                    archive_entry_mode(entry) & S_IXUSR
-                    ? GIT_FILEMODE_BLOB_EXECUTABLE
-                    : GIT_FILEMODE_BLOB);
-
-                break;
-            }
-
-            case AE_IFLNK: {
-                auto target = archive_entry_symlink(entry);
-
-                git_oid oid;
-                if (git_blob_create_from_buffer(&oid, *this, target, strlen(target)))
-                    throw Error("creating a blob object for tarball symlink member '%s': %s", path, git_error_last()->message);
-
-                addToTree(*pathComponents.rbegin(), oid, GIT_FILEMODE_LINK);
-
-                break;
-            }
-
-            default:
-                throw Error("file '%s' in tarball has unsupported file type", path);
-            }
-        }
-
-        updateBuilders({});
-
-        auto [oid, _name] = popBuilder();
-
-        return TarballInfo {
-            .treeHash = toHash(oid),
-            .lastModified = lastModified
-        };
-    }
+    TarballInfo importTarball(Source & source) override;
 
     std::vector> getSubmodules(const Hash & rev, bool exportIgnore) override;
 
@@ -511,6 +359,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this
 
     ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override;
 
+    ref getFileSystemObjectSink() override;
+
     static int sidebandProgressCallback(const char * str, int len, void * payload)
     {
         auto act = (Activity *) payload;
@@ -884,6 +734,154 @@ struct GitExportIgnoreInputAccessor : CachingFilteringInputAccessor {
 
 };
 
+struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
+{
+    ref repo;
+
+    struct PendingDir
+    {
+        std::string name;
+        TreeBuilder builder;
+    };
+
+    std::vector pendingDirs;
+
+    size_t componentsToStrip = 1;
+
+    void pushBuilder(std::string name)
+    {
+        git_treebuilder * b;
+        if (git_treebuilder_new(&b, *repo, nullptr))
+            throw Error("creating a tree builder: %s", git_error_last()->message);
+        pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) });
+    };
+
+    GitFileSystemObjectSinkImpl(ref repo) : repo(repo)
+    {
+        pushBuilder("");
+    }
+
+    std::pair popBuilder()
+    {
+        assert(!pendingDirs.empty());
+        auto pending = std::move(pendingDirs.back());
+        git_oid oid;
+        if (git_treebuilder_write(&oid, pending.builder.get()))
+            throw Error("creating a tree object: %s", git_error_last()->message);
+        pendingDirs.pop_back();
+        return {oid, pending.name};
+    };
+
+    void addToTree(const std::string & name, const git_oid & oid, git_filemode_t mode)
+    {
+        assert(!pendingDirs.empty());
+        auto & pending = pendingDirs.back();
+        if (git_treebuilder_insert(nullptr, pending.builder.get(), name.c_str(), &oid, mode))
+            throw Error("adding a file to a tree builder: %s", git_error_last()->message);
+    };
+
+    void updateBuilders(std::span names)
+    {
+        // Find the common prefix of pendingDirs and names.
+        size_t prefixLen = 0;
+        for (; prefixLen < names.size() && prefixLen + 1 < pendingDirs.size(); ++prefixLen)
+            if (names[prefixLen] != pendingDirs[prefixLen + 1].name)
+                break;
+
+        // Finish the builders that are not part of the common prefix.
+        for (auto n = pendingDirs.size(); n > prefixLen + 1; --n) {
+            auto [oid, name] = popBuilder();
+            addToTree(name, oid, GIT_FILEMODE_TREE);
+        }
+
+        // Create builders for the new directories.
+        for (auto n = prefixLen; n < names.size(); ++n)
+            pushBuilder(names[n]);
+    };
+
+    bool prepareDirs(const std::vector & pathComponents, bool isDir)
+    {
+        std::span pathComponents2{pathComponents};
+
+        if (pathComponents2.size() <= componentsToStrip) return false;
+        pathComponents2 = pathComponents2.subspan(componentsToStrip);
+
+        updateBuilders(
+            isDir
+            ? pathComponents2
+            : pathComponents2.first(pathComponents2.size() - 1));
+
+        return true;
+    }
+
+    void createRegularFile(
+        const Path & path,
+        std::function func) override
+    {
+        auto pathComponents = tokenizeString>(path, "/");
+        if (!prepareDirs(pathComponents, false)) return;
+
+        git_writestream * stream = nullptr;
+        if (git_blob_create_from_stream(&stream, *repo, nullptr))
+            throw Error("creating a blob stream object: %s", git_error_last()->message);
+
+        struct CRF : CreateRegularFileSink {
+            const Path & path;
+            GitFileSystemObjectSinkImpl & back;
+            git_writestream * stream;
+            bool executable = false;
+            CRF(const Path & path, GitFileSystemObjectSinkImpl & back, git_writestream * stream)
+                : path(path), back(back), stream(stream)
+            {}
+            void operator () (std::string_view data) override
+            {
+                if (stream->write(stream, data.data(), data.size()))
+                    throw Error("writing a blob for tarball member '%s': %s", path, git_error_last()->message);
+            }
+            void isExecutable() override
+            {
+                executable = true;
+            }
+        } crf { path, *this, stream };
+        func(crf);
+
+        git_oid oid;
+        if (git_blob_create_from_stream_commit(&oid, stream))
+            throw Error("creating a blob object for tarball member '%s': %s", path, git_error_last()->message);
+
+        addToTree(*pathComponents.rbegin(), oid,
+            crf.executable
+            ? GIT_FILEMODE_BLOB_EXECUTABLE
+            : GIT_FILEMODE_BLOB);
+    }
+
+    void createDirectory(const Path & path) override
+    {
+        auto pathComponents = tokenizeString>(path, "/");
+        (void) prepareDirs(pathComponents, true);
+    }
+
+    void createSymlink(const Path & path, const std::string & target) override
+    {
+        auto pathComponents = tokenizeString>(path, "/");
+        if (!prepareDirs(pathComponents, false)) return;
+
+        git_oid oid;
+        if (git_blob_create_from_buffer(&oid, *repo, target.c_str(), target.size()))
+            throw Error("creating a blob object for tarball symlink member '%s': %s", path, git_error_last()->message);
+
+        addToTree(*pathComponents.rbegin(), oid, GIT_FILEMODE_LINK);
+    }
+
+    Hash sync() override {
+        updateBuilders({});
+
+        auto [oid, _name] = popBuilder();
+
+        return toHash(oid);
+    }
+};
+
 ref GitRepoImpl::getRawAccessor(const Hash & rev)
 {
     auto self = ref(shared_from_this());
@@ -918,6 +916,11 @@ ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportI
     }
 }
 
+ref GitRepoImpl::getFileSystemObjectSink()
+{
+    return make_ref(ref(shared_from_this()));
+}
+
 std::vector> GitRepoImpl::getSubmodules(const Hash & rev, bool exportIgnore)
 {
     /* Read the .gitmodules files from this revision. */
@@ -951,4 +954,81 @@ ref getTarballCache()
     return make_ref(repoDir, true, true);
 }
 
+}
+
+#include "tarfile.hh"
+#include 
+
+namespace nix {
+
+GitRepo::TarballInfo GitRepoImpl::importTarball(Source & source)
+{
+    TarArchive archive { source };
+
+    auto parseSink = getFileSystemObjectSink();
+
+    time_t lastModified = 0;
+
+    for (;;) {
+        // FIXME: merge with extract_archive
+        struct archive_entry * entry;
+        int r = archive_read_next_header(archive.archive, &entry);
+        if (r == ARCHIVE_EOF) break;
+        auto path = archive_entry_pathname(entry);
+        if (!path)
+            throw Error("cannot get archive member name: %s", archive_error_string(archive.archive));
+        if (r == ARCHIVE_WARN)
+            warn(archive_error_string(archive.archive));
+        else
+            archive.check(r);
+
+        lastModified = std::max(lastModified, archive_entry_mtime(entry));
+
+        switch (archive_entry_filetype(entry)) {
+
+        case AE_IFDIR:
+            parseSink->createDirectory(path);
+            break;
+
+        case AE_IFREG: {
+            parseSink->createRegularFile(path, [&](auto & crf) {
+                if (archive_entry_mode(entry) & S_IXUSR)
+                    crf.isExecutable();
+
+                while (true) {
+                    std::vector buf(128 * 1024);
+                    auto n = archive_read_data(archive.archive, buf.data(), buf.size());
+                    if (n < 0)
+                        throw Error("cannot read file '%s' from tarball", path);
+                    if (n == 0) break;
+                    crf(std::string_view {
+                        (const char *) buf.data(),
+                        (size_t) n,
+                    });
+                }
+            });
+
+            break;
+        }
+
+        case AE_IFLNK: {
+            auto target = archive_entry_symlink(entry);
+
+            parseSink->createSymlink(path, target);
+
+            break;
+        }
+
+        default:
+            throw Error("file '%s' in tarball has unsupported file type", path);
+        }
+    }
+
+    return TarballInfo {
+        .treeHash = parseSink->sync(),
+        .lastModified = lastModified
+    };
+}
+
+
 }
diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh
index b54559def..f82f62fc8 100644
--- a/src/libfetchers/git-utils.hh
+++ b/src/libfetchers/git-utils.hh
@@ -2,11 +2,20 @@
 
 #include "filtering-input-accessor.hh"
 #include "input-accessor.hh"
+#include "fs-sink.hh"
 
 namespace nix {
 
 namespace fetchers { struct PublicKey; }
 
+struct GitFileSystemObjectSink : FileSystemObjectSink
+{
+    /**
+     * Flush builder and return a final Git hash.
+     */
+    virtual Hash sync() = 0;
+};
+
 struct GitRepo
 {
     virtual ~GitRepo()
@@ -70,14 +79,14 @@ struct GitRepo
         time_t lastModified;
     };
 
-    virtual TarballInfo importTarball(Source & source) = 0;
-
     virtual bool hasObject(const Hash & oid) = 0;
 
     virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0;
 
     virtual ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0;
 
+    virtual ref getFileSystemObjectSink() = 0;
+
     virtual void fetch(
         const std::string & url,
         const std::string & refspec,
@@ -90,6 +99,8 @@ struct GitRepo
     virtual void verifyCommit(
         const Hash & rev,
         const std::vector & publicKeys) = 0;
+
+    virtual TarballInfo importTarball(Source & source) = 0;
 };
 
 ref getTarballCache();
diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh
index 4dfb5b329..ae577819a 100644
--- a/src/libutil/fs-sink.hh
+++ b/src/libutil/fs-sink.hh
@@ -26,6 +26,8 @@ struct CreateRegularFileSink : Sink
 
 struct FileSystemObjectSink
 {
+    virtual ~FileSystemObjectSink() = default;
+
     virtual void createDirectory(const Path & path) = 0;
 
     /**

From ed24baaec4f3825ce538d1894ced63bfc82db7c8 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Thu, 21 Dec 2023 04:28:06 -0500
Subject: [PATCH 107/164] Finish separating concerns with tarball cache

There is no longer an `importTarball` method. Instead, there is a
`unpackTarfileToSink` function (back in libutil). The caller can use
thisw with the `getParseSink` method we added in the last commit easily
enough.

In addition, tarball cache functionality is separated from `git-utils`
and moved into `tarball-cache`. This ensures we are separating mechanism
and policy.
---
 src/libfetchers/git-utils.cc     | 86 --------------------------------
 src/libfetchers/git-utils.hh     | 10 ----
 src/libfetchers/github.cc        | 15 ++++--
 src/libfetchers/tarball-cache.cc | 13 +++++
 src/libfetchers/tarball-cache.hh | 17 +++++++
 src/libutil/tarfile.cc           | 62 +++++++++++++++++++++++
 src/libutil/tarfile.hh           |  3 ++
 7 files changed, 107 insertions(+), 99 deletions(-)
 create mode 100644 src/libfetchers/tarball-cache.cc
 create mode 100644 src/libfetchers/tarball-cache.hh

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 980a5a4d7..42bf42de6 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -315,8 +315,6 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this
         return std::nullopt;
     }
 
-    TarballInfo importTarball(Source & source) override;
-
     std::vector> getSubmodules(const Hash & rev, bool exportIgnore) override;
 
     std::string resolveSubmoduleUrl(
@@ -947,88 +945,4 @@ std::vector> GitRepoImpl::getSubmodules
     return result;
 }
 
-ref getTarballCache()
-{
-    static auto repoDir = std::filesystem::path(getCacheDir()) / "nix" / "tarball-cache";
-
-    return make_ref(repoDir, true, true);
-}
-
-}
-
-#include "tarfile.hh"
-#include 
-
-namespace nix {
-
-GitRepo::TarballInfo GitRepoImpl::importTarball(Source & source)
-{
-    TarArchive archive { source };
-
-    auto parseSink = getFileSystemObjectSink();
-
-    time_t lastModified = 0;
-
-    for (;;) {
-        // FIXME: merge with extract_archive
-        struct archive_entry * entry;
-        int r = archive_read_next_header(archive.archive, &entry);
-        if (r == ARCHIVE_EOF) break;
-        auto path = archive_entry_pathname(entry);
-        if (!path)
-            throw Error("cannot get archive member name: %s", archive_error_string(archive.archive));
-        if (r == ARCHIVE_WARN)
-            warn(archive_error_string(archive.archive));
-        else
-            archive.check(r);
-
-        lastModified = std::max(lastModified, archive_entry_mtime(entry));
-
-        switch (archive_entry_filetype(entry)) {
-
-        case AE_IFDIR:
-            parseSink->createDirectory(path);
-            break;
-
-        case AE_IFREG: {
-            parseSink->createRegularFile(path, [&](auto & crf) {
-                if (archive_entry_mode(entry) & S_IXUSR)
-                    crf.isExecutable();
-
-                while (true) {
-                    std::vector buf(128 * 1024);
-                    auto n = archive_read_data(archive.archive, buf.data(), buf.size());
-                    if (n < 0)
-                        throw Error("cannot read file '%s' from tarball", path);
-                    if (n == 0) break;
-                    crf(std::string_view {
-                        (const char *) buf.data(),
-                        (size_t) n,
-                    });
-                }
-            });
-
-            break;
-        }
-
-        case AE_IFLNK: {
-            auto target = archive_entry_symlink(entry);
-
-            parseSink->createSymlink(path, target);
-
-            break;
-        }
-
-        default:
-            throw Error("file '%s' in tarball has unsupported file type", path);
-        }
-    }
-
-    return TarballInfo {
-        .treeHash = parseSink->sync(),
-        .lastModified = lastModified
-    };
-}
-
-
 }
diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh
index f82f62fc8..029d39741 100644
--- a/src/libfetchers/git-utils.hh
+++ b/src/libfetchers/git-utils.hh
@@ -73,12 +73,6 @@ struct GitRepo
         const std::string & url,
         const std::string & base) = 0;
 
-    struct TarballInfo
-    {
-        Hash treeHash;
-        time_t lastModified;
-    };
-
     virtual bool hasObject(const Hash & oid) = 0;
 
     virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0;
@@ -99,10 +93,6 @@ struct GitRepo
     virtual void verifyCommit(
         const Hash & rev,
         const std::vector & publicKeys) = 0;
-
-    virtual TarballInfo importTarball(Source & source) = 0;
 };
 
-ref getTarballCache();
-
 }
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 1cfc142a5..8b3e6ff20 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -8,7 +8,9 @@
 #include "fetchers.hh"
 #include "fetch-settings.hh"
 #include "tarball.hh"
+#include "tarfile.hh"
 #include "git-utils.hh"
+#include "tarball-cache.hh"
 
 #include 
 #include 
@@ -191,7 +193,7 @@ struct GitArchiveInputScheme : InputScheme
 
     virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
 
-    std::pair downloadArchive(ref store, Input input) const
+    std::pair downloadArchive(ref store, Input input) const
     {
         if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD");
 
@@ -218,7 +220,7 @@ struct GitArchiveInputScheme : InputScheme
                 auto treeHash = getRevAttr(*treeHashAttrs, "treeHash");
                 auto lastModified = getIntAttr(*lastModifiedAttrs, "lastModified");
                 if (getTarballCache()->hasObject(treeHash))
-                    return {std::move(input), GitRepo::TarballInfo { .treeHash = treeHash, .lastModified = (time_t) lastModified }};
+                    return {std::move(input), TarballInfo { .treeHash = treeHash, .lastModified = (time_t) lastModified }};
                 else
                     debug("Git tree with hash '%s' has disappeared from the cache, refetching...", treeHash.gitRev());
             }
@@ -233,7 +235,14 @@ struct GitArchiveInputScheme : InputScheme
             getFileTransfer()->download(std::move(req), sink);
         });
 
-        auto tarballInfo = getTarballCache()->importTarball(*source);
+        TarArchive archive { *source };
+        auto parseSink = getTarballCache()->getFileSystemObjectSink();
+        auto lastModified = unpackTarfileToSink(archive, *parseSink);
+
+        TarballInfo tarballInfo {
+            .treeHash = parseSink->sync(),
+            .lastModified = lastModified
+        };
 
         cache->upsert(treeHashKey, Attrs{{"treeHash", tarballInfo.treeHash.gitRev()}});
         cache->upsert(lastModifiedKey, Attrs{{"lastModified", (uint64_t) tarballInfo.lastModified}});
diff --git a/src/libfetchers/tarball-cache.cc b/src/libfetchers/tarball-cache.cc
new file mode 100644
index 000000000..bb2c51973
--- /dev/null
+++ b/src/libfetchers/tarball-cache.cc
@@ -0,0 +1,13 @@
+#include "tarball-cache.hh"
+#include "users.hh"
+
+namespace nix::fetchers {
+
+ref getTarballCache()
+{
+    static auto repoDir = std::filesystem::path(getCacheDir()) / "nix" / "tarball-cache";
+
+    return GitRepo::openRepo(repoDir, true, true);
+}
+
+}
diff --git a/src/libfetchers/tarball-cache.hh b/src/libfetchers/tarball-cache.hh
new file mode 100644
index 000000000..e1517038b
--- /dev/null
+++ b/src/libfetchers/tarball-cache.hh
@@ -0,0 +1,17 @@
+#pragma once
+///@file
+
+#include "ref.hh"
+#include "git-utils.hh"
+
+namespace nix::fetchers {
+
+struct TarballInfo
+{
+    Hash treeHash;
+    time_t lastModified;
+};
+
+ref getTarballCache();
+
+}
diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc
index 187b3e948..3bb6694f8 100644
--- a/src/libutil/tarfile.cc
+++ b/src/libutil/tarfile.cc
@@ -132,4 +132,66 @@ void unpackTarfile(const Path & tarFile, const Path & destDir)
     extract_archive(archive, destDir);
 }
 
+time_t unpackTarfileToSink(TarArchive & archive, FileSystemObjectSink & parseSink)
+{
+    time_t lastModified = 0;
+
+    for (;;) {
+        // FIXME: merge with extract_archive
+        struct archive_entry * entry;
+        int r = archive_read_next_header(archive.archive, &entry);
+        if (r == ARCHIVE_EOF) break;
+        auto path = archive_entry_pathname(entry);
+        if (!path)
+            throw Error("cannot get archive member name: %s", archive_error_string(archive.archive));
+        if (r == ARCHIVE_WARN)
+            warn(archive_error_string(archive.archive));
+        else
+            archive.check(r);
+
+        lastModified = std::max(lastModified, archive_entry_mtime(entry));
+
+        switch (archive_entry_filetype(entry)) {
+
+        case AE_IFDIR:
+            parseSink.createDirectory(path);
+            break;
+
+        case AE_IFREG: {
+            parseSink.createRegularFile(path, [&](auto & crf) {
+                if (archive_entry_mode(entry) & S_IXUSR)
+                    crf.isExecutable();
+
+                while (true) {
+                    std::vector buf(128 * 1024);
+                    auto n = archive_read_data(archive.archive, buf.data(), buf.size());
+                    if (n < 0)
+                        throw Error("cannot read file '%s' from tarball", path);
+                    if (n == 0) break;
+                    crf(std::string_view {
+                        (const char *) buf.data(),
+                        (size_t) n,
+                    });
+                }
+            });
+
+            break;
+        }
+
+        case AE_IFLNK: {
+            auto target = archive_entry_symlink(entry);
+
+            parseSink.createSymlink(path, target);
+
+            break;
+        }
+
+        default:
+            throw Error("file '%s' in tarball has unsupported file type", path);
+        }
+    }
+
+    return lastModified;
+}
+
 }
diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh
index 237d18c31..6a9c42149 100644
--- a/src/libutil/tarfile.hh
+++ b/src/libutil/tarfile.hh
@@ -2,6 +2,7 @@
 ///@file
 
 #include "serialise.hh"
+#include "fs-sink.hh"
 #include 
 
 namespace nix {
@@ -29,4 +30,6 @@ void unpackTarfile(Source & source, const Path & destDir);
 
 void unpackTarfile(const Path & tarFile, const Path & destDir);
 
+time_t unpackTarfileToSink(TarArchive & archive, FileSystemObjectSink & parseSink);
+
 }

From 78b8db72b53b6657cbdaaac8ad6c0f99fb92ed10 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Thu, 15 Feb 2024 21:58:08 +0100
Subject: [PATCH 108/164] Remove tarball-cache.{hh,cc}

TarballInfo is only used in github.cc, and getTarballCache() is a bit
too trivial to have its own file.
---
 src/libfetchers/git-utils.cc     |  7 +++++++
 src/libfetchers/git-utils.hh     |  2 ++
 src/libfetchers/github.cc        |  7 ++++++-
 src/libfetchers/tarball-cache.cc | 13 -------------
 src/libfetchers/tarball-cache.hh | 17 -----------------
 5 files changed, 15 insertions(+), 31 deletions(-)
 delete mode 100644 src/libfetchers/tarball-cache.cc
 delete mode 100644 src/libfetchers/tarball-cache.hh

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 32f665aa0..4f034e9d4 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -986,4 +986,11 @@ std::vector> GitRepoImpl::getSubmodules
     return result;
 }
 
+ref getTarballCache()
+{
+    static auto repoDir = std::filesystem::path(getCacheDir()) / "nix" / "tarball-cache";
+
+    return GitRepo::openRepo(repoDir, true, true);
+}
+
 }
diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh
index 029d39741..5f68d26a7 100644
--- a/src/libfetchers/git-utils.hh
+++ b/src/libfetchers/git-utils.hh
@@ -95,4 +95,6 @@ struct GitRepo
         const std::vector & publicKeys) = 0;
 };
 
+ref getTarballCache();
+
 }
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 8b3e6ff20..e6fbece13 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -10,7 +10,6 @@
 #include "tarball.hh"
 #include "tarfile.hh"
 #include "git-utils.hh"
-#include "tarball-cache.hh"
 
 #include 
 #include 
@@ -193,6 +192,12 @@ struct GitArchiveInputScheme : InputScheme
 
     virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
 
+    struct TarballInfo
+    {
+        Hash treeHash;
+        time_t lastModified;
+    };
+
     std::pair downloadArchive(ref store, Input input) const
     {
         if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD");
diff --git a/src/libfetchers/tarball-cache.cc b/src/libfetchers/tarball-cache.cc
deleted file mode 100644
index bb2c51973..000000000
--- a/src/libfetchers/tarball-cache.cc
+++ /dev/null
@@ -1,13 +0,0 @@
-#include "tarball-cache.hh"
-#include "users.hh"
-
-namespace nix::fetchers {
-
-ref getTarballCache()
-{
-    static auto repoDir = std::filesystem::path(getCacheDir()) / "nix" / "tarball-cache";
-
-    return GitRepo::openRepo(repoDir, true, true);
-}
-
-}
diff --git a/src/libfetchers/tarball-cache.hh b/src/libfetchers/tarball-cache.hh
deleted file mode 100644
index e1517038b..000000000
--- a/src/libfetchers/tarball-cache.hh
+++ /dev/null
@@ -1,17 +0,0 @@
-#pragma once
-///@file
-
-#include "ref.hh"
-#include "git-utils.hh"
-
-namespace nix::fetchers {
-
-struct TarballInfo
-{
-    Hash treeHash;
-    time_t lastModified;
-};
-
-ref getTarballCache();
-
-}

From e27b7e04bf38c1fdf342d6e15b2c003ca9b92cb1 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Fri, 16 Feb 2024 08:45:15 -0500
Subject: [PATCH 109/164] Add note about this being a temp solution

---
 src/libutil/source-path.hh | 7 +++++++
 1 file changed, 7 insertions(+)

diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh
index b4cfa9ce8..4542a2bac 100644
--- a/src/libutil/source-path.hh
+++ b/src/libutil/source-path.hh
@@ -11,6 +11,10 @@
 
 namespace nix {
 
+/**
+ * Note there is a decent chance this type soon goes away because the problem is solved another way.
+ * See the discussion in https://github.com/NixOS/nix/pull/9985.
+ */
 enum class SymlinkResolution {
     /**
      * Resolve symlinks in the ancestors only.
@@ -121,6 +125,9 @@ struct SourcePath
     /**
      * Resolve any symlinks in this `SourcePath` according to the
      * given resolution mode.
+     *
+     * @param mode might only be a temporary solution for this. 
+     * See the discussion in https://github.com/NixOS/nix/pull/9985.
      */
     SourcePath resolveSymlinks(
         SymlinkResolution mode = SymlinkResolution::Full) const;

From d17e1d9737f68d5f77e9c0f9bfa56da8a4f63816 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Sat, 13 Jan 2024 01:11:49 -0500
Subject: [PATCH 110/164] Purify `CanonPath`

The core `CanonPath` constructors were using `absPath`, but `absPath` in
some situations does IO which is not appropriate. It turns out that
these constructors avoided those situations, and thus were pure, but it
was far from obvious this was the case.

To remedy the situation, abstract the core algorithm from `canonPath` to
use separately in `CanonPath` without any IO. No we know by-construction
that those constructors are pure.

That leaves `CanonPath::fromCWD` as the only operation which uses IO /
is impure. Add docs on it, and `CanonPath` as a whole, explaining the
situation.

This is also necessary to support Windows paths on windows without
messing up `CanonPath`. But, I think it is good even without that.

Co-authored-by: Eelco Dolstra 
Co-authored-by: Robert Hensing 
---
 src/libutil/canon-path.cc        | 15 ++++--
 src/libutil/canon-path.hh        | 18 +++++--
 src/libutil/file-path-impl.hh    | 81 ++++++++++++++++++++++++++++++++
 src/libutil/file-system.cc       | 80 ++++++++++++-------------------
 tests/unit/libutil/canon-path.cc | 18 +++++++
 5 files changed, 155 insertions(+), 57 deletions(-)
 create mode 100644 src/libutil/file-path-impl.hh

diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc
index 1223ba33c..fcd53862b 100644
--- a/src/libutil/canon-path.cc
+++ b/src/libutil/canon-path.cc
@@ -1,16 +1,25 @@
 #include "canon-path.hh"
-#include "file-system.hh"
+#include "util.hh"
+#include "file-path-impl.hh"
 
 namespace nix {
 
 CanonPath CanonPath::root = CanonPath("/");
 
+static std::string absPathPure(std::string_view path)
+{
+    return canonPathInner(path, [](auto &, auto &){});
+}
+
 CanonPath::CanonPath(std::string_view raw)
-    : path(absPath(raw, "/"))
+    : path(absPathPure(concatStrings("/", raw)))
 { }
 
 CanonPath::CanonPath(std::string_view raw, const CanonPath & root)
-    : path(absPath(raw, root.abs()))
+    : path(absPathPure(
+        raw.size() > 0 && raw[0] == '/'
+            ? raw
+            : concatStrings(root.abs(), "/", raw)))
 { }
 
 CanonPath::CanonPath(const std::vector & elems)
diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh
index 2f8ff381e..8f5a1c279 100644
--- a/src/libutil/canon-path.hh
+++ b/src/libutil/canon-path.hh
@@ -21,9 +21,21 @@ namespace nix {
  *
  * - There are no components equal to '.' or '..'.
  *
- * Note that the path does not need to correspond to an actually
- * existing path, and there is no guarantee that symlinks are
- * resolved.
+ * `CanonPath` are "virtual" Nix paths for abstract file system objects;
+ * they are always Unix-style paths, regardless of what OS Nix is
+ * running on. The `/` root doesn't denote the ambient host file system
+ * root, but some virtual FS root.
+ *
+ * @note It might be useful to compare `openat(some_fd, "foo/bar")` on
+ * Unix. `"foo/bar"` is a relative path because an absolute path would
+ * "override" the `some_fd` directory file descriptor and escape to the
+ * "system root". Conversely, Nix's abstract file operations *never* escape the
+ * designated virtual file system (i.e. `SourceAccessor` or
+ * `ParseSink`), so `CanonPath` does not need an absolute/relative
+ * distinction.
+ *
+ * @note The path does not need to correspond to an actually existing
+ * path, and the path may or may not have unresolved symlinks.
  */
 class CanonPath
 {
diff --git a/src/libutil/file-path-impl.hh b/src/libutil/file-path-impl.hh
new file mode 100644
index 000000000..39159c7c2
--- /dev/null
+++ b/src/libutil/file-path-impl.hh
@@ -0,0 +1,81 @@
+#pragma once
+/**
+ * @file
+ *
+ * Pure (no IO) infrastructure just for defining other path types;
+ * should not be used directly outside of utilities.
+ */
+#include 
+#include 
+
+namespace nix {
+
+/**
+ * Core pure path canonicalization algorithm.
+ *
+ * @param hookComponent
+ *   A callback which is passed two arguments,
+ *   references to
+ *
+ *   1. the result so far
+ *
+ *   2. the remaining path to resolve
+ *
+ *   This is a chance to modify those two paths in arbitrary way, e.g. if
+ *   "result" points to a symlink.
+ */
+typename std::string canonPathInner(
+    std::string_view remaining,
+    auto && hookComponent)
+{
+    assert(remaining != "");
+
+    std::string result;
+    result.reserve(256);
+
+    while (true) {
+
+        /* Skip slashes. */
+        while (!remaining.empty() && remaining[0] == '/')
+            remaining.remove_prefix(1);
+
+        if (remaining.empty()) break;
+
+        auto nextComp = ({
+            auto nextPathSep = remaining.find('/');
+            nextPathSep == remaining.npos ? remaining : remaining.substr(0, nextPathSep);
+        });
+
+        /* Ignore `.'. */
+        if (nextComp == ".")
+            remaining.remove_prefix(1);
+
+        /* If `..', delete the last component. */
+        else if (nextComp == "..")
+        {
+            if (!result.empty()) result.erase(result.rfind('/'));
+            remaining.remove_prefix(2);
+        }
+
+        /* Normal component; copy it. */
+        else {
+            result += '/';
+            if (const auto slash = remaining.find('/'); slash == result.npos) {
+                result += remaining;
+                remaining = {};
+            } else {
+                result += remaining.substr(0, slash);
+                remaining = remaining.substr(slash);
+            }
+
+            hookComponent(result, remaining);
+        }
+    }
+
+    if (result.empty())
+        result = "/";
+
+    return result;
+}
+
+}
diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc
index 9fa1f62df..3c019a9ed 100644
--- a/src/libutil/file-system.cc
+++ b/src/libutil/file-system.cc
@@ -1,5 +1,6 @@
 #include "environment-variables.hh"
 #include "file-system.hh"
+#include "file-path-impl.hh"
 #include "signals.hh"
 #include "finally.hh"
 #include "serialise.hh"
@@ -21,11 +22,18 @@ namespace fs = std::filesystem;
 
 namespace nix {
 
+/** Treat the string as possibly an absolute path, by inspecting the start of it. Return whether it was probably intended to be absolute. */
+static bool isAbsolute(PathView path)
+{
+    return !path.empty() && path[0] == '/';
+}
+
+
 Path absPath(PathView path, std::optional dir, bool resolveSymlinks)
 {
     std::string scratch;
 
-    if (path.empty() || path[0] != '/') {
+    if (!isAbsolute(path)) {
         // In this case we need to call `canonPath` on a newly-created
         // string. We set `scratch` to that string first, and then set
         // `path` to `scratch`. This ensures the newly-created string
@@ -58,69 +66,39 @@ Path canonPath(PathView path, bool resolveSymlinks)
 {
     assert(path != "");
 
-    std::string s;
-    s.reserve(256);
-
-    if (path[0] != '/')
+    if (!isAbsolute(path))
         throw Error("not an absolute path: '%1%'", path);
 
+    /* This just exists because we cannot set the target of `remaining`
+       (the callback parameter) directly to a newly-constructed string,
+       since it is `std::string_view`. */
     std::string temp;
 
     /* Count the number of times we follow a symlink and stop at some
        arbitrary (but high) limit to prevent infinite loops. */
     unsigned int followCount = 0, maxFollow = 1024;
 
-    while (1) {
-
-        /* Skip slashes. */
-        while (!path.empty() && path[0] == '/') path.remove_prefix(1);
-        if (path.empty()) break;
-
-        /* Ignore `.'. */
-        if (path == "." || path.substr(0, 2) == "./")
-            path.remove_prefix(1);
-
-        /* If `..', delete the last component. */
-        else if (path == ".." || path.substr(0, 3) == "../")
-        {
-            if (!s.empty()) s.erase(s.rfind('/'));
-            path.remove_prefix(2);
-        }
-
-        /* Normal component; copy it. */
-        else {
-            s += '/';
-            if (const auto slash = path.find('/'); slash == path.npos) {
-                s += path;
-                path = {};
-            } else {
-                s += path.substr(0, slash);
-                path = path.substr(slash);
-            }
-
-            /* If s points to a symlink, resolve it and continue from there */
-            if (resolveSymlinks && isLink(s)) {
+    return canonPathInner(
+        path,
+        [&followCount, &temp, maxFollow, resolveSymlinks]
+        (std::string & result, std::string_view & remaining) {
+            if (resolveSymlinks && isLink(result)) {
                 if (++followCount >= maxFollow)
-                    throw Error("infinite symlink recursion in path '%1%'", path);
-                temp = concatStrings(readLink(s), path);
-                path = temp;
-                if (!temp.empty() && temp[0] == '/') {
-                    s.clear();  /* restart for symlinks pointing to absolute path */
+                    throw Error("infinite symlink recursion in path '%0%'", remaining);
+                remaining = (temp = concatStrings(readLink(result), remaining));
+                if (isAbsolute(remaining)) {
+                    /* restart for symlinks pointing to absolute path */
+                    result.clear();
                 } else {
-                    s = dirOf(s);
-                    if (s == "/") {  // we don’t want trailing slashes here, which dirOf only produces if s = /
-                        s.clear();
+                    result = dirOf(result);
+                    if (result == "/") {
+                        /* we don’t want trailing slashes here, which `dirOf`
+                           only produces if `result = /` */
+                        result.clear();
                     }
                 }
             }
-        }
-    }
-
-    if (s.empty()) {
-        s = "/";
-    }
-
-    return s;
+        });
 }
 
 
diff --git a/tests/unit/libutil/canon-path.cc b/tests/unit/libutil/canon-path.cc
index bf11abe3e..7f91308af 100644
--- a/tests/unit/libutil/canon-path.cc
+++ b/tests/unit/libutil/canon-path.cc
@@ -41,6 +41,24 @@ namespace nix {
         }
     }
 
+    TEST(CanonPath, from_existing) {
+        CanonPath p0("foo//bar/");
+        {
+            CanonPath p("/baz//quux/", p0);
+            ASSERT_EQ(p.abs(), "/baz/quux");
+            ASSERT_EQ(p.rel(), "baz/quux");
+            ASSERT_EQ(*p.baseName(), "quux");
+            ASSERT_EQ(*p.dirOf(), "/baz");
+        }
+        {
+            CanonPath p("baz//quux/", p0);
+            ASSERT_EQ(p.abs(), "/foo/bar/baz/quux");
+            ASSERT_EQ(p.rel(), "foo/bar/baz/quux");
+            ASSERT_EQ(*p.baseName(), "quux");
+            ASSERT_EQ(*p.dirOf(), "/foo/bar/baz");
+        }
+    }
+
     TEST(CanonPath, pop) {
         CanonPath p("foo/bar/x");
         ASSERT_EQ(p.abs(), "/foo/bar/x");

From 4531585275254f13dae1ff61434e15865a1e796a Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 29 Jan 2024 17:16:18 -0500
Subject: [PATCH 111/164] Factor out the Unix-specific parts of
 `canonPathInner`

This prepares the code to also support Windows paths in the next commit.
---
 src/libutil/canon-path.cc     |  2 +-
 src/libutil/file-path-impl.hh | 52 +++++++++++++++++++++++++++++------
 src/libutil/file-system.cc    |  2 +-
 3 files changed, 45 insertions(+), 11 deletions(-)

diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc
index fcd53862b..27f048697 100644
--- a/src/libutil/canon-path.cc
+++ b/src/libutil/canon-path.cc
@@ -8,7 +8,7 @@ CanonPath CanonPath::root = CanonPath("/");
 
 static std::string absPathPure(std::string_view path)
 {
-    return canonPathInner(path, [](auto &, auto &){});
+    return canonPathInner(path, [](auto &, auto &){});
 }
 
 CanonPath::CanonPath(std::string_view raw)
diff --git a/src/libutil/file-path-impl.hh b/src/libutil/file-path-impl.hh
index 39159c7c2..941d433e0 100644
--- a/src/libutil/file-path-impl.hh
+++ b/src/libutil/file-path-impl.hh
@@ -10,6 +10,39 @@
 
 namespace nix {
 
+/**
+ * Unix-style path primives.
+ *
+ * Nix'result own "logical" paths are always Unix-style. So this is always
+ * used for that, and additionally used for native paths on Unix.
+ */
+struct UnixPathTrait
+{
+    using CharT = char;
+
+    using String = std::string;
+
+    using StringView = std::string_view;
+
+    constexpr static char preferredSep = '/';
+
+    static inline bool isPathSep(char c)
+    {
+        return c == '/';
+    }
+
+    static inline size_t findPathSep(StringView path, size_t from = 0)
+    {
+        return path.find('/', from);
+    }
+
+    static inline size_t rfindPathSep(StringView path, size_t from = StringView::npos)
+    {
+        return path.rfind('/', from);
+    }
+};
+
+
 /**
  * Core pure path canonicalization algorithm.
  *
@@ -24,25 +57,26 @@ namespace nix {
  *   This is a chance to modify those two paths in arbitrary way, e.g. if
  *   "result" points to a symlink.
  */
-typename std::string canonPathInner(
-    std::string_view remaining,
+template
+typename PathDict::String canonPathInner(
+    typename PathDict::StringView remaining,
     auto && hookComponent)
 {
     assert(remaining != "");
 
-    std::string result;
+    typename PathDict::String result;
     result.reserve(256);
 
     while (true) {
 
         /* Skip slashes. */
-        while (!remaining.empty() && remaining[0] == '/')
+        while (!remaining.empty() && PathDict::isPathSep(remaining[0]))
             remaining.remove_prefix(1);
 
         if (remaining.empty()) break;
 
         auto nextComp = ({
-            auto nextPathSep = remaining.find('/');
+            auto nextPathSep = PathDict::findPathSep(remaining);
             nextPathSep == remaining.npos ? remaining : remaining.substr(0, nextPathSep);
         });
 
@@ -53,14 +87,14 @@ typename std::string canonPathInner(
         /* If `..', delete the last component. */
         else if (nextComp == "..")
         {
-            if (!result.empty()) result.erase(result.rfind('/'));
+            if (!result.empty()) result.erase(PathDict::rfindPathSep(result));
             remaining.remove_prefix(2);
         }
 
         /* Normal component; copy it. */
         else {
-            result += '/';
-            if (const auto slash = remaining.find('/'); slash == result.npos) {
+            result += PathDict::preferredSep;
+            if (const auto slash = PathDict::findPathSep(remaining); slash == result.npos) {
                 result += remaining;
                 remaining = {};
             } else {
@@ -73,7 +107,7 @@ typename std::string canonPathInner(
     }
 
     if (result.empty())
-        result = "/";
+        result = typename PathDict::String { PathDict::preferredSep };
 
     return result;
 }
diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc
index 3c019a9ed..ff83bc4ea 100644
--- a/src/libutil/file-system.cc
+++ b/src/libutil/file-system.cc
@@ -78,7 +78,7 @@ Path canonPath(PathView path, bool resolveSymlinks)
        arbitrary (but high) limit to prevent infinite loops. */
     unsigned int followCount = 0, maxFollow = 1024;
 
-    return canonPathInner(
+    return canonPathInner(
         path,
         [&followCount, &temp, maxFollow, resolveSymlinks]
         (std::string & result, std::string_view & remaining) {

From 319ec6f84accb7342160b856185402dcdebbaba9 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Sun, 14 Jan 2024 14:30:25 -0500
Subject: [PATCH 112/164] Support Windows paths in `canonPath` and `absPath`

`canonPath` and `absPath` work on native paths, and so should switch
between supporting Unix paths and Windows paths accordingly.

The templating is because `CanonPath`, which shares the implementation,
should always be Unix style. It is the pure "nix-native" path type for
virtual file operations --- it is part of Nix's "business logic", and
should not vary with the host OS accordingly.
---
 src/libutil/file-path-impl.hh | 61 +++++++++++++++++++++++++++++++++++
 src/libutil/file-system.cc    | 17 ++++++++--
 tests/unit/libutil/tests.cc   | 30 ++++++++++-------
 3 files changed, 94 insertions(+), 14 deletions(-)

diff --git a/src/libutil/file-path-impl.hh b/src/libutil/file-path-impl.hh
index 941d433e0..4c90150fd 100644
--- a/src/libutil/file-path-impl.hh
+++ b/src/libutil/file-path-impl.hh
@@ -43,6 +43,67 @@ struct UnixPathTrait
 };
 
 
+/**
+ * Windows-style path primitives.
+ *
+ * The character type is a parameter because while windows paths rightly
+ * work over UTF-16 (*) using `wchar_t`, at the current time we are
+ * often manipulating them converted to UTF-8 (*) using `char`.
+ *
+ * (Actually neither are guaranteed to be valid unicode; both are
+ * arbitrary non-0 8- or 16-bit bytes. But for charcters with specifical
+ * meaning like '/', '\\', ':', etc., we refer to an encoding scheme,
+ * and also for sake of UIs that display paths a text.)
+ */
+template
+struct WindowsPathTrait
+{
+    using CharT = CharT0;
+
+    using String = std::basic_string;
+
+    using StringView = std::basic_string_view;
+
+    constexpr static CharT preferredSep = '\\';
+
+    static inline bool isPathSep(CharT c)
+    {
+        return c == '/' || c == preferredSep;
+    }
+
+    static size_t findPathSep(StringView path, size_t from = 0)
+    {
+        size_t p1 = path.find('/', from);
+        size_t p2 = path.find(preferredSep, from);
+        return p1 == String::npos ? p2 :
+               p2 == String::npos ? p1 :
+               std::min(p1, p2);
+    }
+
+    static size_t rfindPathSep(StringView path, size_t from = String::npos)
+    {
+        size_t p1 = path.rfind('/', from);
+        size_t p2 = path.rfind(preferredSep, from);
+        return p1 == String::npos ? p2 :
+               p2 == String::npos ? p1 :
+               std::max(p1, p2);
+    }
+};
+
+
+/**
+ * @todo Revisit choice of `char` or `wchar_t` for `WindowsPathTrait`
+ * argument.
+ */
+using NativePathTrait =
+#ifdef _WIN32
+    WindowsPathTrait
+#else
+    UnixPathTrait
+#endif
+    ;
+
+
 /**
  * Core pure path canonicalization algorithm.
  *
diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc
index ff83bc4ea..b0a3f0797 100644
--- a/src/libutil/file-system.cc
+++ b/src/libutil/file-system.cc
@@ -22,10 +22,14 @@ namespace fs = std::filesystem;
 
 namespace nix {
 
-/** Treat the string as possibly an absolute path, by inspecting the start of it. Return whether it was probably intended to be absolute. */
+/**
+ * Treat the string as possibly an absolute path, by inspecting the
+ * start of it. Return whether it was probably intended to be
+ * absolute.
+ */
 static bool isAbsolute(PathView path)
 {
-    return !path.empty() && path[0] == '/';
+    return fs::path { path }.is_absolute();
 }
 
 
@@ -69,6 +73,9 @@ Path canonPath(PathView path, bool resolveSymlinks)
     if (!isAbsolute(path))
         throw Error("not an absolute path: '%1%'", path);
 
+    // For Windows
+    auto rootName = fs::path { path }.root_name();
+
     /* This just exists because we cannot set the target of `remaining`
        (the callback parameter) directly to a newly-constructed string,
        since it is `std::string_view`. */
@@ -78,7 +85,7 @@ Path canonPath(PathView path, bool resolveSymlinks)
        arbitrary (but high) limit to prevent infinite loops. */
     unsigned int followCount = 0, maxFollow = 1024;
 
-    return canonPathInner(
+    auto ret = canonPathInner(
         path,
         [&followCount, &temp, maxFollow, resolveSymlinks]
         (std::string & result, std::string_view & remaining) {
@@ -99,6 +106,10 @@ Path canonPath(PathView path, bool resolveSymlinks)
                 }
             }
         });
+
+    if (!rootName.empty())
+        ret = rootName.string() + std::move(ret);
+    return ret;
 }
 
 
diff --git a/tests/unit/libutil/tests.cc b/tests/unit/libutil/tests.cc
index 568f03f70..4406fd184 100644
--- a/tests/unit/libutil/tests.cc
+++ b/tests/unit/libutil/tests.cc
@@ -9,6 +9,14 @@
 
 #include 
 
+#ifdef _WIN32
+# define FS_SEP "\\"
+# define FS_ROOT "C:" FS_SEP // Need a mounted one, C drive is likely
+#else
+# define FS_SEP "/"
+# define FS_ROOT FS_SEP
+#endif
+
 namespace nix {
 
 /* ----------- tests for util.hh ------------------------------------------------*/
@@ -18,9 +26,9 @@ namespace nix {
      * --------------------------------------------------------------------------*/
 
     TEST(absPath, doesntChangeRoot) {
-        auto p = absPath("/");
+        auto p = absPath(FS_ROOT);
 
-        ASSERT_EQ(p, "/");
+        ASSERT_EQ(p, FS_ROOT);
     }
 
 
@@ -53,11 +61,11 @@ namespace nix {
 
 
     TEST(absPath, pathIsCanonicalised) {
-        auto path = "/some/path/with/trailing/dot/.";
+        auto path = FS_ROOT "some/path/with/trailing/dot/.";
         auto p1 = absPath(path);
         auto p2 = absPath(p1);
 
-        ASSERT_EQ(p1, "/some/path/with/trailing/dot");
+        ASSERT_EQ(p1, FS_ROOT "some" FS_SEP "path" FS_SEP "with" FS_SEP "trailing" FS_SEP "dot");
         ASSERT_EQ(p1, p2);
     }
 
@@ -66,24 +74,24 @@ namespace nix {
      * --------------------------------------------------------------------------*/
 
     TEST(canonPath, removesTrailingSlashes) {
-        auto path = "/this/is/a/path//";
+        auto path = FS_ROOT "this/is/a/path//";
         auto p = canonPath(path);
 
-        ASSERT_EQ(p, "/this/is/a/path");
+        ASSERT_EQ(p, FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path");
     }
 
     TEST(canonPath, removesDots) {
-        auto path = "/this/./is/a/path/./";
+        auto path = FS_ROOT "this/./is/a/path/./";
         auto p = canonPath(path);
 
-        ASSERT_EQ(p, "/this/is/a/path");
+        ASSERT_EQ(p, FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path");
     }
 
     TEST(canonPath, removesDots2) {
-        auto path = "/this/a/../is/a////path/foo/..";
+        auto path = FS_ROOT "this/a/../is/a////path/foo/..";
         auto p = canonPath(path);
 
-        ASSERT_EQ(p, "/this/is/a/path");
+        ASSERT_EQ(p, FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path");
     }
 
     TEST(canonPath, requiresAbsolutePath) {
@@ -197,7 +205,7 @@ namespace nix {
      * --------------------------------------------------------------------------*/
 
     TEST(pathExists, rootExists) {
-        ASSERT_TRUE(pathExists("/"));
+        ASSERT_TRUE(pathExists(FS_ROOT));
     }
 
     TEST(pathExists, cwdExists) {

From 6162105675762a394603dbbf39cb1fa55065fec3 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 10:28:44 +0100
Subject: [PATCH 113/164] Don't say "copying X to the store" in read-only mode

---
 src/libfetchers/fetch-to-store.cc | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc
index c27880662..f5c740266 100644
--- a/src/libfetchers/fetch-to-store.cc
+++ b/src/libfetchers/fetch-to-store.cc
@@ -33,12 +33,15 @@ StorePath fetchToStore(
     } else
         debug("source path '%s' is uncacheable", path);
 
-    Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", path));
+    auto readOnly = settings.readOnlyMode;
+
+    Activity act(*logger, lvlChatty, actUnknown,
+        fmt(readOnly ? "hashing '%s'" : "copying '%s' to the store", path));
 
     auto filter2 = filter ? *filter : defaultPathFilter;
 
     auto storePath =
-        settings.readOnlyMode
+        readOnly
         ? store.computeStorePath(
             name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first
         : store.addToStore(

From d52d91fe7a349d24a83b8698b3d04874c9f52cd2 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 11:21:28 +0100
Subject: [PATCH 114/164] AllowListInputAccessor: Clarify that the "allowed
 paths" are actually allowed prefixes

E.g. adding "/" will allow access to the root and *everything below it*.
---
 src/libexpr/eval.cc                         |  4 ++--
 src/libfetchers/filtering-input-accessor.cc | 16 ++++++++--------
 src/libfetchers/filtering-input-accessor.hh |  9 +++++----
 3 files changed, 15 insertions(+), 14 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 6fc9df237..41b6f5c85 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -467,13 +467,13 @@ EvalState::~EvalState()
 void EvalState::allowPath(const Path & path)
 {
     if (auto rootFS2 = rootFS.dynamic_pointer_cast())
-        rootFS2->allowPath(CanonPath(path));
+        rootFS2->allowPrefix(CanonPath(path));
 }
 
 void EvalState::allowPath(const StorePath & storePath)
 {
     if (auto rootFS2 = rootFS.dynamic_pointer_cast())
-        rootFS2->allowPath(CanonPath(store->toRealPath(storePath)));
+        rootFS2->allowPrefix(CanonPath(store->toRealPath(storePath)));
 }
 
 void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
diff --git a/src/libfetchers/filtering-input-accessor.cc b/src/libfetchers/filtering-input-accessor.cc
index 087a100af..32343abc4 100644
--- a/src/libfetchers/filtering-input-accessor.cc
+++ b/src/libfetchers/filtering-input-accessor.cc
@@ -51,33 +51,33 @@ void FilteringInputAccessor::checkAccess(const CanonPath & path)
 
 struct AllowListInputAccessorImpl : AllowListInputAccessor
 {
-    std::set allowedPaths;
+    std::set allowedPrefixes;
 
     AllowListInputAccessorImpl(
         ref next,
-        std::set && allowedPaths,
+        std::set && allowedPrefixes,
         MakeNotAllowedError && makeNotAllowedError)
         : AllowListInputAccessor(SourcePath(next), std::move(makeNotAllowedError))
-        , allowedPaths(std::move(allowedPaths))
+        , allowedPrefixes(std::move(allowedPrefixes))
     { }
 
     bool isAllowed(const CanonPath & path) override
     {
-        return path.isAllowed(allowedPaths);
+        return path.isAllowed(allowedPrefixes);
     }
 
-    void allowPath(CanonPath path) override
+    void allowPrefix(CanonPath prefix) override
     {
-        allowedPaths.insert(std::move(path));
+        allowedPrefixes.insert(std::move(prefix));
     }
 };
 
 ref AllowListInputAccessor::create(
     ref next,
-    std::set && allowedPaths,
+    std::set && allowedPrefixes,
     MakeNotAllowedError && makeNotAllowedError)
 {
-    return make_ref(next, std::move(allowedPaths), std::move(makeNotAllowedError));
+    return make_ref(next, std::move(allowedPrefixes), std::move(makeNotAllowedError));
 }
 
 bool CachingFilteringInputAccessor::isAllowed(const CanonPath & path)
diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh
index 8a9b206ee..8111a72c5 100644
--- a/src/libfetchers/filtering-input-accessor.hh
+++ b/src/libfetchers/filtering-input-accessor.hh
@@ -54,18 +54,19 @@ struct FilteringInputAccessor : InputAccessor
 };
 
 /**
- * A wrapping `InputAccessor` that checks paths against an allow-list.
+ * A wrapping `InputAccessor` that checks paths against a set of
+ * allowed prefixes.
  */
 struct AllowListInputAccessor : public FilteringInputAccessor
 {
     /**
-     * Grant access to the specified path.
+     * Grant access to the specified prefix.
      */
-    virtual void allowPath(CanonPath path) = 0;
+    virtual void allowPrefix(CanonPath prefix) = 0;
 
     static ref create(
         ref next,
-        std::set && allowedPaths,
+        std::set && allowedPrefixes,
         MakeNotAllowedError && makeNotAllowedError);
 
     using FilteringInputAccessor::FilteringInputAccessor;

From 9e762454cf62d0d7a6259b560cc3e340f6f5ec6e Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 11:40:02 +0100
Subject: [PATCH 115/164] Support empty Git repositories / workdirs

Fixes #10039.
---
 src/libfetchers/git-utils.cc             | 21 ++++++++++++--------
 src/libfetchers/git.cc                   |  8 ++++++--
 src/libfetchers/memory-input-accessor.cc |  6 ++++++
 src/libfetchers/memory-input-accessor.hh |  2 ++
 tests/functional/fetchGit.sh             | 25 ++++++++++++++++++++++++
 5 files changed, 52 insertions(+), 10 deletions(-)

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 4f034e9d4..037fcc365 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -2,6 +2,7 @@
 #include "fs-input-accessor.hh"
 #include "input-accessor.hh"
 #include "filtering-input-accessor.hh"
+#include "memory-input-accessor.hh"
 #include "cache.hh"
 #include "finally.hh"
 #include "processes.hh"
@@ -942,17 +943,21 @@ ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore)
 ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError)
 {
     auto self = ref(shared_from_this());
+    /* In case of an empty workdir, return an empty in-memory tree. We
+       cannot use AllowListInputAccessor because it would return an
+       error for the root (and we can't add the root to the allow-list
+       since that would allow access to all its children). */
     ref fileAccessor =
-        AllowListInputAccessor::create(
-                makeFSInputAccessor(path),
-                std::set { wd.files },
-                std::move(makeNotAllowedError));
-    if (exportIgnore) {
+        wd.files.empty()
+        ? makeEmptyInputAccessor()
+        : AllowListInputAccessor::create(
+            makeFSInputAccessor(path),
+            std::set { wd.files },
+            std::move(makeNotAllowedError)).cast();
+    if (exportIgnore)
         return make_ref(self, fileAccessor, std::nullopt);
-    }
-    else {
+    else
         return fileAccessor;
-    }
 }
 
 ref GitRepoImpl::getFileSystemObjectSink()
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index bef945d54..97ef35b51 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -158,6 +158,8 @@ std::vector getPublicKeys(const Attrs & attrs)
 
 }  // end namespace
 
+static const Hash nullRev{HashAlgorithm::SHA1};
+
 struct GitInputScheme : InputScheme
 {
     std::optional inputFromURL(const ParsedURL & url, bool requireTree) const override
@@ -708,10 +710,12 @@ struct GitInputScheme : InputScheme
             if (auto ref = repo->getWorkdirRef())
                 input.attrs.insert_or_assign("ref", *ref);
 
-            auto rev = repoInfo.workdirInfo.headRev.value();
+            /* Return a rev of 000... if there are no commits yet. */
+            auto rev = repoInfo.workdirInfo.headRev.value_or(nullRev);
 
             input.attrs.insert_or_assign("rev", rev.gitRev());
-            input.attrs.insert_or_assign("revCount", getRevCount(repoInfo, repoInfo.url, rev));
+            input.attrs.insert_or_assign("revCount",
+                rev == nullRev ? 0 : getRevCount(repoInfo, repoInfo.url, rev));
 
             verifyCommit(input, repo);
         } else {
diff --git a/src/libfetchers/memory-input-accessor.cc b/src/libfetchers/memory-input-accessor.cc
index 88a2e34e8..34a801f67 100644
--- a/src/libfetchers/memory-input-accessor.cc
+++ b/src/libfetchers/memory-input-accessor.cc
@@ -20,4 +20,10 @@ ref makeMemoryInputAccessor()
     return make_ref();
 }
 
+ref makeEmptyInputAccessor()
+{
+    static auto empty = makeMemoryInputAccessor().cast();
+    return empty;
+}
+
 }
diff --git a/src/libfetchers/memory-input-accessor.hh b/src/libfetchers/memory-input-accessor.hh
index 508b07722..63afadd2a 100644
--- a/src/libfetchers/memory-input-accessor.hh
+++ b/src/libfetchers/memory-input-accessor.hh
@@ -13,4 +13,6 @@ struct MemoryInputAccessor : InputAccessor
 
 ref makeMemoryInputAccessor();
 
+ref makeEmptyInputAccessor();
+
 }
diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh
index ea90f8ebe..0583774c4 100644
--- a/tests/functional/fetchGit.sh
+++ b/tests/functional/fetchGit.sh
@@ -268,3 +268,28 @@ git -C "$repo" add hello .gitignore
 git -C "$repo" commit -m 'Bla1'
 cd "$repo"
 path11=$(nix eval --impure --raw --expr "(builtins.fetchGit ./.).outPath")
+
+# Test a workdir with no commits.
+empty="$TEST_ROOT/empty"
+git init "$empty"
+
+emptyAttrs='{ lastModified = 0; lastModifiedDate = "19700101000000"; narHash = "sha256-pQpattmS9VmO3ZIQUFn66az8GSmB4IvYhTTCFn6SUmo="; rev = "0000000000000000000000000000000000000000"; revCount = 0; shortRev = "0000000"; submodules = false; }'
+
+[[ $(nix eval --impure --expr "builtins.removeAttrs (builtins.fetchGit $empty) [\"outPath\"]") = $emptyAttrs ]]
+
+echo foo > "$empty/x"
+
+[[ $(nix eval --impure --expr "builtins.removeAttrs (builtins.fetchGit $empty) [\"outPath\"]") = $emptyAttrs ]]
+
+git -C "$empty" add x
+
+[[ $(nix eval --impure --expr "builtins.removeAttrs (builtins.fetchGit $empty) [\"outPath\"]") = '{ lastModified = 0; lastModifiedDate = "19700101000000"; narHash = "sha256-wzlAGjxKxpaWdqVhlq55q5Gxo4Bf860+kLeEa/v02As="; rev = "0000000000000000000000000000000000000000"; revCount = 0; shortRev = "0000000"; submodules = false; }' ]]
+
+# Test a repo with an empty commit.
+git -C "$empty" rm -f x
+
+git -C "$empty" config user.email "foobar@example.com"
+git -C "$empty" config user.name "Foobar"
+git -C "$empty" commit --allow-empty --allow-empty-message --message ""
+
+nix eval --impure --expr "let attrs = builtins.fetchGit $empty; in assert attrs.lastModified != 0; assert attrs.rev != \"0000000000000000000000000000000000000000\"; assert attrs.revCount == 1; true"

From 7cb4d0c5b7dee435ea4b25e0c6dec4d60ad3675f Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 10:36:36 +0100
Subject: [PATCH 116/164] fetchToStore(): Don't always respect
 settings.readOnlyMode

It's now up to the caller whether readOnlyMode should be applied. In
some contexts (like InputScheme::fetch()), we always need to fetch.
---
 src/libcmd/installable-value.cc   | 2 +-
 src/libexpr/eval.cc               | 9 ++++++++-
 src/libexpr/primops.cc            | 9 ++++++++-
 src/libfetchers/fetch-to-store.cc | 9 ++++-----
 src/libfetchers/fetch-to-store.hh | 3 +++
 src/libfetchers/fetchers.cc       | 2 +-
 tests/functional/fetchGit.sh      | 5 ++++-
 7 files changed, 29 insertions(+), 10 deletions(-)

diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc
index c8a3e1b21..1aa2e65c1 100644
--- a/src/libcmd/installable-value.cc
+++ b/src/libcmd/installable-value.cc
@@ -45,7 +45,7 @@ ref InstallableValue::require(ref installable)
 std::optional InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
 {
     if (v.type() == nPath) {
-        auto storePath = fetchToStore(*state->store, v.path());
+        auto storePath = fetchToStore(*state->store, v.path(), FetchMode::Copy);
         return {{
             .path = DerivedPath::Opaque {
                 .path = std::move(storePath),
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 6fc9df237..4919ac358 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -2339,7 +2339,14 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
     auto dstPath = i != srcToStore.end()
         ? i->second
         : [&]() {
-            auto dstPath = fetchToStore(*store, path.resolveSymlinks(), path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
+            auto dstPath = fetchToStore(
+                *store,
+                path.resolveSymlinks(),
+                settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
+                path.baseName(),
+                FileIngestionMethod::Recursive,
+                nullptr,
+                repair);
             allowPath(dstPath);
             srcToStore.insert_or_assign(path, dstPath);
             printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 8c6aeffac..89d9704ee 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -2228,7 +2228,14 @@ static void addPath(
             });
 
         if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
-            auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair);
+            auto dstPath = fetchToStore(
+                *state.store,
+                path.resolveSymlinks(),
+                settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
+                name,
+                method,
+                filter.get(),
+                state.repair);
             if (expectedHash && expectedStorePath != dstPath)
                 state.error(
                     "store path mismatch in (possibly filtered) path added from '%s'",
diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc
index f5c740266..398286065 100644
--- a/src/libfetchers/fetch-to-store.cc
+++ b/src/libfetchers/fetch-to-store.cc
@@ -7,6 +7,7 @@ namespace nix {
 StorePath fetchToStore(
     Store & store,
     const SourcePath & path,
+    FetchMode mode,
     std::string_view name,
     ContentAddressMethod method,
     PathFilter * filter,
@@ -33,21 +34,19 @@ StorePath fetchToStore(
     } else
         debug("source path '%s' is uncacheable", path);
 
-    auto readOnly = settings.readOnlyMode;
-
     Activity act(*logger, lvlChatty, actUnknown,
-        fmt(readOnly ? "hashing '%s'" : "copying '%s' to the store", path));
+        fmt(mode == FetchMode::DryRun ? "hashing '%s'" : "copying '%s' to the store", path));
 
     auto filter2 = filter ? *filter : defaultPathFilter;
 
     auto storePath =
-        readOnly
+        mode == FetchMode::DryRun
         ? store.computeStorePath(
             name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first
         : store.addToStore(
             name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2, repair);
 
-    if (cacheKey)
+    if (cacheKey && mode == FetchMode::Copy)
         fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
 
     return storePath;
diff --git a/src/libfetchers/fetch-to-store.hh b/src/libfetchers/fetch-to-store.hh
index e5e039340..81af1e240 100644
--- a/src/libfetchers/fetch-to-store.hh
+++ b/src/libfetchers/fetch-to-store.hh
@@ -8,12 +8,15 @@
 
 namespace nix {
 
+enum struct FetchMode { DryRun, Copy };
+
 /**
  * Copy the `path` to the Nix store.
  */
 StorePath fetchToStore(
     Store & store,
     const SourcePath & path,
+    FetchMode mode,
     std::string_view name = "source",
     ContentAddressMethod method = FileIngestionMethod::Recursive,
     PathFilter * filter = nullptr,
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 7f282c972..9a534c1e2 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -376,7 +376,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
 std::pair InputScheme::fetch(ref store, const Input & input)
 {
     auto [accessor, input2] = getAccessor(store, input);
-    auto storePath = fetchToStore(*store, SourcePath(accessor), input2.getName());
+    auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, input2.getName());
     return {storePath, input2};
 }
 
diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh
index ea90f8ebe..4e71cfe8c 100644
--- a/tests/functional/fetchGit.sh
+++ b/tests/functional/fetchGit.sh
@@ -30,7 +30,10 @@ echo hello >> $TEST_ROOT/worktree/hello
 rev2=$(git -C $repo rev-parse HEAD)
 git -C $repo tag -a tag2 -m tag2
 
-# Fetch a worktree
+# Check whether fetching in read-only mode works.
+nix-instantiate --eval -E "builtins.readFile ((builtins.fetchGit file://$TEST_ROOT/worktree) + \"/hello\") == \"utrecht\\n\""
+
+# Fetch a worktree.
 unset _NIX_FORCE_HTTP
 path0=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$TEST_ROOT/worktree).outPath")
 path0_=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; url = file://$TEST_ROOT/worktree; }).outPath")

From db012d1e6395b342633ae8037841a9e751281b1e Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 19 Feb 2024 13:15:49 +0100
Subject: [PATCH 117/164] tests/functional/tarball.sh: Fix invalid file:// URLs

---
 tests/functional/tarball.sh | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/tests/functional/tarball.sh b/tests/functional/tarball.sh
index e59ee400e..391c21385 100644
--- a/tests/functional/tarball.sh
+++ b/tests/functional/tarball.sh
@@ -42,11 +42,11 @@ test_tarball() {
     nix-instantiate --strict --eval -E "!((import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })) ? submodules)" >&2
     nix-instantiate --strict --eval -E "!((import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })) ? submodules)" 2>&1 | grep 'true'
 
-    nix-instantiate --eval -E '1 + 2' -I fnord=file://no-such-tarball.tar$ext
-    nix-instantiate --eval -E 'with ; 1 + 2' -I fnord=file://no-such-tarball$ext
-    (! nix-instantiate --eval -E ' 1' -I fnord=file://no-such-tarball$ext)
+    nix-instantiate --eval -E '1 + 2' -I fnord=file:///no-such-tarball.tar$ext
+    nix-instantiate --eval -E 'with ; 1 + 2' -I fnord=file:///no-such-tarball$ext
+    (! nix-instantiate --eval -E ' 1' -I fnord=file:///no-such-tarball$ext)
 
-    nix-instantiate --eval -E '' -I fnord=file://no-such-tarball$ext -I fnord=.
+    nix-instantiate --eval -E '' -I fnord=file:///no-such-tarball$ext -I fnord=.
 
     # Ensure that the `name` attribute isn’t accepted as that would mess
     # with the content-addressing

From b00f412f818771c92934614e13382c3087f69587 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Fri, 13 Oct 2023 13:21:38 +0200
Subject: [PATCH 118/164] Remove bad.tar.xz check, since libarchive doesn't
 care

---
 tests/functional/bad.tar.xz | Bin 228 -> 0 bytes
 tests/functional/tarball.sh |   5 -----
 2 files changed, 5 deletions(-)
 delete mode 100644 tests/functional/bad.tar.xz

diff --git a/tests/functional/bad.tar.xz b/tests/functional/bad.tar.xz
deleted file mode 100644
index 250a5ad1a79ee088d5976160664daad6e1a136ff..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 228
zcmVuwgxWr*GQJ}UxbD$dT
zFZcR4Oq`4c+brLa?D6R=fMJjM$?MdlXt0Pp!zwDzlSF=e({Qq}bnc_B9C}zwW!F?<
z;h>7LqPxu=kzFWj+$Z*4&0g78Yyp1kn1+nCzFC}~s1p
Date: Mon, 19 Feb 2024 13:54:40 +0100
Subject: [PATCH 119/164] PosixSourceAccessor: Support roots that are not
 directories

We have to support this for `fetchTree { type = "file" }` (and
probably other types of trees that can have a non-directory at the
root, like NARs).
---
 src/libutil/posix-source-accessor.cc | 5 +++++
 tests/functional/fetchTree-file.sh   | 1 +
 2 files changed, 6 insertions(+)

diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc
index 0300de01e..f8ec7fc6b 100644
--- a/src/libutil/posix-source-accessor.cc
+++ b/src/libutil/posix-source-accessor.cc
@@ -30,6 +30,11 @@ std::filesystem::path PosixSourceAccessor::makeAbsPath(const CanonPath & path)
 {
     return root.empty()
         ? (std::filesystem::path { path.abs() })
+        : path.isRoot()
+        ? /* Don't append a slash for the root of the accessor, since
+             it can be a non-directory (e.g. in the case of `fetchTree
+             { type = "file" }`). */
+          root
         : root / path.rel();
 }
 
diff --git a/tests/functional/fetchTree-file.sh b/tests/functional/fetchTree-file.sh
index 6395c133d..be698ea35 100644
--- a/tests/functional/fetchTree-file.sh
+++ b/tests/functional/fetchTree-file.sh
@@ -14,6 +14,7 @@ test_fetch_file () {
         tree = builtins.fetchTree { type = "file"; url = "file://$PWD/test_input"; };
     in
     assert (tree.narHash == "$input_hash");
+    assert builtins.readFile tree == "foo\n";
     tree
 EOF
 }

From cabee9815239af426cece729cb765810b8a716ce Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 12:57:36 +0100
Subject: [PATCH 120/164] Tarball fetcher: Use the content-addressed Git cache

Backported from the lazy-trees branch.
---
 src/libcmd/common-eval-args.cc   |   6 +-
 src/libexpr/eval.cc              |   7 +-
 src/libexpr/primops/fetchTree.cc |   3 +-
 src/libfetchers/git-utils.cc     |  16 +++
 src/libfetchers/git-utils.hh     |   6 ++
 src/libfetchers/tarball.cc       | 168 ++++++++++++++++++-------------
 src/libfetchers/tarball.hh       |  12 ++-
 src/libstore/filetransfer.cc     |  29 ++++--
 src/libstore/filetransfer.hh     |  33 +++++-
 9 files changed, 185 insertions(+), 95 deletions(-)

diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc
index 58f04e225..444ff81c9 100644
--- a/src/libcmd/common-eval-args.cc
+++ b/src/libcmd/common-eval-args.cc
@@ -9,6 +9,7 @@
 #include "store-api.hh"
 #include "command.hh"
 #include "tarball.hh"
+#include "fetch-to-store.hh"
 
 namespace nix {
 
@@ -167,8 +168,9 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
 SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir)
 {
     if (EvalSettings::isPseudoUrl(s)) {
-        auto storePath = fetchers::downloadTarball(
-            state.store, EvalSettings::resolvePseudoUrl(s), "source", false).storePath;
+        auto accessor = fetchers::downloadTarball(
+            EvalSettings::resolvePseudoUrl(s)).accessor;
+        auto storePath = fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy);
         return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
     }
 
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 4919ac358..133d02f59 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -2794,10 +2794,11 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa
 
     if (EvalSettings::isPseudoUrl(value)) {
         try {
-            auto storePath = fetchers::downloadTarball(
-                store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
+            auto accessor = fetchers::downloadTarball(
+                EvalSettings::resolvePseudoUrl(value)).accessor;
+            auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy);
             res = { store->toRealPath(storePath) };
-        } catch (FileTransferError & e) {
+        } catch (Error & e) {
             logWarning({
                 .msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
             });
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 1997d5513..01a43e3fd 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -9,6 +9,7 @@
 #include "tarball.hh"
 #include "url.hh"
 #include "value-to-json.hh"
+#include "fetch-to-store.hh"
 
 #include 
 #include 
@@ -473,7 +474,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
     //       https://github.com/NixOS/nix/issues/4313
     auto storePath =
         unpack
-        ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath
+        ? fetchToStore(*state.store, fetchers::downloadTarball(*url).accessor, FetchMode::Copy, name)
         : fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
 
     if (expectedHash) {
diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 4f034e9d4..f216b6e10 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -466,6 +466,22 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this
         else
             throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output);
     }
+
+    Hash treeHashToNarHash(const Hash & treeHash) override
+    {
+        auto accessor = getAccessor(treeHash, false);
+
+        fetchers::Attrs cacheKey({{"_what", "treeHashToNarHash"}, {"treeHash", treeHash.gitRev()}});
+
+        if (auto res = fetchers::getCache()->lookup(cacheKey))
+            return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), HashAlgorithm::SHA256);
+
+        auto narHash = accessor->hashPath(CanonPath::root);
+
+        fetchers::getCache()->upsert(cacheKey, fetchers::Attrs({{"narHash", narHash.to_string(HashFormat::SRI, true)}}));
+
+        return narHash;
+    }
 };
 
 ref GitRepo::openRepo(const std::filesystem::path & path, bool create, bool bare)
diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh
index 5f68d26a7..fbb2d947b 100644
--- a/src/libfetchers/git-utils.hh
+++ b/src/libfetchers/git-utils.hh
@@ -93,6 +93,12 @@ struct GitRepo
     virtual void verifyCommit(
         const Hash & rev,
         const std::vector & publicKeys) = 0;
+
+    /**
+     * Given a Git tree hash, compute the hash of its NAR
+     * serialisation. This is memoised on-disk.
+     */
+    virtual Hash treeHashToNarHash(const Hash & treeHash) = 0;
 };
 
 ref getTarballCache();
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 3b7709440..e3b1fbe56 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -9,6 +9,9 @@
 #include "types.hh"
 #include "split.hh"
 #include "posix-source-accessor.hh"
+#include "fs-input-accessor.hh"
+#include "store-api.hh"
+#include "git-utils.hh"
 
 namespace nix::fetchers {
 
@@ -57,10 +60,8 @@ DownloadFileResult downloadFile(
             throw;
     }
 
-    // FIXME: write to temporary file.
     Attrs infoAttrs({
         {"etag", res.etag},
-        {"url", res.effectiveUri},
     });
 
     if (res.immutableUrl)
@@ -91,96 +92,98 @@ DownloadFileResult downloadFile(
         storePath = std::move(info.path);
     }
 
-    getCache()->add(
-        *store,
-        inAttrs,
-        infoAttrs,
-        *storePath,
-        locked);
-
-    if (url != res.effectiveUri)
+    /* Cache metadata for all URLs in the redirect chain. */
+    for (auto & url : res.urls) {
+        inAttrs.insert_or_assign("url", url);
+        infoAttrs.insert_or_assign("url", *res.urls.rbegin());
         getCache()->add(
             *store,
-            {
-                {"type", "file"},
-                {"url", res.effectiveUri},
-                {"name", name},
-            },
+            inAttrs,
             infoAttrs,
             *storePath,
             locked);
+    }
 
     return {
         .storePath = std::move(*storePath),
         .etag = res.etag,
-        .effectiveUrl = res.effectiveUri,
+        .effectiveUrl = *res.urls.rbegin(),
         .immutableUrl = res.immutableUrl,
     };
 }
 
 DownloadTarballResult downloadTarball(
-    ref store,
     const std::string & url,
-    const std::string & name,
-    bool locked,
     const Headers & headers)
 {
     Attrs inAttrs({
-        {"type", "tarball"},
+        {"_what", "tarballCache"},
         {"url", url},
-        {"name", name},
     });
 
-    auto cached = getCache()->lookupExpired(*store, inAttrs);
+    auto cached = getCache()->lookupExpired(inAttrs);
+
+    auto attrsToResult = [&](const Attrs & infoAttrs)
+    {
+        auto treeHash = getRevAttr(infoAttrs, "treeHash");
+        return DownloadTarballResult {
+            .treeHash = treeHash,
+            .lastModified = (time_t) getIntAttr(infoAttrs, "lastModified"),
+            .immutableUrl = maybeGetStrAttr(infoAttrs, "immutableUrl"),
+            .accessor = getTarballCache()->getAccessor(treeHash, false),
+        };
+    };
+
+    if (cached && !getTarballCache()->hasObject(getRevAttr(cached->infoAttrs, "treeHash")))
+        cached.reset();
 
     if (cached && !cached->expired)
-        return {
-            .storePath = std::move(cached->storePath),
-            .lastModified = (time_t) getIntAttr(cached->infoAttrs, "lastModified"),
-            .immutableUrl = maybeGetStrAttr(cached->infoAttrs, "immutableUrl"),
-        };
+        return attrsToResult(cached->infoAttrs);
 
-    auto res = downloadFile(store, url, name, locked, headers);
+    auto _res = std::make_shared>();
 
-    std::optional unpackedStorePath;
-    time_t lastModified;
-
-    if (cached && res.etag != "" && getStrAttr(cached->infoAttrs, "etag") == res.etag) {
-        unpackedStorePath = std::move(cached->storePath);
-        lastModified = getIntAttr(cached->infoAttrs, "lastModified");
-    } else {
-        Path tmpDir = createTempDir();
-        AutoDelete autoDelete(tmpDir, true);
-        unpackTarfile(store->toRealPath(res.storePath), tmpDir);
-        auto members = readDirectory(tmpDir);
-        if (members.size() != 1)
-            throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
-        auto topDir = tmpDir + "/" + members.begin()->name;
-        lastModified = lstat(topDir).st_mtime;
-        PosixSourceAccessor accessor;
-        unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair);
-    }
-
-    Attrs infoAttrs({
-        {"lastModified", uint64_t(lastModified)},
-        {"etag", res.etag},
+    auto source = sinkToSource([&](Sink & sink) {
+        FileTransferRequest req(url);
+        req.expectedETag = cached ? getStrAttr(cached->infoAttrs, "etag") : "";
+        getFileTransfer()->download(std::move(req), sink,
+            [_res](FileTransferResult r)
+            {
+                *_res->lock() = r;
+            });
     });
 
-    if (res.immutableUrl)
-        infoAttrs.emplace("immutableUrl", *res.immutableUrl);
+    // TODO: fall back to cached value if download fails.
 
-    getCache()->add(
-        *store,
-        inAttrs,
-        infoAttrs,
-        *unpackedStorePath,
-        locked);
+    /* Note: if the download is cached, `importTarball()` will receive
+       no data, which causes it to import an empty tarball. */
+    TarArchive archive { *source };
+    auto parseSink = getTarballCache()->getFileSystemObjectSink();
+    auto lastModified = unpackTarfileToSink(archive, *parseSink);
 
-    return {
-        .storePath = std::move(*unpackedStorePath),
-        .lastModified = lastModified,
-        .immutableUrl = res.immutableUrl,
-    };
+    auto res(_res->lock());
+
+    Attrs infoAttrs;
+
+    if (res->cached) {
+        infoAttrs = cached->infoAttrs;
+    } else {
+        infoAttrs.insert_or_assign("etag", res->etag);
+        infoAttrs.insert_or_assign("treeHash", parseSink->sync().gitRev());
+        infoAttrs.insert_or_assign("lastModified", uint64_t(lastModified));
+        if (res->immutableUrl)
+            infoAttrs.insert_or_assign("immutableUrl", *res->immutableUrl);
+    }
+
+    /* Insert a cache entry for every URL in the redirect chain. */
+    for (auto & url : res->urls) {
+        inAttrs.insert_or_assign("url", url);
+        getCache()->upsert(inAttrs, infoAttrs);
+    }
+
+    // FIXME: add a cache entry for immutableUrl? That could allow
+    // cache poisoning.
+
+    return attrsToResult(infoAttrs);
 }
 
 // An input scheme corresponding to a curl-downloadable resource.
@@ -198,6 +201,8 @@ struct CurlInputScheme : InputScheme
 
     virtual bool isValidURL(const ParsedURL & url, bool requireTree) const = 0;
 
+    static const std::set specialParams;
+
     std::optional inputFromURL(const ParsedURL & _url, bool requireTree) const override
     {
         if (!isValidURL(_url, requireTree))
@@ -220,8 +225,12 @@ struct CurlInputScheme : InputScheme
             if (auto n = string2Int(*i))
                 input.attrs.insert_or_assign("revCount", *n);
 
-        url.query.erase("rev");
-        url.query.erase("revCount");
+        if (auto i = get(url.query, "lastModified"))
+            if (auto n = string2Int(*i))
+                input.attrs.insert_or_assign("lastModified", *n);
+
+        for (auto & param : allowedAttrs())
+            url.query.erase(param);
 
         input.attrs.insert_or_assign("type", std::string { schemeName() });
         input.attrs.insert_or_assign("url", url.to_string());
@@ -275,10 +284,20 @@ struct FileInputScheme : CurlInputScheme
                 : (!requireTree && !hasTarballExtension(url.path)));
     }
 
-    std::pair fetch(ref store, const Input & input) override
+    std::pair, Input> getAccessor(ref store, const Input & _input) const override
     {
+        auto input(_input);
+
         auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false);
-        return {std::move(file.storePath), input};
+
+        auto narHash = store->queryPathInfo(file.storePath)->narHash;
+        input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
+
+        auto accessor = makeStorePathAccessor(store, file.storePath);
+
+        accessor->setPathDisplay("«" + input.to_string() + "»");
+
+        return {accessor, input};
     }
 };
 
@@ -296,11 +315,13 @@ struct TarballInputScheme : CurlInputScheme
                 : (requireTree || hasTarballExtension(url.path)));
     }
 
-    std::pair fetch(ref store, const Input & _input) override
+    std::pair, Input> getAccessor(ref store, const Input & _input) const override
     {
-        Input input(_input);
-        auto url = getStrAttr(input.attrs, "url");
-        auto result = downloadTarball(store, url, input.getName(), false);
+        auto input(_input);
+
+        auto result = downloadTarball(getStrAttr(input.attrs, "url"), {});
+
+        result.accessor->setPathDisplay("«" + input.to_string() + "»");
 
         if (result.immutableUrl) {
             auto immutableInput = Input::fromURL(*result.immutableUrl);
@@ -314,7 +335,10 @@ struct TarballInputScheme : CurlInputScheme
         if (result.lastModified && !input.attrs.contains("lastModified"))
             input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
 
-        return {result.storePath, std::move(input)};
+        input.attrs.insert_or_assign("narHash",
+            getTarballCache()->treeHashToNarHash(result.treeHash).to_string(HashFormat::SRI, true));
+
+        return {result.accessor, input};
     }
 };
 
diff --git a/src/libfetchers/tarball.hh b/src/libfetchers/tarball.hh
index 9e6b50b31..77ad3bf09 100644
--- a/src/libfetchers/tarball.hh
+++ b/src/libfetchers/tarball.hh
@@ -2,11 +2,13 @@
 
 #include "types.hh"
 #include "path.hh"
+#include "hash.hh"
 
 #include 
 
 namespace nix {
 class Store;
+struct InputAccessor;
 }
 
 namespace nix::fetchers {
@@ -28,16 +30,18 @@ DownloadFileResult downloadFile(
 
 struct DownloadTarballResult
 {
-    StorePath storePath;
+    Hash treeHash;
     time_t lastModified;
     std::optional immutableUrl;
+    ref accessor;
 };
 
+/**
+ * Download and import a tarball into the Git cache. The result is the
+ * Git tree hash of the root directory.
+ */
 DownloadTarballResult downloadTarball(
-    ref store,
     const std::string & url,
-    const std::string & name,
-    bool locked,
     const Headers & headers = {});
 
 }
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index ebfae346f..bab21bf51 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -106,6 +106,8 @@ struct curlFileTransfer : public FileTransfer
                     this->result.data.append(data);
               })
         {
+            result.urls.push_back(request.uri);
+
             requestHeaders = curl_slist_append(requestHeaders, "Accept-Encoding: zstd, br, gzip, deflate, bzip2, xz");
             if (!request.expectedETag.empty())
                 requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str());
@@ -182,6 +184,14 @@ struct curlFileTransfer : public FileTransfer
             return ((TransferItem *) userp)->writeCallback(contents, size, nmemb);
         }
 
+        void appendCurrentUrl()
+        {
+            char * effectiveUriCStr = nullptr;
+            curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
+            if (effectiveUriCStr && *result.urls.rbegin() != effectiveUriCStr)
+                result.urls.push_back(effectiveUriCStr);
+        }
+
         size_t headerCallback(void * contents, size_t size, size_t nmemb)
         {
             size_t realSize = size * nmemb;
@@ -196,6 +206,7 @@ struct curlFileTransfer : public FileTransfer
                 statusMsg = trim(match.str(1));
                 acceptRanges = false;
                 encoding = "";
+                appendCurrentUrl();
             } else {
 
                 auto i = line.find(':');
@@ -360,14 +371,11 @@ struct curlFileTransfer : public FileTransfer
         {
             auto httpStatus = getHTTPStatus();
 
-            char * effectiveUriCStr = nullptr;
-            curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
-            if (effectiveUriCStr)
-                result.effectiveUri = effectiveUriCStr;
-
             debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes",
                 request.verb(), request.uri, code, httpStatus, result.bodySize);
 
+            appendCurrentUrl();
+
             if (decompressionSink) {
                 try {
                     decompressionSink->finish();
@@ -779,7 +787,10 @@ FileTransferResult FileTransfer::upload(const FileTransferRequest & request)
     return enqueueFileTransfer(request).get();
 }
 
-void FileTransfer::download(FileTransferRequest && request, Sink & sink)
+void FileTransfer::download(
+    FileTransferRequest && request,
+    Sink & sink,
+    std::function resultCallback)
 {
     /* Note: we can't call 'sink' via request.dataCallback, because
        that would cause the sink to execute on the fileTransfer
@@ -829,11 +840,13 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink)
     };
 
     enqueueFileTransfer(request,
-        {[_state](std::future fut) {
+        {[_state, resultCallback{std::move(resultCallback)}](std::future fut) {
             auto state(_state->lock());
             state->quit = true;
             try {
-                fut.get();
+                auto res = fut.get();
+                if (resultCallback)
+                    resultCallback(std::move(res));
             } catch (...) {
                 state->exc = std::current_exception();
             }
diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh
index a3b0dde1f..1c271cbec 100644
--- a/src/libstore/filetransfer.hh
+++ b/src/libstore/filetransfer.hh
@@ -75,14 +75,34 @@ struct FileTransferRequest
 
 struct FileTransferResult
 {
+    /**
+     * Whether this is a cache hit (i.e. the ETag supplied in the
+     * request is still valid). If so, `data` is empty.
+     */
     bool cached = false;
+
+    /**
+     * The ETag of the object.
+     */
     std::string etag;
-    std::string effectiveUri;
+
+    /**
+     * All URLs visited in the redirect chain.
+     */
+    std::vector urls;
+
+    /**
+     * The response body.
+     */
     std::string data;
+
     uint64_t bodySize = 0;
-    /* An "immutable" URL for this resource (i.e. one whose contents
-       will never change), as returned by the `Link: ;
-       rel="immutable"` header. */
+
+    /**
+     * An "immutable" URL for this resource (i.e. one whose contents
+     * will never change), as returned by the `Link: ;
+     * rel="immutable"` header.
+     */
     std::optional immutableUrl;
 };
 
@@ -116,7 +136,10 @@ struct FileTransfer
      * Download a file, writing its data to a sink. The sink will be
      * invoked on the thread of the caller.
      */
-    void download(FileTransferRequest && request, Sink & sink);
+    void download(
+        FileTransferRequest && request,
+        Sink & sink,
+        std::function resultCallback = {});
 
     enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
 };

From 0acd783190418af514b363685e010195ea7260bc Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 15:22:15 +0100
Subject: [PATCH 121/164] Don't send settings that depend on disabled
 experimental features to the daemon

This fixes warnings like

   warning: Ignoring setting 'auto-allocate-uids' because experimental feature 'auto-allocate-uids' is not enabled
   warning: Ignoring setting 'impure-env' because experimental feature 'configurable-impure-env' is not enabled

when using the daemon and the user didn't actually set those settings.

Note: this also hides those settings from `nix config show`, but that
seems a good thing.
---
 src/libutil/config-impl.hh                |  2 +-
 src/libutil/config.cc                     |  4 +++-
 tests/functional/experimental-features.sh | 10 ++++++----
 3 files changed, 10 insertions(+), 6 deletions(-)

diff --git a/src/libutil/config-impl.hh b/src/libutil/config-impl.hh
index 9f69e8444..1da0cb638 100644
--- a/src/libutil/config-impl.hh
+++ b/src/libutil/config-impl.hh
@@ -4,7 +4,7 @@
  *
  * Template implementations (as opposed to mere declarations).
  *
- * This file is an exmample of the "impl.hh" pattern. See the
+ * This file is an example of the "impl.hh" pattern. See the
  * contributing guide.
  *
  * One only needs to include this when one is declaring a
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index 37f5b50c7..617c2ec89 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -84,7 +84,9 @@ void AbstractConfig::reapplyUnknownSettings()
 void Config::getSettings(std::map & res, bool overriddenOnly)
 {
     for (const auto & opt : _settings)
-        if (!opt.second.isAlias && (!overriddenOnly || opt.second.setting->overridden))
+        if (!opt.second.isAlias
+            && (!overriddenOnly || opt.second.setting->overridden)
+            && experimentalFeatureSettings.isEnabled(opt.second.setting->experimentalFeature))
             res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description});
 }
 
diff --git a/tests/functional/experimental-features.sh b/tests/functional/experimental-features.sh
index 9ee4a53d4..12112b293 100644
--- a/tests/functional/experimental-features.sh
+++ b/tests/functional/experimental-features.sh
@@ -31,17 +31,19 @@ source common.sh
 NIX_CONFIG='
   experimental-features = nix-command
   accept-flake-config = true
-' nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr
-grepQuiet "false" $TEST_ROOT/stdout
+' expect 1 nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr
+[[ $(cat $TEST_ROOT/stdout) = '' ]]
 grepQuiet "Ignoring setting 'accept-flake-config' because experimental feature 'flakes' is not enabled" $TEST_ROOT/stderr
+grepQuiet "error: could not find setting 'accept-flake-config'" $TEST_ROOT/stderr
 
 # 'flakes' experimental-feature is disabled after, ignore and warn
 NIX_CONFIG='
   accept-flake-config = true
   experimental-features = nix-command
-' nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr
-grepQuiet "false" $TEST_ROOT/stdout
+' expect 1 nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr
+[[ $(cat $TEST_ROOT/stdout) = '' ]]
 grepQuiet "Ignoring setting 'accept-flake-config' because experimental feature 'flakes' is not enabled" $TEST_ROOT/stderr
+grepQuiet "error: could not find setting 'accept-flake-config'" $TEST_ROOT/stderr
 
 # 'flakes' experimental-feature is enabled before, process
 NIX_CONFIG='

From 071dd2b3a4e6c0b2106f1b6f14ec26e153d97446 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Fri, 16 Feb 2024 17:00:07 +0100
Subject: [PATCH 122/164] Input: Replace 'locked' bool by isLocked() method

It's better to just check whether the input has all the attributes
needed to consider itself locked (e.g. whether a Git input has an
'rev' attribute).

Also, the 'locked' field was actually incorrect for Git inputs: it
would be set to true even for dirty worktrees. As a result, we got
away with using fetchTree() internally even though fetchTree()
requires a locked input in pure mode. In particular, this allowed
'--override-input' to work by accident.

The fix is to pass a set of "overrides" to call-flake.nix for all the
unlocked inputs (i.e. the top-level flake and any --override-inputs).
---
 src/libexpr/flake/call-flake.nix | 61 ++++++++++++++---------
 src/libexpr/flake/flake.cc       | 84 ++++++++++++++++++++------------
 src/libexpr/flake/flake.hh       |  7 +++
 src/libexpr/flake/lockfile.cc    | 17 ++++---
 src/libexpr/flake/lockfile.hh    |  7 +--
 src/libexpr/primops/fetchTree.cc |  6 +--
 src/libfetchers/fetchers.cc      | 11 ++---
 src/libfetchers/fetchers.hh      | 12 +++--
 src/libfetchers/git.cc           |  7 ++-
 src/libfetchers/github.cc        |  5 ++
 src/libfetchers/mercurial.cc     |  5 ++
 src/libfetchers/path.cc          |  5 ++
 src/libfetchers/tarball.cc       |  5 ++
 src/nix-env/nix-env.cc           |  2 +-
 src/nix/flake.cc                 |  2 +-
 tests/functional/fetchGit.sh     |  4 +-
 16 files changed, 155 insertions(+), 85 deletions(-)

diff --git a/src/libexpr/flake/call-flake.nix b/src/libexpr/flake/call-flake.nix
index 4beb0b0fe..d0ccb1e37 100644
--- a/src/libexpr/flake/call-flake.nix
+++ b/src/libexpr/flake/call-flake.nix
@@ -1,20 +1,52 @@
-lockFileStr: rootSrc: rootSubdir:
+# This is a helper to callFlake() to lazily fetch flake inputs.
+
+# The contents of the lock file, in JSON format.
+lockFileStr:
+
+# A mapping of lock file node IDs to { sourceInfo, subdir } attrsets,
+# with sourceInfo.outPath providing an InputAccessor to a previously
+# fetched tree. This is necessary for possibly unlocked inputs, in
+# particular the root input, but also --override-inputs pointing to
+# unlocked trees.
+overrides:
 
 let
 
   lockFile = builtins.fromJSON lockFileStr;
 
+  # Resolve a input spec into a node name. An input spec is
+  # either a node name, or a 'follows' path from the root
+  # node.
+  resolveInput = inputSpec:
+    if builtins.isList inputSpec
+    then getInputByPath lockFile.root inputSpec
+    else inputSpec;
+
+  # Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
+  # root node, returning the final node.
+  getInputByPath = nodeName: path:
+    if path == []
+    then nodeName
+    else
+      getInputByPath
+        # Since this could be a 'follows' input, call resolveInput.
+        (resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
+        (builtins.tail path);
+
   allNodes =
     builtins.mapAttrs
       (key: node:
         let
 
           sourceInfo =
-            if key == lockFile.root
-            then rootSrc
-            else fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
+            if overrides ? ${key}
+            then
+              overrides.${key}.sourceInfo
+            else
+              # FIXME: remove obsolete node.info.
+              fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
 
-          subdir = if key == lockFile.root then rootSubdir else node.locked.dir or "";
+          subdir = overrides.${key}.dir or node.locked.dir or "";
 
           outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir);
 
@@ -24,25 +56,6 @@ let
             (inputName: inputSpec: allNodes.${resolveInput inputSpec})
             (node.inputs or {});
 
-          # Resolve a input spec into a node name. An input spec is
-          # either a node name, or a 'follows' path from the root
-          # node.
-          resolveInput = inputSpec:
-              if builtins.isList inputSpec
-              then getInputByPath lockFile.root inputSpec
-              else inputSpec;
-
-          # Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
-          # root node, returning the final node.
-          getInputByPath = nodeName: path:
-            if path == []
-            then nodeName
-            else
-              getInputByPath
-                # Since this could be a 'follows' input, call resolveInput.
-                (resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
-                (builtins.tail path);
-
           outputs = flake.outputs (inputs // { self = result; });
 
           result =
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 451780c89..022d39cdb 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -365,6 +365,7 @@ LockedFlake lockFlake(
         std::map overrides;
         std::set explicitCliOverrides;
         std::set overridesUsed, updatesUsed;
+        std::map, StorePath> nodePaths;
 
         for (auto & i : lockFlags.inputOverrides) {
             overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
@@ -535,11 +536,13 @@ LockedFlake lockFlake(
                             }
                         }
 
-                        computeLocks(
-                            mustRefetch
-                            ? getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath).inputs
-                            : fakeInputs,
-                            childNode, inputPath, oldLock, lockRootPath, parentPath, !mustRefetch);
+                        if (mustRefetch) {
+                            auto inputFlake = getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath);
+                            nodePaths.emplace(childNode, inputFlake.storePath);
+                            computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, lockRootPath, parentPath, false);
+                        } else {
+                            computeLocks(fakeInputs, childNode, inputPath, oldLock, lockRootPath, parentPath, true);
+                        }
 
                     } else {
                         /* We need to create a new lock file entry. So fetch
@@ -584,6 +587,7 @@ LockedFlake lockFlake(
                                flake. Also, unless we already have this flake
                                in the top-level lock file, use this flake's
                                own lock file. */
+                            nodePaths.emplace(childNode, inputFlake.storePath);
                             computeLocks(
                                 inputFlake.inputs, childNode, inputPath,
                                 oldLock
@@ -596,11 +600,13 @@ LockedFlake lockFlake(
                         }
 
                         else {
-                            auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
+                            auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
                                 state, *input.ref, useRegistries, flakeCache);
 
                             auto childNode = make_ref(lockedRef, ref, false);
 
+                            nodePaths.emplace(childNode, storePath);
+
                             node->inputs.insert_or_assign(id, childNode);
                         }
                     }
@@ -615,6 +621,8 @@ LockedFlake lockFlake(
         // Bring in the current ref for relative path resolution if we have it
         auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true);
 
+        nodePaths.emplace(newLockFile.root, flake.storePath);
+
         computeLocks(
             flake.inputs,
             newLockFile.root,
@@ -707,14 +715,6 @@ LockedFlake lockFlake(
                             flake.lockedRef.input.getRev() &&
                             prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
                             warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
-
-                        /* Make sure that we picked up the change,
-                           i.e. the tree should usually be dirty
-                           now. Corner case: we could have reverted from a
-                           dirty to a clean tree! */
-                        if (flake.lockedRef.input == prevLockedRef.input
-                            && !flake.lockedRef.input.isLocked())
-                            throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
                     }
                 } else
                     throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
@@ -724,7 +724,11 @@ LockedFlake lockFlake(
             }
         }
 
-        return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
+        return LockedFlake {
+            .flake = std::move(flake),
+            .lockFile = std::move(newLockFile),
+            .nodePaths = std::move(nodePaths)
+        };
 
     } catch (Error & e) {
         e.addTrace({}, "while updating the lock file of flake '%s'", flake.lockedRef.to_string());
@@ -736,30 +740,48 @@ void callFlake(EvalState & state,
     const LockedFlake & lockedFlake,
     Value & vRes)
 {
-    auto vLocks = state.allocValue();
-    auto vRootSrc = state.allocValue();
-    auto vRootSubdir = state.allocValue();
-    auto vTmp1 = state.allocValue();
-    auto vTmp2 = state.allocValue();
+    experimentalFeatureSettings.require(Xp::Flakes);
 
-    vLocks->mkString(lockedFlake.lockFile.to_string());
+    auto [lockFileStr, keyMap] = lockedFlake.lockFile.to_string();
 
-    emitTreeAttrs(
-        state,
-        lockedFlake.flake.storePath,
-        lockedFlake.flake.lockedRef.input,
-        *vRootSrc,
-        false,
-        lockedFlake.flake.forceDirty);
+    auto overrides = state.buildBindings(lockedFlake.nodePaths.size());
 
-    vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir);
+    for (auto & [node, storePath] : lockedFlake.nodePaths) {
+        auto override = state.buildBindings(2);
+
+        auto & vSourceInfo = override.alloc(state.symbols.create("sourceInfo"));
+
+        auto lockedNode = node.dynamic_pointer_cast();
+
+        emitTreeAttrs(
+            state,
+            storePath,
+            lockedNode ? lockedNode->lockedRef.input : lockedFlake.flake.lockedRef.input,
+            vSourceInfo,
+            false,
+            !lockedNode && lockedFlake.flake.forceDirty);
+
+        auto key = keyMap.find(node);
+        assert(key != keyMap.end());
+
+        override
+            .alloc(state.symbols.create("dir"))
+            .mkString(lockedNode ? lockedNode->lockedRef.subdir : lockedFlake.flake.lockedRef.subdir);
+
+        overrides.alloc(state.symbols.create(key->second)).mkAttrs(override);
+    }
+
+    auto & vOverrides = state.allocValue()->mkAttrs(overrides);
 
     auto vCallFlake = state.allocValue();
     state.evalFile(state.callFlakeInternal, *vCallFlake);
 
+    auto vTmp1 = state.allocValue();
+    auto vLocks = state.allocValue();
+    vLocks->mkString(lockFileStr);
     state.callFunction(*vCallFlake, *vLocks, *vTmp1, noPos);
-    state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
-    state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
+
+    state.callFunction(*vTmp1, vOverrides, vRes, noPos);
 }
 
 static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, Value & v)
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index d5ad3eade..19b680c56 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -103,6 +103,13 @@ struct LockedFlake
     Flake flake;
     LockFile lockFile;
 
+    /**
+     * Store paths of nodes that have been fetched in
+     * lockFlake(); in particular, the root node and the overriden
+     * inputs.
+     */
+    std::map, StorePath> nodePaths;
+
     Fingerprint getFingerprint() const;
 };
 
diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc
index 3e99fb2d4..2c16dc802 100644
--- a/src/libexpr/flake/lockfile.cc
+++ b/src/libexpr/flake/lockfile.cc
@@ -38,7 +38,7 @@ LockedNode::LockedNode(const nlohmann::json & json)
     , isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
 {
     if (!lockedRef.input.isLocked())
-        throw Error("lock file contains mutable lock '%s'",
+        throw Error("lock file contains unlocked input '%s'",
             fetchers::attrsToJSON(lockedRef.input.toAttrs()));
 }
 
@@ -134,10 +134,10 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
     // a bit since we don't need to worry about cycles.
 }
 
-nlohmann::json LockFile::toJSON() const
+std::pair LockFile::toJSON() const
 {
     nlohmann::json nodes;
-    std::unordered_map, std::string> nodeKeys;
+    KeyMap nodeKeys;
     std::unordered_set keys;
 
     std::function node)> dumpNode;
@@ -194,12 +194,13 @@ nlohmann::json LockFile::toJSON() const
     json["root"] = dumpNode("root", root);
     json["nodes"] = std::move(nodes);
 
-    return json;
+    return {json, std::move(nodeKeys)};
 }
 
-std::string LockFile::to_string() const
+std::pair LockFile::to_string() const
 {
-    return toJSON().dump(2);
+    auto [json, nodeKeys] = toJSON();
+    return {json.dump(2), std::move(nodeKeys)};
 }
 
 LockFile LockFile::read(const Path & path)
@@ -210,7 +211,7 @@ LockFile LockFile::read(const Path & path)
 
 std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
 {
-    stream << lockFile.toJSON().dump(2);
+    stream << lockFile.toJSON().first.dump(2);
     return stream;
 }
 
@@ -243,7 +244,7 @@ std::optional LockFile::isUnlocked() const
 bool LockFile::operator ==(const LockFile & other) const
 {
     // FIXME: slow
-    return toJSON() == other.toJSON();
+    return toJSON().first == other.toJSON().first;
 }
 
 bool LockFile::operator !=(const LockFile & other) const
diff --git a/src/libexpr/flake/lockfile.hh b/src/libexpr/flake/lockfile.hh
index 5a1493404..57a7202a2 100644
--- a/src/libexpr/flake/lockfile.hh
+++ b/src/libexpr/flake/lockfile.hh
@@ -59,14 +59,15 @@ struct LockFile
 
     typedef std::map, std::string> KeyMap;
 
-    nlohmann::json toJSON() const;
+    std::pair toJSON() const;
 
-    std::string to_string() const;
+    std::pair to_string() const;
 
     static LockFile read(const Path & path);
 
     /**
-     * Check whether this lock file has any unlocked inputs.
+     * Check whether this lock file has any unlocked inputs. If so,
+     * return one.
      */
     std::optional isUnlocked() const;
 
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 1997d5513..b4d9a6189 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -24,8 +24,6 @@ void emitTreeAttrs(
     bool emptyRevFallback,
     bool forceDirty)
 {
-    assert(input.isLocked());
-
     auto attrs = state.buildBindings(100);
 
     state.mkStorePathString(storePath, attrs.alloc(state.sOutPath));
@@ -176,8 +174,8 @@ static void fetchTree(
             fetcher = "fetchGit";
 
         state.error(
-            "in pure evaluation mode, %s requires a locked input",
-            fetcher
+            "in pure evaluation mode, '%s' will not fetch unlocked input '%s'",
+            fetcher, input.to_string()
         ).atPos(pos).debugThrow();
     }
 
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 9a534c1e2..363ad018e 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -45,12 +45,8 @@ static void fixupInput(Input & input)
     // Check common attributes.
     input.getType();
     input.getRef();
-    if (input.getRev())
-        input.locked = true;
     input.getRevCount();
     input.getLastModified();
-    if (input.getNarHash())
-        input.locked = true;
 }
 
 Input Input::fromURL(const ParsedURL & url, bool requireTree)
@@ -140,6 +136,11 @@ bool Input::isDirect() const
     return !scheme || scheme->isDirect(*this);
 }
 
+bool Input::isLocked() const
+{
+    return scheme && scheme->isLocked(*this);
+}
+
 Attrs Input::toAttrs() const
 {
     return attrs;
@@ -222,8 +223,6 @@ std::pair Input::fetch(ref store) const
                 input.to_string(), *prevRevCount);
     }
 
-    input.locked = true;
-
     return {std::move(storePath), input};
 }
 
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index 036647830..472fba6f4 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -29,7 +29,6 @@ struct Input
 
     std::shared_ptr scheme; // note: can be null
     Attrs attrs;
-    bool locked = false;
 
     /**
      * path of the parent of this input, used for relative path resolution
@@ -71,7 +70,7 @@ public:
      * Check whether this is a "locked" input, that is,
      * one that contains a commit hash or content hash.
      */
-    bool isLocked() const { return locked; }
+    bool isLocked() const;
 
     bool operator ==(const Input & other) const;
 
@@ -121,7 +120,6 @@ public:
     std::optional getFingerprint(ref store) const;
 };
 
-
 /**
  * The `InputScheme` represents a type of fetcher.  Each fetcher
  * registers with nix at startup time.  When processing an `Input`,
@@ -196,6 +194,14 @@ struct InputScheme
      */
     virtual std::optional getFingerprint(ref store, const Input & input) const
     { return std::nullopt; }
+
+    /**
+     * Return `true` if this input is considered "locked", i.e. it has
+     * attributes like a Git revision or NAR hash that uniquely
+     * identify its contents.
+     */
+    virtual bool isLocked(const Input & input) const
+    { return false; }
 };
 
 void registerInputScheme(std::shared_ptr && fetcher);
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 97ef35b51..87d114276 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -737,8 +737,6 @@ struct GitInputScheme : InputScheme
             ? getLastModified(repoInfo, repoInfo.url, *repoInfo.workdirInfo.headRev)
             : 0);
 
-        input.locked = true; // FIXME
-
         return {accessor, std::move(input)};
     }
 
@@ -775,6 +773,11 @@ struct GitInputScheme : InputScheme
         else
             return std::nullopt;
     }
+
+    bool isLocked(const Input & input) const override
+    {
+        return (bool) input.getRev();
+    }
 };
 
 static auto rGitInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); });
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index e6fbece13..76f94337b 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -280,6 +280,11 @@ struct GitArchiveInputScheme : InputScheme
         return {accessor, input};
     }
 
+    bool isLocked(const Input & input) const override
+    {
+        return (bool) input.getRev();
+    }
+
     std::optional experimentalFeature() const override
     {
         return Xp::Flakes;
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index 55e2eae03..a5f55a44e 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -347,6 +347,11 @@ struct MercurialInputScheme : InputScheme
         return makeResult(infoAttrs, std::move(storePath));
     }
 
+    bool isLocked(const Input & input) const override
+    {
+        return (bool) input.getRev();
+    }
+
     std::optional getFingerprint(ref store, const Input & input) const override
     {
         if (auto rev = input.getRev())
diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
index d3b0e475d..276fd1b36 100644
--- a/src/libfetchers/path.cc
+++ b/src/libfetchers/path.cc
@@ -87,6 +87,11 @@ struct PathInputScheme : InputScheme
         writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents);
     }
 
+    bool isLocked(const Input & input) const override
+    {
+        return (bool) input.getNarHash();
+    }
+
     CanonPath getAbsPath(const Input & input) const
     {
         auto path = getStrAttr(input.attrs, "path");
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 3b7709440..1d80fd880 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -260,6 +260,11 @@ struct CurlInputScheme : InputScheme
             url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true));
         return url;
     }
+
+    bool isLocked(const Input & input) const override
+    {
+        return (bool) input.getNarHash();
+    }
 };
 
 struct FileInputScheme : CurlInputScheme
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 1f311733b..5e3de20c5 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -143,7 +143,7 @@ static void getAllExprs(EvalState & state,
             }
             /* Load the expression on demand. */
             auto vArg = state.allocValue();
-            vArg->mkString(path2.path.abs());
+            vArg->mkPath(path2);
             if (seen.size() == maxAttrs)
                 throw Error("too many Nix expressions in directory '%1%'", path);
             attrs.alloc(attrName).mkApp(&state.getBuiltin("import"), vArg);
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 4504bb22e..131589f35 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -224,7 +224,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
             if (auto lastModified = flake.lockedRef.input.getLastModified())
                 j["lastModified"] = *lastModified;
             j["path"] = store->printStorePath(flake.storePath);
-            j["locks"] = lockedFlake.lockFile.toJSON();
+            j["locks"] = lockedFlake.lockFile.toJSON().first;
             logger->cout("%s", j.dump());
         } else {
             logger->cout(
diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh
index 856c0e534..3f2d0d5fb 100644
--- a/tests/functional/fetchGit.sh
+++ b/tests/functional/fetchGit.sh
@@ -70,7 +70,7 @@ path2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"
 [[ $(nix eval --raw --expr "builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]]
 
 # But without a hash, it fails
-expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "fetchGit requires a locked input"
+expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "'fetchGit' will not fetch unlocked input"
 
 # Fetch again. This should be cached.
 mv $repo ${repo}-tmp
@@ -211,7 +211,7 @@ path6=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; ur
 [[ $path3 = $path6 ]]
 [[ $(nix eval --impure --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]]
 
-expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "fetchTree requires a locked input"
+expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "'fetchTree' will not fetch unlocked input"
 
 # Explicit ref = "HEAD" should work, and produce the same outPath as without ref
 path7=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).outPath")

From 2a8fe9a93837733e9dd9ed5c078734a35b203e14 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Fri, 2 Feb 2024 18:53:49 -0800
Subject: [PATCH 123/164] `:quit` in the debugger should quit the whole program

---
 src/libcmd/repl.cc              | 63 ++++++++++++++++++++++++---------
 src/libcmd/repl.hh              |  4 +--
 src/libexpr/eval.cc             | 14 ++++++--
 src/libexpr/eval.hh             |  5 ++-
 src/libexpr/primops.cc          | 11 +-----
 src/libexpr/repl-exit-status.hh | 20 +++++++++++
 src/libmain/shared.cc           |  2 --
 src/libmain/shared.hh           | 10 +-----
 src/libutil/exit.cc             |  7 ++++
 src/libutil/exit.hh             | 19 ++++++++++
 10 files changed, 111 insertions(+), 44 deletions(-)
 create mode 100644 src/libexpr/repl-exit-status.hh
 create mode 100644 src/libutil/exit.cc
 create mode 100644 src/libutil/exit.hh

diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 03602e170..e423df3fe 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -52,6 +52,27 @@ extern "C" {
 
 namespace nix {
 
+/**
+ * Returned by `NixRepl::processLine`.
+ */
+enum class ProcessLineResult {
+    /**
+     * The user exited with `:quit`. The REPL should exit. The surrounding
+     * program or evaluation (e.g., if the REPL was acting as the debugger)
+     * should also exit.
+     */
+    QuitAll,
+    /**
+     * The user exited with `:continue`. The REPL should exit, but the program
+     * should continue running.
+     */
+    QuitOnce,
+    /**
+     * The user did not exit. The REPL should request another line of input.
+     */
+    Continue,
+};
+
 struct NixRepl
     : AbstractNixRepl
     #if HAVE_BOEHMGC
@@ -75,13 +96,13 @@ struct NixRepl
             std::function getValues);
     virtual ~NixRepl();
 
-    void mainLoop() override;
+    ReplExitStatus mainLoop() override;
     void initEnv() override;
 
     StringSet completePrefix(const std::string & prefix);
     bool getLine(std::string & input, const std::string & prompt);
     StorePath getDerivationPath(Value & v);
-    bool processLine(std::string line);
+    ProcessLineResult processLine(std::string line);
 
     void loadFile(const Path & path);
     void loadFlake(const std::string & flakeRef);
@@ -246,7 +267,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
 
 static bool isFirstRepl = true;
 
-void NixRepl::mainLoop()
+ReplExitStatus NixRepl::mainLoop()
 {
     if (isFirstRepl) {
         std::string_view debuggerNotice = "";
@@ -287,15 +308,25 @@ void NixRepl::mainLoop()
         // When continuing input from previous lines, don't print a prompt, just align to the same
         // number of chars as the prompt.
         if (!getLine(input, input.empty() ? "nix-repl> " : "          ")) {
-            // ctrl-D should exit the debugger.
+            // Ctrl-D should exit the debugger.
             state->debugStop = false;
-            state->debugQuit = true;
             logger->cout("");
-            break;
+            // TODO: Should Ctrl-D exit just the current debugger session or
+            // the entire program?
+            return ReplExitStatus::QuitAll;
         }
         logger->resume();
         try {
-            if (!removeWhitespace(input).empty() && !processLine(input)) return;
+            switch (processLine(input)) {
+                case ProcessLineResult::QuitAll:
+                    return ReplExitStatus::QuitAll;
+                case ProcessLineResult::QuitOnce:
+                    return ReplExitStatus::Continue;
+                case ProcessLineResult::Continue:
+                    break;
+                default:
+                    abort();
+            }
         } catch (ParseError & e) {
             if (e.msg().find("unexpected end of file") != std::string::npos) {
                 // For parse errors on incomplete input, we continue waiting for the next line of
@@ -483,10 +514,11 @@ void NixRepl::loadDebugTraceEnv(DebugTrace & dt)
     }
 }
 
-bool NixRepl::processLine(std::string line)
+ProcessLineResult NixRepl::processLine(std::string line)
 {
     line = trim(line);
-    if (line == "") return true;
+    if (line.empty())
+        return ProcessLineResult::Continue;
 
     _isInterrupted = false;
 
@@ -581,13 +613,13 @@ bool NixRepl::processLine(std::string line)
     else if (state->debugRepl && (command == ":s" || command == ":step")) {
         // set flag to stop at next DebugTrace; exit repl.
         state->debugStop = true;
-        return false;
+        return ProcessLineResult::QuitOnce;
     }
 
     else if (state->debugRepl && (command == ":c" || command == ":continue")) {
         // set flag to run to next breakpoint or end of program; exit repl.
         state->debugStop = false;
-        return false;
+        return ProcessLineResult::QuitOnce;
     }
 
     else if (command == ":a" || command == ":add") {
@@ -730,8 +762,7 @@ bool NixRepl::processLine(std::string line)
 
     else if (command == ":q" || command == ":quit") {
         state->debugStop = false;
-        state->debugQuit = true;
-        return false;
+        return ProcessLineResult::QuitAll;
     }
 
     else if (command == ":doc") {
@@ -792,7 +823,7 @@ bool NixRepl::processLine(std::string line)
         }
     }
 
-    return true;
+    return ProcessLineResult::Continue;
 }
 
 void NixRepl::loadFile(const Path & path)
@@ -923,7 +954,7 @@ std::unique_ptr AbstractNixRepl::create(
 }
 
 
-void AbstractNixRepl::runSimple(
+ReplExitStatus AbstractNixRepl::runSimple(
     ref evalState,
     const ValMap & extraEnv)
 {
@@ -945,7 +976,7 @@ void AbstractNixRepl::runSimple(
     for (auto & [name, value] : extraEnv)
         repl->addVarToScope(repl->state->symbols.create(name), *value);
 
-    repl->mainLoop();
+    return repl->mainLoop();
 }
 
 }
diff --git a/src/libcmd/repl.hh b/src/libcmd/repl.hh
index 6d88883fe..21aa8bfc7 100644
--- a/src/libcmd/repl.hh
+++ b/src/libcmd/repl.hh
@@ -28,13 +28,13 @@ struct AbstractNixRepl
         const SearchPath & searchPath, nix::ref store, ref state,
         std::function getValues);
 
-    static void runSimple(
+    static ReplExitStatus runSimple(
         ref evalState,
         const ValMap & extraEnv);
 
     virtual void initEnv() = 0;
 
-    virtual void mainLoop() = 0;
+    virtual ReplExitStatus mainLoop() = 0;
 };
 
 }
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 832c8369a..3de26bd1e 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -3,6 +3,7 @@
 #include "hash.hh"
 #include "primops.hh"
 #include "print-options.hh"
+#include "shared.hh"
 #include "types.hh"
 #include "util.hh"
 #include "store-api.hh"
@@ -416,7 +417,6 @@ EvalState::EvalState(
     , buildStore(buildStore ? buildStore : store)
     , debugRepl(nullptr)
     , debugStop(false)
-    , debugQuit(false)
     , trylevel(0)
     , regexCache(makeRegexCache())
 #if HAVE_BOEHMGC
@@ -792,7 +792,17 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
     auto se = getStaticEnv(expr);
     if (se) {
         auto vm = mapStaticEnvBindings(symbols, *se.get(), env);
-        (debugRepl)(ref(shared_from_this()), *vm);
+        auto exitStatus = (debugRepl)(ref(shared_from_this()), *vm);
+        switch (exitStatus) {
+            case ReplExitStatus::QuitAll:
+                if (error)
+                    throw *error;
+                throw Exit(0);
+            case ReplExitStatus::Continue:
+                break;
+            default:
+                abort();
+        }
     }
 }
 
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 8e639a1fa..42fe0d3e4 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -11,6 +11,7 @@
 #include "experimental-features.hh"
 #include "input-accessor.hh"
 #include "search-path.hh"
+#include "repl-exit-status.hh"
 
 #include 
 #include 
@@ -219,9 +220,8 @@ public:
     /**
      * Debugger
      */
-    void (* debugRepl)(ref es, const ValMap & extraEnv);
+    ReplExitStatus (* debugRepl)(ref es, const ValMap & extraEnv);
     bool debugStop;
-    bool debugQuit;
     int trylevel;
     std::list debugTraces;
     std::map> exprEnvs;
@@ -758,7 +758,6 @@ struct DebugTraceStacker {
     DebugTraceStacker(EvalState & evalState, DebugTrace t);
     ~DebugTraceStacker()
     {
-        // assert(evalState.debugTraces.front() == trace);
         evalState.debugTraces.pop_front();
     }
     EvalState & evalState;
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 731485133..5b3b2f11a 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -760,15 +760,6 @@ static RegisterPrimOp primop_break({
 
             auto & dt = state.debugTraces.front();
             state.runDebugRepl(&error, dt.env, dt.expr);
-
-            if (state.debugQuit) {
-                // If the user elects to quit the repl, throw an exception.
-                throw Error(ErrorInfo{
-                    .level = lvlInfo,
-                    .msg = HintFmt("quit the debugger"),
-                    .pos = nullptr,
-                });
-            }
         }
 
         // Return the value we were passed.
@@ -879,7 +870,7 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Va
     /* increment state.trylevel, and decrement it when this function returns. */
     MaintainCount trylevel(state.trylevel);
 
-    void (* savedDebugRepl)(ref es, const ValMap & extraEnv) = nullptr;
+    ReplExitStatus (* savedDebugRepl)(ref es, const ValMap & extraEnv) = nullptr;
     if (state.debugRepl && evalSettings.ignoreExceptionsDuringTry)
     {
         /* to prevent starting the repl from exceptions withing a tryEval, null it. */
diff --git a/src/libexpr/repl-exit-status.hh b/src/libexpr/repl-exit-status.hh
new file mode 100644
index 000000000..08299ff61
--- /dev/null
+++ b/src/libexpr/repl-exit-status.hh
@@ -0,0 +1,20 @@
+#pragma once
+
+namespace nix {
+
+/**
+ * Exit status returned from the REPL.
+ */
+enum class ReplExitStatus {
+    /**
+     * The user exited with `:quit`. The program (e.g., if the REPL was acting
+     * as the debugger) should exit.
+     */
+    QuitAll,
+    /**
+     * The user exited with `:continue`. The program should continue running.
+     */
+    Continue,
+};
+
+}
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 7b9b3c5b5..7bced0aa4 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -408,6 +408,4 @@ PrintFreed::~PrintFreed()
             showBytes(results.bytesFreed));
 }
 
-Exit::~Exit() { }
-
 }
diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh
index c68f6cd83..99c3dffab 100644
--- a/src/libmain/shared.hh
+++ b/src/libmain/shared.hh
@@ -7,6 +7,7 @@
 #include "common-args.hh"
 #include "path.hh"
 #include "derived-path.hh"
+#include "exit.hh"
 
 #include 
 
@@ -15,15 +16,6 @@
 
 namespace nix {
 
-class Exit : public std::exception
-{
-public:
-    int status;
-    Exit() : status(0) { }
-    Exit(int status) : status(status) { }
-    virtual ~Exit();
-};
-
 int handleExceptions(const std::string & programName, std::function fun);
 
 /**
diff --git a/src/libutil/exit.cc b/src/libutil/exit.cc
new file mode 100644
index 000000000..73cd8b04e
--- /dev/null
+++ b/src/libutil/exit.cc
@@ -0,0 +1,7 @@
+#include "exit.hh"
+
+namespace nix {
+
+Exit::~Exit() {}
+
+}
diff --git a/src/libutil/exit.hh b/src/libutil/exit.hh
new file mode 100644
index 000000000..55f33e62f
--- /dev/null
+++ b/src/libutil/exit.hh
@@ -0,0 +1,19 @@
+#pragma once
+
+#include 
+
+namespace nix {
+
+/**
+ * Exit the program with a given exit code.
+ */
+class Exit : public std::exception
+{
+public:
+    int status;
+    Exit() : status(0) { }
+    explicit Exit(int status) : status(status) { }
+    virtual ~Exit();
+};
+
+}

From 8e71883e3f59100479e96aa1883ef52dbaa03fd3 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Tue, 20 Feb 2024 14:52:16 -0800
Subject: [PATCH 124/164] Rename `ProcessLineResult` variants

---
 src/libcmd/repl.cc | 22 +++++++++++-----------
 1 file changed, 11 insertions(+), 11 deletions(-)

diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index e423df3fe..42ec0f709 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -61,16 +61,16 @@ enum class ProcessLineResult {
      * program or evaluation (e.g., if the REPL was acting as the debugger)
      * should also exit.
      */
-    QuitAll,
+    Quit,
     /**
      * The user exited with `:continue`. The REPL should exit, but the program
      * should continue running.
      */
-    QuitOnce,
+    Continue,
     /**
      * The user did not exit. The REPL should request another line of input.
      */
-    Continue,
+    PromptAgain,
 };
 
 struct NixRepl
@@ -318,11 +318,11 @@ ReplExitStatus NixRepl::mainLoop()
         logger->resume();
         try {
             switch (processLine(input)) {
-                case ProcessLineResult::QuitAll:
+                case ProcessLineResult::Quit:
                     return ReplExitStatus::QuitAll;
-                case ProcessLineResult::QuitOnce:
-                    return ReplExitStatus::Continue;
                 case ProcessLineResult::Continue:
+                    return ReplExitStatus::Continue;
+                case ProcessLineResult::PromptAgain:
                     break;
                 default:
                     abort();
@@ -518,7 +518,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
 {
     line = trim(line);
     if (line.empty())
-        return ProcessLineResult::Continue;
+        return ProcessLineResult::PromptAgain;
 
     _isInterrupted = false;
 
@@ -613,13 +613,13 @@ ProcessLineResult NixRepl::processLine(std::string line)
     else if (state->debugRepl && (command == ":s" || command == ":step")) {
         // set flag to stop at next DebugTrace; exit repl.
         state->debugStop = true;
-        return ProcessLineResult::QuitOnce;
+        return ProcessLineResult::Continue;
     }
 
     else if (state->debugRepl && (command == ":c" || command == ":continue")) {
         // set flag to run to next breakpoint or end of program; exit repl.
         state->debugStop = false;
-        return ProcessLineResult::QuitOnce;
+        return ProcessLineResult::Continue;
     }
 
     else if (command == ":a" || command == ":add") {
@@ -762,7 +762,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
 
     else if (command == ":q" || command == ":quit") {
         state->debugStop = false;
-        return ProcessLineResult::QuitAll;
+        return ProcessLineResult::Quit;
     }
 
     else if (command == ":doc") {
@@ -823,7 +823,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
         }
     }
 
-    return ProcessLineResult::Continue;
+    return ProcessLineResult::PromptAgain;
 }
 
 void NixRepl::loadFile(const Path & path)

From 60eeacc24a0d4dd8dabeb7fcf16b3aea5b8d3192 Mon Sep 17 00:00:00 2001
From: syvb 
Date: Tue, 20 Feb 2024 19:17:18 -0500
Subject: [PATCH 125/164] Say how many channels were unpacked

---
 src/nix-channel/nix-channel.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index 79db78236..48553fa31 100644
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -138,7 +138,7 @@ static void update(const StringSet & channelNames)
 
     // Unpack the channel tarballs into the Nix store and install them
     // into the channels profile.
-    std::cerr << "unpacking channels...\n";
+    std::cerr << "unpacking " << exprs.size() << " channels...\n";
     Strings envArgs{ "--profile", profile, "--file", unpackChannelPath, "--install", "--remove-all", "--from-expression" };
     for (auto & expr : exprs)
         envArgs.push_back(std::move(expr));

From 7fd0de38c6e2c203e3f3c376dcf9a48424d216fe Mon Sep 17 00:00:00 2001
From: Graham Dennis 
Date: Wed, 21 Feb 2024 18:40:34 +1100
Subject: [PATCH 126/164] Faster flake.lock parsing

This PR reduces the creation of short-lived basic_json objects while
parsing flake.lock files. For large flake.lock files (~1.5MB) I was
observing ~60s being spent for trivial nix build operations while
after this change it is now taking ~1.6s.
---
 src/libexpr/flake/lockfile.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc
index 3e99fb2d4..58ebd97ba 100644
--- a/src/libexpr/flake/lockfile.cc
+++ b/src/libexpr/flake/lockfile.cc
@@ -107,7 +107,7 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
                 std::string inputKey = i.value();
                 auto k = nodeMap.find(inputKey);
                 if (k == nodeMap.end()) {
-                    auto nodes = json["nodes"];
+                    auto & nodes = json["nodes"];
                     auto jsonNode2 = nodes.find(inputKey);
                     if (jsonNode2 == nodes.end())
                         throw Error("lock file references missing node '%s'", inputKey);

From 09d76e512a468ad65bedaeda56871de7043849b0 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Wed, 21 Feb 2024 12:08:18 +0100
Subject: [PATCH 127/164] GitArchiveInputScheme: Require a NAR hash

---
 src/libfetchers/github.cc | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 76f94337b..a48c99a0b 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -282,7 +282,11 @@ struct GitArchiveInputScheme : InputScheme
 
     bool isLocked(const Input & input) const override
     {
-        return (bool) input.getRev();
+        /* Since we can't verify the integrity of the tarball from the
+           Git revision alone, we also require a NAR hash for
+           locking. FIXME: in the future, we may want to require a Git
+           tree hash instead of a NAR hash. */
+        return input.getRev().has_value() && input.getNarHash().has_value();
     }
 
     std::optional experimentalFeature() const override

From b111fba8cd3c5d492565b5dc22a493ed58ef8571 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Wed, 21 Feb 2024 09:07:39 -0800
Subject: [PATCH 128/164] Add documentation, rename to `debugger-on-trace`

---
 doc/manual/rl-next/debugger-on-trace.md |  4 ++--
 src/libexpr/eval-settings.hh            | 11 +++++++++--
 src/libexpr/primops.cc                  |  6 ++++++
 3 files changed, 17 insertions(+), 4 deletions(-)

diff --git a/doc/manual/rl-next/debugger-on-trace.md b/doc/manual/rl-next/debugger-on-trace.md
index d4e55d59c..721928550 100644
--- a/doc/manual/rl-next/debugger-on-trace.md
+++ b/doc/manual/rl-next/debugger-on-trace.md
@@ -1,9 +1,9 @@
 ---
-synopsis: Enter the `--debugger` when `builtins.trace` is called if `builtins-trace-debugger` is set
+synopsis: Enter the `--debugger` when `builtins.trace` is called if `debugger-on-trace` is set
 prs: 9914
 ---
 
-If the `builtins-trace-debugger` option is set and `--debugger` is given,
+If the `debugger-on-trace` option is set and `--debugger` is given,
 `builtins.trace` calls will behave similarly to `builtins.break` and will enter
 the debug REPL. This is useful for determining where warnings are being emitted
 from.
diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh
index 757daebc0..b5783d28f 100644
--- a/src/libexpr/eval-settings.hh
+++ b/src/libexpr/eval-settings.hh
@@ -128,8 +128,15 @@ struct EvalSettings : Config
     Setting maxCallDepth{this, 10000, "max-call-depth",
         "The maximum function call depth to allow before erroring."};
 
-    Setting builtinsTraceDebugger{this, false, "builtins-trace-debugger",
-        "Whether to enter the debugger on `builtins.trace` calls."};
+    Setting builtinsTraceDebugger{this, false, "debugger-on-trace",
+        R"(
+          If set to true and the `--debugger` flag is given,
+          [`builtins.trace`](@docroot@/language/builtins.md#builtins-trace) will
+          enter the debugger like
+          [`builtins.break`](@docroot@/language/builtins.md#builtins-break).
+
+          This is useful for debugging warnings in third-party Nix code.
+        )"};
 };
 
 extern EvalSettings evalSettings;
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index a24a2d018..0ee146359 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1010,6 +1010,12 @@ static RegisterPrimOp primop_trace({
       Evaluate *e1* and print its abstract syntax representation on
       standard error. Then return *e2*. This function is useful for
       debugging.
+
+      If the
+      [`debugger-on-trace`](@docroot@/command-ref/conf-file.md#conf-debugger-on-trace)
+      option is set to `true` and the `--debugger` flag is given, the
+      interactive debugger will be started when `trace` is called (like
+      [`break`](@docroot@/language/builtins.md#builtins-break)).
     )",
     .fun = prim_trace,
 });

From efd36b49e84335f324c9d4c2dcd511f126aa4d60 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Fri, 19 Jan 2024 21:11:56 -0500
Subject: [PATCH 129/164] `nix hash path`, and preperatory refactors

- `nix store add` supports text hashing

  With functional test ensuring it matches `builtins.toFile`.

- Factored-out flags for both commands

- Move all common reusable flags to `libcmd`

  - They are not part of the *definition* of the CLI infra, just a usag
    of it.

  - The `libstore` flag couldn't go in `args.hh` in libutil anyways,
    would be awkward for it to live alone

- Shuffle around `Cmd*` hierarchy so flags for deprecated commands don't
  end up on the new ones
---
 src/libcmd/misc-store-flags.cc | 121 +++++++++++++++++++++++++++++++++
 src/libcmd/misc-store-flags.hh |  21 ++++++
 src/libutil/args.cc            |  67 ------------------
 src/libutil/args.hh            |  19 +++---
 src/nix/add-to-store.cc        |  20 +-----
 src/nix/hash.cc                |  99 ++++++++++++++++++---------
 src/nix/prefetch.cc            |   3 +-
 tests/functional/add.sh        |   5 ++
 tests/functional/hash-path.sh  |  28 +++++---
 9 files changed, 245 insertions(+), 138 deletions(-)
 create mode 100644 src/libcmd/misc-store-flags.cc
 create mode 100644 src/libcmd/misc-store-flags.hh

diff --git a/src/libcmd/misc-store-flags.cc b/src/libcmd/misc-store-flags.cc
new file mode 100644
index 000000000..e66d3f63b
--- /dev/null
+++ b/src/libcmd/misc-store-flags.cc
@@ -0,0 +1,121 @@
+#include "misc-store-flags.hh"
+
+namespace nix::flag
+{
+
+static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
+{
+    for (auto & format : hashFormats) {
+        if (hasPrefix(format, prefix)) {
+            completions.add(format);
+        }
+    }
+}
+
+Args::Flag hashFormatWithDefault(std::string && longName, HashFormat * hf)
+{
+    assert(*hf == nix::HashFormat::SRI);
+    return Args::Flag {
+            .longName = std::move(longName),
+            .description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.",
+            .labels = {"hash-format"},
+            .handler = {[hf](std::string s) {
+                *hf = parseHashFormat(s);
+            }},
+            .completer = hashFormatCompleter,
+    };
+}
+
+Args::Flag hashFormatOpt(std::string && longName, std::optional * ohf)
+{
+    return Args::Flag {
+            .longName = std::move(longName),
+            .description = "Hash format (`base16`, `nix32`, `base64`, `sri`).",
+            .labels = {"hash-format"},
+            .handler = {[ohf](std::string s) {
+                *ohf = std::optional{parseHashFormat(s)};
+            }},
+            .completer = hashFormatCompleter,
+    };
+}
+
+static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
+{
+    for (auto & algo : hashAlgorithms)
+        if (hasPrefix(algo, prefix))
+            completions.add(algo);
+}
+
+Args::Flag hashAlgo(std::string && longName, HashAlgorithm * ha)
+{
+    return Args::Flag {
+            .longName = std::move(longName),
+            .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`).",
+            .labels = {"hash-algo"},
+            .handler = {[ha](std::string s) {
+                *ha = parseHashAlgo(s);
+            }},
+            .completer = hashAlgoCompleter,
+    };
+}
+
+Args::Flag hashAlgoOpt(std::string && longName, std::optional * oha)
+{
+    return Args::Flag {
+            .longName = std::move(longName),
+            .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.",
+            .labels = {"hash-algo"},
+            .handler = {[oha](std::string s) {
+                *oha = std::optional{parseHashAlgo(s)};
+            }},
+            .completer = hashAlgoCompleter,
+    };
+}
+
+Args::Flag fileIngestionMethod(FileIngestionMethod * method)
+{
+    return Args::Flag {
+        .longName  = "mode",
+        // FIXME indentation carefully made for context, this is messed up.
+        .description = R"(
+    How to compute the hash of the input.
+    One of:
+
+    - `nar` (the default): Serialises the input as an archive (following the [_Nix Archive Format_](https://edolstra.github.io/pubs/phd-thesis.pdf#page=101)) and passes that to the hash function.
+
+    - `flat`: Assumes that the input is a single file and directly passes it to the hash function;
+        )",
+        .labels = {"file-ingestion-method"},
+        .handler = {[method](std::string s) {
+            *method = parseFileIngestionMethod(s);
+        }},
+    };
+}
+
+Args::Flag contentAddressMethod(ContentAddressMethod * method)
+{
+    return Args::Flag {
+        .longName  = "mode",
+        // FIXME indentation carefully made for context, this is messed up.
+        .description = R"(
+    How to compute the content-address of the store object.
+    One of:
+
+    - `nar` (the default): Serialises the input as an archive (following the [_Nix Archive Format_](https://edolstra.github.io/pubs/phd-thesis.pdf#page=101)) and passes that to the hash function.
+
+    - `flat`: Assumes that the input is a single file and directly passes it to the hash function;
+
+    - `text`: Like `flat`, but used for
+      [derivations](@docroot@/glossary.md#store-derivation) serialized in store object and 
+      [`builtins.toFile`](@docroot@/language/builtins.html#builtins-toFile).
+      For advanced use-cases only;
+      for regular usage prefer `nar` and `flat.
+        )",
+        .labels = {"content-address-method"},
+        .handler = {[method](std::string s) {
+            *method = ContentAddressMethod::parse(s);
+        }},
+    };
+}
+
+}
diff --git a/src/libcmd/misc-store-flags.hh b/src/libcmd/misc-store-flags.hh
new file mode 100644
index 000000000..124372af7
--- /dev/null
+++ b/src/libcmd/misc-store-flags.hh
@@ -0,0 +1,21 @@
+#include "args.hh"
+#include "content-address.hh"
+
+namespace nix::flag {
+
+Args::Flag hashAlgo(std::string && longName, HashAlgorithm * ha);
+static inline Args::Flag hashAlgo(HashAlgorithm * ha)
+{
+    return hashAlgo("hash-algo", ha);
+}
+Args::Flag hashAlgoOpt(std::string && longName, std::optional * oha);
+Args::Flag hashFormatWithDefault(std::string && longName, HashFormat * hf);
+Args::Flag hashFormatOpt(std::string && longName, std::optional * ohf);
+static inline Args::Flag hashAlgoOpt(std::optional * oha)
+{
+    return hashAlgoOpt("hash-algo", oha);
+}
+Args::Flag fileIngestionMethod(FileIngestionMethod * method);
+Args::Flag contentAddressMethod(ContentAddressMethod * method);
+
+}
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index 8996cbe5b..a981ed9fb 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -544,73 +544,6 @@ nlohmann::json Args::toJSON()
     return res;
 }
 
-static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
-{
-    for (auto & format : hashFormats) {
-        if (hasPrefix(format, prefix)) {
-            completions.add(format);
-        }
-    }
-}
-
-Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashFormat * hf) {
-    assert(*hf == nix::HashFormat::SRI);
-    return Flag{
-            .longName = std::move(longName),
-            .description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.",
-            .labels = {"hash-format"},
-            .handler = {[hf](std::string s) {
-                *hf = parseHashFormat(s);
-            }},
-            .completer = hashFormatCompleter,
-    };
-}
-
-Args::Flag Args::Flag::mkHashFormatOptFlag(std::string && longName, std::optional * ohf) {
-    return Flag{
-            .longName = std::move(longName),
-            .description = "Hash format (`base16`, `nix32`, `base64`, `sri`).",
-            .labels = {"hash-format"},
-            .handler = {[ohf](std::string s) {
-                *ohf = std::optional{parseHashFormat(s)};
-            }},
-            .completer = hashFormatCompleter,
-    };
-}
-
-static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
-{
-    for (auto & algo : hashAlgorithms)
-        if (hasPrefix(algo, prefix))
-            completions.add(algo);
-}
-
-Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha)
-{
-    return Flag{
-            .longName = std::move(longName),
-            .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`).",
-            .labels = {"hash-algo"},
-            .handler = {[ha](std::string s) {
-                *ha = parseHashAlgo(s);
-            }},
-            .completer = hashAlgoCompleter,
-    };
-}
-
-Args::Flag Args::Flag::mkHashAlgoOptFlag(std::string && longName, std::optional * oha)
-{
-    return Flag{
-            .longName = std::move(longName),
-            .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.",
-            .labels = {"hash-algo"},
-            .handler = {[oha](std::string s) {
-                *oha = std::optional{parseHashAlgo(s)};
-            }},
-            .completer = hashAlgoCompleter,
-    };
-}
-
 static void _completePath(AddCompletions & completions, std::string_view prefix, bool onlyDirs)
 {
     completions.setType(Completions::Type::Filenames);
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index 6c9c48065..4b2e1d960 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -155,6 +155,8 @@ protected:
      */
     using CompleterClosure = std::function;
 
+public:
+
     /**
      * Description of flags / options
      *
@@ -175,19 +177,10 @@ protected:
         CompleterClosure completer;
 
         std::optional experimentalFeature;
-
-        static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha);
-        static Flag mkHashAlgoFlag(HashAlgorithm * ha) {
-            return mkHashAlgoFlag("hash-algo", ha);
-        }
-        static Flag mkHashAlgoOptFlag(std::string && longName, std::optional * oha);
-        static Flag mkHashAlgoOptFlag(std::optional * oha) {
-            return mkHashAlgoOptFlag("hash-algo", oha);
-        }
-        static Flag mkHashFormatFlagWithDefault(std::string && longName, HashFormat * hf);
-        static Flag mkHashFormatOptFlag(std::string && longName, std::optional * ohf);
     };
 
+protected:
+
     /**
      * Index of all registered "long" flag descriptions (flags like
      * `--long`).
@@ -206,6 +199,8 @@ protected:
      */
     virtual bool processFlag(Strings::iterator & pos, Strings::iterator end);
 
+public:
+
     /**
      * Description of positional arguments
      *
@@ -220,6 +215,8 @@ protected:
         CompleterClosure completer;
     };
 
+protected:
+
     /**
      * Queue of expected positional argument forms.
      *
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index 9ea37ab4c..ca2daecab 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -3,6 +3,7 @@
 #include "store-api.hh"
 #include "archive.hh"
 #include "posix-source-accessor.hh"
+#include "misc-store-flags.hh"
 
 using namespace nix;
 
@@ -26,23 +27,9 @@ struct CmdAddToStore : MixDryRun, StoreCommand
             .handler = {&namePart},
         });
 
-        addFlag({
-            .longName  = "mode",
-            .description = R"(
-    How to compute the hash of the input.
-    One of:
+        addFlag(flag::contentAddressMethod(&caMethod));
 
-    - `nar` (the default): Serialises the input as an archive (following the [_Nix Archive Format_](https://edolstra.github.io/pubs/phd-thesis.pdf#page=101)) and passes that to the hash function.
-
-    - `flat`: Assumes that the input is a single file and directly passes it to the hash function;
-            )",
-            .labels = {"hash-mode"},
-            .handler = {[this](std::string s) {
-                this->caMethod = parseFileIngestionMethod(s);
-            }},
-        });
-
-        addFlag(Flag::mkHashAlgoFlag(&hashAlgo));
+        addFlag(flag::hashAlgo(&hashAlgo));
     }
 
     void run(ref store) override
@@ -63,7 +50,6 @@ struct CmdAddToStore : MixDryRun, StoreCommand
 
 struct CmdAdd : CmdAddToStore
 {
-
     std::string description() override
     {
         return "Add a file or directory to the Nix store";
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index eec1c0eae..98d227f0e 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -6,11 +6,12 @@
 #include "references.hh"
 #include "archive.hh"
 #include "posix-source-accessor.hh"
+#include "misc-store-flags.hh"
 
 using namespace nix;
 
 /**
- * Base for `nix hash file` (deprecated), `nix hash path` and `nix-hash` (legacy).
+ * Base for `nix hash path`, `nix hash file` (deprecated), and `nix-hash` (legacy).
  *
  * Deprecation Issue: https://github.com/NixOS/nix/issues/8876
  */
@@ -19,12 +20,21 @@ struct CmdHashBase : Command
     FileIngestionMethod mode;
     HashFormat hashFormat = HashFormat::SRI;
     bool truncate = false;
-    HashAlgorithm ha = HashAlgorithm::SHA256;
+    HashAlgorithm hashAlgo = HashAlgorithm::SHA256;
     std::vector paths;
     std::optional modulus;
 
     explicit CmdHashBase(FileIngestionMethod mode) : mode(mode)
     {
+        expectArgs({
+            .label = "paths",
+            .handler = {&paths},
+            .completer = completePath
+        });
+
+        // FIXME The following flags should be deprecated, but we don't
+        // yet have a mechanism for that.
+
         addFlag({
             .longName = "sri",
             .description = "Print the hash in SRI format.",
@@ -49,22 +59,7 @@ struct CmdHashBase : Command
             .handler = {&hashFormat, HashFormat::Base16},
         });
 
-        addFlag(Flag::mkHashAlgoFlag("type", &ha));
-
-        #if 0
-        addFlag({
-            .longName = "modulo",
-            .description = "Compute the hash modulo the specified string.",
-            .labels = {"modulus"},
-            .handler = {&modulus},
-        });
-        #endif\
-
-        expectArgs({
-            .label = "paths",
-            .handler = {&paths},
-            .completer = completePath
-        });
+        addFlag(flag::hashAlgo("type", &hashAlgo));
     }
 
     std::string description() override
@@ -85,9 +80,9 @@ struct CmdHashBase : Command
 
             std::unique_ptr hashSink;
             if (modulus)
-                hashSink = std::make_unique(ha, *modulus);
+                hashSink = std::make_unique(hashAlgo, *modulus);
             else
-                hashSink = std::make_unique(ha);
+                hashSink = std::make_unique(hashAlgo);
 
             auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
             dumpPath(accessor, canonPath, *hashSink, mode);
@@ -99,15 +94,53 @@ struct CmdHashBase : Command
     }
 };
 
+/**
+ * `nix hash path`
+ */
+struct CmdHashPath : CmdHashBase
+{
+    CmdHashPath()
+        : CmdHashBase(FileIngestionMethod::Recursive)
+    {
+        addFlag(flag::hashAlgo("algo", &hashAlgo));
+        addFlag(flag::fileIngestionMethod(&mode));
+        addFlag(flag::hashFormatWithDefault("format", &hashFormat));
+        #if 0
+        addFlag({
+            .longName = "modulo",
+            .description = "Compute the hash modulo the specified string.",
+            .labels = {"modulus"},
+            .handler = {&modulus},
+        });
+        #endif
+    }
+};
+
+/**
+ * For deprecated `nix hash file`
+ *
+ * Deprecation Issue: https://github.com/NixOS/nix/issues/8876
+ */
+struct CmdHashFile : CmdHashBase
+{
+    CmdHashFile()
+        : CmdHashBase(FileIngestionMethod::Flat)
+    {
+    }
+};
+
+/**
+ * For deprecated `nix hash to-*`
+ */
 struct CmdToBase : Command
 {
     HashFormat hashFormat;
-    std::optional ht;
+    std::optional hashAlgo;
     std::vector args;
 
     CmdToBase(HashFormat hashFormat) : hashFormat(hashFormat)
     {
-        addFlag(Flag::mkHashAlgoOptFlag("type", &ht));
+        addFlag(flag::hashAlgoOpt("type", &hashAlgo));
         expectArgs("strings", &args);
     }
 
@@ -124,7 +157,7 @@ struct CmdToBase : Command
     {
         warn("The old format conversion sub commands of `nix hash` where deprecated in favor of `nix hash convert`.");
         for (auto s : args)
-            logger->cout(Hash::parseAny(s, ht).to_string(hashFormat, hashFormat == HashFormat::SRI));
+            logger->cout(Hash::parseAny(s, hashAlgo).to_string(hashFormat, hashFormat == HashFormat::SRI));
     }
 };
 
@@ -139,9 +172,9 @@ struct CmdHashConvert : Command
     std::vector hashStrings;
 
     CmdHashConvert(): to(HashFormat::SRI) {
-        addFlag(Args::Flag::mkHashFormatOptFlag("from", &from));
-        addFlag(Args::Flag::mkHashFormatFlagWithDefault("to", &to));
-        addFlag(Args::Flag::mkHashAlgoOptFlag(&algo));
+        addFlag(flag::hashFormatOpt("from", &from));
+        addFlag(flag::hashFormatWithDefault("to", &to));
+        addFlag(flag::hashAlgoOpt(&algo));
         expectArgs({
            .label = "hashes",
            .handler = {&hashStrings},
@@ -181,8 +214,8 @@ struct CmdHash : NixMultiCommand
             "hash",
             {
                 {"convert", []() { return make_ref();}},
-                {"file", []() { return make_ref(FileIngestionMethod::Flat);; }},
-                {"path", []() { return make_ref(FileIngestionMethod::Recursive); }},
+                {"path", []() { return make_ref(); }},
+                {"file", []() { return make_ref(); }},
                 {"to-base16", []() { return make_ref(HashFormat::Base16); }},
                 {"to-base32", []() { return make_ref(HashFormat::Nix32); }},
                 {"to-base64", []() { return make_ref(HashFormat::Base64); }},
@@ -206,7 +239,7 @@ static int compatNixHash(int argc, char * * argv)
     // Wait until `nix hash convert` is not hidden behind experimental flags anymore.
     // warn("`nix-hash` has been deprecated in favor of `nix hash convert`.");
 
-    std::optional ha;
+    std::optional hashAlgo;
     bool flat = false;
     HashFormat hashFormat = HashFormat::Base16;
     bool truncate = false;
@@ -226,7 +259,7 @@ static int compatNixHash(int argc, char * * argv)
         else if (*arg == "--truncate") truncate = true;
         else if (*arg == "--type") {
             std::string s = getArg(*arg, arg, end);
-            ha = parseHashAlgo(s);
+            hashAlgo = parseHashAlgo(s);
         }
         else if (*arg == "--to-base16") {
             op = opTo;
@@ -253,8 +286,8 @@ static int compatNixHash(int argc, char * * argv)
 
     if (op == opHash) {
         CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive);
-        if (!ha.has_value()) ha = HashAlgorithm::MD5;
-        cmd.ha = ha.value();
+        if (!hashAlgo.has_value()) hashAlgo = HashAlgorithm::MD5;
+        cmd.hashAlgo = hashAlgo.value();
         cmd.hashFormat = hashFormat;
         cmd.truncate = truncate;
         cmd.paths = ss;
@@ -264,7 +297,7 @@ static int compatNixHash(int argc, char * * argv)
     else {
         CmdToBase cmd(hashFormat);
         cmd.args = ss;
-        if (ha.has_value()) cmd.ht = ha;
+        if (hashAlgo.has_value()) cmd.hashAlgo = hashAlgo;
         cmd.run();
     }
 
diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc
index 6e3f878d9..fabec5d88 100644
--- a/src/nix/prefetch.cc
+++ b/src/nix/prefetch.cc
@@ -10,6 +10,7 @@
 #include "eval-inline.hh"
 #include "legacy.hh"
 #include "posix-source-accessor.hh"
+#include "misc-store-flags.hh"
 
 #include 
 
@@ -284,7 +285,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON
             }}
         });
 
-        addFlag(Flag::mkHashAlgoFlag("hash-type", &hashAlgo));
+        addFlag(flag::hashAlgo("hash-type", &hashAlgo));
 
         addFlag({
             .longName = "executable",
diff --git a/tests/functional/add.sh b/tests/functional/add.sh
index 762e01dbe..a4bb0e225 100644
--- a/tests/functional/add.sh
+++ b/tests/functional/add.sh
@@ -45,3 +45,8 @@ clearStore
     [[ "$path1" == "$path2" ]]
     path4=$(nix store add --mode flat --hash-algo sha1 ./dummy)
 )
+(
+    path1=$(nix store add --mode text ./dummy)
+    path2=$(nix eval --impure --raw --expr 'builtins.toFile "dummy" (builtins.readFile ./dummy)')
+    [[ "$path1" == "$path2" ]]
+)
diff --git a/tests/functional/hash-path.sh b/tests/functional/hash-path.sh
index 6d096b29b..4ad9f8ff2 100644
--- a/tests/functional/hash-path.sh
+++ b/tests/functional/hash-path.sh
@@ -2,19 +2,24 @@ source common.sh
 
 try () {
     printf "%s" "$2" > $TEST_ROOT/vector
-    hash="$(nix-hash --flat ${FORMAT_FLAG-} --type "$1" "$TEST_ROOT/vector")"
+    hash="$(nix-hash --flat ${FORMAT+--$FORMAT} --type "$1" "$TEST_ROOT/vector")"
     if ! (( "${NO_TEST_CLASSIC-}" )) && test "$hash" != "$3"; then
         echo "try nix-hash: hash $1, expected $3, got $hash"
         exit 1
     fi
-    hash="$(nix hash file ${FORMAT_FLAG-} --type "$1" "$TEST_ROOT/vector")"
+    hash="$(nix hash file ${FORMAT+--$FORMAT} --type "$1" "$TEST_ROOT/vector")"
+    if ! (( "${NO_TEST_NIX_COMMAND-}" )) && test "$hash" != "$3"; then
+        echo "try nix hash: hash $1, expected $3, got $hash"
+        exit 1
+    fi
+    hash="$(nix hash path --mode flat ${FORMAT+--format $FORMAT} --algo "$1" "$TEST_ROOT/vector")"
     if ! (( "${NO_TEST_NIX_COMMAND-}" )) && test "$hash" != "$3"; then
         echo "try nix hash: hash $1, expected $3, got $hash"
         exit 1
     fi
 }
 
-FORMAT_FLAG=--base16
+FORMAT=base16
 try md5 "" "d41d8cd98f00b204e9800998ecf8427e"
 try md5 "a" "0cc175b9c0f1b6a831c399e269772661"
 try md5 "abc" "900150983cd24fb0d6963f7d28e17f72"
@@ -34,18 +39,18 @@ try sha256 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "248d6a61d
 try sha512 "" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e"
 try sha512 "abc" "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f"
 try sha512 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445"
-unset FORMAT_FLAG
+unset FORMAT
 
-FORMAT_FLAG=--base32
+FORMAT=base32
 try sha256 "abc" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
-unset FORMAT_FLAG
+unset FORMAT
 
-FORMAT_FLAG=--sri
+FORMAT=sri
 try sha512 "" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg=="
 try sha512 "abc" "sha512-3a81oZNherrMQXNJriBBMRLm+k6JqX6iCp7u5ktV05ohkpkqJ0/BqDa6PCOj/uu9RU1EI2Q86A4qmslPpUyknw=="
 try sha512 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "sha512-IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ=="
 try sha256 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "sha256-JI1qYdIGOLjlwCaTDD5gOaM85Flk/yFn9uzt1BnbBsE="
-unset FORMAT_FLAG
+unset FORMAT
 
 # nix-hash [--flat] defaults to the Base16 format
 NO_TEST_NIX_COMMAND=1 try sha512 "abc" "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f"
@@ -56,7 +61,12 @@ NO_TEST_CLASSIC=1 try sha512 "abc" "sha512-3a81oZNherrMQXNJriBBMRLm+k6JqX6iCp7u5
 try2 () {
     hash=$(nix-hash --type "$1" $TEST_ROOT/hash-path)
     if test "$hash" != "$2"; then
-        echo "hash $1, expected $2, got $hash"
+        echo "try nix-hash; hash $1, expected $2, got $hash"
+        exit 1
+    fi
+    hash="$(nix hash path --mode nar --format base16 --algo "$1" "$TEST_ROOT/hash-path")"
+    if test "$hash" != "$2"; then
+        echo "try nix hash: hash $1, expected $2, got $hash"
         exit 1
     fi
 }

From e391fc21019a2ada9a431e195e56188add23427f Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 21:05:10 +0100
Subject: [PATCH 130/164] Add comments

---
 src/libfetchers/tarball.cc | 13 +++++++++++++
 1 file changed, 13 insertions(+)

diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index e3b1fbe56..d4edbb767 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -138,6 +138,8 @@ DownloadTarballResult downloadTarball(
         cached.reset();
 
     if (cached && !cached->expired)
+        /* We previously downloaded this tarball and it's younger than
+           `tarballTtl`, so no need to check the server. */
         return attrsToResult(cached->infoAttrs);
 
     auto _res = std::make_shared>();
@@ -165,6 +167,8 @@ DownloadTarballResult downloadTarball(
     Attrs infoAttrs;
 
     if (res->cached) {
+        /* The server says that the previously downloaded version is
+           still current. */
         infoAttrs = cached->infoAttrs;
     } else {
         infoAttrs.insert_or_assign("etag", res->etag);
@@ -229,6 +233,11 @@ struct CurlInputScheme : InputScheme
             if (auto n = string2Int(*i))
                 input.attrs.insert_or_assign("lastModified", *n);
 
+        /* The URL query parameters serve two roles: specifying fetch
+           settings for Nix itself, and arbitrary data as part of the
+           HTTP request. Now that we've processed the Nix-specific
+           attributes above, remove them so we don't also send them as
+           part of the HTTP request. */
         for (auto & param : allowedAttrs())
             url.query.erase(param);
 
@@ -288,6 +297,10 @@ struct FileInputScheme : CurlInputScheme
     {
         auto input(_input);
 
+        /* Unlike TarballInputScheme, this stores downloaded files in
+           the Nix store directly, since there is little deduplication
+           benefit in using the Git cache for single big files like
+           tarballs. */
         auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false);
 
         auto narHash = store->queryPathInfo(file.storePath)->narHash;

From 4ae5091716fa023230a779db03c1cf1e5687c6fb Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Wed, 21 Feb 2024 21:55:51 +0100
Subject: [PATCH 131/164] nix profile: suggest removal using entry name

When a file conflict arises during a package install a suggestion is
made to remove the old entry. This was previously done using the
installable URLs of the old entry. These URLs are quite verbose and
often do not equal the URL of the existing entry.

This change uses the recently introduced profile entry name for the
suggestion, resulting in a simpler output.

The improvement is easily seen in the change to the functional test.
---
 src/nix/profile.cc              | 12 ++++++------
 tests/functional/nix-profile.sh |  2 +-
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index 812e703b4..fc669d5ed 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -400,13 +400,13 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
             //       See https://github.com/NixOS/nix/compare/3efa476c5439f8f6c1968a6ba20a31d1239c2f04..1fe5d172ece51a619e879c4b86f603d9495cc102
             auto findRefByFilePath = [&](Iterator begin, Iterator end) {
                 for (auto it = begin; it != end; it++) {
-                    auto & profileElement = it->second;
+                    auto & [name, profileElement] = *it;
                     for (auto & storePath : profileElement.storePaths) {
                         if (conflictError.fileA.starts_with(store->printStorePath(storePath))) {
-                            return std::pair(conflictError.fileA, profileElement.toInstallables(*store));
+                            return std::tuple(conflictError.fileA, name, profileElement.toInstallables(*store));
                         }
                         if (conflictError.fileB.starts_with(store->printStorePath(storePath))) {
-                            return std::pair(conflictError.fileB, profileElement.toInstallables(*store));
+                            return std::tuple(conflictError.fileB, name, profileElement.toInstallables(*store));
                         }
                     }
                 }
@@ -415,9 +415,9 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
             // There are 2 conflicting files. We need to find out which one is from the already installed package and
             // which one is the package that is the new package that is being installed.
             // The first matching package is the one that was already installed (original).
-            auto [originalConflictingFilePath, originalConflictingRefs] = findRefByFilePath(manifest.elements.begin(), manifest.elements.end());
+            auto [originalConflictingFilePath, originalEntryName, originalConflictingRefs] = findRefByFilePath(manifest.elements.begin(), manifest.elements.end());
             // The last matching package is the one that was going to be installed (new).
-            auto [newConflictingFilePath, newConflictingRefs] = findRefByFilePath(manifest.elements.rbegin(), manifest.elements.rend());
+            auto [newConflictingFilePath, newEntryName, newConflictingRefs] = findRefByFilePath(manifest.elements.rbegin(), manifest.elements.rend());
 
             throw Error(
                 "An existing package already provides the following file:\n"
@@ -443,7 +443,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
                 "  nix profile install %4% --priority %7%\n",
                 originalConflictingFilePath,
                 newConflictingFilePath,
-                concatStringsSep(" ", originalConflictingRefs),
+                originalEntryName,
                 concatStringsSep(" ", newConflictingRefs),
                 conflictError.priority,
                 conflictError.priority - 1,
diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh
index 35a62fbe2..88b713d53 100644
--- a/tests/functional/nix-profile.sh
+++ b/tests/functional/nix-profile.sh
@@ -166,7 +166,7 @@ error: An existing package already provides the following file:
 
        To remove the existing package:
 
-         nix profile remove path:${flake1Dir}#packages.${system}.default
+         nix profile remove flake1
 
        The new package can also be installed next to the existing one by assigning a different priority.
        The conflicting packages have a priority of 5.

From 040874e4db904ecbca3964b6d22d35c423969729 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Thu, 22 Feb 2024 17:14:33 -0800
Subject: [PATCH 132/164] Print all stack frames

---
 src/libutil/error.cc | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/src/libutil/error.cc b/src/libutil/error.cc
index 4a9efc0b5..d2a3d2114 100644
--- a/src/libutil/error.cc
+++ b/src/libutil/error.cc
@@ -373,7 +373,6 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
     // prepended to each element of the trace
     auto ellipsisIndent = "  ";
 
-    bool frameOnly = false;
     if (!einfo.traces.empty()) {
         // Stack traces seen since we last printed a chunk of `duplicate frames
         // omitted`.
@@ -384,7 +383,6 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
 
         for (const auto & trace : einfo.traces) {
             if (trace.hint.str().empty()) continue;
-            if (frameOnly && !trace.frame) continue;
 
             if (!showTrace && count > 3) {
                 oss << "\n" << ANSI_WARNING "(stack trace truncated; use '--show-trace' to show the full trace)" ANSI_NORMAL << "\n";
@@ -400,7 +398,6 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
             printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen);
 
             count++;
-            frameOnly = trace.frame;
 
             printTrace(oss, ellipsisIndent, count, trace);
         }

From f05c13ecc2345cb8c668289369b066b0520b919b Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Thu, 22 Feb 2024 17:14:55 -0800
Subject: [PATCH 133/164] Remove the concept of "skipped frames"

---
 src/libexpr/eval-error.cc | 14 +++-----------
 src/libexpr/eval-error.hh |  2 +-
 src/libexpr/eval.cc       |  9 ++++-----
 src/libexpr/eval.hh       |  2 +-
 src/libexpr/primops.cc    |  7 +++----
 src/libutil/error.cc      |  7 +++----
 src/libutil/error.hh      |  3 +--
 7 files changed, 16 insertions(+), 28 deletions(-)

diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc
index f4cdeec5c..8db03610b 100644
--- a/src/libexpr/eval-error.cc
+++ b/src/libexpr/eval-error.cc
@@ -28,15 +28,7 @@ template
 EvalErrorBuilder & EvalErrorBuilder::withTrace(PosIdx pos, const std::string_view text)
 {
     error.err.traces.push_front(
-        Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = false});
-    return *this;
-}
-
-template
-EvalErrorBuilder & EvalErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text)
-{
-    error.err.traces.push_front(
-        Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = true});
+        Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text))});
     return *this;
 }
 
@@ -63,9 +55,9 @@ EvalErrorBuilder & EvalErrorBuilder::withFrame(const Env & env, const Expr
 }
 
 template
-EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, HintFmt hint, bool frame)
+EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, HintFmt hint)
 {
-    error.addTrace(error.state.positions[pos], hint, frame);
+    error.addTrace(error.state.positions[pos], hint);
     return *this;
 }
 
diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh
index 392902ad2..7e0cbe982 100644
--- a/src/libexpr/eval-error.hh
+++ b/src/libexpr/eval-error.hh
@@ -89,7 +89,7 @@ public:
 
     [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrame(const Env & e, const Expr & ex);
 
-    [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, HintFmt hint, bool frame = false);
+    [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, HintFmt hint);
 
     template
     [[nodiscard, gnu::noinline]] EvalErrorBuilder &
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 205d40b83..54b1125ce 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -811,9 +811,9 @@ void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2)
     e.addTrace(nullptr, s, s2);
 }
 
-void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const
+void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const
 {
-    e.addTrace(positions[pos], HintFmt(s, s2), frame);
+    e.addTrace(positions[pos], HintFmt(s, s2));
 }
 
 template
@@ -1587,9 +1587,8 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
                         "while calling %s",
                         lambda.name
                         ? concatStrings("'", symbols[lambda.name], "'")
-                        : "anonymous lambda",
-                        true);
-                    if (pos) addErrorTrace(e, pos, "from call site%s", "", true);
+                        : "anonymous lambda");
+                    if (pos) addErrorTrace(e, pos, "from call site%s", "");
                 }
                 throw;
             }
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 42fe0d3e4..80b583eb1 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -435,7 +435,7 @@ public:
     [[gnu::noinline]]
     void addErrorTrace(Error & e, const char * s, const std::string & s2) const;
     [[gnu::noinline]]
-    void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame = false) const;
+    void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const;
 
 public:
     /**
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 42cfa4917..835afba82 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -811,7 +811,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
         auto message = state.coerceToString(pos, *args[0], context,
                 "while evaluating the error message passed to builtins.addErrorContext",
                 false, false).toOwned();
-        e.addTrace(nullptr, HintFmt(message), true);
+        e.addTrace(nullptr, HintFmt(message));
         throw;
     }
 }
@@ -1075,7 +1075,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
         e.addTrace(nullptr, HintFmt(
                 "while evaluating derivation '%s'\n"
                 "  whose name attribute is located at %s",
-                drvName, pos), true);
+                drvName, pos));
         throw;
     }
 }
@@ -1233,8 +1233,7 @@ drvName, Bindings * attrs, Value & v)
 
         } catch (Error & e) {
             e.addTrace(state.positions[i->pos],
-                HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName),
-                true);
+                HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName));
             throw;
         }
     }
diff --git a/src/libutil/error.cc b/src/libutil/error.cc
index d2a3d2114..d1e864a1a 100644
--- a/src/libutil/error.cc
+++ b/src/libutil/error.cc
@@ -11,9 +11,9 @@
 
 namespace nix {
 
-void BaseError::addTrace(std::shared_ptr && e, HintFmt hint, bool frame)
+void BaseError::addTrace(std::shared_ptr && e, HintFmt hint)
 {
-    err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame });
+    err.traces.push_front(Trace { .pos = std::move(e), .hint = hint });
 }
 
 void throwExceptionSelfCheck(){
@@ -61,8 +61,7 @@ inline bool operator<(const Trace& lhs, const Trace& rhs)
     // This formats a freshly formatted hint string and then throws it away, which
     // shouldn't be much of a problem because it only runs when pos is equal, and this function is
     // used for trace printing, which is infrequent.
-    return std::forward_as_tuple(lhs.hint.str(), lhs.frame)
-        < std::forward_as_tuple(rhs.hint.str(), rhs.frame);
+    return lhs.hint.str() < rhs.hint.str();
 }
 inline bool operator> (const Trace& lhs, const Trace& rhs) { return rhs < lhs; }
 inline bool operator<=(const Trace& lhs, const Trace& rhs) { return !(lhs > rhs); }
diff --git a/src/libutil/error.hh b/src/libutil/error.hh
index 2e5de5d32..89f5ad021 100644
--- a/src/libutil/error.hh
+++ b/src/libutil/error.hh
@@ -64,7 +64,6 @@ void printCodeLines(std::ostream & out,
 struct Trace {
     std::shared_ptr pos;
     HintFmt hint;
-    bool frame;
 };
 
 inline bool operator<(const Trace& lhs, const Trace& rhs);
@@ -162,7 +161,7 @@ public:
         addTrace(std::move(e), HintFmt(std::string(fs), args...));
     }
 
-    void addTrace(std::shared_ptr && e, HintFmt hint, bool frame = false);
+    void addTrace(std::shared_ptr && e, HintFmt hint);
 
     bool hasTrace() const { return !err.traces.empty(); }
 

From 91e89628fdfe7b08e0f61b8531edd31833330e04 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Thu, 22 Feb 2024 17:18:27 -0800
Subject: [PATCH 134/164] Make `addErrorTrace` variadic

---
 src/libexpr/eval.cc | 12 +++++++-----
 src/libexpr/eval.hh |  6 ++++--
 2 files changed, 11 insertions(+), 7 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 54b1125ce..c4e163b08 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -806,14 +806,16 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
     }
 }
 
-void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) const
+template
+void EvalState::addErrorTrace(Error & e, const Args & ... formatArgs) const
 {
-    e.addTrace(nullptr, s, s2);
+    e.addTrace(nullptr, HintFmt(formatArgs...));
 }
 
-void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const
+template
+void EvalState::addErrorTrace(Error & e, const PosIdx pos, const Args & ... formatArgs) const
 {
-    e.addTrace(positions[pos], HintFmt(s, s2));
+    e.addTrace(positions[pos], HintFmt(formatArgs...));
 }
 
 template
@@ -1588,7 +1590,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
                         lambda.name
                         ? concatStrings("'", symbols[lambda.name], "'")
                         : "anonymous lambda");
-                    if (pos) addErrorTrace(e, pos, "from call site%s", "");
+                    if (pos) addErrorTrace(e, pos, "from call site");
                 }
                 throw;
             }
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 80b583eb1..01abd4eb1 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -432,10 +432,12 @@ public:
     std::string_view forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx);
     std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx);
 
+    template
     [[gnu::noinline]]
-    void addErrorTrace(Error & e, const char * s, const std::string & s2) const;
+    void addErrorTrace(Error & e, const Args & ... formatArgs) const;
+    template
     [[gnu::noinline]]
-    void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const;
+    void addErrorTrace(Error & e, const PosIdx pos, const Args & ... formatArgs) const;
 
 public:
     /**

From fe6408b5df4a2a4c2342a02bc9f94abf4ca88a85 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Thu, 22 Feb 2024 17:58:37 -0800
Subject: [PATCH 135/164] Update snapshots

---
 tests/functional/lang/eval-fail-duplicate-traces.err.exp   | 7 +++++++
 .../eval-fail-foldlStrict-strict-op-application.err.exp    | 7 +++++++
 tests/functional/lang/eval-fail-mutual-recursion.err.exp   | 7 +++++++
 3 files changed, 21 insertions(+)

diff --git a/tests/functional/lang/eval-fail-duplicate-traces.err.exp b/tests/functional/lang/eval-fail-duplicate-traces.err.exp
index 32ad9b376..cedaebd3b 100644
--- a/tests/functional/lang/eval-fail-duplicate-traces.err.exp
+++ b/tests/functional/lang/eval-fail-duplicate-traces.err.exp
@@ -41,4 +41,11 @@ error:
              |                ^
             5|     if n > 0
 
+       … while calling the 'throw' builtin
+         at /pwd/lang/eval-fail-duplicate-traces.nix:7:10:
+            6|     then throwAfter (n - 1)
+            7|     else throw "Uh oh!";
+             |          ^
+            8| in
+
        error: Uh oh!
diff --git a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp
index 7cb08af8a..4903bc82d 100644
--- a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp
+++ b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp
@@ -27,4 +27,11 @@ error:
              |      ^
             6|
 
+       … while calling the 'throw' builtin
+         at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:5:9:
+            4|   null
+            5|   [ (_: throw "Not the final value, but is still forced!") (_: 23) ]
+             |         ^
+            6|
+
        error: Not the final value, but is still forced!
diff --git a/tests/functional/lang/eval-fail-mutual-recursion.err.exp b/tests/functional/lang/eval-fail-mutual-recursion.err.exp
index dc2e11766..c034afcd5 100644
--- a/tests/functional/lang/eval-fail-mutual-recursion.err.exp
+++ b/tests/functional/lang/eval-fail-mutual-recursion.err.exp
@@ -54,4 +54,11 @@ error:
 
        (21 duplicate frames omitted)
 
+       … while calling the 'throw' builtin
+         at /pwd/lang/eval-fail-mutual-recursion.nix:34:10:
+           33|     then throwAfterB true 10
+           34|     else throw "Uh oh!";
+             |          ^
+           35| in
+
        error: Uh oh!

From d3bff699aac0ff940e7e5551e39b53e62e780281 Mon Sep 17 00:00:00 2001
From: ramboman 
Date: Fri, 23 Feb 2024 01:05:25 -0500
Subject: [PATCH 136/164] `nix`: Fix `haveInternet` to check for proxy

---
 src/nix/main.cc | 21 +++++++++++++++++++++
 1 file changed, 21 insertions(+)

diff --git a/src/nix/main.cc b/src/nix/main.cc
index 39c04069b..687c072e0 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -23,6 +23,7 @@
 #include 
 #include 
 #include 
+#include 
 
 #include 
 
@@ -32,6 +33,24 @@ void chrootHelper(int argc, char * * argv);
 
 namespace nix {
 
+static bool haveProxyEnvironmentVariables()
+{
+    static const char * const proxyVariables[] = {
+        "http_proxy",
+        "https_proxy",
+        "ftp_proxy",
+        "HTTP_PROXY",
+        "HTTPS_PROXY",
+        "FTP_PROXY"
+    };
+    for (auto & proxyVariable: proxyVariables) {
+        if (std::getenv(proxyVariable)) {
+            return true;
+        }
+    }
+    return false;
+}
+
 /* Check if we have a non-loopback/link-local network interface. */
 static bool haveInternet()
 {
@@ -55,6 +74,8 @@ static bool haveInternet()
         }
     }
 
+    if (haveProxyEnvironmentVariables()) return true;
+
     return false;
 }
 

From bca737dcad2401b81d60f6ecf3f163b9346b5556 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Fri, 23 Feb 2024 10:28:37 +0100
Subject: [PATCH 137/164] c++-ize the proxy detection code

Just for consistency with the rest
---
 src/nix/main.cc | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/src/nix/main.cc b/src/nix/main.cc
index 687c072e0..5af5f2e41 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -23,7 +23,6 @@
 #include 
 #include 
 #include 
-#include 
 
 #include 
 
@@ -35,7 +34,7 @@ namespace nix {
 
 static bool haveProxyEnvironmentVariables()
 {
-    static const char * const proxyVariables[] = {
+    static const std::vector proxyVariables = {
         "http_proxy",
         "https_proxy",
         "ftp_proxy",
@@ -44,7 +43,7 @@ static bool haveProxyEnvironmentVariables()
         "FTP_PROXY"
     };
     for (auto & proxyVariable: proxyVariables) {
-        if (std::getenv(proxyVariable)) {
+        if (getEnv(proxyVariable).has_value()) {
             return true;
         }
     }

From 24fd7e2755bed3a854f8089c2db2fed89eb07f56 Mon Sep 17 00:00:00 2001
From: ramboman 
Date: Sat, 24 Feb 2024 01:00:13 +0000
Subject: [PATCH 138/164] `install-multi-user.sh`: `_sudo`: add proxy variables
 to sudo

---
 scripts/install-multi-user.sh | 27 ++++++++++++++++++++++++++-
 1 file changed, 26 insertions(+), 1 deletion(-)

diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index ad3ee8881..1dbb93bf9 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -58,6 +58,31 @@ readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
 
 readonly ROOT_HOME=~root
 
+readonly PROXY_ENVIRONMENT_VARIABLES=(
+    http_proxy
+    https_proxy
+    ftp_proxy
+    no_proxy
+    HTTP_PROXY
+    HTTPS_PROXY
+    FTP_PROXY
+    NO_PROXY
+)
+
+SUDO_EXTRA_ENVIRONMENT_VARIABLES=()
+
+setup_sudo_extra_environment_variables() {
+    local i=${#SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}
+    for variable in "${PROXY_ENVIRONMENT_VARIABLES[@]}"; do
+        if [ "x${!variable:-}" != "x" ]; then
+            SUDO_EXTRA_ENVIRONMENT_VARIABLES[i]="$variable=${!variable}"
+            i=$((i + 1))
+        fi
+    done
+}
+
+setup_sudo_extra_environment_variables
+
 if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then
     readonly IS_HEADLESS='no'
 else
@@ -361,7 +386,7 @@ _sudo() {
     if is_root; then
         env "$@"
     else
-        sudo "$@"
+        sudo "${SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}" "$@"
     fi
 }
 

From 5598ce3e0f3a3cfce69d008c808920950e8c1139 Mon Sep 17 00:00:00 2001
From: zimbatm 
Date: Sat, 24 Feb 2024 11:15:58 +0100
Subject: [PATCH 139/164] ci: fix docker default tag

Docker uses "latest" as the default label instead of "master".

This change will allow to docker run ghcr.io/nixos/nix without having to
specify the label.

It keeps the :master label on docker hub for back-compat.
---
 .github/workflows/ci.yml | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index fdd2d67f6..38126dd68 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -153,6 +153,8 @@ jobs:
         IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
 
         docker tag nix:$NIX_VERSION $IMAGE_ID:$NIX_VERSION
-        docker tag nix:$NIX_VERSION $IMAGE_ID:master
+        docker tag nix:$NIX_VERSION $IMAGE_ID:latest
         docker push $IMAGE_ID:$NIX_VERSION
+        docker push $IMAGE_ID:latest
+        # deprecated 2024-02-24
         docker push $IMAGE_ID:master

From d83008c3a797c8e4ec1e1a97c5b1bc5e6b02c561 Mon Sep 17 00:00:00 2001
From: Johannes Kirschbauer 
Date: Sat, 24 Feb 2024 19:34:53 +0700
Subject: [PATCH 140/164] documentation: clarify genericClosure (#10003)

* doc: clarify genericClosure documentation

Co-authored-by: Valentin Gagarin 
---
 src/libexpr/primops.cc | 63 ++++++++++++++++++++++++++----------------
 1 file changed, 39 insertions(+), 24 deletions(-)

diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 835afba82..850cc7a45 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -705,38 +705,53 @@ static RegisterPrimOp primop_genericClosure(PrimOp {
     .args = {"attrset"},
     .arity = 1,
     .doc = R"(
-      Take an *attrset* with values named `startSet` and `operator` in order to
-      return a *list of attrsets* by starting with the `startSet` and recursively
-      applying the `operator` function to each `item`. The *attrsets* in the
-      `startSet` and the *attrsets* produced by `operator` must contain a value
-      named `key` which is comparable. The result is produced by calling `operator`
-      for each `item` with a value for `key` that has not been called yet including
-      newly produced `item`s. The function terminates when no new `item`s are
-      produced. The resulting *list of attrsets* contains only *attrsets* with a
-      unique key. For example,
+      `builtins.genericClosure` iteratively computes the transitive closure over an arbitrary relation defined by a function.
 
-      ```
-      builtins.genericClosure {
-        startSet = [ {key = 5;} ];
-        operator = item: [{
-          key = if (item.key / 2 ) * 2 == item.key
-               then item.key / 2
-               else 3 * item.key + 1;
-        }];
-      }
-      ```
-      evaluates to
-      ```
-      [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
-      ```
+      It takes *attrset* with two attributes named `startSet` and `operator`, and returns a list of attrbute sets:
+
+      - `startSet`:
+        The initial list of attribute sets.
+
+      - `operator`:
+        A function that takes an attribute set and returns a list of attribute sets.
+        It defines how each item in the current set is processed and expanded into more items.
+
+      Each attribute set in the list `startSet` and the list returned by `operator` must have an attribute `key`, which must support equality comparison.
+      The value of `key` can be one of the following types:
 
-      `key` can be one of the following types:
       - [Number](@docroot@/language/values.md#type-number)
       - [Boolean](@docroot@/language/values.md#type-boolean)
       - [String](@docroot@/language/values.md#type-string)
       - [Path](@docroot@/language/values.md#type-path)
       - [List](@docroot@/language/values.md#list)
 
+      The result is produced by calling the `operator` on each `item` that has not been called yet, including newly added items, until no new items are added.
+      Items are compared by their `key` attribute.
+
+      Common usages are:
+
+      - Generating unique collections of items, such as dependency graphs.
+      - Traversing through structures that may contain cycles or loops.
+      - Processing data structures with complex internal relationships.
+
+      > **Example**
+      >
+      > ```nix
+      > builtins.genericClosure {
+      >   startSet = [ {key = 5;} ];
+      >   operator = item: [{
+      >     key = if (item.key / 2 ) * 2 == item.key
+      >          then item.key / 2
+      >          else 3 * item.key + 1;
+      >   }];
+      > }
+      > ```
+      >
+      > evaluates to
+      >
+      > ```nix
+      > [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
+      > ```
       )",
     .fun = prim_genericClosure,
 });

From a82aeedb5b9e24c9788febab3dcf65169b79cece Mon Sep 17 00:00:00 2001
From: Jade Lovelace 
Date: Sat, 24 Feb 2024 15:52:16 -0800
Subject: [PATCH 141/164] Warn on implicit switch case fallthrough

This seems to have found one actual bug in fs-sink.cc: the symlink case
was falling into the regular file case, which can't possibly be
intentional, right?
---
 Makefile               | 2 +-
 src/libexpr/lexer.l    | 3 +++
 src/libutil/fs-sink.cc | 2 ++
 3 files changed, 6 insertions(+), 1 deletion(-)

diff --git a/Makefile b/Makefile
index d3542c3e9..f8689c8cf 100644
--- a/Makefile
+++ b/Makefile
@@ -81,7 +81,7 @@ ifdef HOST_WINDOWS
   GLOBAL_LDFLAGS += -Wl,--export-all-symbols
 endif
 
-GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src
+GLOBAL_CXXFLAGS += -g -Wall -Wimplicit-fallthrough -include $(buildprefix)config.h -std=c++2a -I src
 
 # Include the main lib, causing rules to be defined
 
diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l
index 380048c77..5b26d6927 100644
--- a/src/libexpr/lexer.l
+++ b/src/libexpr/lexer.l
@@ -94,6 +94,9 @@ static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
 
 }
 
+// yacc generates code that uses unannotated fallthrough.
+#pragma GCC diagnostic ignored "-Wimplicit-fallthrough"
+
 #define YY_USER_INIT initLoc(yylloc)
 #define YY_USER_ACTION adjustLoc(yylloc, yytext, yyleng);
 
diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc
index 95b6088da..35ce0ac36 100644
--- a/src/libutil/fs-sink.cc
+++ b/src/libutil/fs-sink.cc
@@ -15,6 +15,7 @@ void copyRecursive(
     case SourceAccessor::tSymlink:
     {
         sink.createSymlink(to, accessor.readLink(from));
+        break;
     }
 
     case SourceAccessor::tRegular:
@@ -38,6 +39,7 @@ void copyRecursive(
                 sink, to + "/" + name);
             break;
         }
+        break;
     }
 
     case SourceAccessor::tMisc:

From 8ac4542593e583e86009d953d89f8683f7eef9fb Mon Sep 17 00:00:00 2001
From: Yueh-Shun Li 
Date: Mon, 26 Feb 2024 17:59:07 +0800
Subject: [PATCH 142/164] .gitignore: ignore historical test binaries

After commit 91b6833686a6 (" Move tests to separate directories, and
document"), previously-built test executables are now tracked by Git,
which is annoying for developers.

This patch add .gitignore rules to ignore the obsolete test directories
to solve such problem and enhance developer experience.
---
 .gitignore | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/.gitignore b/.gitignore
index a0a0786ed..5c1136823 100644
--- a/.gitignore
+++ b/.gitignore
@@ -45,13 +45,16 @@ perl/Makefile.config
 /src/libexpr/parser-tab.hh
 /src/libexpr/parser-tab.output
 /src/libexpr/nix.tbl
+/src/libexpr/tests
 /tests/unit/libexpr/libnixexpr-tests
 
 # /src/libstore/
 *.gen.*
+/src/libstore/tests
 /tests/unit/libstore/libnixstore-tests
 
 # /src/libutil/
+/src/libutil/tests
 /tests/unit/libutil/libnixutil-tests
 
 /src/nix/nix

From 219705ff64cc3411bc92b88fe369cd2999604986 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Mon, 26 Feb 2024 11:04:19 +0100
Subject: [PATCH 143/164] Remove dead code

Most of the code in `git.{cc,hh}` is dead, so get rid of it.
---
 src/libutil/fs-sink.cc    |  46 ------
 src/libutil/fs-sink.hh    |   7 -
 src/libutil/git.cc        | 289 --------------------------------------
 src/libutil/git.hh        | 152 --------------------
 tests/unit/libutil/git.cc | 205 ---------------------------
 5 files changed, 699 deletions(-)

diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc
index 35ce0ac36..0ebd750f6 100644
--- a/src/libutil/fs-sink.cc
+++ b/src/libutil/fs-sink.cc
@@ -5,52 +5,6 @@
 
 namespace nix {
 
-void copyRecursive(
-    SourceAccessor & accessor, const CanonPath & from,
-    FileSystemObjectSink & sink, const Path & to)
-{
-    auto stat = accessor.lstat(from);
-
-    switch (stat.type) {
-    case SourceAccessor::tSymlink:
-    {
-        sink.createSymlink(to, accessor.readLink(from));
-        break;
-    }
-
-    case SourceAccessor::tRegular:
-    {
-        sink.createRegularFile(to, [&](CreateRegularFileSink & crf) {
-            if (stat.isExecutable)
-                crf.isExecutable();
-            accessor.readFile(from, crf, [&](uint64_t size) {
-                crf.preallocateContents(size);
-            });
-        });
-        break;
-    }
-
-    case SourceAccessor::tDirectory:
-    {
-        sink.createDirectory(to);
-        for (auto & [name, _] : accessor.readDirectory(from)) {
-            copyRecursive(
-                accessor, from / name,
-                sink, to + "/" + name);
-            break;
-        }
-        break;
-    }
-
-    case SourceAccessor::tMisc:
-        throw Error("file '%1%' has an unsupported type", from);
-
-    default:
-        abort();
-    }
-}
-
-
 struct RestoreSinkSettings : Config
 {
     Setting preallocateContents{this, false, "preallocate-contents",
diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh
index ae577819a..670b55c2b 100644
--- a/src/libutil/fs-sink.hh
+++ b/src/libutil/fs-sink.hh
@@ -41,13 +41,6 @@ struct FileSystemObjectSink
     virtual void createSymlink(const Path & path, const std::string & target) = 0;
 };
 
-/**
- * Recursively copy file system objects from the source into the sink.
- */
-void copyRecursive(
-    SourceAccessor & accessor, const CanonPath & sourcePath,
-    FileSystemObjectSink & sink, const Path & destPath);
-
 /**
  * Ignore everything and do nothing
  */
diff --git a/src/libutil/git.cc b/src/libutil/git.cc
index 5733531fa..029e1af44 100644
--- a/src/libutil/git.cc
+++ b/src/libutil/git.cc
@@ -5,302 +5,13 @@
 #include 
 #include  // for strcasecmp
 
-#include "signals.hh"
-#include "config.hh"
-#include "hash.hh"
-#include "posix-source-accessor.hh"
-
 #include "git.hh"
-#include "serialise.hh"
 
 namespace nix::git {
 
 using namespace nix;
 using namespace std::string_literals;
 
-std::optional decodeMode(RawMode m) {
-    switch (m) {
-        case (RawMode) Mode::Directory:
-        case (RawMode) Mode::Executable:
-        case (RawMode) Mode::Regular:
-        case (RawMode) Mode::Symlink:
-            return (Mode) m;
-        default:
-            return std::nullopt;
-    }
-}
-
-
-static std::string getStringUntil(Source & source, char byte)
-{
-    std::string s;
-    char n[1];
-    source(std::string_view { n, 1 });
-    while (*n != byte) {
-        s += *n;
-        source(std::string_view { n, 1 });
-    }
-    return s;
-}
-
-
-static std::string getString(Source & source, int n)
-{
-    std::string v;
-    v.resize(n);
-    source(v);
-    return v;
-}
-
-void parseBlob(
-    FileSystemObjectSink & sink,
-    const Path & sinkPath,
-    Source & source,
-    bool executable,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    xpSettings.require(Xp::GitHashing);
-
-    sink.createRegularFile(sinkPath, [&](auto & crf) {
-        if (executable)
-            crf.isExecutable();
-
-        unsigned long long size = std::stoi(getStringUntil(source, 0));
-
-        crf.preallocateContents(size);
-
-        unsigned long long left = size;
-        std::string buf;
-        buf.reserve(65536);
-
-        while (left) {
-            checkInterrupt();
-            buf.resize(std::min((unsigned long long)buf.capacity(), left));
-            source(buf);
-            crf(buf);
-            left -= buf.size();
-        }
-    });
-}
-
-void parseTree(
-    FileSystemObjectSink & sink,
-    const Path & sinkPath,
-    Source & source,
-    std::function hook,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    unsigned long long size = std::stoi(getStringUntil(source, 0));
-    unsigned long long left = size;
-
-    sink.createDirectory(sinkPath);
-
-    while (left) {
-        std::string perms = getStringUntil(source, ' ');
-        left -= perms.size();
-        left -= 1;
-
-        RawMode rawMode = std::stoi(perms, 0, 8);
-        auto modeOpt = decodeMode(rawMode);
-        if (!modeOpt)
-            throw Error("Unknown Git permission: %o", perms);
-        auto mode = std::move(*modeOpt);
-
-        std::string name = getStringUntil(source, '\0');
-        left -= name.size();
-        left -= 1;
-
-        std::string hashs = getString(source, 20);
-        left -= 20;
-
-        Hash hash(HashAlgorithm::SHA1);
-        std::copy(hashs.begin(), hashs.end(), hash.hash);
-
-        hook(name, TreeEntry {
-            .mode = mode,
-            .hash = hash,
-        });
-    }
-}
-
-ObjectType parseObjectType(
-    Source & source,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    xpSettings.require(Xp::GitHashing);
-
-    auto type = getString(source, 5);
-
-    if (type == "blob ") {
-        return ObjectType::Blob;
-    } else if (type == "tree ") {
-        return ObjectType::Tree;
-    } else throw Error("input doesn't look like a Git object");
-}
-
-void parse(
-    FileSystemObjectSink & sink,
-    const Path & sinkPath,
-    Source & source,
-    bool executable,
-    std::function hook,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    xpSettings.require(Xp::GitHashing);
-
-    auto type = parseObjectType(source, xpSettings);
-
-    switch (type) {
-    case ObjectType::Blob:
-        parseBlob(sink, sinkPath, source, executable, xpSettings);
-        break;
-    case ObjectType::Tree:
-        parseTree(sink, sinkPath, source, hook, xpSettings);
-        break;
-    default:
-        assert(false);
-    };
-}
-
-
-std::optional convertMode(SourceAccessor::Type type)
-{
-    switch (type) {
-    case SourceAccessor::tSymlink:   return Mode::Symlink;
-    case SourceAccessor::tRegular:   return Mode::Regular;
-    case SourceAccessor::tDirectory: return Mode::Directory;
-    case SourceAccessor::tMisc:      return std::nullopt;
-    default: abort();
-    }
-}
-
-
-void restore(FileSystemObjectSink & sink, Source & source, std::function hook)
-{
-    parse(sink, "", source, false, [&](Path name, TreeEntry entry) {
-        auto [accessor, from] = hook(entry.hash);
-        auto stat = accessor->lstat(from);
-        auto gotOpt = convertMode(stat.type);
-        if (!gotOpt)
-            throw Error("file '%s' (git hash %s) has an unsupported type",
-                from,
-                entry.hash.to_string(HashFormat::Base16, false));
-        auto & got = *gotOpt;
-        if (got != entry.mode)
-            throw Error("git mode of file '%s' (git hash %s) is %o but expected %o",
-                from,
-                entry.hash.to_string(HashFormat::Base16, false),
-                (RawMode) got,
-                (RawMode) entry.mode);
-        copyRecursive(
-            *accessor, from,
-            sink, name);
-    });
-}
-
-
-void dumpBlobPrefix(
-    uint64_t size, Sink & sink,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    xpSettings.require(Xp::GitHashing);
-    auto s = fmt("blob %d\0"s, std::to_string(size));
-    sink(s);
-}
-
-
-void dumpTree(const Tree & entries, Sink & sink,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    xpSettings.require(Xp::GitHashing);
-
-    std::string v1;
-
-    for (auto & [name, entry] : entries) {
-        auto name2 = name;
-        if (entry.mode == Mode::Directory) {
-            assert(name2.back() == '/');
-            name2.pop_back();
-        }
-        v1 += fmt("%o %s\0"s, static_cast(entry.mode), name2);
-        std::copy(entry.hash.hash, entry.hash.hash + entry.hash.hashSize, std::back_inserter(v1));
-    }
-
-    {
-        auto s = fmt("tree %d\0"s, v1.size());
-        sink(s);
-    }
-
-    sink(v1);
-}
-
-
-Mode dump(
-    SourceAccessor & accessor, const CanonPath & path,
-    Sink & sink,
-    std::function hook,
-    PathFilter & filter,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    auto st = accessor.lstat(path);
-
-    switch (st.type) {
-    case SourceAccessor::tRegular:
-    {
-        accessor.readFile(path, sink, [&](uint64_t size) {
-            dumpBlobPrefix(size, sink, xpSettings);
-        });
-        return st.isExecutable
-            ? Mode::Executable
-            : Mode::Regular;
-    }
-
-    case SourceAccessor::tDirectory:
-    {
-        Tree entries;
-        for (auto & [name, _] : accessor.readDirectory(path)) {
-            auto child = path / name;
-            if (!filter(child.abs())) continue;
-
-            auto entry = hook(child);
-
-            auto name2 = name;
-            if (entry.mode == Mode::Directory)
-                name2 += "/";
-
-            entries.insert_or_assign(std::move(name2), std::move(entry));
-        }
-        dumpTree(entries, sink, xpSettings);
-        return Mode::Directory;
-    }
-
-    case SourceAccessor::tSymlink:
-    case SourceAccessor::tMisc:
-    default:
-        throw Error("file '%1%' has an unsupported type", path);
-    }
-}
-
-
-TreeEntry dumpHash(
-        HashAlgorithm ha,
-        SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
-{
-    std::function hook;
-    hook = [&](const CanonPath & path) -> TreeEntry {
-        auto hashSink = HashSink(ha);
-        auto mode = dump(accessor, path, hashSink, hook, filter);
-        auto hash = hashSink.finish().first;
-        return {
-            .mode = mode,
-            .hash = hash,
-        };
-    };
-
-    return hook(path);
-}
-
-
 std::optional parseLsRemoteLine(std::string_view line)
 {
     const static std::regex line_regex("^(ref: *)?([^\\s]+)(?:\\t+(.*))?$");
diff --git a/src/libutil/git.hh b/src/libutil/git.hh
index d9eb138e1..dea351929 100644
--- a/src/libutil/git.hh
+++ b/src/libutil/git.hh
@@ -5,160 +5,8 @@
 #include 
 #include 
 
-#include "types.hh"
-#include "serialise.hh"
-#include "hash.hh"
-#include "source-accessor.hh"
-#include "fs-sink.hh"
-
 namespace nix::git {
 
-enum struct ObjectType {
-    Blob,
-    Tree,
-    //Commit,
-    //Tag,
-};
-
-using RawMode = uint32_t;
-
-enum struct Mode : RawMode {
-    Directory = 0040000,
-    Regular = 0100644,
-    Executable = 0100755,
-    Symlink = 0120000,
-};
-
-std::optional decodeMode(RawMode m);
-
-/**
- * An anonymous Git tree object entry (no name part).
- */
-struct TreeEntry
-{
-    Mode mode;
-    Hash hash;
-
-    GENERATE_CMP(TreeEntry, me->mode, me->hash);
-};
-
-/**
- * A Git tree object, fully decoded and stored in memory.
- *
- * Directory names must end in a `/` for sake of sorting. See
- * https://github.com/mirage/irmin/issues/352
- */
-using Tree = std::map;
-
-/**
- * Callback for processing a child hash with `parse`
- *
- * The function should
- *
- * 1. Obtain the file system objects denoted by `gitHash`
- *
- * 2. Ensure they match `mode`
- *
- * 3. Feed them into the same sink `parse` was called with
- *
- * Implementations may seek to memoize resources (bandwidth, storage,
- * etc.) for the same Git hash.
- */
-using SinkHook = void(const Path & name, TreeEntry entry);
-
-/**
- * Parse the "blob " or "tree " prefix.
- *
- * @throws if prefix not recognized
- */
-ObjectType parseObjectType(
-    Source & source,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-void parseBlob(
-    FileSystemObjectSink & sink, const Path & sinkPath,
-    Source & source,
-    bool executable,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-void parseTree(
-    FileSystemObjectSink & sink, const Path & sinkPath,
-    Source & source,
-    std::function hook,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-/**
- * Helper putting the previous three `parse*` functions together.
- */
-void parse(
-    FileSystemObjectSink & sink, const Path & sinkPath,
-    Source & source,
-    bool executable,
-    std::function hook,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-/**
- * Assists with writing a `SinkHook` step (2).
- */
-std::optional convertMode(SourceAccessor::Type type);
-
-/**
- * Simplified version of `SinkHook` for `restore`.
- *
- * Given a `Hash`, return a `SourceAccessor` and `CanonPath` pointing to
- * the file system object with that path.
- */
-using RestoreHook = std::pair(Hash);
-
-/**
- * Wrapper around `parse` and `RestoreSink`
- */
-void restore(FileSystemObjectSink & sink, Source & source, std::function hook);
-
-/**
- * Dumps a single file to a sink
- *
- * @param xpSettings for testing purposes
- */
-void dumpBlobPrefix(
-    uint64_t size, Sink & sink,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-/**
- * Dumps a representation of a git tree to a sink
- */
-void dumpTree(
-    const Tree & entries, Sink & sink,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-/**
- * Callback for processing a child with `dump`
- *
- * The function should return the Git hash and mode of the file at the
- * given path in the accessor passed to `dump`.
- *
- * Note that if the child is a directory, its child in must also be so
- * processed in order to compute this information.
- */
-using DumpHook = TreeEntry(const CanonPath & path);
-
-Mode dump(
-    SourceAccessor & accessor, const CanonPath & path,
-    Sink & sink,
-    std::function hook,
-    PathFilter & filter = defaultPathFilter,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-/**
- * Recursively dumps path, hashing as we go.
- *
- * A smaller wrapper around `dump`.
- */
-TreeEntry dumpHash(
-            HashAlgorithm ha,
-            SourceAccessor & accessor, const CanonPath & path,
-            PathFilter & filter = defaultPathFilter);
-
 /**
  * A line from the output of `git ls-remote --symref`.
  *
diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc
index 76ef86bcf..73bbd049e 100644
--- a/tests/unit/libutil/git.cc
+++ b/tests/unit/libutil/git.cc
@@ -9,211 +9,6 @@ namespace nix {
 
 using namespace git;
 
-class GitTest : public CharacterizationTest
-{
-    Path unitTestData = getUnitTestData() + "/git";
-
-public:
-
-    Path goldenMaster(std::string_view testStem) const override {
-        return unitTestData + "/" + testStem;
-    }
-
-    /**
-     * We set these in tests rather than the regular globals so we don't have
-     * to worry about race conditions if the tests run concurrently.
-     */
-    ExperimentalFeatureSettings mockXpSettings;
-
-private:
-
-    void SetUp() override
-    {
-        mockXpSettings.set("experimental-features", "git-hashing");
-    }
-};
-
-TEST(GitMode, gitMode_directory) {
-    Mode m = Mode::Directory;
-    RawMode r = 0040000;
-    ASSERT_EQ(static_cast(m), r);
-    ASSERT_EQ(decodeMode(r), std::optional { m });
-};
-
-TEST(GitMode, gitMode_executable) {
-    Mode m = Mode::Executable;
-    RawMode r = 0100755;
-    ASSERT_EQ(static_cast(m), r);
-    ASSERT_EQ(decodeMode(r), std::optional { m });
-};
-
-TEST(GitMode, gitMode_regular) {
-    Mode m = Mode::Regular;
-    RawMode r = 0100644;
-    ASSERT_EQ(static_cast(m), r);
-    ASSERT_EQ(decodeMode(r), std::optional { m });
-};
-
-TEST(GitMode, gitMode_symlink) {
-    Mode m = Mode::Symlink;
-    RawMode r = 0120000;
-    ASSERT_EQ(static_cast(m), r);
-    ASSERT_EQ(decodeMode(r), std::optional { m });
-};
-
-TEST_F(GitTest, blob_read) {
-    readTest("hello-world-blob.bin", [&](const auto & encoded) {
-        StringSource in { encoded };
-        StringSink out;
-        RegularFileSink out2 { out };
-        ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Blob);
-        parseBlob(out2, "", in, false, mockXpSettings);
-
-        auto expected = readFile(goldenMaster("hello-world.bin"));
-
-        ASSERT_EQ(out.s, expected);
-    });
-}
-
-TEST_F(GitTest, blob_write) {
-    writeTest("hello-world-blob.bin", [&]() {
-        auto decoded = readFile(goldenMaster("hello-world.bin"));
-        StringSink s;
-        dumpBlobPrefix(decoded.size(), s, mockXpSettings);
-        s(decoded);
-        return s.s;
-    });
-}
-
-/**
- * This data is for "shallow" tree tests. However, we use "real" hashes
- * so that we can check our test data in a small shell script test test
- * (`tests/unit/libutil/data/git/check-data.sh`).
- */
-const static Tree tree = {
-    {
-        "Foo",
-        {
-            .mode = Mode::Regular,
-            // hello world with special chars from above
-            .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
-        },
-    },
-    {
-        "bAr",
-        {
-            .mode = Mode::Executable,
-            // ditto
-            .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
-        },
-    },
-    {
-        "baZ/",
-        {
-            .mode = Mode::Directory,
-            // Empty directory hash
-            .hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", HashAlgorithm::SHA1),
-        },
-    },
-};
-
-TEST_F(GitTest, tree_read) {
-    readTest("tree.bin", [&](const auto & encoded) {
-        StringSource in { encoded };
-        NullFileSystemObjectSink out;
-        Tree got;
-        ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Tree);
-        parseTree(out, "", in, [&](auto & name, auto entry) {
-            auto name2 = name;
-            if (entry.mode == Mode::Directory)
-                name2 += '/';
-            got.insert_or_assign(name2, std::move(entry));
-        }, mockXpSettings);
-
-        ASSERT_EQ(got, tree);
-    });
-}
-
-TEST_F(GitTest, tree_write) {
-    writeTest("tree.bin", [&]() {
-        StringSink s;
-        dumpTree(tree, s, mockXpSettings);
-        return s.s;
-    });
-}
-
-TEST_F(GitTest, both_roundrip) {
-    using File = MemorySourceAccessor::File;
-
-    MemorySourceAccessor files;
-    files.root = File::Directory {
-        .contents {
-            {
-                "foo",
-                File::Regular {
-                    .contents = "hello\n\0\n\tworld!",
-                },
-            },
-            {
-                "bar",
-                File::Directory {
-                    .contents = {
-                        {
-                            "baz",
-                            File::Regular {
-                                .executable = true,
-                                .contents = "good day,\n\0\n\tworld!",
-                            },
-                        },
-                    },
-                },
-            },
-        },
-    };
-
-    std::map cas;
-
-    std::function dumpHook;
-    dumpHook = [&](const CanonPath & path) {
-        StringSink s;
-        HashSink hashSink { HashAlgorithm::SHA1 };
-        TeeSink s2 { s, hashSink };
-        auto mode = dump(
-            files, path, s2, dumpHook,
-            defaultPathFilter, mockXpSettings);
-        auto hash = hashSink.finish().first;
-        cas.insert_or_assign(hash, std::move(s.s));
-        return TreeEntry {
-            .mode = mode,
-            .hash = hash,
-        };
-    };
-
-    auto root = dumpHook(CanonPath::root);
-
-    MemorySourceAccessor files2;
-
-    MemorySink sinkFiles2 { files2 };
-
-    std::function mkSinkHook;
-    mkSinkHook = [&](auto prefix, auto & hash, auto executable) {
-        StringSource in { cas[hash] };
-        parse(
-            sinkFiles2, prefix, in, executable,
-            [&](const Path & name, const auto & entry) {
-                mkSinkHook(
-                    prefix + "/" + name,
-                    entry.hash,
-                    entry.mode == Mode::Executable);
-            },
-            mockXpSettings);
-    };
-
-    mkSinkHook("", root.hash, false);
-
-    ASSERT_EQ(files, files2);
-}
-
 TEST(GitLsRemote, parseSymrefLineWithReference) {
     auto line = "ref: refs/head/main	HEAD";
     auto res = parseLsRemoteLine(line);

From fd47f76da9752d0bec35e58525e5aacfd3e7dd26 Mon Sep 17 00:00:00 2001
From: Yueh-Shun Li 
Date: Mon, 26 Feb 2024 02:04:20 +0800
Subject: [PATCH 144/164] treewide: hash type -> hash algorithm

"hash type" -> "hash algorithm" in all comments, documentation, and
messages.

ht -> ha, [Hh]ashType -> [HhashAlgo] for all local variables and
function arguments. No API change is made.

Continuation of 5334c9c792a2 and 837b889c4154.
---
 src/libstore/content-address.cc     | 6 +++---
 src/libstore/content-address.hh     | 8 ++++----
 src/libstore/derivations.cc         | 2 +-
 src/libstore/globals.hh             | 4 ++--
 src/libutil/file-content-address.cc | 4 ++--
 src/libutil/file-content-address.hh | 4 ++--
 src/libutil/hash.cc                 | 2 +-
 src/libutil/hash.hh                 | 4 ++--
 8 files changed, 17 insertions(+), 17 deletions(-)

diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index 2091f8e02..4e3d2f64d 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -111,10 +111,10 @@ static std::pair parseContentAddressMethodP
     }
 
     auto parseHashAlgorithm_ = [&](){
-        auto hashTypeRaw = splitPrefixTo(rest, ':');
-        if (!hashTypeRaw)
+        auto hashAlgoRaw = splitPrefixTo(rest, ':');
+        if (!hashAlgoRaw)
             throw UsageError("content address hash must be in form ':', but found: %s", wholeInput);
-        HashAlgorithm hashAlgo = parseHashAlgo(*hashTypeRaw);
+        HashAlgorithm hashAlgo = parseHashAlgo(*hashAlgoRaw);
         return hashAlgo;
     };
 
diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh
index 80538df50..5925f8e01 100644
--- a/src/libstore/content-address.hh
+++ b/src/libstore/content-address.hh
@@ -91,17 +91,17 @@ struct ContentAddressMethod
     std::string_view renderPrefix() const;
 
     /**
-     * Parse a content addressing method and hash type.
+     * Parse a content addressing method and hash algorithm.
      */
     static std::pair parseWithAlgo(std::string_view rawCaMethod);
 
     /**
-     * Render a content addressing method and hash type in a
+     * Render a content addressing method and hash algorithm in a
      * nicer way, prefixing both cases.
      *
      * The rough inverse of `parse()`.
      */
-    std::string renderWithAlgo(HashAlgorithm ht) const;
+    std::string renderWithAlgo(HashAlgorithm ha) const;
 
     /**
      * Get the underlying way to content-address file system objects.
@@ -127,7 +127,7 @@ struct ContentAddressMethod
  *   ‘text:sha256:’
  *
  * - `FixedIngestionMethod`:
- *   ‘fixed:::’
+ *   ‘fixed:::’
  */
 struct ContentAddress
 {
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 36042c06c..305ed5b42 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -701,7 +701,7 @@ DerivationType BasicDerivation::type() const
                     floatingHashAlgo = dof.hashAlgo;
                 } else {
                     if (*floatingHashAlgo != dof.hashAlgo)
-                        throw Error("all floating outputs must use the same hash type");
+                        throw Error("all floating outputs must use the same hash algorithm");
                 }
             },
             [&](const DerivationOutput::Deferred &) {
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 8330d6571..e6544976a 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -1094,8 +1094,8 @@ public:
         this, {}, "hashed-mirrors",
         R"(
           A list of web servers used by `builtins.fetchurl` to obtain files by
-          hash. Given a hash type *ht* and a base-16 hash *h*, Nix will try to
-          download the file from *hashed-mirror*/*ht*/*h*. This allows files to
+          hash. Given a hash algorithm *ha* and a base-16 hash *h*, Nix will try to
+          download the file from *hashed-mirror*/*ha*/*h*. This allows files to
           be downloaded even if they have disappeared from their original URI.
           For example, given an example mirror `http://tarballs.nixos.org/`,
           when building the derivation
diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 6753e0f49..2339024a2 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -63,10 +63,10 @@ void restorePath(
 
 HashResult hashPath(
     SourceAccessor & accessor, const CanonPath & path,
-    FileIngestionMethod method, HashAlgorithm ht,
+    FileIngestionMethod method, HashAlgorithm ha,
     PathFilter & filter)
 {
-    HashSink sink { ht };
+    HashSink sink { ha };
     dumpPath(accessor, path, sink, method, filter);
     return sink.finish();
 }
diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh
index 41f23f2af..9a7dae8c6 100644
--- a/src/libutil/file-content-address.hh
+++ b/src/libutil/file-content-address.hh
@@ -63,11 +63,11 @@ void restorePath(
  * Compute the hash of the given file system object according to the
  * given method.
  *
- * The hash is defined as (essentially) hashString(ht, dumpPath(path)).
+ * The hash is defined as (essentially) hashString(ha, dumpPath(path)).
  */
 HashResult hashPath(
     SourceAccessor & accessor, const CanonPath & path,
-    FileIngestionMethod method, HashAlgorithm ht,
+    FileIngestionMethod method, HashAlgorithm ha,
     PathFilter & filter = defaultPathFilter);
 
 }
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index d067da969..d4c9d6533 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -274,7 +274,7 @@ Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha
 {
     if (hashStr.empty()) {
         if (!ha)
-            throw BadHash("empty hash requires explicit hash type");
+            throw BadHash("empty hash requires explicit hash algorithm");
         Hash h(*ha);
         warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true));
         return h;
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index f7e8eb265..e14aae43c 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -58,7 +58,7 @@ struct Hash
      * Parse the hash from a string representation in the format
      * "[:]" or "-" (a
      * Subresource Integrity hash expression). If the 'type' argument
-     * is not present, then the hash type must be specified in the
+     * is not present, then the hash algorithm must be specified in the
      * string.
      */
     static Hash parseAny(std::string_view s, std::optional optAlgo);
@@ -200,7 +200,7 @@ std::optional parseHashFormatOpt(std::string_view hashFormatName);
 std::string_view printHashFormat(HashFormat hashFormat);
 
 /**
- * Parse a string representing a hash type.
+ * Parse a string representing a hash algorithm.
  */
 HashAlgorithm parseHashAlgo(std::string_view s);
 

From cefd0302b55b3360dbca59cfcb4bf6a750d6cdcf Mon Sep 17 00:00:00 2001
From: pennae 
Date: Sat, 27 Jan 2024 16:33:34 +0100
Subject: [PATCH 145/164] evaluate inherit (from) exprs only once per directive

desugaring inherit-from to syntactic duplication of the source expr also
duplicates side effects of the source expr (such as trace calls) and
expensive computations (such as derivationStrict).
---
 doc/manual/rl-next/inherit-from-by-need.md    |  7 +++
 src/libexpr/eval.cc                           | 24 ++++++++--
 src/libexpr/nixexpr.cc                        | 44 ++++++++++++++++---
 src/libexpr/nixexpr.hh                        | 16 +++++++
 src/libexpr/parser-state.hh                   | 11 +++++
 src/libexpr/parser.y                          |  7 ++-
 .../lang/eval-okay-inherit-from.err.exp       |  1 -
 .../lang/eval-okay-inherit-from.exp           |  2 +-
 .../lang/eval-okay-inherit-from.nix           | 12 ++++-
 9 files changed, 109 insertions(+), 15 deletions(-)
 create mode 100644 doc/manual/rl-next/inherit-from-by-need.md

diff --git a/doc/manual/rl-next/inherit-from-by-need.md b/doc/manual/rl-next/inherit-from-by-need.md
new file mode 100644
index 000000000..67c2cdedf
--- /dev/null
+++ b/doc/manual/rl-next/inherit-from-by-need.md
@@ -0,0 +1,7 @@
+---
+synopsis: "`inherit (x) ...` evaluates `x` only once"
+prs: 9847
+---
+
+`inherit (x) a b ...` now evaluates the expression `x` only once for all inherited attributes rather than once for each inherited attribute.
+This does not usually have a measurable impact, but side-effects (such as `builtins.trace`) would be duplicated and expensive expressions (such as derivations) could cause a measurable slowdown.
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 91341e167..a353571af 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -1186,6 +1186,18 @@ void ExprPath::eval(EvalState & state, Env & env, Value & v)
 }
 
 
+Env * ExprAttrs::buildInheritFromEnv(EvalState & state, Env & up)
+{
+    Env & inheritEnv = state.allocEnv(inheritFromExprs->size());
+    inheritEnv.up = &up;
+
+    Displacement displ = 0;
+    for (auto from : *inheritFromExprs)
+        inheritEnv.values[displ++] = from->maybeThunk(state, up);
+
+    return &inheritEnv;
+}
+
 void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
 {
     v.mkAttrs(state.buildBindings(attrs.size() + dynamicAttrs.size()).finish());
@@ -1197,6 +1209,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
         Env & env2(state.allocEnv(attrs.size()));
         env2.up = &env;
         dynamicEnv = &env2;
+        Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env2) : nullptr;
 
         AttrDefs::iterator overrides = attrs.find(state.sOverrides);
         bool hasOverrides = overrides != attrs.end();
@@ -1209,9 +1222,9 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
             Value * vAttr;
             if (hasOverrides && !i.second.inherited()) {
                 vAttr = state.allocValue();
-                mkThunk(*vAttr, *i.second.chooseByKind(&env2, &env, &env2), i.second.e);
+                mkThunk(*vAttr, *i.second.chooseByKind(&env2, &env, inheritEnv), i.second.e);
             } else
-                vAttr = i.second.e->maybeThunk(state, *i.second.chooseByKind(&env2, &env, &env2));
+                vAttr = i.second.e->maybeThunk(state, *i.second.chooseByKind(&env2, &env, inheritEnv));
             env2.values[displ++] = vAttr;
             v.attrs->push_back(Attr(i.first, vAttr, i.second.pos));
         }
@@ -1244,10 +1257,11 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
     }
 
     else {
+        Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env) : nullptr;
         for (auto & i : attrs) {
             v.attrs->push_back(Attr(
                     i.first,
-                    i.second.e->maybeThunk(state, *i.second.chooseByKind(&env, &env, &env)),
+                    i.second.e->maybeThunk(state, *i.second.chooseByKind(&env, &env, inheritEnv)),
                     i.second.pos));
         }
     }
@@ -1282,6 +1296,8 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v)
     Env & env2(state.allocEnv(attrs->attrs.size()));
     env2.up = &env;
 
+    Env * inheritEnv = attrs->inheritFromExprs ? attrs->buildInheritFromEnv(state, env2) : nullptr;
+
     /* The recursive attributes are evaluated in the new environment,
        while the inherited attributes are evaluated in the original
        environment. */
@@ -1289,7 +1305,7 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v)
     for (auto & i : attrs->attrs) {
         env2.values[displ++] = i.second.e->maybeThunk(
             state,
-            *i.second.chooseByKind(&env2, &env, &env2));
+            *i.second.chooseByKind(&env2, &env, inheritEnv));
     }
 
     auto dts = state.debugRepl
diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc
index 82e69de51..4b805d710 100644
--- a/src/libexpr/nixexpr.cc
+++ b/src/libexpr/nixexpr.cc
@@ -80,7 +80,7 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co
         return sa < sb;
     });
     std::vector inherits;
-    std::map> inheritsFrom;
+    std::map> inheritsFrom;
     for (auto & i : sorted) {
         switch (i->second.kind) {
         case AttrDef::Kind::Plain:
@@ -90,7 +90,8 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co
             break;
         case AttrDef::Kind::InheritedFrom: {
             auto & select = dynamic_cast(*i->second.e);
-            inheritsFrom[select.e].push_back(i->first);
+            auto & from = dynamic_cast(*select.e);
+            inheritsFrom[&from].push_back(i->first);
             break;
         }
         }
@@ -102,7 +103,7 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co
     }
     for (const auto & [from, syms] : inheritsFrom) {
         str << "inherit (";
-        from->show(symbols, str);
+        (*inheritFromExprs)[from->displ]->show(symbols, str);
         str << ")";
         for (auto sym : syms) str << " " << symbols[sym];
         str << "; ";
@@ -328,6 +329,12 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr &
     this->level = withLevel;
 }
 
+void ExprInheritFrom::bindVars(EvalState & es, const std::shared_ptr & env)
+{
+    if (es.debugRepl)
+        es.exprEnvs.insert(std::make_pair(this, env));
+}
+
 void ExprSelect::bindVars(EvalState & es, const std::shared_ptr & env)
 {
     if (es.debugRepl)
@@ -351,6 +358,27 @@ void ExprOpHasAttr::bindVars(EvalState & es, const std::shared_ptrbindVars(es, env);
 }
 
+std::shared_ptr ExprAttrs::bindInheritSources(
+    EvalState & es, const std::shared_ptr & env)
+{
+    if (!inheritFromExprs)
+        return nullptr;
+
+    // the inherit (from) source values are inserted into an env of its own, which
+    // does not introduce any variable names.
+    // analysis must see an empty env, or an env that contains only entries with
+    // otherwise unused names to not interfere with regular names. the parser
+    // has already filled all exprs that access this env with appropriate level
+    // and displacement, and nothing else is allowed to access it. ideally we'd
+    // not even *have* an expr that grabs anything from this env since it's fully
+    // invisible, but the evaluator does not allow for this yet.
+    auto inner = std::make_shared(nullptr, env.get(), 0);
+    for (auto from : *inheritFromExprs)
+        from->bindVars(es, env);
+
+    return inner;
+}
+
 void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr & env)
 {
     if (es.debugRepl)
@@ -368,8 +396,9 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr
 
         // No need to sort newEnv since attrs is in sorted order.
 
+        auto inheritFromEnv = bindInheritSources(es, newEnv);
         for (auto & i : attrs)
-            i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, newEnv));
+            i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, inheritFromEnv));
 
         for (auto & i : dynamicAttrs) {
             i.nameExpr->bindVars(es, newEnv);
@@ -377,8 +406,10 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr
         }
     }
     else {
+        auto inheritFromEnv = bindInheritSources(es, env);
+
         for (auto & i : attrs)
-            i.second.e->bindVars(es, i.second.chooseByKind(env, env, env));
+            i.second.e->bindVars(es, i.second.chooseByKind(env, env, inheritFromEnv));
 
         for (auto & i : dynamicAttrs) {
             i.nameExpr->bindVars(es, env);
@@ -446,8 +477,9 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr &
 
     // No need to sort newEnv since attrs->attrs is in sorted order.
 
+    auto inheritFromEnv = attrs->bindInheritSources(es, newEnv);
     for (auto & i : attrs->attrs)
-        i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, newEnv));
+        i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, inheritFromEnv));
 
     if (es.debugRepl)
         es.exprEnvs.insert(std::make_pair(this, newEnv));
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 4a93143b4..4bb2ee2f9 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -135,6 +135,18 @@ struct ExprVar : Expr
     COMMON_METHODS
 };
 
+struct ExprInheritFrom : ExprVar
+{
+    ExprInheritFrom(PosIdx pos, Displacement displ): ExprVar(pos, {})
+    {
+        this->level = 0;
+        this->displ = displ;
+        this->fromWith = nullptr;
+    }
+
+    void bindVars(EvalState & es, const std::shared_ptr & env);
+};
+
 struct ExprSelect : Expr
 {
     PosIdx pos;
@@ -195,6 +207,7 @@ struct ExprAttrs : Expr
     };
     typedef std::map AttrDefs;
     AttrDefs attrs;
+    std::unique_ptr> inheritFromExprs;
     struct DynamicAttrDef {
         Expr * nameExpr, * valueExpr;
         PosIdx pos;
@@ -208,6 +221,9 @@ struct ExprAttrs : Expr
     PosIdx getPos() const override { return pos; }
     COMMON_METHODS
 
+    std::shared_ptr bindInheritSources(
+        EvalState & es, const std::shared_ptr & env);
+    Env * buildInheritFromEnv(EvalState & state, Env & up);
     void showBindings(const SymbolTable & symbols, std::ostream & str) const;
 };
 
diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh
index ae38de130..9aa18a0ae 100644
--- a/src/libexpr/parser-state.hh
+++ b/src/libexpr/parser-state.hh
@@ -118,13 +118,24 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr *
             auto ae = dynamic_cast(e);
             auto jAttrs = dynamic_cast(j->second.e);
             if (jAttrs && ae) {
+                if (ae->inheritFromExprs && !jAttrs->inheritFromExprs)
+                    jAttrs->inheritFromExprs = std::make_unique>();
                 for (auto & ad : ae->attrs) {
                     auto j2 = jAttrs->attrs.find(ad.first);
                     if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
                         dupAttr(ad.first, j2->second.pos, ad.second.pos);
                     jAttrs->attrs.emplace(ad.first, ad.second);
+                    if (ad.second.kind == ExprAttrs::AttrDef::Kind::InheritedFrom) {
+                        auto & sel = dynamic_cast(*ad.second.e);
+                        auto & from = dynamic_cast(*sel.e);
+                        from.displ += jAttrs->inheritFromExprs->size();
+                    }
                 }
                 jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
+                if (ae->inheritFromExprs) {
+                    jAttrs->inheritFromExprs->insert(jAttrs->inheritFromExprs->end(),
+                        ae->inheritFromExprs->begin(), ae->inheritFromExprs->end());
+                }
             } else {
                 dupAttr(attrPath, pos, j->second.pos);
             }
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index 0898b81f7..b0aee7b41 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -321,14 +321,17 @@ binds
     }
   | binds INHERIT '(' expr ')' attrs ';'
     { $$ = $1;
-      /* !!! Should ensure sharing of the expression in $4. */
+      if (!$$->inheritFromExprs)
+          $$->inheritFromExprs = std::make_unique>();
+      $$->inheritFromExprs->push_back($4);
+      auto from = new nix::ExprInheritFrom(state->at(@4), $$->inheritFromExprs->size() - 1);
       for (auto & i : *$6) {
           if ($$->attrs.find(i.symbol) != $$->attrs.end())
               state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos);
           $$->attrs.emplace(
               i.symbol,
               ExprAttrs::AttrDef(
-                  new ExprSelect(CUR_POS, $4, i.symbol),
+                  new ExprSelect(CUR_POS, from, i.symbol),
                   state->at(@6),
                   ExprAttrs::AttrDef::Kind::InheritedFrom));
       }
diff --git a/tests/functional/lang/eval-okay-inherit-from.err.exp b/tests/functional/lang/eval-okay-inherit-from.err.exp
index 51881205b..3227501f2 100644
--- a/tests/functional/lang/eval-okay-inherit-from.err.exp
+++ b/tests/functional/lang/eval-okay-inherit-from.err.exp
@@ -1,2 +1 @@
 trace: used
-trace: used
diff --git a/tests/functional/lang/eval-okay-inherit-from.exp b/tests/functional/lang/eval-okay-inherit-from.exp
index 43bd0e899..024daff6b 100644
--- a/tests/functional/lang/eval-okay-inherit-from.exp
+++ b/tests/functional/lang/eval-okay-inherit-from.exp
@@ -1 +1 @@
-[ 1 2 { __overrides = { y = { d = [ ]; }; }; c = [ ]; d = 4; x = { c = [ ]; }; y = «repeated»; } ]
+[ 1 2 { __overrides = { y = { d = [ ]; }; }; c = [ ]; d = 4; x = { c = [ ]; }; y = «repeated»; } { inner = { c = 3; d = 4; }; } ]
diff --git a/tests/functional/lang/eval-okay-inherit-from.nix b/tests/functional/lang/eval-okay-inherit-from.nix
index d1fad7d69..b72a1c639 100644
--- a/tests/functional/lang/eval-okay-inherit-from.nix
+++ b/tests/functional/lang/eval-okay-inherit-from.nix
@@ -2,5 +2,15 @@ let
   inherit (builtins.trace "used" { a = 1; b = 2; }) a b;
   x.c = 3;
   y.d = 4;
+
+  merged = {
+    inner = {
+      inherit (y) d;
+    };
+
+    inner = {
+      inherit (x) c;
+    };
+  };
 in
-  [ a b rec { x.c = []; inherit (x) c; inherit (y) d; __overrides.y.d = []; } ]
+  [ a b rec { x.c = []; inherit (x) c; inherit (y) d; __overrides.y.d = []; } merged ]

From 1cd87b7042d14aae1fafa47b1c28db4c5bd20de7 Mon Sep 17 00:00:00 2001
From: pennae 
Date: Mon, 26 Feb 2024 15:33:52 +0100
Subject: [PATCH 146/164] remove ExprAttrs::AttrDef::inherited

it's no longer widely used and has a rather confusing meaning now that
inherit-from is handled very differently.
---
 src/libexpr/eval.cc         | 2 +-
 src/libexpr/nixexpr.hh      | 2 --
 src/libexpr/parser-state.hh | 2 +-
 3 files changed, 2 insertions(+), 4 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index a353571af..2e7c8207c 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -1220,7 +1220,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
         Displacement displ = 0;
         for (auto & i : attrs) {
             Value * vAttr;
-            if (hasOverrides && !i.second.inherited()) {
+            if (hasOverrides && i.second.kind != AttrDef::Kind::Inherited) {
                 vAttr = state.allocValue();
                 mkThunk(*vAttr, *i.second.chooseByKind(&env2, &env, inheritEnv), i.second.e);
             } else
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 4bb2ee2f9..2390c4286 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -189,8 +189,6 @@ struct ExprAttrs : Expr
             : kind(kind), e(e), pos(pos) { };
         AttrDef() { };
 
-        bool inherited() const { return kind == Kind::Inherited; }
-
         template
         const T & chooseByKind(const T & plain, const T & inherited, const T & inheritedFrom) const
         {
diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh
index 9aa18a0ae..34aef661f 100644
--- a/src/libexpr/parser-state.hh
+++ b/src/libexpr/parser-state.hh
@@ -89,7 +89,7 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr *
         if (i->symbol) {
             ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
             if (j != attrs->attrs.end()) {
-                if (!j->second.inherited()) {
+                if (j->second.kind != ExprAttrs::AttrDef::Kind::Inherited) {
                     ExprAttrs * attrs2 = dynamic_cast(j->second.e);
                     if (!attrs2) dupAttr(attrPath, pos, j->second.pos);
                     attrs = attrs2;

From f24e445bc024cfd3c26be5f061280af549321c22 Mon Sep 17 00:00:00 2001
From: pennae <82953136+pennae@users.noreply.github.com>
Date: Mon, 26 Feb 2024 15:43:51 +0100
Subject: [PATCH 147/164] add doc comment justifying ExprInheritFrom

Co-authored-by: Robert Hensing 
---
 src/libexpr/nixexpr.hh | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 2390c4286..94356759b 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -135,6 +135,11 @@ struct ExprVar : Expr
     COMMON_METHODS
 };
 
+/**
+ * A pseudo-expression for the purpose of evaluating the `from` expression in `inherit (from)` syntax.
+ * Unlike normal variable references, the displacement is set during parsing, and always refers to
+ * `ExprAttrs::inheritFromExprs` (by itself or in `ExprLet`), whose values are put into their own `Env`.
+ */
 struct ExprInheritFrom : ExprVar
 {
     ExprInheritFrom(PosIdx pos, Displacement displ): ExprVar(pos, {})

From d28a240aa66acaa7691c8d56054cc9fd4c7fd8f3 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Mon, 26 Feb 2024 21:06:07 +0100
Subject: [PATCH 148/164] profile: extract getNameFromElement

---
 src/nix/profile.cc | 17 +++++++++++------
 1 file changed, 11 insertions(+), 6 deletions(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index fc669d5ed..e04ae008d 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -101,6 +101,15 @@ struct ProfileElement
     }
 };
 
+std::string getNameFromElement(const ProfileElement & element)
+{
+    std::optional result = std::nullopt;
+    if (element.source) {
+        result = getNameFromURL(parseURL(element.source->to_string()));
+    }
+    return result.value_or(element.identifier());
+}
+
 struct ProfileManifest
 {
     using ProfileElementName = std::string;
@@ -189,12 +198,8 @@ struct ProfileManifest
 
     void addElement(ProfileElement element)
     {
-        auto name =
-            element.source
-            ? getNameFromURL(parseURL(element.source->to_string()))
-            : std::nullopt;
-        auto name2 = name ? *name : element.identifier();
-        addElement(name2, std::move(element));
+        auto name = getNameFromElement(element);
+        addElement(name, std::move(element));
     }
 
     nlohmann::json toJSON(Store & store) const

From be0052b45fb1da5018c640157fc0cef95185c5bb Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 <7226587+thufschmitt@users.noreply.github.com>
Date: Tue, 27 Feb 2024 06:39:30 +0100
Subject: [PATCH 149/164] Revert "Remove dead Git code"

---
 src/libutil/fs-sink.cc    |  46 ++++++
 src/libutil/fs-sink.hh    |   7 +
 src/libutil/git.cc        | 289 ++++++++++++++++++++++++++++++++++++++
 src/libutil/git.hh        | 152 ++++++++++++++++++++
 tests/unit/libutil/git.cc | 205 +++++++++++++++++++++++++++
 5 files changed, 699 insertions(+)

diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc
index 0ebd750f6..35ce0ac36 100644
--- a/src/libutil/fs-sink.cc
+++ b/src/libutil/fs-sink.cc
@@ -5,6 +5,52 @@
 
 namespace nix {
 
+void copyRecursive(
+    SourceAccessor & accessor, const CanonPath & from,
+    FileSystemObjectSink & sink, const Path & to)
+{
+    auto stat = accessor.lstat(from);
+
+    switch (stat.type) {
+    case SourceAccessor::tSymlink:
+    {
+        sink.createSymlink(to, accessor.readLink(from));
+        break;
+    }
+
+    case SourceAccessor::tRegular:
+    {
+        sink.createRegularFile(to, [&](CreateRegularFileSink & crf) {
+            if (stat.isExecutable)
+                crf.isExecutable();
+            accessor.readFile(from, crf, [&](uint64_t size) {
+                crf.preallocateContents(size);
+            });
+        });
+        break;
+    }
+
+    case SourceAccessor::tDirectory:
+    {
+        sink.createDirectory(to);
+        for (auto & [name, _] : accessor.readDirectory(from)) {
+            copyRecursive(
+                accessor, from / name,
+                sink, to + "/" + name);
+            break;
+        }
+        break;
+    }
+
+    case SourceAccessor::tMisc:
+        throw Error("file '%1%' has an unsupported type", from);
+
+    default:
+        abort();
+    }
+}
+
+
 struct RestoreSinkSettings : Config
 {
     Setting preallocateContents{this, false, "preallocate-contents",
diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh
index 670b55c2b..ae577819a 100644
--- a/src/libutil/fs-sink.hh
+++ b/src/libutil/fs-sink.hh
@@ -41,6 +41,13 @@ struct FileSystemObjectSink
     virtual void createSymlink(const Path & path, const std::string & target) = 0;
 };
 
+/**
+ * Recursively copy file system objects from the source into the sink.
+ */
+void copyRecursive(
+    SourceAccessor & accessor, const CanonPath & sourcePath,
+    FileSystemObjectSink & sink, const Path & destPath);
+
 /**
  * Ignore everything and do nothing
  */
diff --git a/src/libutil/git.cc b/src/libutil/git.cc
index 029e1af44..5733531fa 100644
--- a/src/libutil/git.cc
+++ b/src/libutil/git.cc
@@ -5,13 +5,302 @@
 #include 
 #include  // for strcasecmp
 
+#include "signals.hh"
+#include "config.hh"
+#include "hash.hh"
+#include "posix-source-accessor.hh"
+
 #include "git.hh"
+#include "serialise.hh"
 
 namespace nix::git {
 
 using namespace nix;
 using namespace std::string_literals;
 
+std::optional decodeMode(RawMode m) {
+    switch (m) {
+        case (RawMode) Mode::Directory:
+        case (RawMode) Mode::Executable:
+        case (RawMode) Mode::Regular:
+        case (RawMode) Mode::Symlink:
+            return (Mode) m;
+        default:
+            return std::nullopt;
+    }
+}
+
+
+static std::string getStringUntil(Source & source, char byte)
+{
+    std::string s;
+    char n[1];
+    source(std::string_view { n, 1 });
+    while (*n != byte) {
+        s += *n;
+        source(std::string_view { n, 1 });
+    }
+    return s;
+}
+
+
+static std::string getString(Source & source, int n)
+{
+    std::string v;
+    v.resize(n);
+    source(v);
+    return v;
+}
+
+void parseBlob(
+    FileSystemObjectSink & sink,
+    const Path & sinkPath,
+    Source & source,
+    bool executable,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    xpSettings.require(Xp::GitHashing);
+
+    sink.createRegularFile(sinkPath, [&](auto & crf) {
+        if (executable)
+            crf.isExecutable();
+
+        unsigned long long size = std::stoi(getStringUntil(source, 0));
+
+        crf.preallocateContents(size);
+
+        unsigned long long left = size;
+        std::string buf;
+        buf.reserve(65536);
+
+        while (left) {
+            checkInterrupt();
+            buf.resize(std::min((unsigned long long)buf.capacity(), left));
+            source(buf);
+            crf(buf);
+            left -= buf.size();
+        }
+    });
+}
+
+void parseTree(
+    FileSystemObjectSink & sink,
+    const Path & sinkPath,
+    Source & source,
+    std::function hook,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    unsigned long long size = std::stoi(getStringUntil(source, 0));
+    unsigned long long left = size;
+
+    sink.createDirectory(sinkPath);
+
+    while (left) {
+        std::string perms = getStringUntil(source, ' ');
+        left -= perms.size();
+        left -= 1;
+
+        RawMode rawMode = std::stoi(perms, 0, 8);
+        auto modeOpt = decodeMode(rawMode);
+        if (!modeOpt)
+            throw Error("Unknown Git permission: %o", perms);
+        auto mode = std::move(*modeOpt);
+
+        std::string name = getStringUntil(source, '\0');
+        left -= name.size();
+        left -= 1;
+
+        std::string hashs = getString(source, 20);
+        left -= 20;
+
+        Hash hash(HashAlgorithm::SHA1);
+        std::copy(hashs.begin(), hashs.end(), hash.hash);
+
+        hook(name, TreeEntry {
+            .mode = mode,
+            .hash = hash,
+        });
+    }
+}
+
+ObjectType parseObjectType(
+    Source & source,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    xpSettings.require(Xp::GitHashing);
+
+    auto type = getString(source, 5);
+
+    if (type == "blob ") {
+        return ObjectType::Blob;
+    } else if (type == "tree ") {
+        return ObjectType::Tree;
+    } else throw Error("input doesn't look like a Git object");
+}
+
+void parse(
+    FileSystemObjectSink & sink,
+    const Path & sinkPath,
+    Source & source,
+    bool executable,
+    std::function hook,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    xpSettings.require(Xp::GitHashing);
+
+    auto type = parseObjectType(source, xpSettings);
+
+    switch (type) {
+    case ObjectType::Blob:
+        parseBlob(sink, sinkPath, source, executable, xpSettings);
+        break;
+    case ObjectType::Tree:
+        parseTree(sink, sinkPath, source, hook, xpSettings);
+        break;
+    default:
+        assert(false);
+    };
+}
+
+
+std::optional convertMode(SourceAccessor::Type type)
+{
+    switch (type) {
+    case SourceAccessor::tSymlink:   return Mode::Symlink;
+    case SourceAccessor::tRegular:   return Mode::Regular;
+    case SourceAccessor::tDirectory: return Mode::Directory;
+    case SourceAccessor::tMisc:      return std::nullopt;
+    default: abort();
+    }
+}
+
+
+void restore(FileSystemObjectSink & sink, Source & source, std::function hook)
+{
+    parse(sink, "", source, false, [&](Path name, TreeEntry entry) {
+        auto [accessor, from] = hook(entry.hash);
+        auto stat = accessor->lstat(from);
+        auto gotOpt = convertMode(stat.type);
+        if (!gotOpt)
+            throw Error("file '%s' (git hash %s) has an unsupported type",
+                from,
+                entry.hash.to_string(HashFormat::Base16, false));
+        auto & got = *gotOpt;
+        if (got != entry.mode)
+            throw Error("git mode of file '%s' (git hash %s) is %o but expected %o",
+                from,
+                entry.hash.to_string(HashFormat::Base16, false),
+                (RawMode) got,
+                (RawMode) entry.mode);
+        copyRecursive(
+            *accessor, from,
+            sink, name);
+    });
+}
+
+
+void dumpBlobPrefix(
+    uint64_t size, Sink & sink,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    xpSettings.require(Xp::GitHashing);
+    auto s = fmt("blob %d\0"s, std::to_string(size));
+    sink(s);
+}
+
+
+void dumpTree(const Tree & entries, Sink & sink,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    xpSettings.require(Xp::GitHashing);
+
+    std::string v1;
+
+    for (auto & [name, entry] : entries) {
+        auto name2 = name;
+        if (entry.mode == Mode::Directory) {
+            assert(name2.back() == '/');
+            name2.pop_back();
+        }
+        v1 += fmt("%o %s\0"s, static_cast(entry.mode), name2);
+        std::copy(entry.hash.hash, entry.hash.hash + entry.hash.hashSize, std::back_inserter(v1));
+    }
+
+    {
+        auto s = fmt("tree %d\0"s, v1.size());
+        sink(s);
+    }
+
+    sink(v1);
+}
+
+
+Mode dump(
+    SourceAccessor & accessor, const CanonPath & path,
+    Sink & sink,
+    std::function hook,
+    PathFilter & filter,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    auto st = accessor.lstat(path);
+
+    switch (st.type) {
+    case SourceAccessor::tRegular:
+    {
+        accessor.readFile(path, sink, [&](uint64_t size) {
+            dumpBlobPrefix(size, sink, xpSettings);
+        });
+        return st.isExecutable
+            ? Mode::Executable
+            : Mode::Regular;
+    }
+
+    case SourceAccessor::tDirectory:
+    {
+        Tree entries;
+        for (auto & [name, _] : accessor.readDirectory(path)) {
+            auto child = path / name;
+            if (!filter(child.abs())) continue;
+
+            auto entry = hook(child);
+
+            auto name2 = name;
+            if (entry.mode == Mode::Directory)
+                name2 += "/";
+
+            entries.insert_or_assign(std::move(name2), std::move(entry));
+        }
+        dumpTree(entries, sink, xpSettings);
+        return Mode::Directory;
+    }
+
+    case SourceAccessor::tSymlink:
+    case SourceAccessor::tMisc:
+    default:
+        throw Error("file '%1%' has an unsupported type", path);
+    }
+}
+
+
+TreeEntry dumpHash(
+        HashAlgorithm ha,
+        SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
+{
+    std::function hook;
+    hook = [&](const CanonPath & path) -> TreeEntry {
+        auto hashSink = HashSink(ha);
+        auto mode = dump(accessor, path, hashSink, hook, filter);
+        auto hash = hashSink.finish().first;
+        return {
+            .mode = mode,
+            .hash = hash,
+        };
+    };
+
+    return hook(path);
+}
+
+
 std::optional parseLsRemoteLine(std::string_view line)
 {
     const static std::regex line_regex("^(ref: *)?([^\\s]+)(?:\\t+(.*))?$");
diff --git a/src/libutil/git.hh b/src/libutil/git.hh
index dea351929..d9eb138e1 100644
--- a/src/libutil/git.hh
+++ b/src/libutil/git.hh
@@ -5,8 +5,160 @@
 #include 
 #include 
 
+#include "types.hh"
+#include "serialise.hh"
+#include "hash.hh"
+#include "source-accessor.hh"
+#include "fs-sink.hh"
+
 namespace nix::git {
 
+enum struct ObjectType {
+    Blob,
+    Tree,
+    //Commit,
+    //Tag,
+};
+
+using RawMode = uint32_t;
+
+enum struct Mode : RawMode {
+    Directory = 0040000,
+    Regular = 0100644,
+    Executable = 0100755,
+    Symlink = 0120000,
+};
+
+std::optional decodeMode(RawMode m);
+
+/**
+ * An anonymous Git tree object entry (no name part).
+ */
+struct TreeEntry
+{
+    Mode mode;
+    Hash hash;
+
+    GENERATE_CMP(TreeEntry, me->mode, me->hash);
+};
+
+/**
+ * A Git tree object, fully decoded and stored in memory.
+ *
+ * Directory names must end in a `/` for sake of sorting. See
+ * https://github.com/mirage/irmin/issues/352
+ */
+using Tree = std::map;
+
+/**
+ * Callback for processing a child hash with `parse`
+ *
+ * The function should
+ *
+ * 1. Obtain the file system objects denoted by `gitHash`
+ *
+ * 2. Ensure they match `mode`
+ *
+ * 3. Feed them into the same sink `parse` was called with
+ *
+ * Implementations may seek to memoize resources (bandwidth, storage,
+ * etc.) for the same Git hash.
+ */
+using SinkHook = void(const Path & name, TreeEntry entry);
+
+/**
+ * Parse the "blob " or "tree " prefix.
+ *
+ * @throws if prefix not recognized
+ */
+ObjectType parseObjectType(
+    Source & source,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+void parseBlob(
+    FileSystemObjectSink & sink, const Path & sinkPath,
+    Source & source,
+    bool executable,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+void parseTree(
+    FileSystemObjectSink & sink, const Path & sinkPath,
+    Source & source,
+    std::function hook,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+/**
+ * Helper putting the previous three `parse*` functions together.
+ */
+void parse(
+    FileSystemObjectSink & sink, const Path & sinkPath,
+    Source & source,
+    bool executable,
+    std::function hook,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+/**
+ * Assists with writing a `SinkHook` step (2).
+ */
+std::optional convertMode(SourceAccessor::Type type);
+
+/**
+ * Simplified version of `SinkHook` for `restore`.
+ *
+ * Given a `Hash`, return a `SourceAccessor` and `CanonPath` pointing to
+ * the file system object with that path.
+ */
+using RestoreHook = std::pair(Hash);
+
+/**
+ * Wrapper around `parse` and `RestoreSink`
+ */
+void restore(FileSystemObjectSink & sink, Source & source, std::function hook);
+
+/**
+ * Dumps a single file to a sink
+ *
+ * @param xpSettings for testing purposes
+ */
+void dumpBlobPrefix(
+    uint64_t size, Sink & sink,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+/**
+ * Dumps a representation of a git tree to a sink
+ */
+void dumpTree(
+    const Tree & entries, Sink & sink,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+/**
+ * Callback for processing a child with `dump`
+ *
+ * The function should return the Git hash and mode of the file at the
+ * given path in the accessor passed to `dump`.
+ *
+ * Note that if the child is a directory, its child in must also be so
+ * processed in order to compute this information.
+ */
+using DumpHook = TreeEntry(const CanonPath & path);
+
+Mode dump(
+    SourceAccessor & accessor, const CanonPath & path,
+    Sink & sink,
+    std::function hook,
+    PathFilter & filter = defaultPathFilter,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+/**
+ * Recursively dumps path, hashing as we go.
+ *
+ * A smaller wrapper around `dump`.
+ */
+TreeEntry dumpHash(
+            HashAlgorithm ha,
+            SourceAccessor & accessor, const CanonPath & path,
+            PathFilter & filter = defaultPathFilter);
+
 /**
  * A line from the output of `git ls-remote --symref`.
  *
diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc
index 73bbd049e..76ef86bcf 100644
--- a/tests/unit/libutil/git.cc
+++ b/tests/unit/libutil/git.cc
@@ -9,6 +9,211 @@ namespace nix {
 
 using namespace git;
 
+class GitTest : public CharacterizationTest
+{
+    Path unitTestData = getUnitTestData() + "/git";
+
+public:
+
+    Path goldenMaster(std::string_view testStem) const override {
+        return unitTestData + "/" + testStem;
+    }
+
+    /**
+     * We set these in tests rather than the regular globals so we don't have
+     * to worry about race conditions if the tests run concurrently.
+     */
+    ExperimentalFeatureSettings mockXpSettings;
+
+private:
+
+    void SetUp() override
+    {
+        mockXpSettings.set("experimental-features", "git-hashing");
+    }
+};
+
+TEST(GitMode, gitMode_directory) {
+    Mode m = Mode::Directory;
+    RawMode r = 0040000;
+    ASSERT_EQ(static_cast(m), r);
+    ASSERT_EQ(decodeMode(r), std::optional { m });
+};
+
+TEST(GitMode, gitMode_executable) {
+    Mode m = Mode::Executable;
+    RawMode r = 0100755;
+    ASSERT_EQ(static_cast(m), r);
+    ASSERT_EQ(decodeMode(r), std::optional { m });
+};
+
+TEST(GitMode, gitMode_regular) {
+    Mode m = Mode::Regular;
+    RawMode r = 0100644;
+    ASSERT_EQ(static_cast(m), r);
+    ASSERT_EQ(decodeMode(r), std::optional { m });
+};
+
+TEST(GitMode, gitMode_symlink) {
+    Mode m = Mode::Symlink;
+    RawMode r = 0120000;
+    ASSERT_EQ(static_cast(m), r);
+    ASSERT_EQ(decodeMode(r), std::optional { m });
+};
+
+TEST_F(GitTest, blob_read) {
+    readTest("hello-world-blob.bin", [&](const auto & encoded) {
+        StringSource in { encoded };
+        StringSink out;
+        RegularFileSink out2 { out };
+        ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Blob);
+        parseBlob(out2, "", in, false, mockXpSettings);
+
+        auto expected = readFile(goldenMaster("hello-world.bin"));
+
+        ASSERT_EQ(out.s, expected);
+    });
+}
+
+TEST_F(GitTest, blob_write) {
+    writeTest("hello-world-blob.bin", [&]() {
+        auto decoded = readFile(goldenMaster("hello-world.bin"));
+        StringSink s;
+        dumpBlobPrefix(decoded.size(), s, mockXpSettings);
+        s(decoded);
+        return s.s;
+    });
+}
+
+/**
+ * This data is for "shallow" tree tests. However, we use "real" hashes
+ * so that we can check our test data in a small shell script test test
+ * (`tests/unit/libutil/data/git/check-data.sh`).
+ */
+const static Tree tree = {
+    {
+        "Foo",
+        {
+            .mode = Mode::Regular,
+            // hello world with special chars from above
+            .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
+        },
+    },
+    {
+        "bAr",
+        {
+            .mode = Mode::Executable,
+            // ditto
+            .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
+        },
+    },
+    {
+        "baZ/",
+        {
+            .mode = Mode::Directory,
+            // Empty directory hash
+            .hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", HashAlgorithm::SHA1),
+        },
+    },
+};
+
+TEST_F(GitTest, tree_read) {
+    readTest("tree.bin", [&](const auto & encoded) {
+        StringSource in { encoded };
+        NullFileSystemObjectSink out;
+        Tree got;
+        ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Tree);
+        parseTree(out, "", in, [&](auto & name, auto entry) {
+            auto name2 = name;
+            if (entry.mode == Mode::Directory)
+                name2 += '/';
+            got.insert_or_assign(name2, std::move(entry));
+        }, mockXpSettings);
+
+        ASSERT_EQ(got, tree);
+    });
+}
+
+TEST_F(GitTest, tree_write) {
+    writeTest("tree.bin", [&]() {
+        StringSink s;
+        dumpTree(tree, s, mockXpSettings);
+        return s.s;
+    });
+}
+
+TEST_F(GitTest, both_roundrip) {
+    using File = MemorySourceAccessor::File;
+
+    MemorySourceAccessor files;
+    files.root = File::Directory {
+        .contents {
+            {
+                "foo",
+                File::Regular {
+                    .contents = "hello\n\0\n\tworld!",
+                },
+            },
+            {
+                "bar",
+                File::Directory {
+                    .contents = {
+                        {
+                            "baz",
+                            File::Regular {
+                                .executable = true,
+                                .contents = "good day,\n\0\n\tworld!",
+                            },
+                        },
+                    },
+                },
+            },
+        },
+    };
+
+    std::map cas;
+
+    std::function dumpHook;
+    dumpHook = [&](const CanonPath & path) {
+        StringSink s;
+        HashSink hashSink { HashAlgorithm::SHA1 };
+        TeeSink s2 { s, hashSink };
+        auto mode = dump(
+            files, path, s2, dumpHook,
+            defaultPathFilter, mockXpSettings);
+        auto hash = hashSink.finish().first;
+        cas.insert_or_assign(hash, std::move(s.s));
+        return TreeEntry {
+            .mode = mode,
+            .hash = hash,
+        };
+    };
+
+    auto root = dumpHook(CanonPath::root);
+
+    MemorySourceAccessor files2;
+
+    MemorySink sinkFiles2 { files2 };
+
+    std::function mkSinkHook;
+    mkSinkHook = [&](auto prefix, auto & hash, auto executable) {
+        StringSource in { cas[hash] };
+        parse(
+            sinkFiles2, prefix, in, executable,
+            [&](const Path & name, const auto & entry) {
+                mkSinkHook(
+                    prefix + "/" + name,
+                    entry.hash,
+                    entry.mode == Mode::Executable);
+            },
+            mockXpSettings);
+    };
+
+    mkSinkHook("", root.hash, false);
+
+    ASSERT_EQ(files, files2);
+}
+
 TEST(GitLsRemote, parseSymrefLineWithReference) {
     auto line = "ref: refs/head/main	HEAD";
     auto res = parseLsRemoteLine(line);

From e5d9130a5bb1ee3f2926afff17be4c031ca404a0 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Mon, 26 Feb 2024 21:45:41 +0100
Subject: [PATCH 150/164] Fix extraction of name for defaultPackage URLs

---
 src/libexpr/flake/url-name.cc        | 14 ++++++--------
 tests/unit/libexpr/flake/url-name.cc |  1 +
 2 files changed, 7 insertions(+), 8 deletions(-)

diff --git a/src/libexpr/flake/url-name.cc b/src/libexpr/flake/url-name.cc
index 753f197d5..d62b34552 100644
--- a/src/libexpr/flake/url-name.cc
+++ b/src/libexpr/flake/url-name.cc
@@ -5,13 +5,12 @@
 namespace nix {
 
 static const std::string attributeNamePattern("[a-zA-Z0-9_-]+");
-static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?");
+static const std::regex lastAttributeRegex("^((?:" + attributeNamePattern + "\\.)*)(" + attributeNamePattern +")(\\^.*)?$");
 static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+");
 static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")");
 static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?");
 static const std::regex gitProviderRegex("github|gitlab|sourcehut");
 static const std::regex gitSchemeRegex("git($|\\+.*)");
-static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)");
 
 std::optional getNameFromURL(const ParsedURL & url)
 {
@@ -22,8 +21,11 @@ std::optional getNameFromURL(const ParsedURL & url)
         return url.query.at("dir");
 
     /* If the fragment isn't a "default" and contains two attribute elements, use the last one */
-    if (std::regex_match(url.fragment, match, lastAttributeRegex))
-        return match.str(1);
+    if (std::regex_match(url.fragment, match, lastAttributeRegex)
+        && match.str(1) != "defaultPackage."
+        && match.str(2) != "default") {
+        return match.str(2);
+    }
 
     /* If this is a github/gitlab/sourcehut flake, use the repo name */
     if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex))
@@ -33,10 +35,6 @@ std::optional getNameFromURL(const ParsedURL & url)
     if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex))
         return match.str(1);
 
-    /* If everything failed but there is a non-default fragment, use it in full */
-    if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex))
-        return url.fragment;
-
     /* If there is no fragment, take the last element of the path */
     if (std::regex_match(url.path, match, lastPathSegmentRegex))
         return match.str(1);
diff --git a/tests/unit/libexpr/flake/url-name.cc b/tests/unit/libexpr/flake/url-name.cc
index 85387b323..15bc6b111 100644
--- a/tests/unit/libexpr/flake/url-name.cc
+++ b/tests/unit/libexpr/flake/url-name.cc
@@ -14,6 +14,7 @@ namespace nix {
         ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "mylaptop");
         ASSERT_EQ(getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man")), "complex");
         ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*")), "myproj");
+        ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#defaultPackage.x86_64-linux")), "myproj");
 
         ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello");
         ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#hello")), "hello");

From 04836c73e5589ec10bef08992a7ef815a7f7592c Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Sun, 21 Jan 2024 14:01:57 -0500
Subject: [PATCH 151/164] Merge `nativeCheckInputs` into `nativeBuildInputs`

They were getting skipped for the test-against checks.
---
 package.nix | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)

diff --git a/package.nix b/package.nix
index d1d14d10e..1f895e301 100644
--- a/package.nix
+++ b/package.nix
@@ -209,6 +209,10 @@ in {
     (lib.getBin lowdown)
     mdbook
     mdbook-linkcheck
+  ] ++ lib.optionals doInstallCheck [
+    git
+    mercurial
+    openssh
   ] ++ lib.optionals (doInstallCheck || enableManual) [
     jq # Also for custom mdBook preprocessor.
   ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
@@ -249,12 +253,6 @@ in {
   dontBuild = !attrs.doBuild;
   doCheck = attrs.doCheck;
 
-  nativeCheckInputs = [
-    git
-    mercurial
-    openssh
-  ];
-
   disallowedReferences = [ boost ];
 
   preConfigure = lib.optionalString (doBuild && ! stdenv.hostPlatform.isStatic) (

From 201551c937c3f816a23c4c2f36edba60619e42f9 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 4 Sep 2023 09:51:23 -0400
Subject: [PATCH 152/164] Add Git object hashing to the store layer

Part of RFC 133

Extracted from our old IPFS branches.

Co-Authored-By: Matthew Bauer 
Co-Authored-By: Carlo Nucera 
Co-authored-by: Robert Hensing 
Co-authored-by: Florian Klink 
---
 Makefile                                    |   1 +
 doc/manual/src/protocols/store-path.md      |   9 +-
 perl/lib/Nix/Store.xs                       |   2 +-
 src/libexpr/primops.cc                      |   5 +-
 src/libstore/binary-cache-store.cc          |   7 +-
 src/libstore/binary-cache-store.hh          |   2 +-
 src/libstore/build/local-derivation-goal.cc |  34 ++++--
 src/libstore/build/worker.cc                |   4 +-
 src/libstore/content-address.cc             |  11 ++
 src/libstore/daemon.cc                      |   7 +-
 src/libstore/local-fs-store.hh              |   2 +-
 src/libstore/local-store.cc                 |  85 ++++++++++++---
 src/libstore/optimise-store.cc              |   4 +-
 src/libstore/remote-store.cc                |   1 +
 src/libstore/remote-store.hh                |   2 +-
 src/libstore/store-api.cc                   |  45 +++++++-
 src/libstore/uds-remote-store.hh            |   2 +-
 src/libutil/file-content-address.cc         |  81 +++++++++++---
 src/libutil/file-content-address.hh         | 110 ++++++++++++++++----
 src/nix-store/nix-store.cc                  |   2 +-
 src/nix/add-to-store.cc                     |   1 +
 src/nix/hash.cc                             |  47 +++++++--
 tests/functional/git-hashing/common.sh      |  11 ++
 tests/functional/git-hashing/local.mk       |   7 ++
 tests/functional/git-hashing/simple.sh      |  58 +++++++++++
 tests/unit/libstore/content-address.cc      |   2 +
 tests/unit/libutil/file-content-address.cc  |  28 +++++
 27 files changed, 484 insertions(+), 86 deletions(-)
 create mode 100644 tests/functional/git-hashing/common.sh
 create mode 100644 tests/functional/git-hashing/local.mk
 create mode 100644 tests/functional/git-hashing/simple.sh

diff --git a/Makefile b/Makefile
index f8689c8cf..745e60aa5 100644
--- a/Makefile
+++ b/Makefile
@@ -42,6 +42,7 @@ ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes)
 makefiles += \
   tests/functional/local.mk \
   tests/functional/ca/local.mk \
+  tests/functional/git-hashing/local.mk \
   tests/functional/dyn-drv/local.mk \
   tests/functional/test-libstoreconsumer/local.mk \
   tests/functional/plugins/local.mk
diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index fcf8038fc..565c4fa75 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -89,15 +89,20 @@ where
 
       - `rec` = one of:
 
+        - ```ebnf
+          | ""
+          ```
+          (empty string) for hashes of the flat (single file) serialization
+
         - ```ebnf
           | "r:"
           ```
           hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
         - ```ebnf
-          | ""
+          | "git:"
           ```
-          (empty string) for hashes of the flat (single file) serialization
+          hashes of the [Git blob/tree](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects) [Merkel tree](https://en.wikipedia.org/wiki/Merkle_tree) format
 
       - ```ebnf
         algo = "md5" | "sha1" | "sha256"
diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs
index 4a928594b..1c64cc66b 100644
--- a/perl/lib/Nix/Store.xs
+++ b/perl/lib/Nix/Store.xs
@@ -259,7 +259,7 @@ hashPath(char * algo, int base32, char * path)
             auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
             Hash h = hashPath(
                 accessor, canonPath,
-                FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
+                FileIngestionMethod::Recursive, parseHashAlgo(algo));
             auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
             XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
         } catch (Error & e) {
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 850cc7a45..9ea266cf9 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1138,7 +1138,10 @@ drvName, Bindings * attrs, Value & v)
         auto handleHashMode = [&](const std::string_view s) {
             if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive;
             else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
-            else if (s == "text") {
+            else if (s == "git") {
+                experimentalFeatureSettings.require(Xp::GitHashing);
+                ingestionMethod = FileIngestionMethod::Git;
+            } else if (s == "text") {
                 experimentalFeatureSettings.require(Xp::DynamicDerivations);
                 ingestionMethod = TextIngestionMethod {};
             } else
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 189d1d305..d6047dd7e 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -324,6 +324,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(
             nar = dump2.s;
             break;
         case FileIngestionMethod::Flat:
+        {
             // The dump is Flat, so we need to convert it to NAR with a
             // single file.
             StringSink s;
@@ -331,6 +332,10 @@ StorePath BinaryCacheStore::addToStoreFromDump(
             nar = std::move(s.s);
             break;
         }
+        case FileIngestionMethod::Git:
+            unsupported("addToStoreFromDump");
+            break;
+        }
     } else {
         // Otherwise, we have to do th same hashing as NAR so our single
         // hash will suffice for both purposes.
@@ -450,7 +455,7 @@ StorePath BinaryCacheStore::addToStore(
        non-recursive+sha256 so we can just use the default
        implementation of this method in terms of addToStoreFromDump. */
 
-    auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
+    auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter);
 
     auto source = sinkToSource([&](Sink & sink) {
         accessor.dumpPath(path, sink, filter);
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index 00ab73905..76de2d11a 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -147,7 +147,7 @@ public:
 
     void narFromPath(const StorePath & path, Sink & sink) override;
 
-    ref getFSAccessor(bool requireValidPath) override;
+    ref getFSAccessor(bool requireValidPath = true) override;
 
     void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
 
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index b373c74b2..d92966a74 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -8,6 +8,7 @@
 #include "finally.hh"
 #include "util.hh"
 #include "archive.hh"
+#include "git.hh"
 #include "compression.hh"
 #include "daemon.hh"
 #include "topo-sort.hh"
@@ -2457,15 +2458,28 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
             rewriteOutput(outputRewrites);
             /* FIXME optimize and deduplicate with addToStore */
             std::string oldHashPart { scratchPath->hashPart() };
-            auto got = ({
-                HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
+            auto got = [&]{
                 PosixSourceAccessor accessor;
-                dumpPath(
-                    accessor, CanonPath { actualPath },
-                    caSink,
-                    outputHash.method.getFileIngestionMethod());
-                caSink.finish().first;
-            });
+                auto fim = outputHash.method.getFileIngestionMethod();
+                switch (fim) {
+                case FileIngestionMethod::Flat:
+                case FileIngestionMethod::Recursive:
+                {
+                    HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
+                    auto fim = outputHash.method.getFileIngestionMethod();
+                    dumpPath(
+                        accessor, CanonPath { actualPath },
+                        caSink,
+                        (FileSerialisationMethod) fim);
+                    return caSink.finish().first;
+                }
+                case FileIngestionMethod::Git: {
+                    return git::dumpHash(
+                        outputHash.hashAlgo, accessor,
+                        CanonPath { tmpDir + "/tmp" }).hash;
+                }
+                }
+            }();
 
             ValidPathInfo newInfo0 {
                 worker.store,
@@ -2491,7 +2505,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
                 PosixSourceAccessor accessor;
                 HashResult narHashAndSize = hashPath(
                     accessor, CanonPath { actualPath },
-                    FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
+                    FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
                 newInfo0.narHash = narHashAndSize.first;
                 newInfo0.narSize = narHashAndSize.second;
             }
@@ -2515,7 +2529,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
                 PosixSourceAccessor accessor;
                 HashResult narHashAndSize = hashPath(
                     accessor, CanonPath { actualPath },
-                    FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
+                    FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
                 ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
                 newInfo0.narSize = narHashAndSize.second;
                 auto refs = rewriteRefs();
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index 3a34f4006..815ded3d5 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -529,11 +529,11 @@ bool Worker::pathContentsGood(const StorePath & path)
     if (!pathExists(store.printStorePath(path)))
         res = false;
     else {
-        HashResult current = hashPath(
+        Hash current = hashPath(
             *store.getFSAccessor(), CanonPath { store.printStorePath(path) },
             FileIngestionMethod::Recursive, info->narHash.algo);
         Hash nullHash(HashAlgorithm::SHA256);
-        res = info->narHash == nullHash || info->narHash == current.first;
+        res = info->narHash == nullHash || info->narHash == current;
     }
     pathContentsGoodCache.insert_or_assign(path, res);
     if (!res)
diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index 4e3d2f64d..4ed4f2de5 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -11,6 +11,9 @@ std::string_view makeFileIngestionPrefix(FileIngestionMethod m)
         return "";
     case FileIngestionMethod::Recursive:
         return "r:";
+    case FileIngestionMethod::Git:
+        experimentalFeatureSettings.require(Xp::GitHashing);
+        return "git:";
     default:
         throw Error("impossible, caught both cases");
     }
@@ -51,6 +54,10 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
     if (splitPrefix(m, "r:")) {
         return FileIngestionMethod::Recursive;
     }
+    else if (splitPrefix(m, "git:")) {
+        experimentalFeatureSettings.require(Xp::GitHashing);
+        return FileIngestionMethod::Git;
+    }
     else if (splitPrefix(m, "text:")) {
         return TextIngestionMethod {};
     }
@@ -131,6 +138,10 @@ static std::pair parseContentAddressMethodP
         auto method = FileIngestionMethod::Flat;
         if (splitPrefix(rest, "r:"))
             method = FileIngestionMethod::Recursive;
+        else if (splitPrefix(rest, "git:")) {
+            experimentalFeatureSettings.require(Xp::GitHashing);
+            method = FileIngestionMethod::Git;
+        }
         HashAlgorithm hashAlgo = parseHashAlgorithm_();
         return {
             std::move(method),
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index cf5020dfe..873065e14 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -13,6 +13,7 @@
 #include "archive.hh"
 #include "derivations.hh"
 #include "args.hh"
+#include "git.hh"
 
 namespace nix::daemon {
 
@@ -443,13 +444,17 @@ static void performOp(TunnelLogger * logger, ref store,
                     TeeSource savedNARSource(from, saved);
                     NullFileSystemObjectSink sink; /* just parse the NAR */
                     parseDump(sink, savedNARSource);
-                } else {
+                } else if (method == FileIngestionMethod::Flat) {
                     /* Incrementally parse the NAR file, stripping the
                        metadata, and streaming the sole file we expect into
                        `saved`. */
                     RegularFileSink savedRegular { saved };
                     parseDump(savedRegular, from);
                     if (!savedRegular.regular) throw Error("regular file expected");
+                } else {
+                    /* Should have validated above that no other file ingestion
+                       method was used. */
+                    assert(false);
                 }
             });
             logger->startWork();
diff --git a/src/libstore/local-fs-store.hh b/src/libstore/local-fs-store.hh
index bf855b67e..8fb081200 100644
--- a/src/libstore/local-fs-store.hh
+++ b/src/libstore/local-fs-store.hh
@@ -43,7 +43,7 @@ public:
     LocalFSStore(const Params & params);
 
     void narFromPath(const StorePath & path, Sink & sink) override;
-    ref getFSAccessor(bool requireValidPath) override;
+    ref getFSAccessor(bool requireValidPath = true) override;
 
     /**
      * Creates symlink from the `gcRoot` to the `storePath` and
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 2c22bfe31..5f35cf3a8 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -1,5 +1,6 @@
 #include "local-store.hh"
 #include "globals.hh"
+#include "git.hh"
 #include "archive.hh"
 #include "pathlocks.hh"
 #include "worker-protocol.hh"
@@ -1097,19 +1098,29 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
             if (info.ca) {
                 auto & specified = *info.ca;
                 auto actualHash = ({
-                    HashModuloSink caSink {
-                        specified.hash.algo,
-                        std::string { info.path.hashPart() },
-                    };
-                    PosixSourceAccessor accessor;
-                    dumpPath(
-                        *getFSAccessor(false),
-                        CanonPath { printStorePath(info.path) },
-                        caSink,
-                        specified.method.getFileIngestionMethod());
+                    auto accessor = getFSAccessor(false);
+                    CanonPath path { printStorePath(info.path) };
+                    Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++
+                    auto fim = specified.method.getFileIngestionMethod();
+                    switch (fim) {
+                    case FileIngestionMethod::Flat:
+                    case FileIngestionMethod::Recursive:
+                    {
+                        HashModuloSink caSink {
+                            specified.hash.algo,
+                            std::string { info.path.hashPart() },
+                        };
+                        dumpPath(*accessor, path, caSink, (FileSerialisationMethod) fim);
+                        h = caSink.finish().first;
+                        break;
+                    }
+                    case FileIngestionMethod::Git:
+                        h = git::dumpHash(specified.hash.algo, *accessor, path).hash;
+                        break;
+                    }
                     ContentAddress {
                         .method = specified.method,
-                        .hash = caSink.finish().first,
+                        .hash = std::move(h),
                     };
                 });
                 if (specified.hash != actualHash.hash) {
@@ -1199,7 +1210,30 @@ StorePath LocalStore::addToStoreFromDump(
         delTempDir = std::make_unique(tempDir);
         tempPath = tempDir + "/x";
 
-        restorePath(tempPath, bothSource, method.getFileIngestionMethod());
+        auto fim = method.getFileIngestionMethod();
+        switch (fim) {
+        case FileIngestionMethod::Flat:
+        case FileIngestionMethod::Recursive:
+            restorePath(tempPath, bothSource, (FileSerialisationMethod) fim);
+            break;
+        case FileIngestionMethod::Git: {
+            RestoreSink sink;
+            sink.dstPath = tempPath;
+            auto accessor = getFSAccessor();
+            git::restore(sink, bothSource, [&](Hash childHash) {
+                return std::pair {
+                    &*accessor,
+                    CanonPath {
+                        printStorePath(this->makeFixedOutputPath("git", FixedOutputInfo {
+                            .method = FileIngestionMethod::Git,
+                            .hash = childHash,
+                        }))
+                    },
+                };
+            });
+            break;
+        }
+        }
 
         dumpBuffer.reset();
         dump = {};
@@ -1238,7 +1272,30 @@ StorePath LocalStore::addToStoreFromDump(
             if (inMemory) {
                 StringSource dumpSource { dump };
                 /* Restore from the buffer in memory. */
-                restorePath(realPath, dumpSource, method.getFileIngestionMethod());
+                auto fim = method.getFileIngestionMethod();
+                switch (fim) {
+                case FileIngestionMethod::Flat:
+                case FileIngestionMethod::Recursive:
+                    restorePath(realPath, dumpSource, (FileSerialisationMethod) fim);
+                    break;
+                case FileIngestionMethod::Git: {
+                    RestoreSink sink;
+                    sink.dstPath = realPath;
+                    auto accessor = getFSAccessor();
+                    git::restore(sink, dumpSource, [&](Hash childHash) {
+                        return std::pair {
+                            &*accessor,
+                            CanonPath {
+                                printStorePath(this->makeFixedOutputPath("git", FixedOutputInfo {
+                                    .method = FileIngestionMethod::Git,
+                                    .hash = childHash,
+                                }))
+                            },
+                        };
+                    });
+                    break;
+                }
+                }
             } else {
                 /* Move the temporary path we restored above. */
                 moveFile(tempPath, realPath);
@@ -1367,7 +1424,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
             PosixSourceAccessor accessor;
             std::string hash = hashPath(
                 accessor, CanonPath { linkPath },
-                FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
+                FileIngestionMethod::Recursive, HashAlgorithm::SHA256).to_string(HashFormat::Nix32, false);
             if (hash != link.name) {
                 printError("link '%s' was modified! expected hash '%s', got '%s'",
                     linkPath, link.name, hash);
diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc
index 78e4f6d86..daaaaf073 100644
--- a/src/libstore/optimise-store.cc
+++ b/src/libstore/optimise-store.cc
@@ -151,7 +151,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
         PosixSourceAccessor accessor;
         hashPath(
             accessor, CanonPath { path },
-            FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
+            FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first;
     });
     debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
 
@@ -166,7 +166,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
                 PosixSourceAccessor accessor;
                 hashPath(
                     accessor, CanonPath { linkPath },
-                    FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
+                    FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first;
            })))
         {
             // XXX: Consider overwriting linkPath with our valid version.
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index fadef45ff..0cae84828 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -13,6 +13,7 @@
 #include "derivations.hh"
 #include "pool.hh"
 #include "finally.hh"
+#include "git.hh"
 #include "logging.hh"
 #include "callback.hh"
 #include "filetransfer.hh"
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 87704985b..c51a21375 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -184,7 +184,7 @@ protected:
 
     friend struct ConnectionHandle;
 
-    virtual ref getFSAccessor(bool requireValidPath) override;
+    virtual ref getFSAccessor(bool requireValidPath = true) override;
 
     virtual void narFromPath(const StorePath & path, Sink & sink) override;
 
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 4238cbbf5..c44612ec5 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -12,7 +12,9 @@
 #include "references.hh"
 #include "archive.hh"
 #include "callback.hh"
+#include "git.hh"
 #include "remote-store.hh"
+#include "posix-source-accessor.hh"
 // FIXME this should not be here, see TODO below on
 // `addMultipleToStore`.
 #include "worker-protocol.hh"
@@ -119,6 +121,9 @@ static std::string makeType(
 
 StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const
 {
+    if (info.method == FileIngestionMethod::Git && info.hash.algo != HashAlgorithm::SHA1)
+        throw Error("Git file ingestion must use SHA-1 hash");
+
     if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) {
         return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
     } else {
@@ -166,7 +171,7 @@ std::pair StoreDirConfig::computeStorePath(
     const StorePathSet & references,
     PathFilter & filter) const
 {
-    auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
+    auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter);
     return {
         makeFixedOutputPathFromCA(
             name,
@@ -193,7 +198,37 @@ StorePath Store::addToStore(
     RepairFlag repair)
 {
     auto source = sinkToSource([&](Sink & sink) {
-        dumpPath(accessor, path, sink, method.getFileIngestionMethod(), filter);
+        auto fim = method.getFileIngestionMethod();
+        switch (fim) {
+        case FileIngestionMethod::Flat:
+        case FileIngestionMethod::Recursive:
+        {
+            dumpPath(accessor, path, sink, (FileSerialisationMethod) fim, filter);
+            break;
+        }
+        case FileIngestionMethod::Git:
+        {
+            git::dump(
+                accessor, path,
+                sink,
+                // recursively add to store if path is a directory
+                [&](const CanonPath & path) -> git::TreeEntry {
+                    auto storePath = addToStore("git", accessor, path, method, hashAlgo, references, filter, repair);
+                    auto info = queryPathInfo(storePath);
+                    assert(info->ca);
+                    assert(info->ca->method == FileIngestionMethod::Git);
+                    auto stat = getFSAccessor()->lstat(CanonPath(printStorePath(storePath)));
+                    auto gitModeOpt = git::convertMode(stat.type);
+                    assert(gitModeOpt);
+                    return {
+                        .mode = *gitModeOpt,
+                        .hash = info->ca->hash,
+                    };
+                },
+                filter);
+            break;
+        }
+        }
     });
     return addToStoreFromDump(*source, name, method, hashAlgo, references, repair);
 }
@@ -355,9 +390,7 @@ ValidPathInfo Store::addToStoreSlow(
     NullFileSystemObjectSink blank;
     auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat
         ? (FileSystemObjectSink &) fileSink
-        : method.getFileIngestionMethod() == FileIngestionMethod::Recursive
-        ? (FileSystemObjectSink &) blank
-        : (abort(), (FileSystemObjectSink &)*(FileSystemObjectSink *)nullptr); // handled both cases
+        : (FileSystemObjectSink &) blank; // for recursive or git we do recursive
 
     /* The information that flows from tapped (besides being replicated in
        narSink), is now put in parseSink. */
@@ -369,6 +402,8 @@ ValidPathInfo Store::addToStoreSlow(
 
     auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256
         ? narHash
+        : method == FileIngestionMethod::Git
+        ? git::dumpHash(hashAlgo, accessor, srcPath).hash
         : caHashSink.finish().first;
 
     if (expectedCAHash && expectedCAHash != hash)
diff --git a/src/libstore/uds-remote-store.hh b/src/libstore/uds-remote-store.hh
index a5ac9080a..8bce8994a 100644
--- a/src/libstore/uds-remote-store.hh
+++ b/src/libstore/uds-remote-store.hh
@@ -35,7 +35,7 @@ public:
     static std::set uriSchemes()
     { return {"unix"}; }
 
-    ref getFSAccessor(bool requireValidPath) override
+    ref getFSAccessor(bool requireValidPath = true) override
     { return LocalFSStore::getFSAccessor(requireValidPath); }
 
     void narFromPath(const StorePath & path, Sink & sink) override
diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 2339024a2..471bda6a0 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -1,16 +1,53 @@
 #include "file-content-address.hh"
 #include "archive.hh"
+#include "git.hh"
 
 namespace nix {
 
-FileIngestionMethod parseFileIngestionMethod(std::string_view input)
+static std::optional parseFileSerialisationMethodOpt(std::string_view input)
 {
     if (input == "flat") {
-        return FileIngestionMethod::Flat;
+        return FileSerialisationMethod::Flat;
     } else if (input == "nar") {
-        return FileIngestionMethod::Recursive;
+        return FileSerialisationMethod::Recursive;
     } else {
-        throw UsageError("Unknown file ingestion method '%s', expect `flat` or `nar`");
+        return std::nullopt;
+    }
+}
+
+FileSerialisationMethod parseFileSerialisationMethod(std::string_view input)
+{
+    auto ret = parseFileSerialisationMethodOpt(input);
+    if (ret)
+        return *ret;
+    else
+        throw UsageError("Unknown file serialiation method '%s', expect `flat` or `nar`");
+}
+
+
+FileIngestionMethod parseFileIngestionMethod(std::string_view input)
+{
+    if (input == "git") {
+        return FileIngestionMethod::Git;
+    } else {
+        auto ret = parseFileSerialisationMethodOpt(input);
+        if (ret)
+            return static_cast(*ret);
+        else
+            throw UsageError("Unknown file ingestion method '%s', expect `flat`, `nar`, or `git`");
+    }
+}
+
+
+std::string_view renderFileSerialisationMethod(FileSerialisationMethod method)
+{
+    switch (method) {
+    case FileSerialisationMethod::Flat:
+        return "flat";
+    case FileSerialisationMethod::Recursive:
+        return "nar";
+    default:
+        assert(false);
     }
 }
 
@@ -19,9 +56,11 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
 {
     switch (method) {
     case FileIngestionMethod::Flat:
-        return "flat";
     case FileIngestionMethod::Recursive:
-        return "nar";
+        return renderFileSerialisationMethod(
+            static_cast(method));
+    case FileIngestionMethod::Git:
+        return "git";
     default:
         abort();
     }
@@ -31,14 +70,14 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
 void dumpPath(
     SourceAccessor & accessor, const CanonPath & path,
     Sink & sink,
-    FileIngestionMethod method,
+    FileSerialisationMethod method,
     PathFilter & filter)
 {
     switch (method) {
-    case FileIngestionMethod::Flat:
+    case FileSerialisationMethod::Flat:
         accessor.readFile(path, sink);
         break;
-    case FileIngestionMethod::Recursive:
+    case FileSerialisationMethod::Recursive:
         accessor.dumpPath(path, sink, filter);
         break;
     }
@@ -48,13 +87,13 @@ void dumpPath(
 void restorePath(
     const Path & path,
     Source & source,
-    FileIngestionMethod method)
+    FileSerialisationMethod method)
 {
     switch (method) {
-    case FileIngestionMethod::Flat:
+    case FileSerialisationMethod::Flat:
         writeFile(path, source);
         break;
-    case FileIngestionMethod::Recursive:
+    case FileSerialisationMethod::Recursive:
         restorePath(path, source);
         break;
     }
@@ -63,7 +102,7 @@ void restorePath(
 
 HashResult hashPath(
     SourceAccessor & accessor, const CanonPath & path,
-    FileIngestionMethod method, HashAlgorithm ha,
+    FileSerialisationMethod method, HashAlgorithm ha,
     PathFilter & filter)
 {
     HashSink sink { ha };
@@ -71,4 +110,20 @@ HashResult hashPath(
     return sink.finish();
 }
 
+
+Hash hashPath(
+    SourceAccessor & accessor, const CanonPath & path,
+    FileIngestionMethod method, HashAlgorithm ht,
+    PathFilter & filter)
+{
+    switch (method) {
+    case FileIngestionMethod::Flat:
+    case FileIngestionMethod::Recursive:
+        return hashPath(accessor, path, (FileSerialisationMethod) method, ht, filter).first;
+    case FileIngestionMethod::Git:
+        return git::dumpHash(ht, accessor, path, filter).hash;
+    }
+
+}
+
 }
diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh
index 9a7dae8c6..b361ab243 100644
--- a/src/libutil/file-content-address.hh
+++ b/src/libutil/file-content-address.hh
@@ -8,37 +8,38 @@
 namespace nix {
 
 /**
- * An enumeration of the main ways we can serialize file system
+ * An enumeration of the ways we can serialize file system
  * objects.
  */
-enum struct FileIngestionMethod : uint8_t {
+enum struct FileSerialisationMethod : uint8_t {
     /**
-     * Flat-file hashing. Directly ingest the contents of a single file
+     * Flat-file. The contents of a single file exactly.
      */
-    Flat = 0,
+    Flat,
+
     /**
-     * Recursive (or NAR) hashing. Serializes the file-system object in
-     * Nix Archive format and ingest that.
+     * Nix Archive. Serializes the file-system object in
+     * Nix Archive format.
      */
-    Recursive = 1,
+    Recursive,
 };
 
 /**
- * Parse a `FileIngestionMethod` by name. Choice of:
+ * Parse a `FileSerialisationMethod` by name. Choice of:
  *
- *  - `flat`: `FileIngestionMethod::Flat`
- *  - `nar`: `FileIngestionMethod::Recursive`
+ *  - `flat`: `FileSerialisationMethod::Flat`
+ *  - `nar`: `FileSerialisationMethod::Recursive`
  *
- * Oppostite of `renderFileIngestionMethod`.
+ * Opposite of `renderFileSerialisationMethod`.
  */
-FileIngestionMethod parseFileIngestionMethod(std::string_view input);
+FileSerialisationMethod parseFileSerialisationMethod(std::string_view input);
 
 /**
- * Render a `FileIngestionMethod` by name.
+ * Render a `FileSerialisationMethod` by name.
  *
- * Oppostite of `parseFileIngestionMethod`.
+ * Opposite of `parseFileSerialisationMethod`.
  */
-std::string_view renderFileIngestionMethod(FileIngestionMethod method);
+std::string_view renderFileSerialisationMethod(FileSerialisationMethod method);
 
 /**
  * Dump a serialization of the given file system object.
@@ -46,26 +47,97 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method);
 void dumpPath(
     SourceAccessor & accessor, const CanonPath & path,
     Sink & sink,
-    FileIngestionMethod method,
+    FileSerialisationMethod method,
     PathFilter & filter = defaultPathFilter);
 
 /**
- * Restore a serialization of the given file system object.
+ * Restore a serialisation of the given file system object.
  *
  * @TODO use an arbitrary `FileSystemObjectSink`.
  */
 void restorePath(
     const Path & path,
     Source & source,
-    FileIngestionMethod method);
+    FileSerialisationMethod method);
+
 
 /**
  * Compute the hash of the given file system object according to the
  * given method.
  *
- * The hash is defined as (essentially) hashString(ha, dumpPath(path)).
+ * the hash is defined as (in pseudocode):
+ *
+ * ```
+ * hashString(ha, dumpPath(...))
+ * ```
  */
 HashResult hashPath(
+    SourceAccessor & accessor, const CanonPath & path,
+    FileSerialisationMethod method, HashAlgorithm ha,
+    PathFilter & filter = defaultPathFilter);
+
+/**
+ * An enumeration of the ways we can ingest file system
+ * objects, producing a hash or digest.
+ */
+enum struct FileIngestionMethod : uint8_t {
+    /**
+     * Hash `FileSerialisationMethod::Flat` serialisation.
+     */
+    Flat,
+
+    /**
+     * Hash `FileSerialisationMethod::Git` serialisation.
+     */
+    Recursive,
+
+    /**
+     * Git hashing. In particular files are hashed as git "blobs", and
+     * directories are hashed as git "trees".
+     *
+     * Unlike `Flat` and `Recursive`, this is not a hash of a single
+     * serialisation but a [Merkle
+     * DAG](https://en.wikipedia.org/wiki/Merkle_tree) of multiple
+     * rounds of serialisation and hashing.
+     *
+     * @note Git's data model is slightly different, in that a plain
+     * file doesn't have an executable bit, directory entries do
+     * instead. We decide treat a bare file as non-executable by fiat,
+     * as we do with `FileIngestionMethod::Flat` which also lacks this
+     * information. Thus, Git can encode some but all of Nix's "File
+     * System Objects", and this sort of hashing is likewise partial.
+     */
+    Git,
+};
+
+/**
+ * Parse a `FileIngestionMethod` by name. Choice of:
+ *
+ *  - `flat`: `FileIngestionMethod::Flat`
+ *  - `nar`: `FileIngestionMethod::Recursive`
+ *  - `git`: `FileIngestionMethod::Git`
+ *
+ * Opposite of `renderFileIngestionMethod`.
+ */
+FileIngestionMethod parseFileIngestionMethod(std::string_view input);
+
+/**
+ * Render a `FileIngestionMethod` by name.
+ *
+ * Opposite of `parseFileIngestionMethod`.
+ */
+std::string_view renderFileIngestionMethod(FileIngestionMethod method);
+
+/**
+ * Compute the hash of the given file system object according to the
+ * given method.
+ *
+ * Unlike the other `hashPath`, this works on an arbitrary
+ * `FileIngestionMethod` instead of `FileSerialisationMethod`, but
+ * doesn't return the size as this is this is not a both simple and
+ * useful defined for a merkle format.
+ */
+Hash hashPath(
     SourceAccessor & accessor, const CanonPath & path,
     FileIngestionMethod method, HashAlgorithm ha,
     PathFilter & filter = defaultPathFilter);
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 99dbfe6e3..7c8905da6 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -555,7 +555,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
                 HashResult hash = hashPath(
                     *store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) },
 
-                    FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
+                    FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
                 info->narHash = hash.first;
                 info->narSize = hash.second;
             }
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index ca2daecab..02154715f 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -2,6 +2,7 @@
 #include "common-args.hh"
 #include "store-api.hh"
 #include "archive.hh"
+#include "git.hh"
 #include "posix-source-accessor.hh"
 #include "misc-store-flags.hh"
 
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index 98d227f0e..f849bf0cf 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -5,6 +5,7 @@
 #include "shared.hh"
 #include "references.hh"
 #include "archive.hh"
+#include "git.hh"
 #include "posix-source-accessor.hh"
 #include "misc-store-flags.hh"
 
@@ -66,9 +67,11 @@ struct CmdHashBase : Command
     {
         switch (mode) {
         case FileIngestionMethod::Flat:
-            return  "print cryptographic hash of a regular file";
+            return "print cryptographic hash of a regular file";
         case FileIngestionMethod::Recursive:
             return "print cryptographic hash of the NAR serialisation of a path";
+        case FileIngestionMethod::Git:
+            return "print cryptographic hash of the Git serialisation of a path";
         default:
             assert(false);
         };
@@ -77,17 +80,41 @@ struct CmdHashBase : Command
     void run() override
     {
         for (auto path : paths) {
+            auto makeSink = [&]() -> std::unique_ptr {
+                if (modulus)
+                    return std::make_unique(hashAlgo, *modulus);
+                else
+                    return std::make_unique(hashAlgo);
+            };
 
-            std::unique_ptr hashSink;
-            if (modulus)
-                hashSink = std::make_unique(hashAlgo, *modulus);
-            else
-                hashSink = std::make_unique(hashAlgo);
+            auto [accessor_, canonPath] = PosixSourceAccessor::createAtRoot(path);
+            auto & accessor = accessor_;
+            Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++
+            switch (mode) {
+            case FileIngestionMethod::Flat:
+            case FileIngestionMethod::Recursive:
+            {
+                auto hashSink = makeSink();
+                dumpPath(accessor, canonPath, *hashSink, (FileSerialisationMethod) mode);
+                h = hashSink->finish().first;
+                break;
+            }
+            case FileIngestionMethod::Git: {
+                std::function hook;
+                hook = [&](const CanonPath & path) -> git::TreeEntry {
+                    auto hashSink = makeSink();
+                    auto mode = dump(accessor, path, *hashSink, hook);
+                    auto hash = hashSink->finish().first;
+                    return {
+                        .mode = mode,
+                        .hash = hash,
+                    };
+                };
+                h = hook(canonPath).hash;
+                break;
+            }
+            }
 
-            auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
-            dumpPath(accessor, canonPath, *hashSink, mode);
-
-            Hash h = hashSink->finish().first;
             if (truncate && h.hashSize > 20) h = compressHash(h, 20);
             logger->cout(h.to_string(hashFormat, hashFormat == HashFormat::SRI));
         }
diff --git a/tests/functional/git-hashing/common.sh b/tests/functional/git-hashing/common.sh
new file mode 100644
index 000000000..5de96e74f
--- /dev/null
+++ b/tests/functional/git-hashing/common.sh
@@ -0,0 +1,11 @@
+source ../common.sh
+
+clearStore
+clearCache
+
+# Need backend to support git-hashing too
+requireDaemonNewerThan "2.18.0pre20230908"
+
+enableFeatures "git-hashing"
+
+restartDaemon
diff --git a/tests/functional/git-hashing/local.mk b/tests/functional/git-hashing/local.mk
new file mode 100644
index 000000000..ebec01940
--- /dev/null
+++ b/tests/functional/git-hashing/local.mk
@@ -0,0 +1,7 @@
+git-hashing-tests := \
+  $(d)/simple.sh
+
+install-tests-groups += git-hashing
+
+clean-files += \
+  $(d)/config.nix
diff --git a/tests/functional/git-hashing/simple.sh b/tests/functional/git-hashing/simple.sh
new file mode 100644
index 000000000..74b0220f8
--- /dev/null
+++ b/tests/functional/git-hashing/simple.sh
@@ -0,0 +1,58 @@
+source common.sh
+
+repo="$TEST_ROOT/scratch"
+git init "$repo"
+
+git -C "$repo" config user.email "you@example.com"
+git -C "$repo" config user.name "Your Name"
+
+try () {
+    hash=$(nix hash path --mode git --format base16 --algo sha1 $TEST_ROOT/hash-path)
+    [[ "$hash" == "$1" ]]
+
+    git -C "$repo" rm -rf hash-path || true
+    cp -r "$TEST_ROOT/hash-path" "$TEST_ROOT/scratch/hash-path"
+    git -C "$repo" add hash-path
+    git -C "$repo" commit -m "x"
+    git -C "$repo" status
+    hash2=$(git -C "$TEST_ROOT/scratch" rev-parse HEAD:hash-path)
+    [[ "$hash2" = "$1" ]]
+}
+
+# blob
+rm -rf $TEST_ROOT/hash-path
+echo "Hello World" > $TEST_ROOT/hash-path
+try "557db03de997c86a4a028e1ebd3a1ceb225be238"
+
+# tree with children
+rm -rf $TEST_ROOT/hash-path
+mkdir $TEST_ROOT/hash-path
+echo "Hello World" > $TEST_ROOT/hash-path/hello
+echo "Run Hello World" > $TEST_ROOT/hash-path/executable
+chmod +x $TEST_ROOT/hash-path/executable
+try "e5c0a11a556801a5c9dcf330ca9d7e2c572697f4"
+
+rm -rf $TEST_ROOT/dummy1
+echo Hello World! > $TEST_ROOT/dummy1
+path1=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy1)
+hash1=$(nix-store -q --hash $path1)
+test "$hash1" = "sha256:1brffhvj2c0z6x8qismd43m0iy8dsgfmy10bgg9w11szway2wp9v"
+
+rm -rf $TEST_ROOT/dummy2
+mkdir -p $TEST_ROOT/dummy2
+echo Hello World! > $TEST_ROOT/dummy2/hello
+path2=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy2)
+hash2=$(nix-store -q --hash $path2)
+test "$hash2" = "sha256:1vhv7zxam7x277q0y0jcypm7hwhccbzss81vkdgf0ww5sm2am4y0"
+
+rm -rf $TEST_ROOT/dummy3
+mkdir -p $TEST_ROOT/dummy3
+mkdir -p $TEST_ROOT/dummy3/dir
+touch $TEST_ROOT/dummy3/dir/file
+echo Hello World! > $TEST_ROOT/dummy3/dir/file
+touch $TEST_ROOT/dummy3/dir/executable
+chmod +x $TEST_ROOT/dummy3/dir/executable
+echo Run Hello World! > $TEST_ROOT/dummy3/dir/executable
+path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3)
+hash3=$(nix-store -q --hash $path3)
+test "$hash3" = "sha256:08y3nm3mvn9qvskqnf13lfgax5lh73krxz4fcjd5cp202ggpw9nv"
diff --git a/tests/unit/libstore/content-address.cc b/tests/unit/libstore/content-address.cc
index 98c1eace3..cc1c7fcc6 100644
--- a/tests/unit/libstore/content-address.cc
+++ b/tests/unit/libstore/content-address.cc
@@ -13,6 +13,7 @@ TEST(ContentAddressMethod, testRoundTripPrintParse_1) {
         ContentAddressMethod { TextIngestionMethod {} },
         ContentAddressMethod { FileIngestionMethod::Flat },
         ContentAddressMethod { FileIngestionMethod::Recursive },
+        ContentAddressMethod { FileIngestionMethod::Git },
     }) {
         EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam);
     }
@@ -23,6 +24,7 @@ TEST(ContentAddressMethod, testRoundTripPrintParse_2) {
         "text",
         "flat",
         "nar",
+        "git",
     }) {
         EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS);
     }
diff --git a/tests/unit/libutil/file-content-address.cc b/tests/unit/libutil/file-content-address.cc
index 2e819ce40..294e39806 100644
--- a/tests/unit/libutil/file-content-address.cc
+++ b/tests/unit/libutil/file-content-address.cc
@@ -4,6 +4,32 @@
 
 namespace nix {
 
+/* ----------------------------------------------------------------------------
+ * parseFileSerialisationMethod, renderFileSerialisationMethod
+ * --------------------------------------------------------------------------*/
+
+TEST(FileSerialisationMethod, testRoundTripPrintParse_1) {
+    for (const FileSerialisationMethod fim : {
+        FileSerialisationMethod::Flat,
+        FileSerialisationMethod::Recursive,
+    }) {
+        EXPECT_EQ(parseFileSerialisationMethod(renderFileSerialisationMethod(fim)), fim);
+    }
+}
+
+TEST(FileSerialisationMethod, testRoundTripPrintParse_2) {
+    for (const std::string_view fimS : {
+        "flat",
+        "nar",
+    }) {
+        EXPECT_EQ(renderFileSerialisationMethod(parseFileSerialisationMethod(fimS)), fimS);
+    }
+}
+
+TEST(FileSerialisationMethod, testParseFileSerialisationMethodOptException) {
+    EXPECT_THROW(parseFileSerialisationMethod("narwhal"), UsageError);
+}
+
 /* ----------------------------------------------------------------------------
  * parseFileIngestionMethod, renderFileIngestionMethod
  * --------------------------------------------------------------------------*/
@@ -12,6 +38,7 @@ TEST(FileIngestionMethod, testRoundTripPrintParse_1) {
     for (const FileIngestionMethod fim : {
         FileIngestionMethod::Flat,
         FileIngestionMethod::Recursive,
+        FileIngestionMethod::Git,
     }) {
         EXPECT_EQ(parseFileIngestionMethod(renderFileIngestionMethod(fim)), fim);
     }
@@ -21,6 +48,7 @@ TEST(FileIngestionMethod, testRoundTripPrintParse_2) {
     for (const std::string_view fimS : {
         "flat",
         "nar",
+        "git",
     }) {
         EXPECT_EQ(renderFileIngestionMethod(parseFileIngestionMethod(fimS)), fimS);
     }

From d4ad1fcf303f6f34ebb30a82ebe6f99c26bef8cb Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Thu, 18 Jan 2024 23:57:26 -0500
Subject: [PATCH 153/164] Avoid creating temporary store object for git over
 the wire

Instead, serialize as NAR and send that over, then rehash sever side.
This is alorithmically simpler, but comes at the cost of a newer
parameter to `Store::addToStoreFromDump`.

Co-authored-by: Eelco Dolstra 
---
 src/libexpr/primops.cc                      |  2 +-
 src/libstore/binary-cache-store.cc          | 27 +++++---
 src/libstore/binary-cache-store.hh          |  3 +-
 src/libstore/build/local-derivation-goal.cc |  5 +-
 src/libstore/daemon.cc                      | 59 ++++++++--------
 src/libstore/derivations.cc                 |  2 +-
 src/libstore/dummy-store.cc                 |  3 +-
 src/libstore/legacy-ssh-store.hh            |  3 +-
 src/libstore/local-store.cc                 | 77 ++++++++-------------
 src/libstore/local-store.hh                 |  3 +-
 src/libstore/remote-store.cc                | 20 +++++-
 src/libstore/remote-store.hh                |  3 +-
 src/libstore/store-api.cc                   | 47 ++++---------
 src/libstore/store-api.hh                   | 17 +++--
 src/nix-env/user-env.cc                     |  2 +-
 src/nix/develop.cc                          |  2 +-
 16 files changed, 137 insertions(+), 138 deletions(-)

diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 9ea266cf9..78f7f71ed 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -2092,7 +2092,7 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
         })
         : ({
             StringSource s { contents };
-            state.store->addToStoreFromDump(s, name, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
+            state.store->addToStoreFromDump(s, name, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
         });
 
     /* Note: we don't need to add `context' to the context of the
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index d6047dd7e..bea2bb370 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -305,7 +305,8 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
 StorePath BinaryCacheStore::addToStoreFromDump(
     Source & dump,
     std::string_view name,
-    ContentAddressMethod method,
+    FileSerialisationMethod dumpMethod,
+    ContentAddressMethod hashMethod,
     HashAlgorithm hashAlgo,
     const StorePathSet & references,
     RepairFlag repair)
@@ -313,17 +314,26 @@ StorePath BinaryCacheStore::addToStoreFromDump(
     std::optional caHash;
     std::string nar;
 
+    // Calculating Git hash from NAR stream not yet implemented. May not
+    // be possible to implement in single-pass if the NAR is in an
+    // inconvenient order. Could fetch after uploading, however.
+    if (hashMethod.getFileIngestionMethod() == FileIngestionMethod::Git)
+        unsupported("addToStoreFromDump");
+
     if (auto * dump2p = dynamic_cast(&dump)) {
         auto & dump2 = *dump2p;
         // Hack, this gives us a "replayable" source so we can compute
         // multiple hashes more easily.
-        caHash = hashString(HashAlgorithm::SHA256, dump2.s);
-        switch (method.getFileIngestionMethod()) {
-        case FileIngestionMethod::Recursive:
+        //
+        // Only calculate if the dump is in the right format, however.
+        if (static_cast(dumpMethod) == hashMethod.getFileIngestionMethod())
+            caHash = hashString(HashAlgorithm::SHA256, dump2.s);
+        switch (dumpMethod) {
+        case FileSerialisationMethod::Recursive:
             // The dump is already NAR in this case, just use it.
             nar = dump2.s;
             break;
-        case FileIngestionMethod::Flat:
+        case FileSerialisationMethod::Flat:
         {
             // The dump is Flat, so we need to convert it to NAR with a
             // single file.
@@ -332,14 +342,11 @@ StorePath BinaryCacheStore::addToStoreFromDump(
             nar = std::move(s.s);
             break;
         }
-        case FileIngestionMethod::Git:
-            unsupported("addToStoreFromDump");
-            break;
         }
     } else {
         // Otherwise, we have to do th same hashing as NAR so our single
         // hash will suffice for both purposes.
-        if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
+        if (dumpMethod != FileSerialisationMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
             unsupported("addToStoreFromDump");
     }
     StringSource narDump { nar };
@@ -354,7 +361,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(
             *this,
             name,
             ContentAddressWithReferences::fromParts(
-                method,
+                hashMethod,
                 caHash ? *caHash : nar.first,
                 {
                     .others = references,
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index 76de2d11a..7c2828309 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -125,7 +125,8 @@ public:
     StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method,
+        FileSerialisationMethod dumpMethod,
+        ContentAddressMethod hashMethod,
         HashAlgorithm hashAlgo,
         const StorePathSet & references,
         RepairFlag repair) override;
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index d92966a74..a9b8de123 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -1312,12 +1312,13 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
     StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method,
+        FileSerialisationMethod dumpMethod,
+        ContentAddressMethod hashMethod,
         HashAlgorithm hashAlgo,
         const StorePathSet & references,
         RepairFlag repair) override
     {
-        auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, references, repair);
+        auto path = next->addToStoreFromDump(dump, name, dumpMethod, hashMethod, hashAlgo, references, repair);
         goal.addDependency(path);
         return path;
     }
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 873065e14..e1337f51d 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -401,11 +401,23 @@ static void performOp(TunnelLogger * logger, ref store,
             logger->startWork();
             auto pathInfo = [&]() {
                 // NB: FramedSource must be out of scope before logger->stopWork();
-                auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parseWithAlgo(camStr);
-                auto hashAlgo = hashAlgo_; // work around clang bug
+                auto [contentAddressMethod, hashAlgo] = ContentAddressMethod::parseWithAlgo(camStr);
                 FramedSource source(from);
+                FileSerialisationMethod dumpMethod;
+                switch (contentAddressMethod.getFileIngestionMethod()) {
+                case FileIngestionMethod::Flat:
+                    dumpMethod = FileSerialisationMethod::Flat;
+                    break;
+                case FileIngestionMethod::Recursive:
+                    dumpMethod = FileSerialisationMethod::Recursive;
+                    break;
+                case FileIngestionMethod::Git:
+                    // Use NAR; Git is not a serialization method
+                    dumpMethod = FileSerialisationMethod::Recursive;
+                    break;
+                }
                 // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
-                auto path = store->addToStoreFromDump(source, name, contentAddressMethod, hashAlgo, refs, repair);
+                auto path = store->addToStoreFromDump(source, name, dumpMethod, contentAddressMethod, hashAlgo, refs, repair);
                 return store->queryPathInfo(path);
             }();
             logger->stopWork();
@@ -431,34 +443,23 @@ static void performOp(TunnelLogger * logger, ref store,
                 hashAlgo = parseHashAlgo(hashAlgoRaw);
             }
 
+            // Old protocol always sends NAR, regardless of hashing method
             auto dumpSource = sinkToSource([&](Sink & saved) {
-                if (method == FileIngestionMethod::Recursive) {
-                    /* We parse the NAR dump through into `saved` unmodified,
-                       so why all this extra work? We still parse the NAR so
-                       that we aren't sending arbitrary data to `saved`
-                       unwittingly`, and we know when the NAR ends so we don't
-                       consume the rest of `from` and can't parse another
-                       command. (We don't trust `addToStoreFromDump` to not
-                       eagerly consume the entire stream it's given, past the
-                       length of the Nar. */
-                    TeeSource savedNARSource(from, saved);
-                    NullFileSystemObjectSink sink; /* just parse the NAR */
-                    parseDump(sink, savedNARSource);
-                } else if (method == FileIngestionMethod::Flat) {
-                    /* Incrementally parse the NAR file, stripping the
-                       metadata, and streaming the sole file we expect into
-                       `saved`. */
-                    RegularFileSink savedRegular { saved };
-                    parseDump(savedRegular, from);
-                    if (!savedRegular.regular) throw Error("regular file expected");
-                } else {
-                    /* Should have validated above that no other file ingestion
-                       method was used. */
-                    assert(false);
-                }
+                /* We parse the NAR dump through into `saved` unmodified,
+                   so why all this extra work? We still parse the NAR so
+                   that we aren't sending arbitrary data to `saved`
+                   unwittingly`, and we know when the NAR ends so we don't
+                   consume the rest of `from` and can't parse another
+                   command. (We don't trust `addToStoreFromDump` to not
+                   eagerly consume the entire stream it's given, past the
+                   length of the Nar. */
+                TeeSource savedNARSource(from, saved);
+                NullFileSystemObjectSink sink; /* just parse the NAR */
+                parseDump(sink, savedNARSource);
             });
             logger->startWork();
-            auto path = store->addToStoreFromDump(*dumpSource, baseName, method, hashAlgo);
+            auto path = store->addToStoreFromDump(
+                *dumpSource, baseName, FileSerialisationMethod::Recursive, method, hashAlgo);
             logger->stopWork();
 
             to << store->printStorePath(path);
@@ -490,7 +491,7 @@ static void performOp(TunnelLogger * logger, ref store,
         logger->startWork();
         auto path = ({
             StringSource source { s };
-            store->addToStoreFromDump(source, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair);
+            store->addToStoreFromDump(source, suffix, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair);
         });
         logger->stopWork();
         to << store->printStorePath(path);
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 305ed5b42..df14e979f 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -150,7 +150,7 @@ StorePath writeDerivation(Store & store,
         })
         : ({
             StringSource s { contents };
-            store.addToStoreFromDump(s, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair);
+            store.addToStoreFromDump(s, suffix, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair);
         });
 }
 
diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc
index e4f13b8f4..30f23cff9 100644
--- a/src/libstore/dummy-store.cc
+++ b/src/libstore/dummy-store.cc
@@ -61,7 +61,8 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
     virtual StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method = FileIngestionMethod::Recursive,
+        FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
+        ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
         HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
         const StorePathSet & references = StorePathSet(),
         RepairFlag repair = NoRepair) override
diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh
index ae890177b..ca2f115d2 100644
--- a/src/libstore/legacy-ssh-store.hh
+++ b/src/libstore/legacy-ssh-store.hh
@@ -72,7 +72,8 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
     virtual StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method = FileIngestionMethod::Recursive,
+        FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
+        ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
         HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
         const StorePathSet & references = StorePathSet(),
         RepairFlag repair = NoRepair) override
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 5f35cf3a8..56f8c5dd8 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -1148,7 +1148,8 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
 StorePath LocalStore::addToStoreFromDump(
     Source & source0,
     std::string_view name,
-    ContentAddressMethod method,
+    FileSerialisationMethod dumpMethod,
+    ContentAddressMethod hashMethod,
     HashAlgorithm hashAlgo,
     const StorePathSet & references,
     RepairFlag repair)
@@ -1201,7 +1202,13 @@ StorePath LocalStore::addToStoreFromDump(
     Path tempDir;
     AutoCloseFD tempDirFd;
 
-    if (!inMemory) {
+    bool methodsMatch = (FileIngestionMethod) dumpMethod == hashMethod;
+
+    /* If the methods don't match, our streaming hash of the dump is the
+       wrong sort, and we need to rehash. */
+    bool inMemoryAndDontNeedRestore = inMemory && methodsMatch;
+
+    if (!inMemoryAndDontNeedRestore) {
         /* Drain what we pulled so far, and then keep on pulling */
         StringSource dumpSource { dump };
         ChainSource bothSource { dumpSource, source };
@@ -1210,40 +1217,23 @@ StorePath LocalStore::addToStoreFromDump(
         delTempDir = std::make_unique(tempDir);
         tempPath = tempDir + "/x";
 
-        auto fim = method.getFileIngestionMethod();
-        switch (fim) {
-        case FileIngestionMethod::Flat:
-        case FileIngestionMethod::Recursive:
-            restorePath(tempPath, bothSource, (FileSerialisationMethod) fim);
-            break;
-        case FileIngestionMethod::Git: {
-            RestoreSink sink;
-            sink.dstPath = tempPath;
-            auto accessor = getFSAccessor();
-            git::restore(sink, bothSource, [&](Hash childHash) {
-                return std::pair {
-                    &*accessor,
-                    CanonPath {
-                        printStorePath(this->makeFixedOutputPath("git", FixedOutputInfo {
-                            .method = FileIngestionMethod::Git,
-                            .hash = childHash,
-                        }))
-                    },
-                };
-            });
-            break;
-        }
-        }
+        restorePath(tempPath, bothSource, dumpMethod);
 
         dumpBuffer.reset();
         dump = {};
     }
 
-    auto [hash, size] = hashSink->finish();
+    auto [dumpHash, size] = hashSink->finish();
+
+    PosixSourceAccessor accessor;
 
     auto desc = ContentAddressWithReferences::fromParts(
-        method,
-        hash,
+        hashMethod,
+        methodsMatch
+            ? dumpHash
+            : hashPath(
+                accessor, CanonPath { tempPath },
+                hashMethod.getFileIngestionMethod(), hashAlgo),
         {
             .others = references,
             // caller is not capable of creating a self-reference, because this is content-addressed without modulus
@@ -1269,32 +1259,19 @@ StorePath LocalStore::addToStoreFromDump(
 
             autoGC();
 
-            if (inMemory) {
+            if (inMemoryAndDontNeedRestore) {
                 StringSource dumpSource { dump };
                 /* Restore from the buffer in memory. */
-                auto fim = method.getFileIngestionMethod();
+                auto fim = hashMethod.getFileIngestionMethod();
                 switch (fim) {
                 case FileIngestionMethod::Flat:
                 case FileIngestionMethod::Recursive:
                     restorePath(realPath, dumpSource, (FileSerialisationMethod) fim);
                     break;
-                case FileIngestionMethod::Git: {
-                    RestoreSink sink;
-                    sink.dstPath = realPath;
-                    auto accessor = getFSAccessor();
-                    git::restore(sink, dumpSource, [&](Hash childHash) {
-                        return std::pair {
-                            &*accessor,
-                            CanonPath {
-                                printStorePath(this->makeFixedOutputPath("git", FixedOutputInfo {
-                                    .method = FileIngestionMethod::Git,
-                                    .hash = childHash,
-                                }))
-                            },
-                        };
-                    });
-                    break;
-                }
+                case FileIngestionMethod::Git:
+                    // doesn't correspond to serialization method, so
+                    // this should be unreachable
+                    assert(false);
                 }
             } else {
                 /* Move the temporary path we restored above. */
@@ -1303,8 +1280,8 @@ StorePath LocalStore::addToStoreFromDump(
 
             /* For computing the nar hash. In recursive SHA-256 mode, this
                is the same as the store hash, so no need to do it again. */
-            auto narHash = std::pair { hash, size };
-            if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) {
+            auto narHash = std::pair { dumpHash, size };
+            if (dumpMethod != FileSerialisationMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) {
                 HashSink narSink { HashAlgorithm::SHA256 };
                 dumpPath(realPath, narSink);
                 narHash = narSink.finish();
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index ba56d3ead..7eff1d690 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -180,7 +180,8 @@ public:
     StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method,
+        FileSerialisationMethod dumpMethod,
+        ContentAddressMethod hashMethod,
         HashAlgorithm hashAlgo,
         const StorePathSet & references,
         RepairFlag repair) override;
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 0cae84828..8dfe8adda 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -509,12 +509,28 @@ ref RemoteStore::addCAToStore(
 StorePath RemoteStore::addToStoreFromDump(
     Source & dump,
     std::string_view name,
-    ContentAddressMethod method,
+    FileSerialisationMethod dumpMethod,
+    ContentAddressMethod hashMethod,
     HashAlgorithm hashAlgo,
     const StorePathSet & references,
     RepairFlag repair)
 {
-    return addCAToStore(dump, name, method, hashAlgo, references, repair)->path;
+    FileSerialisationMethod fsm;
+    switch (hashMethod.getFileIngestionMethod()) {
+    case FileIngestionMethod::Flat:
+        fsm = FileSerialisationMethod::Flat;
+        break;
+    case FileIngestionMethod::Recursive:
+        fsm = FileSerialisationMethod::Recursive;
+        break;
+    case FileIngestionMethod::Git:
+        // Use NAR; Git is not a serialization method
+        fsm = FileSerialisationMethod::Recursive;
+        break;
+    }
+    if (fsm != dumpMethod)
+        unsupported("RemoteStore::addToStoreFromDump doesn't support this `dumpMethod` `hashMethod` combination");
+    return addCAToStore(dump, name, hashMethod, hashAlgo, references, repair)->path;
 }
 
 
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index c51a21375..d630adc08 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -87,7 +87,8 @@ public:
     StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method = FileIngestionMethod::Recursive,
+        FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
+        ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
         HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
         const StorePathSet & references = StorePathSet(),
         RepairFlag repair = NoRepair) override;
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index c44612ec5..4356296d4 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -197,40 +197,23 @@ StorePath Store::addToStore(
     PathFilter & filter,
     RepairFlag repair)
 {
+    FileSerialisationMethod fsm;
+    switch (method.getFileIngestionMethod()) {
+    case FileIngestionMethod::Flat:
+        fsm = FileSerialisationMethod::Flat;
+        break;
+    case FileIngestionMethod::Recursive:
+        fsm = FileSerialisationMethod::Recursive;
+        break;
+    case FileIngestionMethod::Git:
+        // Use NAR; Git is not a serialization method
+        fsm = FileSerialisationMethod::Recursive;
+        break;
+    }
     auto source = sinkToSource([&](Sink & sink) {
-        auto fim = method.getFileIngestionMethod();
-        switch (fim) {
-        case FileIngestionMethod::Flat:
-        case FileIngestionMethod::Recursive:
-        {
-            dumpPath(accessor, path, sink, (FileSerialisationMethod) fim, filter);
-            break;
-        }
-        case FileIngestionMethod::Git:
-        {
-            git::dump(
-                accessor, path,
-                sink,
-                // recursively add to store if path is a directory
-                [&](const CanonPath & path) -> git::TreeEntry {
-                    auto storePath = addToStore("git", accessor, path, method, hashAlgo, references, filter, repair);
-                    auto info = queryPathInfo(storePath);
-                    assert(info->ca);
-                    assert(info->ca->method == FileIngestionMethod::Git);
-                    auto stat = getFSAccessor()->lstat(CanonPath(printStorePath(storePath)));
-                    auto gitModeOpt = git::convertMode(stat.type);
-                    assert(gitModeOpt);
-                    return {
-                        .mode = *gitModeOpt,
-                        .hash = info->ca->hash,
-                    };
-                },
-                filter);
-            break;
-        }
-        }
+        dumpPath(accessor, path, sink, fsm, filter);
     });
-    return addToStoreFromDump(*source, name, method, hashAlgo, references, repair);
+    return addToStoreFromDump(*source, name, fsm, method, hashAlgo, references, repair);
 }
 
 void Store::addMultipleToStore(
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 5163070b2..5f683a211 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -466,14 +466,23 @@ public:
      * in `dump`, which is either a NAR serialisation (if recursive ==
      * true) or simply the contents of a regular file (if recursive ==
      * false).
-     * `dump` may be drained
      *
-     * \todo remove?
+     * `dump` may be drained.
+     *
+     * @param dumpMethod What serialisation format is `dump`, i.e. how
+     * to deserialize it. Must either match hashMethod or be
+     * `FileSerialisationMethod::Recursive`.
+     *
+     * @param hashMethod How content addressing? Need not match be the
+     * same as `dumpMethod`.
+     *
+     * @todo remove?
      */
     virtual StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method = FileIngestionMethod::Recursive,
+        FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
+        ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
         HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
         const StorePathSet & references = StorePathSet(),
         RepairFlag repair = NoRepair) = 0;
@@ -772,7 +781,7 @@ protected:
      * Helper for methods that are not unsupported: this is used for
      * default definitions for virtual methods that are meant to be overriden.
      *
-     * \todo Using this should be a last resort. It is better to make
+     * @todo Using this should be a last resort. It is better to make
      * the method "virtual pure" and/or move it to a subclass.
      */
     [[noreturn]] void unsupported(const std::string & op)
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index 2f9c988d5..8bebe2b9e 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -113,7 +113,7 @@ bool createUserEnv(EvalState & state, PackageInfos & elems,
         std::string str2 = str.str();
         StringSource source { str2 };
         state.store->addToStoreFromDump(
-            source, "env-manifest.nix", TextIngestionMethod {}, HashAlgorithm::SHA256, references);
+            source, "env-manifest.nix", FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, references);
     });
 
     /* Get the environment builder expression. */
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 403178a5d..c1842f2d5 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -226,7 +226,7 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore
     auto getEnvShPath = ({
         StringSource source { getEnvSh };
         evalStore->addToStoreFromDump(
-            source, "get-env.sh", TextIngestionMethod {}, HashAlgorithm::SHA256, {});
+            source, "get-env.sh", FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, {});
     });
 
     drv.args = {store->printStorePath(getEnvShPath)};

From 44f10f000a13fae6baae9c10767c6d300ff689a8 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Tue, 27 Feb 2024 13:45:18 +0100
Subject: [PATCH 154/164] flake: Update to NixOS 23.11

About time :)

This required disabling `bear` on darwin as it's currently broken (fixed
on master, but not yet on 23.11).
---
 flake.lock  |  8 ++++----
 flake.nix   | 18 +++++-------------
 package.nix |  2 +-
 3 files changed, 10 insertions(+), 18 deletions(-)

diff --git a/flake.lock b/flake.lock
index f0efb4036..a9022dbdc 100644
--- a/flake.lock
+++ b/flake.lock
@@ -34,16 +34,16 @@
     },
     "nixpkgs": {
       "locked": {
-        "lastModified": 1705033721,
-        "narHash": "sha256-K5eJHmL1/kev6WuqyqqbS1cdNnSidIZ3jeqJ7GbrYnQ=",
+        "lastModified": 1708971694,
+        "narHash": "sha256-mBXQ65IrCJbNgTrj0+6xdXpD9/U31AWPKdwGlOufhtI=",
         "owner": "NixOS",
         "repo": "nixpkgs",
-        "rev": "a1982c92d8980a0114372973cbdfe0a307f1bdea",
+        "rev": "4dd376f7943c64b522224a548d9cab5627b4d9d6",
         "type": "github"
       },
       "original": {
         "owner": "NixOS",
-        "ref": "nixos-23.05-small",
+        "ref": "nixos-23.11-small",
         "repo": "nixpkgs",
         "type": "github"
       }
diff --git a/flake.nix b/flake.nix
index 0bc70768e..479ec05c0 100644
--- a/flake.nix
+++ b/flake.nix
@@ -1,7 +1,7 @@
 {
   description = "The purely functional package manager";
 
-  inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
+  inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11-small";
   inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
   inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
   inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };
@@ -10,20 +10,10 @@
 
     let
       inherit (nixpkgs) lib;
-
-      # Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981
-      # Not an "idiomatic" flake input because:
-      #  - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730
-      #  - Subflake would download redundant and huge parent flake
-      #  - No git tree hash support: https://github.com/NixOS/nix/issues/6044
-      inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; }))
-        fileset;
+      inherit (lib) fileset;
 
       officialRelease = false;
 
-      # Set to true to build the release notes for the next release.
-      buildUnreleasedNotes = false;
-
       version = lib.fileContents ./.version + versionSuffix;
       versionSuffix =
         if officialRelease
@@ -405,7 +395,9 @@
             XDG_DATA_DIRS+=:$out/share
           '';
           nativeBuildInputs = attrs.nativeBuildInputs or []
-            ++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear
+            # TODO: Remove the darwin check once
+            # https://github.com/NixOS/nixpkgs/pull/291814 is available
+            ++ lib.optional (stdenv.cc.isClang && !stdenv.isDarwin) pkgs.buildPackages.bear
             ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) pkgs.buildPackages.clang-tools;
         });
         in
diff --git a/package.nix b/package.nix
index 1f895e301..20796a386 100644
--- a/package.nix
+++ b/package.nix
@@ -154,7 +154,7 @@ in {
     in
       fileset.toSource {
         root = ./.;
-        fileset = fileset.intersect baseFiles (fileset.unions ([
+        fileset = fileset.intersection baseFiles (fileset.unions ([
           # For configure
           ./.version
           ./configure.ac

From bbef03872b64da2b65fc7dd2040e1e3f5257bea7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Tue, 27 Feb 2024 14:40:12 +0100
Subject: [PATCH 155/164] Bump the required daemon version for the impure-env
 test

The required version check was a bit too lenient, and
`nixpkgs#nixUnstable` was considered valid while it didn't have the fix.
---
 tests/functional/impure-env.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tests/functional/impure-env.sh b/tests/functional/impure-env.sh
index d9e4a34a2..cfea4cae9 100644
--- a/tests/functional/impure-env.sh
+++ b/tests/functional/impure-env.sh
@@ -1,7 +1,7 @@
 source common.sh
 
 # Needs the config option 'impure-env' to work
-requireDaemonNewerThan "2.18.0pre20230816"
+requireDaemonNewerThan "2.19.0"
 
 enableFeatures "configurable-impure-env"
 restartDaemon

From a0cb75d96f76a3be48b9319e26d8ad78ef4e4525 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Tue, 27 Feb 2024 18:45:51 +0100
Subject: [PATCH 156/164] Disable bear on all the things with darwin as
 hostPlatform

Just `stdenv.isDarwin` isn't enough because it doesn't apply to the
build platform, which mean that cross packages building from darwin to
another platform will have `isDarwin` set to false.
Replace it by `stdenv.buildPlatform.isDarwin`.
---
 flake.nix | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/flake.nix b/flake.nix
index 479ec05c0..baf81007f 100644
--- a/flake.nix
+++ b/flake.nix
@@ -397,7 +397,7 @@
           nativeBuildInputs = attrs.nativeBuildInputs or []
             # TODO: Remove the darwin check once
             # https://github.com/NixOS/nixpkgs/pull/291814 is available
-            ++ lib.optional (stdenv.cc.isClang && !stdenv.isDarwin) pkgs.buildPackages.bear
+            ++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
             ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) pkgs.buildPackages.clang-tools;
         });
         in

From cf3ef060ff7623c006e09ff51ba0f6d4e7bba704 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Tue, 27 Feb 2024 19:24:31 +0100
Subject: [PATCH 157/164] =?UTF-8?q?Disable=20the=20=E2=80=9Cstatic?=
 =?UTF-8?q?=E2=80=9D=20darwin=20stdenvs?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Don't evaluate, and probably not really useful (if at all)
---
 flake.nix | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/flake.nix b/flake.nix
index baf81007f..ceb572c0b 100644
--- a/flake.nix
+++ b/flake.nix
@@ -409,8 +409,9 @@
               (forAllStdenvs (stdenvName: makeShell pkgs pkgs.${stdenvName}));
           in
             (makeShells "native" nixpkgsFor.${system}.native) //
-            (makeShells "static" nixpkgsFor.${system}.static) //
-            (lib.genAttrs shellCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) //
+            (lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin)
+              (makeShells "static" nixpkgsFor.${system}.static)) //
+              (lib.genAttrs shellCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) //
             {
               default = self.devShells.${system}.native-stdenvPackages;
             }

From 945940f2efe273084319790f72a61b6b446d2882 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 28 Feb 2024 02:31:19 +0100
Subject: [PATCH 158/164] nixpkgs: nixos-23.11-small -> release-23.11
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Flake lock file updates:

• Updated input 'nixpkgs':
    'github:NixOS/nixpkgs/4dd376f7943c64b522224a548d9cab5627b4d9d6' (2024-02-26)
  → 'github:NixOS/nixpkgs/b550fe4b4776908ac2a861124307045f8e717c8e' (2024-02-28)
---
 flake.lock | 9 +++++----
 flake.nix  | 4 +++-
 2 files changed, 8 insertions(+), 5 deletions(-)

diff --git a/flake.lock b/flake.lock
index a9022dbdc..3070b4a45 100644
--- a/flake.lock
+++ b/flake.lock
@@ -34,16 +34,17 @@
     },
     "nixpkgs": {
       "locked": {
-        "lastModified": 1708971694,
-        "narHash": "sha256-mBXQ65IrCJbNgTrj0+6xdXpD9/U31AWPKdwGlOufhtI=",
+        "lastModified": 1709083642,
+        "narHash": "sha256-7kkJQd4rZ+vFrzWu8sTRtta5D1kBG0LSRYAfhtmMlSo=",
         "owner": "NixOS",
         "repo": "nixpkgs",
-        "rev": "4dd376f7943c64b522224a548d9cab5627b4d9d6",
+        "rev": "b550fe4b4776908ac2a861124307045f8e717c8e",
+        "treeHash": "74223e48f0b0e94ecf419d793c67068cdfdf5ea0",
         "type": "github"
       },
       "original": {
         "owner": "NixOS",
-        "ref": "nixos-23.11-small",
+        "ref": "release-23.11",
         "repo": "nixpkgs",
         "type": "github"
       }
diff --git a/flake.nix b/flake.nix
index ceb572c0b..58d17bf00 100644
--- a/flake.nix
+++ b/flake.nix
@@ -1,7 +1,9 @@
 {
   description = "The purely functional package manager";
 
-  inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11-small";
+  # TODO switch to nixos-23.11-small
+  #      https://nixpk.gs/pr-tracker.html?pr=291954
+  inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-23.11";
   inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
   inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
   inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };

From 8dc4b41c7f748d1615dbcf2c3438d1a76a5cb295 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 28 Feb 2024 02:34:43 +0100
Subject: [PATCH 159/164] flake.lock: Strip out treeHash. Too soon...

I hate this.
We should have it, but for now we can't.
---
 flake.lock | 1 -
 1 file changed, 1 deletion(-)

diff --git a/flake.lock b/flake.lock
index 3070b4a45..bb2e400c0 100644
--- a/flake.lock
+++ b/flake.lock
@@ -39,7 +39,6 @@
         "owner": "NixOS",
         "repo": "nixpkgs",
         "rev": "b550fe4b4776908ac2a861124307045f8e717c8e",
-        "treeHash": "74223e48f0b0e94ecf419d793c67068cdfdf5ea0",
         "type": "github"
       },
       "original": {

From 6147d27afb7200b972338abf1be1523740773df9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Wed, 28 Feb 2024 07:10:53 +0100
Subject: [PATCH 160/164] Bump the required daemon version for the git hashing
 tests

The required version check was a bit too lenient, and
`nixpkgs#nixUnstable` was considered valid while it didn't have the fix.
---
 tests/functional/git-hashing/common.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tests/functional/git-hashing/common.sh b/tests/functional/git-hashing/common.sh
index 5de96e74f..572cea438 100644
--- a/tests/functional/git-hashing/common.sh
+++ b/tests/functional/git-hashing/common.sh
@@ -4,7 +4,7 @@ clearStore
 clearCache
 
 # Need backend to support git-hashing too
-requireDaemonNewerThan "2.18.0pre20230908"
+requireDaemonNewerThan "2.19"
 
 enableFeatures "git-hashing"
 

From da90be789d8074880d95405a439b446c60947506 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Wed, 28 Feb 2024 08:00:17 +0100
Subject: [PATCH 161/164] Fix a too smart implicit cast

Apparently gcc is able to implicitly cast from `FileIngestionMethod` to
`ContentAddressMethod`, but clang isn't. So explicit the cast
---
 src/libstore/local-store.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 56f8c5dd8..1bbeaa912 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -1202,7 +1202,7 @@ StorePath LocalStore::addToStoreFromDump(
     Path tempDir;
     AutoCloseFD tempDirFd;
 
-    bool methodsMatch = (FileIngestionMethod) dumpMethod == hashMethod;
+    bool methodsMatch = ContentAddressMethod(FileIngestionMethod(dumpMethod)) == hashMethod;
 
     /* If the methods don't match, our streaming hash of the dump is the
        wrong sort, and we need to rehash. */

From f6142cd0d1b248581adddbbc1056df00fe12eb3b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Wed, 28 Feb 2024 08:02:49 +0100
Subject: [PATCH 162/164] unset `NIX_HARDENING_ENABLE` in fast build mode

`NIX_HARDENING_ENABLE` causes `_FORTIFY_SOURCE` to be defined.
This isn't compatible with `-O0`, and the compiler will happily remind
us about it at every call, spamming the terminal with warnings and stack
traces.

We don't really care hardening in that case, so just disable it if we
pass `OPTIMIZE=0`.
---
 Makefile  | 1 +
 flake.nix | 1 +
 2 files changed, 2 insertions(+)

diff --git a/Makefile b/Makefile
index 745e60aa5..c3dc83c77 100644
--- a/Makefile
+++ b/Makefile
@@ -68,6 +68,7 @@ ifeq ($(OPTIMIZE), 1)
   GLOBAL_LDFLAGS += $(CXXLTO)
 else
   GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
+  unexport NIX_HARDENING_ENABLE
 endif
 
 include mk/platform.mk
diff --git a/flake.nix b/flake.nix
index 58d17bf00..42aaace67 100644
--- a/flake.nix
+++ b/flake.nix
@@ -396,6 +396,7 @@
             # Make bash completion work.
             XDG_DATA_DIRS+=:$out/share
           '';
+
           nativeBuildInputs = attrs.nativeBuildInputs or []
             # TODO: Remove the darwin check once
             # https://github.com/NixOS/nixpkgs/pull/291814 is available

From 4d769e7a76bee1e8c967f20d72eb5f3a357577ee Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 28 Feb 2024 19:54:17 +0100
Subject: [PATCH 163/164] actions docker_push_image: Update nix 2.13.3 ->
 2.20.3

---
 .github/workflows/ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 38126dd68..2aa3a3300 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -64,7 +64,7 @@ jobs:
     - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
     - uses: cachix/install-nix-action@v25
       with:
-        install_url: https://releases.nixos.org/nix/nix-2.13.3/install
+        install_url: https://releases.nixos.org/nix/nix-2.20.3/install
     - uses: cachix/cachix-action@v14
       with:
         name: '${{ env.CACHIX_NAME }}'
@@ -116,7 +116,7 @@ jobs:
         fetch-depth: 0
     - uses: cachix/install-nix-action@v25
       with:
-        install_url: https://releases.nixos.org/nix/nix-2.13.3/install
+        install_url: https://releases.nixos.org/nix/nix-2.20.3/install
     - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
     - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
     - uses: cachix/cachix-action@v14

From 14adff17113dd2d4c0eb6c540a74308019829866 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Mon, 26 Feb 2024 21:09:17 +0100
Subject: [PATCH 164/164] profile install: skip and warn on installing package
 twice

---
 src/nix/profile.cc              | 21 ++++++++++++++++++++-
 tests/functional/nix-profile.sh |  3 +++
 2 files changed, 23 insertions(+), 1 deletion(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index e04ae008d..d39a24d36 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -395,7 +395,26 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
 
             element.updateStorePaths(getEvalStore(), store, res);
 
-            manifest.addElement(std::move(element));
+            auto elementName = getNameFromElement(element);
+
+            // Check if the element already exists.
+            auto existingPair = manifest.elements.find(elementName);
+            if (existingPair != manifest.elements.end()) {
+                auto existingElement = existingPair->second;
+                auto existingSource = existingElement.source;
+                auto elementSource = element.source;
+                if (existingSource
+                    && elementSource
+                    && existingElement.priority == element.priority
+                    && existingSource->originalRef == elementSource->originalRef
+                    && existingSource->attrPath == elementSource->attrPath
+                    ) {
+                    warn("'%s' is already installed", elementName);
+                    continue;
+                }
+            }
+
+            manifest.addElement(elementName, std::move(element));
         }
 
         try {
diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh
index 88b713d53..ee93251e9 100644
--- a/tests/functional/nix-profile.sh
+++ b/tests/functional/nix-profile.sh
@@ -64,6 +64,9 @@ nix profile install $flake1Dir
 [[ $($TEST_HOME/.local/state/nix/profile/bin/hello) = "Hello World" ]]
 unset NIX_CONFIG
 
+# Test conflicting package install.
+nix profile install $flake1Dir 2>&1 | grep "warning: 'flake1' is already installed"
+
 # Test upgrading a package.
 printf NixOS > $flake1Dir/who
 printf 2.0 > $flake1Dir/version