Merge remote-tracking branch 'upstream/master' into functional-tests-on-nixos

This commit is contained in:
Robert Hensing 2024-06-24 18:07:21 +02:00
commit 602c444411
83 changed files with 1388 additions and 401 deletions

View File

@ -302,6 +302,12 @@ Derivations can declare some infrequently used optional attributes.
(associative) arrays. For example, the attribute `hardening.format = true` (associative) arrays. For example, the attribute `hardening.format = true`
ends up as the Bash associative array element `${hardening[format]}`. ends up as the Bash associative array element `${hardening[format]}`.
> **Warning**
>
> If set to `true`, other advanced attributes such as [`allowedReferences`](#adv-attr-allowedReferences), [`allowedReferences`](#adv-attr-allowedReferences), [`allowedRequisites`](#adv-attr-allowedRequisites),
[`disallowedReferences`](#adv-attr-disallowedReferences) and [`disallowedRequisites`](#adv-attr-disallowedRequisites), maxSize, and maxClosureSize.
will have no effect.
- [`outputChecks`]{#adv-attr-outputChecks}\ - [`outputChecks`]{#adv-attr-outputChecks}\
When using [structured attributes](#adv-attr-structuredAttrs), the `outputChecks` When using [structured attributes](#adv-attr-structuredAttrs), the `outputChecks`
attribute allows defining checks per-output. attribute allows defining checks per-output.

View File

@ -447,7 +447,6 @@
''^tests/unit/libfetchers/public-key\.cc'' ''^tests/unit/libfetchers/public-key\.cc''
''^tests/unit/libstore-support/tests/derived-path\.cc'' ''^tests/unit/libstore-support/tests/derived-path\.cc''
''^tests/unit/libstore-support/tests/derived-path\.hh'' ''^tests/unit/libstore-support/tests/derived-path\.hh''
''^tests/unit/libstore-support/tests/libstore\.hh''
''^tests/unit/libstore-support/tests/nix_api_store\.hh'' ''^tests/unit/libstore-support/tests/nix_api_store\.hh''
''^tests/unit/libstore-support/tests/outputs-spec\.cc'' ''^tests/unit/libstore-support/tests/outputs-spec\.cc''
''^tests/unit/libstore-support/tests/outputs-spec\.hh'' ''^tests/unit/libstore-support/tests/outputs-spec\.hh''
@ -522,6 +521,7 @@
''^tests/functional/ca/repl\.sh$'' ''^tests/functional/ca/repl\.sh$''
''^tests/functional/ca/selfref-gc\.sh$'' ''^tests/functional/ca/selfref-gc\.sh$''
''^tests/functional/ca/why-depends\.sh$'' ''^tests/functional/ca/why-depends\.sh$''
''^tests/functional/characterisation-test-infra\.sh$''
''^tests/functional/check\.sh$'' ''^tests/functional/check\.sh$''
''^tests/functional/common/vars-and-functions\.sh$'' ''^tests/functional/common/vars-and-functions\.sh$''
''^tests/functional/completions\.sh$'' ''^tests/functional/completions\.sh$''
@ -579,9 +579,7 @@
''^tests/functional/impure-env\.sh$'' ''^tests/functional/impure-env\.sh$''
''^tests/functional/impure-eval\.sh$'' ''^tests/functional/impure-eval\.sh$''
''^tests/functional/install-darwin\.sh$'' ''^tests/functional/install-darwin\.sh$''
''^tests/functional/lang-test-infra\.sh$''
''^tests/functional/lang\.sh$'' ''^tests/functional/lang\.sh$''
''^tests/functional/lang/framework\.sh$''
''^tests/functional/legacy-ssh-store\.sh$'' ''^tests/functional/legacy-ssh-store\.sh$''
''^tests/functional/linux-sandbox\.sh$'' ''^tests/functional/linux-sandbox\.sh$''
''^tests/functional/local-overlay-store/add-lower-inner\.sh$'' ''^tests/functional/local-overlay-store/add-lower-inner\.sh$''

View File

@ -240,6 +240,12 @@ EvalState::EvalState(
, sRight(symbols.create("right")) , sRight(symbols.create("right"))
, sWrong(symbols.create("wrong")) , sWrong(symbols.create("wrong"))
, sStructuredAttrs(symbols.create("__structuredAttrs")) , sStructuredAttrs(symbols.create("__structuredAttrs"))
, sAllowedReferences(symbols.create("allowedReferences"))
, sAllowedRequisites(symbols.create("allowedRequisites"))
, sDisallowedReferences(symbols.create("disallowedReferences"))
, sDisallowedRequisites(symbols.create("disallowedRequisites"))
, sMaxSize(symbols.create("maxSize"))
, sMaxClosureSize(symbols.create("maxClosureSize"))
, sBuilder(symbols.create("builder")) , sBuilder(symbols.create("builder"))
, sArgs(symbols.create("args")) , sArgs(symbols.create("args"))
, sContentAddressed(symbols.create("__contentAddressed")) , sContentAddressed(symbols.create("__contentAddressed"))
@ -2297,7 +2303,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
path.resolveSymlinks(), path.resolveSymlinks(),
settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy, settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
path.baseName(), path.baseName(),
FileIngestionMethod::Recursive, ContentAddressMethod::Raw::NixArchive,
nullptr, nullptr,
repair); repair);
allowPath(dstPath); allowPath(dstPath);

View File

@ -168,7 +168,10 @@ public:
const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue, const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue,
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls, sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
sFile, sLine, sColumn, sFunctor, sToString, sFile, sLine, sColumn, sFunctor, sToString,
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs, sRight, sWrong, sStructuredAttrs,
sAllowedReferences, sAllowedRequisites, sDisallowedReferences, sDisallowedRequisites,
sMaxSize, sMaxClosureSize,
sBuilder, sArgs,
sContentAddressed, sImpure, sContentAddressed, sImpure,
sOutputHash, sOutputHashAlgo, sOutputHashMode, sOutputHash, sOutputHashAlgo, sOutputHashMode,
sRecurseForDerivations, sRecurseForDerivations,

View File

@ -1209,7 +1209,7 @@ static void derivationStrictInternal(
auto handleHashMode = [&](const std::string_view s) { auto handleHashMode = [&](const std::string_view s) {
if (s == "recursive") { if (s == "recursive") {
// back compat, new name is "nar" // back compat, new name is "nar"
ingestionMethod = FileIngestionMethod::Recursive; ingestionMethod = ContentAddressMethod::Raw::NixArchive;
} else try { } else try {
ingestionMethod = ContentAddressMethod::parse(s); ingestionMethod = ContentAddressMethod::parse(s);
} catch (UsageError &) { } catch (UsageError &) {
@ -1217,9 +1217,9 @@ static void derivationStrictInternal(
"invalid value '%s' for 'outputHashMode' attribute", s "invalid value '%s' for 'outputHashMode' attribute", s
).atPos(v).debugThrow(); ).atPos(v).debugThrow();
} }
if (ingestionMethod == TextIngestionMethod {}) if (ingestionMethod == ContentAddressMethod::Raw::Text)
experimentalFeatureSettings.require(Xp::DynamicDerivations); experimentalFeatureSettings.require(Xp::DynamicDerivations);
if (ingestionMethod == FileIngestionMethod::Git) if (ingestionMethod == ContentAddressMethod::Raw::Git)
experimentalFeatureSettings.require(Xp::GitHashing); experimentalFeatureSettings.require(Xp::GitHashing);
}; };
@ -1308,6 +1308,20 @@ static void derivationStrictInternal(
handleOutputs(ss); handleOutputs(ss);
} }
if (i->name == state.sAllowedReferences)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedReferences'; use 'outputChecks.<output>.allowedReferences' instead", drvName);
if (i->name == state.sAllowedRequisites)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedRequisites'; use 'outputChecks.<output>.allowedRequisites' instead", drvName);
if (i->name == state.sDisallowedReferences)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedReferences'; use 'outputChecks.<output>.disallowedReferences' instead", drvName);
if (i->name == state.sDisallowedRequisites)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedRequisites'; use 'outputChecks.<output>.disallowedRequisites' instead", drvName);
if (i->name == state.sMaxSize)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxSize'; use 'outputChecks.<output>.maxSize' instead", drvName);
if (i->name == state.sMaxClosureSize)
warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxClosureSize'; use 'outputChecks.<output>.maxClosureSize' instead", drvName);
} else { } else {
auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned(); auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned();
drv.env.emplace(key, s); drv.env.emplace(key, s);
@ -1377,7 +1391,7 @@ static void derivationStrictInternal(
/* Check whether the derivation name is valid. */ /* Check whether the derivation name is valid. */
if (isDerivation(drvName) && if (isDerivation(drvName) &&
!(ingestionMethod == ContentAddressMethod { TextIngestionMethod { } } && !(ingestionMethod == ContentAddressMethod::Raw::Text &&
outputs.size() == 1 && outputs.size() == 1 &&
*(outputs.begin()) == "out")) *(outputs.begin()) == "out"))
{ {
@ -1399,7 +1413,7 @@ static void derivationStrictInternal(
auto h = newHashAllowEmpty(*outputHash, outputHashAlgo); auto h = newHashAllowEmpty(*outputHash, outputHashAlgo);
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat); auto method = ingestionMethod.value_or(ContentAddressMethod::Raw::Flat);
DerivationOutput::CAFixed dof { DerivationOutput::CAFixed dof {
.ca = ContentAddress { .ca = ContentAddress {
@ -1418,7 +1432,7 @@ static void derivationStrictInternal(
.atPos(v).debugThrow(); .atPos(v).debugThrow();
auto ha = outputHashAlgo.value_or(HashAlgorithm::SHA256); auto ha = outputHashAlgo.value_or(HashAlgorithm::SHA256);
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive); auto method = ingestionMethod.value_or(ContentAddressMethod::Raw::NixArchive);
for (auto & i : outputs) { for (auto & i : outputs) {
drv.env[i] = hashPlaceholder(i); drv.env[i] = hashPlaceholder(i);
@ -2194,7 +2208,7 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
}) })
: ({ : ({
StringSource s { contents }; StringSource s { contents };
state.store->addToStoreFromDump(s, name, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair); state.store->addToStoreFromDump(s, name, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, refs, state.repair);
}); });
/* Note: we don't need to add `context' to the context of the /* Note: we don't need to add `context' to the context of the
@ -2377,7 +2391,7 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg
"while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'"); "while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'");
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource"); state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
addPath(state, pos, path.baseName(), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); addPath(state, pos, path.baseName(), path, args[0], ContentAddressMethod::Raw::NixArchive, std::nullopt, v, context);
} }
static RegisterPrimOp primop_filterSource({ static RegisterPrimOp primop_filterSource({
@ -2440,7 +2454,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
std::optional<SourcePath> path; std::optional<SourcePath> path;
std::string name; std::string name;
Value * filterFun = nullptr; Value * filterFun = nullptr;
ContentAddressMethod method = FileIngestionMethod::Recursive; auto method = ContentAddressMethod::Raw::NixArchive;
std::optional<Hash> expectedHash; std::optional<Hash> expectedHash;
NixStringContext context; NixStringContext context;
@ -2456,8 +2470,8 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
state.forceFunction(*(filterFun = attr.value), attr.pos, "while evaluating the `filter` parameter passed to builtins.path"); state.forceFunction(*(filterFun = attr.value), attr.pos, "while evaluating the `filter` parameter passed to builtins.path");
else if (n == "recursive") else if (n == "recursive")
method = state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") method = state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path")
? FileIngestionMethod::Recursive ? ContentAddressMethod::Raw::NixArchive
: FileIngestionMethod::Flat; : ContentAddressMethod::Raw::Flat;
else if (n == "sha256") else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256); expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
else else

View File

@ -468,7 +468,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
auto expectedPath = state.store->makeFixedOutputPath( auto expectedPath = state.store->makeFixedOutputPath(
name, name,
FixedOutputInfo { FixedOutputInfo {
.method = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat, .method = unpack ? FileIngestionMethod::NixArchive : FileIngestionMethod::Flat,
.hash = *expectedHash, .hash = *expectedHash,
.references = {} .references = {}
}); });

View File

@ -18,7 +18,7 @@ StorePath fetchToStore(
const SourcePath & path, const SourcePath & path,
FetchMode mode, FetchMode mode,
std::string_view name = "source", std::string_view name = "source",
ContentAddressMethod method = FileIngestionMethod::Recursive, ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive,
PathFilter * filter = nullptr, PathFilter * filter = nullptr,
RepairFlag repair = NoRepair); RepairFlag repair = NoRepair);

View File

@ -305,7 +305,7 @@ StorePath Input::computeStorePath(Store & store) const
if (!narHash) if (!narHash)
throw Error("cannot compute store path for unlocked input '%s'", to_string()); throw Error("cannot compute store path for unlocked input '%s'", to_string());
return store.makeFixedOutputPath(getName(), FixedOutputInfo { return store.makeFixedOutputPath(getName(), FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::NixArchive,
.hash = *narHash, .hash = *narHash,
.references = {}, .references = {},
}); });

View File

@ -433,7 +433,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", headers).storePath))); downloadFile(store, url, "source", headers).storePath)));
if (json.is_array() && json.size() == 1 && json[0]["id"] != nullptr) { if (json.is_array() && json.size() >= 1 && json[0]["id"] != nullptr) {
return RefInfo { return RefInfo {
.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1) .rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)
}; };

View File

@ -213,7 +213,7 @@ struct MercurialInputScheme : InputScheme
auto storePath = store->addToStore( auto storePath = store->addToStore(
input.getName(), input.getName(),
{getFSSourceAccessor(), CanonPath(actualPath)}, {getFSSourceAccessor(), CanonPath(actualPath)},
FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, ContentAddressMethod::Raw::NixArchive, HashAlgorithm::SHA256, {},
filter); filter);
return storePath; return storePath;

View File

@ -322,7 +322,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(
if (static_cast<FileIngestionMethod>(dumpMethod) == hashMethod.getFileIngestionMethod()) if (static_cast<FileIngestionMethod>(dumpMethod) == hashMethod.getFileIngestionMethod())
caHash = hashString(HashAlgorithm::SHA256, dump2.s); caHash = hashString(HashAlgorithm::SHA256, dump2.s);
switch (dumpMethod) { switch (dumpMethod) {
case FileSerialisationMethod::Recursive: case FileSerialisationMethod::NixArchive:
// The dump is already NAR in this case, just use it. // The dump is already NAR in this case, just use it.
nar = dump2.s; nar = dump2.s;
break; break;
@ -339,7 +339,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(
} else { } else {
// Otherwise, we have to do th same hashing as NAR so our single // Otherwise, we have to do th same hashing as NAR so our single
// hash will suffice for both purposes. // hash will suffice for both purposes.
if (dumpMethod != FileSerialisationMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) if (dumpMethod != FileSerialisationMethod::NixArchive || hashAlgo != HashAlgorithm::SHA256)
unsupported("addToStoreFromDump"); unsupported("addToStoreFromDump");
} }
StringSource narDump { nar }; StringSource narDump { nar };

View File

@ -530,7 +530,7 @@ bool Worker::pathContentsGood(const StorePath & path)
else { else {
auto current = hashPath( auto current = hashPath(
{store.getFSAccessor(), CanonPath(store.printStorePath(path))}, {store.getFSAccessor(), CanonPath(store.printStorePath(path))},
FileIngestionMethod::Recursive, info->narHash.algo).first; FileIngestionMethod::NixArchive, info->narHash.algo).first;
Hash nullHash(HashAlgorithm::SHA256); Hash nullHash(HashAlgorithm::SHA256);
res = info->narHash == nullHash || info->narHash == current; res = info->narHash == nullHash || info->narHash == current;
} }

View File

@ -8,98 +8,136 @@ std::string_view makeFileIngestionPrefix(FileIngestionMethod m)
{ {
switch (m) { switch (m) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
// Not prefixed for back compat
return ""; return "";
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
return "r:"; return "r:";
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
experimentalFeatureSettings.require(Xp::GitHashing); experimentalFeatureSettings.require(Xp::GitHashing);
return "git:"; return "git:";
default: default:
throw Error("impossible, caught both cases"); assert(false);
} }
} }
std::string_view ContentAddressMethod::render() const std::string_view ContentAddressMethod::render() const
{ {
return std::visit(overloaded { switch (raw) {
[](TextIngestionMethod) -> std::string_view { return "text"; }, case ContentAddressMethod::Raw::Text:
[](FileIngestionMethod m2) { return "text";
/* Not prefixed for back compat with things that couldn't produce text before. */ case ContentAddressMethod::Raw::Flat:
return renderFileIngestionMethod(m2); case ContentAddressMethod::Raw::NixArchive:
}, case ContentAddressMethod::Raw::Git:
}, raw); return renderFileIngestionMethod(getFileIngestionMethod());
default:
assert(false);
}
}
/**
* **Not surjective**
*
* This is not exposed because `FileIngestionMethod::Flat` maps to
* `ContentAddressMethod::Raw::Flat` and
* `ContentAddressMethod::Raw::Text` alike. We can thus only safely use
* this when the latter is ruled out (e.g. because it is already
* handled).
*/
static ContentAddressMethod fileIngestionMethodToContentAddressMethod(FileIngestionMethod m)
{
switch (m) {
case FileIngestionMethod::Flat:
return ContentAddressMethod::Raw::Flat;
case FileIngestionMethod::NixArchive:
return ContentAddressMethod::Raw::NixArchive;
case FileIngestionMethod::Git:
return ContentAddressMethod::Raw::Git;
default:
assert(false);
}
} }
ContentAddressMethod ContentAddressMethod::parse(std::string_view m) ContentAddressMethod ContentAddressMethod::parse(std::string_view m)
{ {
if (m == "text") if (m == "text")
return TextIngestionMethod {}; return ContentAddressMethod::Raw::Text;
else else
return parseFileIngestionMethod(m); return fileIngestionMethodToContentAddressMethod(
parseFileIngestionMethod(m));
} }
std::string_view ContentAddressMethod::renderPrefix() const std::string_view ContentAddressMethod::renderPrefix() const
{ {
return std::visit(overloaded { switch (raw) {
[](TextIngestionMethod) -> std::string_view { return "text:"; }, case ContentAddressMethod::Raw::Text:
[](FileIngestionMethod m2) { return "text:";
/* Not prefixed for back compat with things that couldn't produce text before. */ case ContentAddressMethod::Raw::Flat:
return makeFileIngestionPrefix(m2); case ContentAddressMethod::Raw::NixArchive:
}, case ContentAddressMethod::Raw::Git:
}, raw); return makeFileIngestionPrefix(getFileIngestionMethod());
default:
assert(false);
}
} }
ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m) ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
{ {
if (splitPrefix(m, "r:")) { if (splitPrefix(m, "r:")) {
return FileIngestionMethod::Recursive; return ContentAddressMethod::Raw::NixArchive;
} }
else if (splitPrefix(m, "git:")) { else if (splitPrefix(m, "git:")) {
experimentalFeatureSettings.require(Xp::GitHashing); experimentalFeatureSettings.require(Xp::GitHashing);
return FileIngestionMethod::Git; return ContentAddressMethod::Raw::Git;
} }
else if (splitPrefix(m, "text:")) { else if (splitPrefix(m, "text:")) {
return TextIngestionMethod {}; return ContentAddressMethod::Raw::Text;
}
return ContentAddressMethod::Raw::Flat;
}
/**
* This is slightly more mindful of forward compat in that it uses `fixed:`
* rather than just doing a raw empty prefix or `r:`, which doesn't "save room"
* for future changes very well.
*/
static std::string renderPrefixModern(const ContentAddressMethod & ca)
{
switch (ca.raw) {
case ContentAddressMethod::Raw::Text:
return "text:";
case ContentAddressMethod::Raw::Flat:
case ContentAddressMethod::Raw::NixArchive:
case ContentAddressMethod::Raw::Git:
return "fixed:" + makeFileIngestionPrefix(ca.getFileIngestionMethod());
default:
assert(false);
} }
return FileIngestionMethod::Flat;
} }
std::string ContentAddressMethod::renderWithAlgo(HashAlgorithm ha) const std::string ContentAddressMethod::renderWithAlgo(HashAlgorithm ha) const
{ {
return std::visit(overloaded { return renderPrefixModern(*this) + printHashAlgo(ha);
[&](const TextIngestionMethod & th) {
return std::string{"text:"} + printHashAlgo(ha);
},
[&](const FileIngestionMethod & fim) {
return "fixed:" + makeFileIngestionPrefix(fim) + printHashAlgo(ha);
}
}, raw);
} }
FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const
{ {
return std::visit(overloaded { switch (raw) {
[&](const TextIngestionMethod & th) { case ContentAddressMethod::Raw::Flat:
return FileIngestionMethod::Flat; return FileIngestionMethod::Flat;
}, case ContentAddressMethod::Raw::NixArchive:
[&](const FileIngestionMethod & fim) { return FileIngestionMethod::NixArchive;
return fim; case ContentAddressMethod::Raw::Git:
} return FileIngestionMethod::Git;
}, raw); case ContentAddressMethod::Raw::Text:
return FileIngestionMethod::Flat;
default:
assert(false);
}
} }
std::string ContentAddress::render() const std::string ContentAddress::render() const
{ {
return std::visit(overloaded { return renderPrefixModern(method) + this->hash.to_string(HashFormat::Nix32, true);
[](const TextIngestionMethod &) -> std::string {
return "text:";
},
[](const FileIngestionMethod & method) {
return "fixed:"
+ makeFileIngestionPrefix(method);
},
}, method.raw)
+ this->hash.to_string(HashFormat::Nix32, true);
} }
/** /**
@ -130,17 +168,17 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
// No parsing of the ingestion method, "text" only support flat. // No parsing of the ingestion method, "text" only support flat.
HashAlgorithm hashAlgo = parseHashAlgorithm_(); HashAlgorithm hashAlgo = parseHashAlgorithm_();
return { return {
TextIngestionMethod {}, ContentAddressMethod::Raw::Text,
std::move(hashAlgo), std::move(hashAlgo),
}; };
} else if (prefix == "fixed") { } else if (prefix == "fixed") {
// Parse method // Parse method
auto method = FileIngestionMethod::Flat; auto method = ContentAddressMethod::Raw::Flat;
if (splitPrefix(rest, "r:")) if (splitPrefix(rest, "r:"))
method = FileIngestionMethod::Recursive; method = ContentAddressMethod::Raw::NixArchive;
else if (splitPrefix(rest, "git:")) { else if (splitPrefix(rest, "git:")) {
experimentalFeatureSettings.require(Xp::GitHashing); experimentalFeatureSettings.require(Xp::GitHashing);
method = FileIngestionMethod::Git; method = ContentAddressMethod::Raw::Git;
} }
HashAlgorithm hashAlgo = parseHashAlgorithm_(); HashAlgorithm hashAlgo = parseHashAlgorithm_();
return { return {
@ -201,57 +239,58 @@ size_t StoreReferences::size() const
ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const ContentAddress & ca) noexcept ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const ContentAddress & ca) noexcept
{ {
return std::visit(overloaded { switch (ca.method.raw) {
[&](const TextIngestionMethod &) -> ContentAddressWithReferences { case ContentAddressMethod::Raw::Text:
return TextInfo { return TextInfo {
.hash = ca.hash, .hash = ca.hash,
.references = {}, .references = {},
}; };
}, case ContentAddressMethod::Raw::Flat:
[&](const FileIngestionMethod & method) -> ContentAddressWithReferences { case ContentAddressMethod::Raw::NixArchive:
return FixedOutputInfo { case ContentAddressMethod::Raw::Git:
.method = method, return FixedOutputInfo {
.hash = ca.hash, .method = ca.method.getFileIngestionMethod(),
.references = {}, .hash = ca.hash,
}; .references = {},
}, };
}, ca.method.raw); default:
assert(false);
}
} }
ContentAddressWithReferences ContentAddressWithReferences::fromParts( ContentAddressWithReferences ContentAddressWithReferences::fromParts(
ContentAddressMethod method, Hash hash, StoreReferences refs) ContentAddressMethod method, Hash hash, StoreReferences refs)
{ {
return std::visit(overloaded { switch (method.raw) {
[&](TextIngestionMethod _) -> ContentAddressWithReferences { case ContentAddressMethod::Raw::Text:
if (refs.self) if (refs.self)
throw Error("self-reference not allowed with text hashing"); throw Error("self-reference not allowed with text hashing");
return ContentAddressWithReferences { return TextInfo {
TextInfo { .hash = std::move(hash),
.hash = std::move(hash), .references = std::move(refs.others),
.references = std::move(refs.others), };
} case ContentAddressMethod::Raw::Flat:
}; case ContentAddressMethod::Raw::NixArchive:
}, case ContentAddressMethod::Raw::Git:
[&](FileIngestionMethod m2) -> ContentAddressWithReferences { return FixedOutputInfo {
return ContentAddressWithReferences { .method = method.getFileIngestionMethod(),
FixedOutputInfo { .hash = std::move(hash),
.method = m2, .references = std::move(refs),
.hash = std::move(hash), };
.references = std::move(refs), default:
} assert(false);
}; }
},
}, method.raw);
} }
ContentAddressMethod ContentAddressWithReferences::getMethod() const ContentAddressMethod ContentAddressWithReferences::getMethod() const
{ {
return std::visit(overloaded { return std::visit(overloaded {
[](const TextInfo & th) -> ContentAddressMethod { [](const TextInfo & th) -> ContentAddressMethod {
return TextIngestionMethod {}; return ContentAddressMethod::Raw::Text;
}, },
[](const FixedOutputInfo & fsh) -> ContentAddressMethod { [](const FixedOutputInfo & fsh) -> ContentAddressMethod {
return fsh.method; return fileIngestionMethodToContentAddressMethod(
fsh.method);
}, },
}, raw); }, raw);
} }

View File

@ -5,7 +5,6 @@
#include "hash.hh" #include "hash.hh"
#include "path.hh" #include "path.hh"
#include "file-content-address.hh" #include "file-content-address.hh"
#include "comparator.hh"
#include "variant-wrapper.hh" #include "variant-wrapper.hh"
namespace nix { namespace nix {
@ -14,24 +13,6 @@ namespace nix {
* Content addressing method * Content addressing method
*/ */
/* We only have one way to hash text with references, so this is a single-value
type, mainly useful with std::variant.
*/
/**
* The single way we can serialize "text" file system objects.
*
* Somewhat obscure, used by \ref Derivation derivations and
* `builtins.toFile` currently.
*
* TextIngestionMethod is identical to FileIngestionMethod::Fixed except that
* the former may not have self-references and is tagged `text:${algo}:${hash}`
* rather than `fixed:${algo}:${hash}`. The contents of the store path are
* ingested and hashed identically, aside from the slightly different tag and
* restriction on self-references.
*/
struct TextIngestionMethod : std::monostate { };
/** /**
* Compute the prefix to the hash algorithm which indicates how the * Compute the prefix to the hash algorithm which indicates how the
* files were ingested. * files were ingested.
@ -48,14 +29,51 @@ std::string_view makeFileIngestionPrefix(FileIngestionMethod m);
*/ */
struct ContentAddressMethod struct ContentAddressMethod
{ {
typedef std::variant< enum struct Raw {
TextIngestionMethod, /**
FileIngestionMethod * Calculate a store path using the `FileIngestionMethod::Flat`
> Raw; * hash of the file system objects, and references.
*
* See `store-object/content-address.md#method-flat` in the
* manual.
*/
Flat,
/**
* Calculate a store path using the
* `FileIngestionMethod::NixArchive` hash of the file system
* objects, and references.
*
* See `store-object/content-address.md#method-flat` in the
* manual.
*/
NixArchive,
/**
* Calculate a store path using the `FileIngestionMethod::Git`
* hash of the file system objects, and references.
*
* Part of `ExperimentalFeature::GitHashing`.
*
* See `store-object/content-address.md#method-git` in the
* manual.
*/
Git,
/**
* Calculate a store path using the `FileIngestionMethod::Flat`
* hash of the file system objects, and references, but in a
* different way than `ContentAddressMethod::Raw::Flat`.
*
* See `store-object/content-address.md#method-text` in the
* manual.
*/
Text,
};
Raw raw; Raw raw;
GENERATE_CMP(ContentAddressMethod, me->raw); auto operator <=>(const ContentAddressMethod &) const = default;
MAKE_WRAPPER_CONSTRUCTOR(ContentAddressMethod); MAKE_WRAPPER_CONSTRUCTOR(ContentAddressMethod);
@ -141,7 +159,7 @@ struct ContentAddress
*/ */
Hash hash; Hash hash;
GENERATE_CMP(ContentAddress, me->method, me->hash); auto operator <=>(const ContentAddress &) const = default;
/** /**
* Compute the content-addressability assertion * Compute the content-addressability assertion
@ -200,7 +218,7 @@ struct StoreReferences
*/ */
size_t size() const; size_t size() const;
GENERATE_CMP(StoreReferences, me->self, me->others); auto operator <=>(const StoreReferences &) const = default;
}; };
// This matches the additional info that we need for makeTextPath // This matches the additional info that we need for makeTextPath
@ -217,7 +235,7 @@ struct TextInfo
*/ */
StorePathSet references; StorePathSet references;
GENERATE_CMP(TextInfo, me->hash, me->references); auto operator <=>(const TextInfo &) const = default;
}; };
struct FixedOutputInfo struct FixedOutputInfo
@ -237,7 +255,7 @@ struct FixedOutputInfo
*/ */
StoreReferences references; StoreReferences references;
GENERATE_CMP(FixedOutputInfo, me->hash, me->references); auto operator <=>(const FixedOutputInfo &) const = default;
}; };
/** /**
@ -254,7 +272,7 @@ struct ContentAddressWithReferences
Raw raw; Raw raw;
GENERATE_CMP(ContentAddressWithReferences, me->raw); auto operator <=>(const ContentAddressWithReferences &) const = default;
MAKE_WRAPPER_CONSTRUCTOR(ContentAddressWithReferences); MAKE_WRAPPER_CONSTRUCTOR(ContentAddressWithReferences);

View File

@ -415,12 +415,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
dumpMethod = FileSerialisationMethod::Flat; dumpMethod = FileSerialisationMethod::Flat;
break; break;
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
dumpMethod = FileSerialisationMethod::Recursive; dumpMethod = FileSerialisationMethod::NixArchive;
break; break;
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
// Use NAR; Git is not a serialization method // Use NAR; Git is not a serialization method
dumpMethod = FileSerialisationMethod::Recursive; dumpMethod = FileSerialisationMethod::NixArchive;
break; break;
default: default:
assert(false); assert(false);
@ -435,19 +435,21 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
} else { } else {
HashAlgorithm hashAlgo; HashAlgorithm hashAlgo;
std::string baseName; std::string baseName;
FileIngestionMethod method; ContentAddressMethod method;
{ {
bool fixed; bool fixed;
uint8_t recursive; uint8_t recursive;
std::string hashAlgoRaw; std::string hashAlgoRaw;
from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw; from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
if (recursive > (uint8_t) FileIngestionMethod::Recursive) if (recursive > true)
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive); throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
method = FileIngestionMethod { recursive }; method = recursive
? ContentAddressMethod::Raw::NixArchive
: ContentAddressMethod::Raw::Flat;
/* Compatibility hack. */ /* Compatibility hack. */
if (!fixed) { if (!fixed) {
hashAlgoRaw = "sha256"; hashAlgoRaw = "sha256";
method = FileIngestionMethod::Recursive; method = ContentAddressMethod::Raw::NixArchive;
} }
hashAlgo = parseHashAlgo(hashAlgoRaw); hashAlgo = parseHashAlgo(hashAlgoRaw);
} }
@ -468,7 +470,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}); });
logger->startWork(); logger->startWork();
auto path = store->addToStoreFromDump( auto path = store->addToStoreFromDump(
*dumpSource, baseName, FileSerialisationMethod::Recursive, method, hashAlgo); *dumpSource, baseName, FileSerialisationMethod::NixArchive, method, hashAlgo);
logger->stopWork(); logger->stopWork();
to << store->printStorePath(path); to << store->printStorePath(path);
@ -500,7 +502,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork(); logger->startWork();
auto path = ({ auto path = ({
StringSource source { s }; StringSource source { s };
store->addToStoreFromDump(source, suffix, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair); store->addToStoreFromDump(source, suffix, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, refs, NoRepair);
}); });
logger->stopWork(); logger->stopWork();
to << store->printStorePath(path); to << store->printStorePath(path);

View File

@ -150,7 +150,7 @@ StorePath writeDerivation(Store & store,
}) })
: ({ : ({
StringSource s { contents }; StringSource s { contents };
store.addToStoreFromDump(s, suffix, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair); store.addToStoreFromDump(s, suffix, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, references, repair);
}); });
} }
@ -274,7 +274,7 @@ static DerivationOutput parseDerivationOutput(
{ {
if (hashAlgoStr != "") { if (hashAlgoStr != "") {
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr); ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr);
if (method == TextIngestionMethod {}) if (method == ContentAddressMethod::Raw::Text)
xpSettings.require(Xp::DynamicDerivations); xpSettings.require(Xp::DynamicDerivations);
const auto hashAlgo = parseHashAlgo(hashAlgoStr); const auto hashAlgo = parseHashAlgo(hashAlgoStr);
if (hashS == "impure") { if (hashS == "impure") {
@ -1249,7 +1249,7 @@ DerivationOutput DerivationOutput::fromJSON(
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashAlgorithm> { auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashAlgorithm> {
auto & method_ = getString(valueAt(json, "method")); auto & method_ = getString(valueAt(json, "method"));
ContentAddressMethod method = ContentAddressMethod::parse(method_); ContentAddressMethod method = ContentAddressMethod::parse(method_);
if (method == TextIngestionMethod {}) if (method == ContentAddressMethod::Raw::Text)
xpSettings.require(Xp::DynamicDerivations); xpSettings.require(Xp::DynamicDerivations);
auto & hashAlgo_ = getString(valueAt(json, "hashAlgo")); auto & hashAlgo_ = getString(valueAt(json, "hashAlgo"));

View File

@ -64,8 +64,8 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
virtual StorePath addToStoreFromDump( virtual StorePath addToStoreFromDump(
Source & dump, Source & dump,
std::string_view name, std::string_view name,
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive, FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive, ContentAddressMethod hashMethod = FileIngestionMethod::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
RepairFlag repair = NoRepair) override RepairFlag repair = NoRepair) override

View File

@ -76,8 +76,8 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
virtual StorePath addToStoreFromDump( virtual StorePath addToStoreFromDump(
Source & dump, Source & dump,
std::string_view name, std::string_view name,
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive, FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive, ContentAddressMethod hashMethod = FileIngestionMethod::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
RepairFlag repair = NoRepair) override RepairFlag repair = NoRepair) override

View File

@ -1155,7 +1155,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
auto fim = specified.method.getFileIngestionMethod(); auto fim = specified.method.getFileIngestionMethod();
switch (fim) { switch (fim) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
{ {
HashModuloSink caSink { HashModuloSink caSink {
specified.hash.algo, specified.hash.algo,
@ -1253,7 +1253,7 @@ StorePath LocalStore::addToStoreFromDump(
std::filesystem::path tempDir; std::filesystem::path tempDir;
AutoCloseFD tempDirFd; AutoCloseFD tempDirFd;
bool methodsMatch = ContentAddressMethod(FileIngestionMethod(dumpMethod)) == hashMethod; bool methodsMatch = static_cast<FileIngestionMethod>(dumpMethod) == hashMethod.getFileIngestionMethod();
/* If the methods don't match, our streaming hash of the dump is the /* If the methods don't match, our streaming hash of the dump is the
wrong sort, and we need to rehash. */ wrong sort, and we need to rehash. */
@ -1314,7 +1314,7 @@ StorePath LocalStore::addToStoreFromDump(
auto fim = hashMethod.getFileIngestionMethod(); auto fim = hashMethod.getFileIngestionMethod();
switch (fim) { switch (fim) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
restorePath(realPath, dumpSource, (FileSerialisationMethod) fim); restorePath(realPath, dumpSource, (FileSerialisationMethod) fim);
break; break;
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
@ -1330,7 +1330,7 @@ StorePath LocalStore::addToStoreFromDump(
/* For computing the nar hash. In recursive SHA-256 mode, this /* For computing the nar hash. In recursive SHA-256 mode, this
is the same as the store hash, so no need to do it again. */ is the same as the store hash, so no need to do it again. */
auto narHash = std::pair { dumpHash, size }; auto narHash = std::pair { dumpHash, size };
if (dumpMethod != FileSerialisationMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) { if (dumpMethod != FileSerialisationMethod::NixArchive || hashAlgo != HashAlgorithm::SHA256) {
HashSink narSink { HashAlgorithm::SHA256 }; HashSink narSink { HashAlgorithm::SHA256 };
dumpPath(realPath, narSink); dumpPath(realPath, narSink);
narHash = narSink.finish(); narHash = narSink.finish();
@ -1423,7 +1423,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
PosixSourceAccessor accessor; PosixSourceAccessor accessor;
std::string hash = hashPath( std::string hash = hashPath(
PosixSourceAccessor::createAtRoot(link.path()), PosixSourceAccessor::createAtRoot(link.path()),
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false); FileIngestionMethod::NixArchive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
if (hash != name.string()) { if (hash != name.string()) {
printError("link '%s' was modified! expected hash '%s', got '%s'", printError("link '%s' was modified! expected hash '%s', got '%s'",
link.path(), name, hash); link.path(), name, hash);

View File

@ -52,7 +52,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
dstStore, dstStore,
path.name(), path.name(),
FixedOutputInfo { FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::NixArchive,
.hash = narModuloHash, .hash = narModuloHash,
.references = std::move(refs), .references = std::move(refs),
}, },

View File

@ -151,7 +151,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
Hash hash = ({ Hash hash = ({
hashPath( hashPath(
{make_ref<PosixSourceAccessor>(), CanonPath(path)}, {make_ref<PosixSourceAccessor>(), CanonPath(path)},
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first; FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256).first;
}); });
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true)); debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
@ -165,7 +165,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|| (repair && hash != ({ || (repair && hash != ({
hashPath( hashPath(
PosixSourceAccessor::createAtRoot(linkPath), PosixSourceAccessor::createAtRoot(linkPath),
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first; FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256).first;
}))) })))
{ {
// XXX: Consider overwriting linkPath with our valid version. // XXX: Consider overwriting linkPath with our valid version.

View File

@ -48,15 +48,21 @@ std::optional<ContentAddressWithReferences> ValidPathInfo::contentAddressWithRef
if (! ca) if (! ca)
return std::nullopt; return std::nullopt;
return std::visit(overloaded { switch (ca->method.raw) {
[&](const TextIngestionMethod &) -> ContentAddressWithReferences { case ContentAddressMethod::Raw::Text:
{
assert(references.count(path) == 0); assert(references.count(path) == 0);
return TextInfo { return TextInfo {
.hash = ca->hash, .hash = ca->hash,
.references = references, .references = references,
}; };
}, }
[&](const FileIngestionMethod & m2) -> ContentAddressWithReferences {
case ContentAddressMethod::Raw::Flat:
case ContentAddressMethod::Raw::NixArchive:
case ContentAddressMethod::Raw::Git:
default:
{
auto refs = references; auto refs = references;
bool hasSelfReference = false; bool hasSelfReference = false;
if (refs.count(path)) { if (refs.count(path)) {
@ -64,15 +70,15 @@ std::optional<ContentAddressWithReferences> ValidPathInfo::contentAddressWithRef
refs.erase(path); refs.erase(path);
} }
return FixedOutputInfo { return FixedOutputInfo {
.method = m2, .method = ca->method.getFileIngestionMethod(),
.hash = ca->hash, .hash = ca->hash,
.references = { .references = {
.others = std::move(refs), .others = std::move(refs),
.self = hasSelfReference, .self = hasSelfReference,
}, },
}; };
}, }
}, ca->method.raw); }
} }
bool ValidPathInfo::isContentAddressed(const Store & store) const bool ValidPathInfo::isContentAddressed(const Store & store) const
@ -127,22 +133,18 @@ ValidPathInfo::ValidPathInfo(
: UnkeyedValidPathInfo(narHash) : UnkeyedValidPathInfo(narHash)
, path(store.makeFixedOutputPathFromCA(name, ca)) , path(store.makeFixedOutputPathFromCA(name, ca))
{ {
this->ca = ContentAddress {
.method = ca.getMethod(),
.hash = ca.getHash(),
};
std::visit(overloaded { std::visit(overloaded {
[this](TextInfo && ti) { [this](TextInfo && ti) {
this->references = std::move(ti.references); this->references = std::move(ti.references);
this->ca = ContentAddress {
.method = TextIngestionMethod {},
.hash = std::move(ti.hash),
};
}, },
[this](FixedOutputInfo && foi) { [this](FixedOutputInfo && foi) {
this->references = std::move(foi.references.others); this->references = std::move(foi.references.others);
if (foi.references.self) if (foi.references.self)
this->references.insert(path); this->references.insert(path);
this->ca = ContentAddress {
.method = std::move(foi.method),
.hash = std::move(foi.hash),
};
}, },
}, std::move(ca).raw); }, std::move(ca).raw);
} }

View File

@ -392,8 +392,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
else { else {
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25"); if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
std::visit(overloaded { switch (caMethod.raw) {
[&](const TextIngestionMethod & thm) -> void { case ContentAddressMethod::Raw::Text:
{
if (hashAlgo != HashAlgorithm::SHA256) if (hashAlgo != HashAlgorithm::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashAlgo(hashAlgo)); name, printHashAlgo(hashAlgo));
@ -401,13 +402,19 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
conn->to << WorkerProto::Op::AddTextToStore << name << s; conn->to << WorkerProto::Op::AddTextToStore << name << s;
WorkerProto::write(*this, *conn, references); WorkerProto::write(*this, *conn, references);
conn.processStderr(); conn.processStderr();
}, break;
[&](const FileIngestionMethod & fim) -> void { }
case ContentAddressMethod::Raw::Flat:
case ContentAddressMethod::Raw::NixArchive:
case ContentAddressMethod::Raw::Git:
default:
{
auto fim = caMethod.getFileIngestionMethod();
conn->to conn->to
<< WorkerProto::Op::AddToStore << WorkerProto::Op::AddToStore
<< name << name
<< ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ << ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::NixArchive) ? 0 : 1) /* backwards compatibility hack */
<< (fim == FileIngestionMethod::Recursive ? 1 : 0) << (fim == FileIngestionMethod::NixArchive ? 1 : 0)
<< printHashAlgo(hashAlgo); << printHashAlgo(hashAlgo);
try { try {
@ -415,7 +422,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
connections->incCapacity(); connections->incCapacity();
{ {
Finally cleanup([&]() { connections->decCapacity(); }); Finally cleanup([&]() { connections->decCapacity(); });
if (fim == FileIngestionMethod::Recursive) { if (fim == FileIngestionMethod::NixArchive) {
dump.drainInto(conn->to); dump.drainInto(conn->to);
} else { } else {
std::string contents = dump.drain(); std::string contents = dump.drain();
@ -432,9 +439,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
} catch (EndOfFile & e) { } } catch (EndOfFile & e) { }
throw; throw;
} }
break;
} }
}, caMethod.raw); }
auto path = parseStorePath(readString(conn->from)); auto path = parseStorePath(readString(conn->from));
// Release our connection to prevent a deadlock in queryPathInfo(). // Release our connection to prevent a deadlock in queryPathInfo().
conn_.reset(); conn_.reset();
@ -457,12 +464,12 @@ StorePath RemoteStore::addToStoreFromDump(
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
fsm = FileSerialisationMethod::Flat; fsm = FileSerialisationMethod::Flat;
break; break;
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
fsm = FileSerialisationMethod::Recursive; fsm = FileSerialisationMethod::NixArchive;
break; break;
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
// Use NAR; Git is not a serialization method // Use NAR; Git is not a serialization method
fsm = FileSerialisationMethod::Recursive; fsm = FileSerialisationMethod::NixArchive;
break; break;
default: default:
assert(false); assert(false);

View File

@ -87,8 +87,8 @@ public:
StorePath addToStoreFromDump( StorePath addToStoreFromDump(
Source & dump, Source & dump,
std::string_view name, std::string_view name,
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive, FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive, ContentAddressMethod hashMethod = FileIngestionMethod::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
RepairFlag repair = NoRepair) override; RepairFlag repair = NoRepair) override;

View File

@ -19,6 +19,7 @@
#include "signals.hh" #include "signals.hh"
#include "users.hh" #include "users.hh"
#include <filesystem>
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
using json = nlohmann::json; using json = nlohmann::json;
@ -121,7 +122,7 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed
if (info.method == FileIngestionMethod::Git && info.hash.algo != HashAlgorithm::SHA1) if (info.method == FileIngestionMethod::Git && info.hash.algo != HashAlgorithm::SHA1)
throw Error("Git file ingestion must use SHA-1 hash"); throw Error("Git file ingestion must use SHA-1 hash");
if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) { if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::NixArchive) {
return makeStorePath(makeType(*this, "source", info.references), info.hash, name); return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
} else { } else {
if (!info.references.empty()) { if (!info.references.empty()) {
@ -199,12 +200,12 @@ StorePath Store::addToStore(
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
fsm = FileSerialisationMethod::Flat; fsm = FileSerialisationMethod::Flat;
break; break;
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
fsm = FileSerialisationMethod::Recursive; fsm = FileSerialisationMethod::NixArchive;
break; break;
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
// Use NAR; Git is not a serialization method // Use NAR; Git is not a serialization method
fsm = FileSerialisationMethod::Recursive; fsm = FileSerialisationMethod::NixArchive;
break; break;
} }
auto source = sinkToSource([&](Sink & sink) { auto source = sinkToSource([&](Sink & sink) {
@ -355,7 +356,7 @@ ValidPathInfo Store::addToStoreSlow(
RegularFileSink fileSink { caHashSink }; RegularFileSink fileSink { caHashSink };
TeeSink unusualHashTee { narHashSink, caHashSink }; TeeSink unusualHashTee { narHashSink, caHashSink };
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashAlgorithm::SHA256 auto & narSink = method == ContentAddressMethod::Raw::NixArchive && hashAlgo != HashAlgorithm::SHA256
? static_cast<Sink &>(unusualHashTee) ? static_cast<Sink &>(unusualHashTee)
: narHashSink; : narHashSink;
@ -383,9 +384,9 @@ ValidPathInfo Store::addToStoreSlow(
finish. */ finish. */
auto [narHash, narSize] = narHashSink.finish(); auto [narHash, narSize] = narHashSink.finish();
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256 auto hash = method == ContentAddressMethod::Raw::NixArchive && hashAlgo == HashAlgorithm::SHA256
? narHash ? narHash
: method == FileIngestionMethod::Git : method == ContentAddressMethod::Raw::Git
? git::dumpHash(hashAlgo, srcPath).hash ? git::dumpHash(hashAlgo, srcPath).hash
: caHashSink.finish().first; : caHashSink.finish().first;
@ -1303,7 +1304,7 @@ ref<Store> openStore(StoreReference && storeURI)
if (!pathExists(chrootStore)) { if (!pathExists(chrootStore)) {
try { try {
createDirs(chrootStore); createDirs(chrootStore);
} catch (Error & e) { } catch (SystemError & e) {
return std::make_shared<LocalStore>(params); return std::make_shared<LocalStore>(params);
} }
warn("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore); warn("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore);

View File

@ -441,7 +441,7 @@ public:
virtual StorePath addToStore( virtual StorePath addToStore(
std::string_view name, std::string_view name,
const SourcePath & path, const SourcePath & path,
ContentAddressMethod method = FileIngestionMethod::Recursive, ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
PathFilter & filter = defaultPathFilter, PathFilter & filter = defaultPathFilter,
@ -455,7 +455,7 @@ public:
ValidPathInfo addToStoreSlow( ValidPathInfo addToStoreSlow(
std::string_view name, std::string_view name,
const SourcePath & path, const SourcePath & path,
ContentAddressMethod method = FileIngestionMethod::Recursive, ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
std::optional<Hash> expectedCAHash = {}); std::optional<Hash> expectedCAHash = {});
@ -470,7 +470,7 @@ public:
* *
* @param dumpMethod What serialisation format is `dump`, i.e. how * @param dumpMethod What serialisation format is `dump`, i.e. how
* to deserialize it. Must either match hashMethod or be * to deserialize it. Must either match hashMethod or be
* `FileSerialisationMethod::Recursive`. * `FileSerialisationMethod::NixArchive`.
* *
* @param hashMethod How content addressing? Need not match be the * @param hashMethod How content addressing? Need not match be the
* same as `dumpMethod`. * same as `dumpMethod`.
@ -480,8 +480,8 @@ public:
virtual StorePath addToStoreFromDump( virtual StorePath addToStoreFromDump(
Source & dump, Source & dump,
std::string_view name, std::string_view name,
FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive, FileSerialisationMethod dumpMethod = FileSerialisationMethod::NixArchive,
ContentAddressMethod hashMethod = FileIngestionMethod::Recursive, ContentAddressMethod hashMethod = ContentAddressMethod::Raw::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(), const StorePathSet & references = StorePathSet(),
RepairFlag repair = NoRepair) = 0; RepairFlag repair = NoRepair) = 0;

View File

@ -97,7 +97,7 @@ struct StoreDirConfig : public Config
std::pair<StorePath, Hash> computeStorePath( std::pair<StorePath, Hash> computeStorePath(
std::string_view name, std::string_view name,
const SourcePath & path, const SourcePath & path,
ContentAddressMethod method = FileIngestionMethod::Recursive, ContentAddressMethod method = FileIngestionMethod::NixArchive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = {}, const StorePathSet & references = {},
PathFilter & filter = defaultPathFilter) const; PathFilter & filter = defaultPathFilter) const;

View File

@ -2489,7 +2489,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
auto fim = outputHash.method.getFileIngestionMethod(); auto fim = outputHash.method.getFileIngestionMethod();
switch (fim) { switch (fim) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
{ {
HashModuloSink caSink { outputHash.hashAlgo, oldHashPart }; HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
auto fim = outputHash.method.getFileIngestionMethod(); auto fim = outputHash.method.getFileIngestionMethod();
@ -2531,7 +2531,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
{ {
HashResult narHashAndSize = hashPath( HashResult narHashAndSize = hashPath(
{getFSSourceAccessor(), CanonPath(actualPath)}, {getFSSourceAccessor(), CanonPath(actualPath)},
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256); FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256);
newInfo0.narHash = narHashAndSize.first; newInfo0.narHash = narHashAndSize.first;
newInfo0.narSize = narHashAndSize.second; newInfo0.narSize = narHashAndSize.second;
} }
@ -2554,7 +2554,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
rewriteOutput(outputRewrites); rewriteOutput(outputRewrites);
HashResult narHashAndSize = hashPath( HashResult narHashAndSize = hashPath(
{getFSSourceAccessor(), CanonPath(actualPath)}, {getFSSourceAccessor(), CanonPath(actualPath)},
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256); FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256);
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first }; ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
newInfo0.narSize = narHashAndSize.second; newInfo0.narSize = narHashAndSize.second;
auto refs = rewriteRefs(); auto refs = rewriteRefs();
@ -2904,6 +2904,24 @@ void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo
}; };
if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) { if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) {
if (get(*structuredAttrs, "allowedReferences")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'allowedReferences'; use 'outputChecks' instead");
}
if (get(*structuredAttrs, "allowedRequisites")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'allowedRequisites'; use 'outputChecks' instead");
}
if (get(*structuredAttrs, "disallowedRequisites")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'disallowedRequisites'; use 'outputChecks' instead");
}
if (get(*structuredAttrs, "disallowedReferences")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'disallowedReferences'; use 'outputChecks' instead");
}
if (get(*structuredAttrs, "maxSize")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'maxSize'; use 'outputChecks' instead");
}
if (get(*structuredAttrs, "maxClosureSize")){
warn("'structuredAttrs' disables the effect of the top-level attribute 'maxClosureSize'; use 'outputChecks' instead");
}
if (auto outputChecks = get(*structuredAttrs, "outputChecks")) { if (auto outputChecks = get(*structuredAttrs, "outputChecks")) {
if (auto output = get(*outputChecks, outputName)) { if (auto output = get(*outputChecks, outputName)) {
Checks checks; Checks checks;

View File

@ -10,7 +10,7 @@ static std::optional<FileSerialisationMethod> parseFileSerialisationMethodOpt(st
if (input == "flat") { if (input == "flat") {
return FileSerialisationMethod::Flat; return FileSerialisationMethod::Flat;
} else if (input == "nar") { } else if (input == "nar") {
return FileSerialisationMethod::Recursive; return FileSerialisationMethod::NixArchive;
} else { } else {
return std::nullopt; return std::nullopt;
} }
@ -45,7 +45,7 @@ std::string_view renderFileSerialisationMethod(FileSerialisationMethod method)
switch (method) { switch (method) {
case FileSerialisationMethod::Flat: case FileSerialisationMethod::Flat:
return "flat"; return "flat";
case FileSerialisationMethod::Recursive: case FileSerialisationMethod::NixArchive:
return "nar"; return "nar";
default: default:
assert(false); assert(false);
@ -57,7 +57,7 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
{ {
switch (method) { switch (method) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
return renderFileSerialisationMethod( return renderFileSerialisationMethod(
static_cast<FileSerialisationMethod>(method)); static_cast<FileSerialisationMethod>(method));
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
@ -78,7 +78,7 @@ void dumpPath(
case FileSerialisationMethod::Flat: case FileSerialisationMethod::Flat:
path.readFile(sink); path.readFile(sink);
break; break;
case FileSerialisationMethod::Recursive: case FileSerialisationMethod::NixArchive:
path.dumpPath(sink, filter); path.dumpPath(sink, filter);
break; break;
} }
@ -94,7 +94,7 @@ void restorePath(
case FileSerialisationMethod::Flat: case FileSerialisationMethod::Flat:
writeFile(path, source); writeFile(path, source);
break; break;
case FileSerialisationMethod::Recursive: case FileSerialisationMethod::NixArchive:
restorePath(path, source); restorePath(path, source);
break; break;
} }
@ -119,7 +119,7 @@ std::pair<Hash, std::optional<uint64_t>> hashPath(
{ {
switch (method) { switch (method) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: { case FileIngestionMethod::NixArchive: {
auto res = hashPath(path, (FileSerialisationMethod) method, ht, filter); auto res = hashPath(path, (FileSerialisationMethod) method, ht, filter);
return {res.first, {res.second}}; return {res.first, {res.second}};
} }

View File

@ -35,14 +35,14 @@ enum struct FileSerialisationMethod : uint8_t {
* See `file-system-object/content-address.md#serial-nix-archive` in * See `file-system-object/content-address.md#serial-nix-archive` in
* the manual. * the manual.
*/ */
Recursive, NixArchive,
}; };
/** /**
* Parse a `FileSerialisationMethod` by name. Choice of: * Parse a `FileSerialisationMethod` by name. Choice of:
* *
* - `flat`: `FileSerialisationMethod::Flat` * - `flat`: `FileSerialisationMethod::Flat`
* - `nar`: `FileSerialisationMethod::Recursive` * - `nar`: `FileSerialisationMethod::NixArchive`
* *
* Opposite of `renderFileSerialisationMethod`. * Opposite of `renderFileSerialisationMethod`.
*/ */
@ -107,16 +107,18 @@ enum struct FileIngestionMethod : uint8_t {
Flat, Flat,
/** /**
* Hash `FileSerialisationMethod::Recursive` serialisation. * Hash `FileSerialisationMethod::NixArchive` serialisation.
* *
* See `file-system-object/content-address.md#serial-flat` in the * See `file-system-object/content-address.md#serial-flat` in the
* manual. * manual.
*/ */
Recursive, NixArchive,
/** /**
* Git hashing. * Git hashing.
* *
* Part of `ExperimentalFeature::GitHashing`.
*
* See `file-system-object/content-address.md#serial-git` in the * See `file-system-object/content-address.md#serial-git` in the
* manual. * manual.
*/ */
@ -127,7 +129,7 @@ enum struct FileIngestionMethod : uint8_t {
* Parse a `FileIngestionMethod` by name. Choice of: * Parse a `FileIngestionMethod` by name. Choice of:
* *
* - `flat`: `FileIngestionMethod::Flat` * - `flat`: `FileIngestionMethod::Flat`
* - `nar`: `FileIngestionMethod::Recursive` * - `nar`: `FileIngestionMethod::NixArchive`
* - `git`: `FileIngestionMethod::Git` * - `git`: `FileIngestionMethod::Git`
* *
* Opposite of `renderFileIngestionMethod`. * Opposite of `renderFileIngestionMethod`.

View File

@ -413,30 +413,13 @@ void deletePath(const fs::path & path)
} }
Paths createDirs(const Path & path) void createDirs(const Path & path)
{ {
Paths created; try {
if (path == "/") return created; fs::create_directories(path);
} catch (fs::filesystem_error & e) {
struct stat st; throw SysError("creating directory '%1%'", path);
if (STAT(path.c_str(), &st) == -1) {
created = createDirs(dirOf(path));
if (mkdir(path.c_str()
#ifndef _WIN32 // TODO abstract mkdir perms for Windows
, 0777
#endif
) == -1 && errno != EEXIST)
throw SysError("creating directory '%1%'", path);
st = STAT(path);
created.push_back(path);
} }
if (S_ISLNK(st.st_mode) && stat(path.c_str(), &st) == -1)
throw SysError("statting symlink '%1%'", path);
if (!S_ISDIR(st.st_mode)) throw Error("'%1%' is not a directory", path);
return created;
} }

View File

@ -148,11 +148,10 @@ void deletePath(const std::filesystem::path & path);
void deletePath(const std::filesystem::path & path, uint64_t & bytesFreed); void deletePath(const std::filesystem::path & path, uint64_t & bytesFreed);
/** /**
* Create a directory and all its parents, if necessary. Returns the * Create a directory and all its parents, if necessary.
* list of created directories, in order of creation.
*/ */
Paths createDirs(const Path & path); void createDirs(const Path & path);
inline Paths createDirs(PathView path) inline void createDirs(PathView path)
{ {
return createDirs(Path(path)); return createDirs(Path(path));
} }

View File

@ -3,6 +3,7 @@
#include "types.hh" #include "types.hh"
#include "error.hh" #include "error.hh"
#include "file-descriptor.hh"
#include "logging.hh" #include "logging.hh"
#include "ansicolor.hh" #include "ansicolor.hh"
@ -23,26 +24,36 @@ namespace nix {
struct Sink; struct Sink;
struct Source; struct Source;
#ifndef _WIN32
class Pid class Pid
{ {
#ifndef _WIN32
pid_t pid = -1; pid_t pid = -1;
bool separatePG = false; bool separatePG = false;
int killSignal = SIGKILL; int killSignal = SIGKILL;
#else
AutoCloseFD pid = INVALID_DESCRIPTOR;
#endif
public: public:
Pid(); Pid();
#ifndef _WIN32
Pid(pid_t pid); Pid(pid_t pid);
~Pid();
void operator =(pid_t pid); void operator =(pid_t pid);
operator pid_t(); operator pid_t();
#else
Pid(AutoCloseFD pid);
void operator =(AutoCloseFD pid);
#endif
~Pid();
int kill(); int kill();
int wait(); int wait();
// TODO: Implement for Windows
#ifndef _WIN32
void setSeparatePG(bool separatePG); void setSeparatePG(bool separatePG);
void setKillSignal(int signal); void setKillSignal(int signal);
pid_t release(); pid_t release();
};
#endif #endif
};
#ifndef _WIN32 #ifndef _WIN32

View File

@ -1,9 +1,15 @@
#include "current-process.hh" #include "current-process.hh"
#include "environment-variables.hh" #include "environment-variables.hh"
#include "error.hh"
#include "file-descriptor.hh"
#include "file-path.hh"
#include "signals.hh" #include "signals.hh"
#include "processes.hh" #include "processes.hh"
#include "finally.hh" #include "finally.hh"
#include "serialise.hh" #include "serialise.hh"
#include "file-system.hh"
#include "util.hh"
#include "windows-error.hh"
#include <cerrno> #include <cerrno>
#include <cstdlib> #include <cstdlib>
@ -16,25 +22,347 @@
#include <sys/types.h> #include <sys/types.h>
#include <unistd.h> #include <unistd.h>
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
namespace nix { namespace nix {
std::string runProgram(Path program, bool lookupPath, const Strings & args, using namespace nix::windows;
const std::optional<std::string> & input, bool isInteractive)
Pid::Pid() {}
Pid::Pid(AutoCloseFD pid)
: pid(std::move(pid))
{ {
throw UnimplementedError("Cannot shell out to git on Windows yet");
} }
Pid::~Pid()
{
if (pid.get() != INVALID_DESCRIPTOR)
kill();
}
void Pid::operator=(AutoCloseFD pid)
{
if (this->pid.get() != INVALID_DESCRIPTOR && this->pid.get() != pid.get())
kill();
this->pid = std::move(pid);
}
// TODO: Implement (not needed for process spawning yet)
int Pid::kill()
{
assert(pid.get() != INVALID_DESCRIPTOR);
debug("killing process %1%", pid.get());
throw UnimplementedError("Pid::kill unimplemented");
}
int Pid::wait()
{
// https://github.com/nix-windows/nix/blob/windows-meson/src/libutil/util.cc#L1938
assert(pid.get() != INVALID_DESCRIPTOR);
DWORD status = WaitForSingleObject(pid.get(), INFINITE);
if (status != WAIT_OBJECT_0) {
debug("WaitForSingleObject returned %1%", status);
}
DWORD exitCode = 0;
if (GetExitCodeProcess(pid.get(), &exitCode) == FALSE) {
debug("GetExitCodeProcess failed on pid %1%", pid.get());
}
pid.close();
return exitCode;
}
// TODO: Merge this with Unix's runProgram since it's identical logic.
std::string runProgram(
Path program, bool lookupPath, const Strings & args, const std::optional<std::string> & input, bool isInteractive)
{
auto res = runProgram(RunOptions{
.program = program, .lookupPath = lookupPath, .args = args, .input = input, .isInteractive = isInteractive});
if (!statusOk(res.first))
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
return res.second;
}
std::optional<Path> getProgramInterpreter(const Path & program)
{
// These extensions are automatically handled by Windows and don't require an interpreter.
static constexpr const char * exts[] = {".exe", ".cmd", ".bat"};
for (const auto ext : exts) {
if (hasSuffix(program, ext)) {
return {};
}
}
// TODO: Open file and read the shebang
throw UnimplementedError("getProgramInterpreter unimplemented");
}
// TODO: Not sure if this is needed in the unix version but it might be useful as a member func
void setFDInheritable(AutoCloseFD & fd, bool inherit)
{
if (fd.get() != INVALID_DESCRIPTOR) {
if (!SetHandleInformation(fd.get(), HANDLE_FLAG_INHERIT, inherit ? HANDLE_FLAG_INHERIT : 0)) {
throw WinError("Couldn't disable inheriting of handle");
}
}
}
AutoCloseFD nullFD()
{
// Create null handle to discard reads / writes
// https://stackoverflow.com/a/25609668
// https://github.com/nix-windows/nix/blob/windows-meson/src/libutil/util.cc#L2228
AutoCloseFD nul = CreateFileW(
L"NUL",
GENERIC_READ | GENERIC_WRITE,
// We don't care who reads / writes / deletes this file since it's NUL anyways
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
NULL,
OPEN_EXISTING,
0,
NULL);
if (!nul.get()) {
throw WinError("Couldn't open NUL device");
}
// Let this handle be inheritable by child processes
setFDInheritable(nul, true);
return nul;
}
// Adapted from
// https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
std::string windowsEscape(const std::string & str, bool cmd)
{
// TODO: This doesn't handle cmd.exe escaping.
if (cmd) {
throw UnimplementedError("cmd.exe escaping is not implemented");
}
if (str.find_first_of(" \t\n\v\"") == str.npos && !str.empty()) {
// No need to escape this one, the nonempty contents don't have a special character
return str;
}
std::string buffer;
// Add the opening quote
buffer += '"';
for (auto iter = str.begin();; ++iter) {
size_t backslashes = 0;
while (iter != str.end() && *iter == '\\') {
++iter;
++backslashes;
}
// We only escape backslashes if:
// - They come immediately before the closing quote
// - They come immediately before a quote in the middle of the string
// Both of these cases break the escaping if not handled. Otherwise backslashes are fine as-is
if (iter == str.end()) {
// Need to escape each backslash
buffer.append(backslashes * 2, '\\');
// Exit since we've reached the end of the string
break;
} else if (*iter == '"') {
// Need to escape each backslash and the intermediate quote character
buffer.append(backslashes * 2, '\\');
buffer += "\\\"";
} else {
// Don't escape the backslashes since they won't break the delimiter
buffer.append(backslashes, '\\');
buffer += *iter;
}
}
// Add the closing quote
return buffer + '"';
}
Pid spawnProcess(const Path & realProgram, const RunOptions & options, Pipe & out, Pipe & in)
{
// Setup pipes.
if (options.standardOut) {
// Don't inherit the read end of the output pipe
setFDInheritable(out.readSide, false);
} else {
out.writeSide = nullFD();
}
if (options.standardIn) {
// Don't inherit the write end of the input pipe
setFDInheritable(in.writeSide, false);
} else {
in.readSide = nullFD();
}
STARTUPINFOW startInfo = {0};
startInfo.cb = sizeof(startInfo);
startInfo.dwFlags = STARTF_USESTDHANDLES;
startInfo.hStdInput = in.readSide.get();
startInfo.hStdOutput = out.writeSide.get();
startInfo.hStdError = out.writeSide.get();
std::string envline;
// Retain the current processes' environment variables.
for (const auto & envVar : getEnv()) {
envline += (envVar.first + '=' + envVar.second + '\0');
}
// Also add new ones specified in options.
if (options.environment) {
for (const auto & envVar : *options.environment) {
envline += (envVar.first + '=' + envVar.second + '\0');
}
}
std::string cmdline = windowsEscape(realProgram, false);
for (const auto & arg : options.args) {
// TODO: This isn't the right way to escape windows command
// See https://learn.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-commandlinetoargvw
cmdline += ' ' + windowsEscape(arg, false);
}
PROCESS_INFORMATION procInfo = {0};
if (CreateProcessW(
// EXE path is provided in the cmdline
NULL,
string_to_os_string(cmdline).data(),
NULL,
NULL,
TRUE,
CREATE_UNICODE_ENVIRONMENT | CREATE_SUSPENDED,
string_to_os_string(envline).data(),
options.chdir.has_value() ? string_to_os_string(*options.chdir).data() : NULL,
&startInfo,
&procInfo)
== 0) {
throw WinError("CreateProcessW failed (%1%)", cmdline);
}
// Convert these to use RAII
AutoCloseFD process = procInfo.hProcess;
AutoCloseFD thread = procInfo.hThread;
// Add current process and child to job object so child terminates when parent terminates
// TODO: This spawns one job per child process. We can probably keep this as a global, and
// add children a single job so we don't use so many jobs at once.
Descriptor job = CreateJobObjectW(NULL, NULL);
if (job == NULL) {
TerminateProcess(procInfo.hProcess, 0);
throw WinError("Couldn't create job object for child process");
}
if (AssignProcessToJobObject(job, procInfo.hProcess) == FALSE) {
TerminateProcess(procInfo.hProcess, 0);
throw WinError("Couldn't assign child process to job object");
}
if (ResumeThread(procInfo.hThread) == (DWORD) -1) {
TerminateProcess(procInfo.hProcess, 0);
throw WinError("Couldn't resume child process thread");
}
return process;
}
// TODO: Merge this with Unix's runProgram since it's identical logic.
// Output = error code + "standard out" output stream // Output = error code + "standard out" output stream
std::pair<int, std::string> runProgram(RunOptions && options) std::pair<int, std::string> runProgram(RunOptions && options)
{ {
throw UnimplementedError("Cannot shell out to git on Windows yet"); StringSink sink;
} options.standardOut = &sink;
int status = 0;
try {
runProgram2(options);
} catch (ExecError & e) {
status = e.status;
}
return {status, std::move(sink.s)};
}
void runProgram2(const RunOptions & options) void runProgram2(const RunOptions & options)
{ {
throw UnimplementedError("Cannot shell out to git on Windows yet"); checkInterrupt();
assert(!(options.standardIn && options.input));
std::unique_ptr<Source> source_;
Source * source = options.standardIn;
if (options.input) {
source_ = std::make_unique<StringSource>(*options.input);
source = source_.get();
}
/* Create a pipe. */
Pipe out, in;
// TODO: I copied this from unix but this is handled again in spawnProcess, so might be weird to split it up like
// this
if (options.standardOut)
out.create();
if (source)
in.create();
Path realProgram = options.program;
// TODO: Implement shebang / program interpreter lookup on Windows
auto interpreter = getProgramInterpreter(realProgram);
std::optional<Finally<std::function<void()>>> resumeLoggerDefer;
if (options.isInteractive) {
logger->pause();
resumeLoggerDefer.emplace([]() { logger->resume(); });
}
Pid pid = spawnProcess(interpreter.has_value() ? *interpreter : realProgram, options, out, in);
// TODO: This is identical to unix, deduplicate?
out.writeSide.close();
std::thread writerThread;
std::promise<void> promise;
Finally doJoin([&] {
if (writerThread.joinable())
writerThread.join();
});
if (source) {
in.readSide.close();
writerThread = std::thread([&] {
try {
std::vector<char> buf(8 * 1024);
while (true) {
size_t n;
try {
n = source->read(buf.data(), buf.size());
} catch (EndOfFile &) {
break;
}
writeFull(in.writeSide.get(), {buf.data(), n});
}
promise.set_value();
} catch (...) {
promise.set_exception(std::current_exception());
}
in.writeSide.close();
});
}
if (options.standardOut)
drainFD(out.readSide.get(), *options.standardOut);
/* Wait for the child to finish. */
int status = pid.wait();
/* Wait for the writer thread to finish. */
if (source)
promise.get_future().get();
if (status)
throw ExecError(status, "program '%1%' %2%", options.program, statusToString(status));
} }
std::string statusToString(int status) std::string statusToString(int status)
@ -45,10 +373,8 @@ std::string statusToString(int status)
return "succeeded"; return "succeeded";
} }
bool statusOk(int status) bool statusOk(int status)
{ {
return status == 0; return status == 0;
} }
} }

View File

@ -115,7 +115,7 @@ bool createUserEnv(EvalState & state, PackageInfos & elems,
std::string str2 = str.str(); std::string str2 = str.str();
StringSource source { str2 }; StringSource source { str2 };
state.store->addToStoreFromDump( state.store->addToStoreFromDump(
source, "env-manifest.nix", FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, references); source, "env-manifest.nix", FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, references);
}); });
/* Get the environment builder expression. */ /* Get the environment builder expression. */

View File

@ -194,10 +194,10 @@ static void opAdd(Strings opFlags, Strings opArgs)
store. */ store. */
static void opAddFixed(Strings opFlags, Strings opArgs) static void opAddFixed(Strings opFlags, Strings opArgs)
{ {
auto method = FileIngestionMethod::Flat; ContentAddressMethod method = ContentAddressMethod::Raw::Flat;
for (auto & i : opFlags) for (auto & i : opFlags)
if (i == "--recursive") method = FileIngestionMethod::Recursive; if (i == "--recursive") method = ContentAddressMethod::Raw::NixArchive;
else throw UsageError("unknown flag '%1%'", i); else throw UsageError("unknown flag '%1%'", i);
if (opArgs.empty()) if (opArgs.empty())
@ -223,7 +223,7 @@ static void opPrintFixedPath(Strings opFlags, Strings opArgs)
auto method = FileIngestionMethod::Flat; auto method = FileIngestionMethod::Flat;
for (auto i : opFlags) for (auto i : opFlags)
if (i == "--recursive") method = FileIngestionMethod::Recursive; if (i == "--recursive") method = FileIngestionMethod::NixArchive;
else throw UsageError("unknown flag '%1%'", i); else throw UsageError("unknown flag '%1%'", i);
if (opArgs.size() != 3) if (opArgs.size() != 3)
@ -563,7 +563,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
if (!hashGiven) { if (!hashGiven) {
HashResult hash = hashPath( HashResult hash = hashPath(
{store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) }}, {store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) }},
FileSerialisationMethod::Recursive, HashAlgorithm::SHA256); FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256);
info->narHash = hash.first; info->narHash = hash.first;
info->narSize = hash.second; info->narSize = hash.second;
} }

View File

@ -12,7 +12,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
{ {
Path path; Path path;
std::optional<std::string> namePart; std::optional<std::string> namePart;
ContentAddressMethod caMethod = FileIngestionMethod::Recursive; ContentAddressMethod caMethod = ContentAddressMethod::Raw::NixArchive;
HashAlgorithm hashAlgo = HashAlgorithm::SHA256; HashAlgorithm hashAlgo = HashAlgorithm::SHA256;
CmdAddToStore() CmdAddToStore()
@ -68,7 +68,7 @@ struct CmdAddFile : CmdAddToStore
{ {
CmdAddFile() CmdAddFile()
{ {
caMethod = FileIngestionMethod::Flat; caMethod = ContentAddressMethod::Raw::Flat;
} }
std::string description() override std::string description() override

View File

@ -238,7 +238,7 @@ static StorePath getDerivationEnvironment(ref<Store> store, ref<Store> evalStore
auto getEnvShPath = ({ auto getEnvShPath = ({
StringSource source { getEnvSh }; StringSource source { getEnvSh };
evalStore->addToStoreFromDump( evalStore->addToStoreFromDump(
source, "get-env.sh", FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, {}); source, "get-env.sh", FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, {});
}); });
drv.args = {store->printStorePath(getEnvShPath)}; drv.args = {store->printStorePath(getEnvShPath)};

View File

@ -68,7 +68,7 @@ struct CmdHashBase : Command
switch (mode) { switch (mode) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
return "print cryptographic hash of a regular file"; return "print cryptographic hash of a regular file";
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
return "print cryptographic hash of the NAR serialisation of a path"; return "print cryptographic hash of the NAR serialisation of a path";
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
return "print cryptographic hash of the Git serialisation of a path"; return "print cryptographic hash of the Git serialisation of a path";
@ -91,7 +91,7 @@ struct CmdHashBase : Command
Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++ Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++
switch (mode) { switch (mode) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
case FileIngestionMethod::Recursive: case FileIngestionMethod::NixArchive:
{ {
auto hashSink = makeSink(); auto hashSink = makeSink();
dumpPath(path2, *hashSink, (FileSerialisationMethod) mode); dumpPath(path2, *hashSink, (FileSerialisationMethod) mode);
@ -126,7 +126,7 @@ struct CmdHashBase : Command
struct CmdHashPath : CmdHashBase struct CmdHashPath : CmdHashBase
{ {
CmdHashPath() CmdHashPath()
: CmdHashBase(FileIngestionMethod::Recursive) : CmdHashBase(FileIngestionMethod::NixArchive)
{ {
addFlag(flag::hashAlgo("algo", &hashAlgo)); addFlag(flag::hashAlgo("algo", &hashAlgo));
addFlag(flag::fileIngestionMethod(&mode)); addFlag(flag::fileIngestionMethod(&mode));
@ -311,7 +311,7 @@ static int compatNixHash(int argc, char * * argv)
}); });
if (op == opHash) { if (op == opHash) {
CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive); CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::NixArchive);
if (!hashAlgo.has_value()) hashAlgo = HashAlgorithm::MD5; if (!hashAlgo.has_value()) hashAlgo = HashAlgorithm::MD5;
cmd.hashAlgo = hashAlgo.value(); cmd.hashAlgo = hashAlgo.value();
cmd.hashFormat = hashFormat; cmd.hashFormat = hashFormat;

View File

@ -57,7 +57,9 @@ std::tuple<StorePath, Hash> prefetchFile(
bool unpack, bool unpack,
bool executable) bool executable)
{ {
auto ingestionMethod = unpack || executable ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; ContentAddressMethod method = unpack || executable
? ContentAddressMethod::Raw::NixArchive
: ContentAddressMethod::Raw::Flat;
/* Figure out a name in the Nix store. */ /* Figure out a name in the Nix store. */
if (!name) { if (!name) {
@ -73,11 +75,10 @@ std::tuple<StorePath, Hash> prefetchFile(
the store. */ the store. */
if (expectedHash) { if (expectedHash) {
hashAlgo = expectedHash->algo; hashAlgo = expectedHash->algo;
storePath = store->makeFixedOutputPath(*name, FixedOutputInfo { storePath = store->makeFixedOutputPathFromCA(*name, ContentAddressWithReferences::fromParts(
.method = ingestionMethod, method,
.hash = *expectedHash, *expectedHash,
.references = {}, {}));
});
if (store->isValidPath(*storePath)) if (store->isValidPath(*storePath))
hash = expectedHash; hash = expectedHash;
else else
@ -128,7 +129,7 @@ std::tuple<StorePath, Hash> prefetchFile(
auto info = store->addToStoreSlow( auto info = store->addToStoreSlow(
*name, PosixSourceAccessor::createAtRoot(tmpFile), *name, PosixSourceAccessor::createAtRoot(tmpFile),
ingestionMethod, hashAlgo, {}, expectedHash); method, hashAlgo, {}, expectedHash);
storePath = info.path; storePath = info.path;
assert(info.ca); assert(info.ca);
hash = info.ca->hash; hash = info.ca->hash;

View File

@ -258,7 +258,7 @@ struct ProfileManifest
*store, *store,
"profile", "profile",
FixedOutputInfo { FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::NixArchive,
.hash = narHash, .hash = narHash,
.references = { .references = {
.others = std::move(references), .others = std::move(references),

View File

@ -259,7 +259,7 @@ hashPath(char * algo, int base32, char * path)
try { try {
Hash h = hashPath( Hash h = hashPath(
PosixSourceAccessor::createAtRoot(path), PosixSourceAccessor::createAtRoot(path),
FileIngestionMethod::Recursive, parseHashAlgo(algo)).first; FileIngestionMethod::NixArchive, parseHashAlgo(algo)).first;
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
@ -335,7 +335,7 @@ SV *
StoreWrapper::addToStore(char * srcPath, int recursive, char * algo) StoreWrapper::addToStore(char * srcPath, int recursive, char * algo)
PPCODE: PPCODE:
try { try {
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; auto method = recursive ? ContentAddressMethod::Raw::NixArchive : ContentAddressMethod::Raw::Flat;
auto path = THIS->store->addToStore( auto path = THIS->store->addToStore(
std::string(baseNameOf(srcPath)), std::string(baseNameOf(srcPath)),
PosixSourceAccessor::createAtRoot(srcPath), PosixSourceAccessor::createAtRoot(srcPath),
@ -351,7 +351,7 @@ StoreWrapper::makeFixedOutputPath(int recursive, char * algo, char * hash, char
PPCODE: PPCODE:
try { try {
auto h = Hash::parseAny(hash, parseHashAlgo(algo)); auto h = Hash::parseAny(hash, parseHashAlgo(algo));
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; auto method = recursive ? FileIngestionMethod::NixArchive : FileIngestionMethod::Flat;
auto path = THIS->store->makeFixedOutputPath(name, FixedOutputInfo { auto path = THIS->store->makeFixedOutputPath(name, FixedOutputInfo {
.method = method, .method = method,
.hash = h, .hash = h,

View File

@ -3,7 +3,7 @@
# Test the function for lang.sh # Test the function for lang.sh
source common.sh source common.sh
source lang/framework.sh source characterisation/framework.sh
# We are testing this, so don't want outside world to affect us. # We are testing this, so don't want outside world to affect us.
unset _NIX_TEST_ACCEPT unset _NIX_TEST_ACCEPT

View File

@ -0,0 +1,77 @@
# shellcheck shell=bash
# Golden test support
#
# Test that the output of the given test matches what is expected. If
# `_NIX_TEST_ACCEPT` is non-empty also update the expected output so
# that next time the test succeeds.
function diffAndAcceptInner() {
local -r testName=$1
local -r got="$2"
local -r expected="$3"
# Absence of expected file indicates empty output expected.
if test -e "$expected"; then
local -r expectedOrEmpty="$expected"
else
local -r expectedOrEmpty=characterisation/empty
fi
# Diff so we get a nice message
if ! diff --color=always --unified "$expectedOrEmpty" "$got"; then
echo "FAIL: evaluation result of $testName not as expected"
# shellcheck disable=SC2034
badDiff=1
fi
# Update expected if `_NIX_TEST_ACCEPT` is non-empty.
if test -n "${_NIX_TEST_ACCEPT-}"; then
cp "$got" "$expected"
# Delete empty expected files to avoid bloating the repo with
# empty files.
if ! test -s "$expected"; then
rm "$expected"
fi
fi
}
function characterisationTestExit() {
# Make sure shellcheck knows all these will be defined by the caller
: "${badDiff?} ${badExitCode?}"
if test -n "${_NIX_TEST_ACCEPT-}"; then
if (( "$badDiff" )); then
set +x
echo 'Output did mot match, but accepted output as the persisted expected output.'
echo 'That means the next time the tests are run, they should pass.'
set -x
else
set +x
echo 'NOTE: Environment variable _NIX_TEST_ACCEPT is defined,'
echo 'indicating the unexpected output should be accepted as the expected output going forward,'
echo 'but no tests had unexpected output so there was no expected output to update.'
set -x
fi
if (( "$badExitCode" )); then
exit "$badExitCode"
else
skipTest "regenerating golden masters"
fi
else
if (( "$badDiff" )); then
set +x
echo ''
echo 'You can rerun this test with:'
echo ''
echo " _NIX_TEST_ACCEPT=1 make tests/functional/${TEST_NAME}.test"
echo ''
echo 'to regenerate the files containing the expected output,'
echo 'and then view the git diff to decide whether a change is'
echo 'good/intentional or bad/unintentional.'
echo 'If the diff contains arbitrary or impure information,'
echo 'please improve the normalization that the test applies to the output.'
set -x
fi
exit $(( "$badExitCode" + "$badDiff" ))
fi
}

View File

@ -0,0 +1,20 @@
# shellcheck shell=bash
commonDir="$(readlink -f "$(dirname "${BASH_SOURCE[0]-$0}")")"
# Since this is a generated file
# shellcheck disable=SC1091
source "$commonDir/subst-vars.sh"
# Make sure shellcheck knows this will be defined by the above generated snippet
: "${bindir?}"
export PATH="$bindir:$PATH"
if [[ -n "${NIX_CLIENT_PACKAGE:-}" ]]; then
export PATH="$NIX_CLIENT_PACKAGE/bin":$PATH
fi
DAEMON_PATH="$PATH"
if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then
DAEMON_PATH="${NIX_DAEMON_PACKAGE}/bin:$DAEMON_PATH"
fi

View File

@ -1,10 +1,10 @@
# NOTE: instances of @variable@ are substituted as defined in /mk/templates.mk # NOTE: instances of @variable@ are substituted as defined in /mk/templates.mk
if ! isTestOnNixOS; then if [[ -z "${COMMON_SUBST_VARS_SH_SOURCED-}" ]]; then
export SHELL="@bash@"
export PATH=@bindir@:$PATH
fi
COMMON_SUBST_VARS_SH_SOURCED=1
bindir=@bindir@
export coreutils=@coreutils@ export coreutils=@coreutils@
#lsof=@lsof@ #lsof=@lsof@
@ -16,3 +16,10 @@ export version=@PACKAGE_VERSION@
export system=@system@ export system=@system@
export BUILD_SHARED_LIBS=@BUILD_SHARED_LIBS@ export BUILD_SHARED_LIBS=@BUILD_SHARED_LIBS@
if ! isTestOnNixOS; then
export SHELL="@bash@"
export PATH=@bindir@:$PATH
fi
fi

View File

@ -0,0 +1,4 @@
# shellcheck shell=bash
TEST_ROOT=$(realpath "${TMPDIR:-/tmp}/nix-test")/${TEST_NAME:-default/tests\/functional//}
export TEST_ROOT

View File

@ -21,9 +21,11 @@ commonDir="$(readlink -f "$(dirname "${BASH_SOURCE[0]-$0}")")"
source "$commonDir/subst-vars.sh" source "$commonDir/subst-vars.sh"
# Make sure shellcheck knows all these will be defined by the above generated snippet # Make sure shellcheck knows all these will be defined by the above generated snippet
: "${PATH?} ${coreutils?} ${dot?} ${SHELL?} ${PAGER?} ${busybox?} ${version?} ${system?} ${BUILD_SHARED_LIBS?}" : "${bindir?} ${coreutils?} ${dot?} ${SHELL?} ${PAGER?} ${busybox?} ${version?} ${system?} ${BUILD_SHARED_LIBS?}"
source "$commonDir/paths.sh"
source "$commonDir/test-root.sh"
export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/${TEST_NAME:-default/tests\/functional//}
test_nix_conf_dir=$TEST_ROOT/etc test_nix_conf_dir=$TEST_ROOT/etc
test_nix_conf=$test_nix_conf_dir/nix.conf test_nix_conf=$test_nix_conf_dir/nix.conf
@ -60,14 +62,6 @@ unset XDG_CONFIG_HOME
unset XDG_CONFIG_DIRS unset XDG_CONFIG_DIRS
unset XDG_CACHE_HOME unset XDG_CACHE_HOME
if [[ -n "${NIX_CLIENT_PACKAGE:-}" ]]; then
export PATH="$NIX_CLIENT_PACKAGE/bin":$PATH
fi
DAEMON_PATH="$PATH"
if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then
DAEMON_PATH="${NIX_DAEMON_PACKAGE}/bin:$DAEMON_PATH"
fi
export IMPURE_VAR1=foo export IMPURE_VAR1=foo
export IMPURE_VAR2=bar export IMPURE_VAR2=bar

View File

@ -0,0 +1,23 @@
#!/usr/bin/env bash
source common/test-root.sh
source common/paths.sh
set -o pipefail
source characterisation/framework.sh
badDiff=0
badExitCode=0
store="$TEST_ROOT/store"
for nixFile in derivation/*.nix; do
drvPath=$(nix-instantiate --store "$store" --pure-eval --expr "$(< "$nixFile")")
testName=$(basename "$nixFile" .nix)
got="${store}${drvPath}"
expected="derivation/$testName.drv"
diffAndAcceptInner "$testName" "$got" "$expected"
done
characterisationTestExit

View File

@ -0,0 +1 @@
Derive([("out","/nix/store/1qsc7svv43m4dw2prh6mvyf7cai5czji-advanced-attributes-defaults","","")],[],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("builder","/bin/bash"),("name","advanced-attributes-defaults"),("out","/nix/store/1qsc7svv43m4dw2prh6mvyf7cai5czji-advanced-attributes-defaults"),("system","my-system")])

View File

@ -0,0 +1,6 @@
derivation {
name = "advanced-attributes-defaults";
system = "my-system";
builder = "/bin/bash";
args = [ "-c" "echo hello > $out" ];
}

View File

@ -0,0 +1 @@
Derive([("dev","/nix/store/8bazivnbipbyi569623skw5zm91z6kc2-advanced-attributes-structured-attrs-defaults-dev","",""),("out","/nix/store/f8f8nvnx32bxvyxyx2ff7akbvwhwd9dw-advanced-attributes-structured-attrs-defaults","","")],[],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"builder\":\"/bin/bash\",\"name\":\"advanced-attributes-structured-attrs-defaults\",\"outputs\":[\"out\",\"dev\"],\"system\":\"my-system\"}"),("dev","/nix/store/8bazivnbipbyi569623skw5zm91z6kc2-advanced-attributes-structured-attrs-defaults-dev"),("out","/nix/store/f8f8nvnx32bxvyxyx2ff7akbvwhwd9dw-advanced-attributes-structured-attrs-defaults")])

View File

@ -0,0 +1,8 @@
derivation {
name = "advanced-attributes-structured-attrs-defaults";
system = "my-system";
builder = "/bin/bash";
args = [ "-c" "echo hello > $out" ];
outputs = [ "out" "dev" ];
__structuredAttrs = true;
}

View File

@ -0,0 +1 @@
Derive([("bin","/nix/store/pbzb48v0ycf80jgligcp4n8z0rblna4n-advanced-attributes-structured-attrs-bin","",""),("dev","/nix/store/7xapi8jv7flcz1qq8jhw55ar8ag8hldh-advanced-attributes-structured-attrs-dev","",""),("out","/nix/store/mpq3l1l1qc2yr50q520g08kprprwv79f-advanced-attributes-structured-attrs","","")],[("/nix/store/4xm4wccqsvagz9gjksn24s7rip2fdy7v-foo.drv",["out"]),("/nix/store/plsq5jbr5nhgqwcgb2qxw7jchc09dnl8-bar.drv",["out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar\"],\"disallowedRequisites\":[\"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo\"],\"allowedRequisites\":[\"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/nix/store/pbzb48v0ycf80jgligcp4n8z0rblna4n-advanced-attributes-structured-attrs-bin"),("dev","/nix/store/7xapi8jv7flcz1qq8jhw55ar8ag8hldh-advanced-attributes-structured-attrs-dev"),("out","/nix/store/mpq3l1l1qc2yr50q520g08kprprwv79f-advanced-attributes-structured-attrs")])

View File

@ -0,0 +1,45 @@
let
system = "my-system";
foo = derivation {
inherit system;
name = "foo";
builder = "/bin/bash";
args = ["-c" "echo foo > $out"];
};
bar = derivation {
inherit system;
name = "bar";
builder = "/bin/bash";
args = ["-c" "echo bar > $out"];
};
in
derivation {
inherit system;
name = "advanced-attributes-structured-attrs";
builder = "/bin/bash";
args = [ "-c" "echo hello > $out" ];
__sandboxProfile = "sandcastle";
__noChroot = true;
__impureHostDeps = ["/usr/bin/ditto"];
impureEnvVars = ["UNICORN"];
__darwinAllowLocalNetworking = true;
outputs = [ "out" "bin" "dev" ];
__structuredAttrs = true;
outputChecks = {
out = {
allowedReferences = [foo];
allowedRequisites = [foo];
};
bin = {
disallowedReferences = [bar];
disallowedRequisites = [bar];
};
dev = {
maxSize = 789;
maxClosureSize = 5909;
};
};
requiredSystemFeatures = ["rainbow" "uid-range"];
preferLocalBuild = true;
allowSubstitutes = false;
}

View File

@ -0,0 +1 @@
Derive([("out","/nix/store/33a6fdmn8q9ih9d7npbnrxn2q56a4l8q-advanced-attributes","","")],[("/nix/store/4xm4wccqsvagz9gjksn24s7rip2fdy7v-foo.drv",["out"]),("/nix/store/plsq5jbr5nhgqwcgb2qxw7jchc09dnl8-bar.drv",["out"])],[],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"),("allowedRequisites","/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"),("builder","/bin/bash"),("disallowedReferences","/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"),("disallowedRequisites","/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/nix/store/33a6fdmn8q9ih9d7npbnrxn2q56a4l8q-advanced-attributes"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")])

View File

@ -0,0 +1,33 @@
let
system = "my-system";
foo = derivation {
inherit system;
name = "foo";
builder = "/bin/bash";
args = ["-c" "echo foo > $out"];
};
bar = derivation {
inherit system;
name = "bar";
builder = "/bin/bash";
args = ["-c" "echo bar > $out"];
};
in
derivation {
inherit system;
name = "advanced-attributes";
builder = "/bin/bash";
args = [ "-c" "echo hello > $out" ];
__sandboxProfile = "sandcastle";
__noChroot = true;
__impureHostDeps = ["/usr/bin/ditto"];
impureEnvVars = ["UNICORN"];
__darwinAllowLocalNetworking = true;
allowedReferences = [foo];
allowedRequisites = [foo];
disallowedReferences = [bar];
disallowedRequisites = [bar];
requiredSystemFeatures = ["rainbow" "uid-range"];
preferLocalBuild = true;
allowSubstitutes = false;
}

View File

@ -4,7 +4,7 @@ source common.sh
set -o pipefail set -o pipefail
source lang/framework.sh source characterisation/framework.sh
# specialize function a bit # specialize function a bit
function diffAndAccept() { function diffAndAccept() {
@ -138,32 +138,4 @@ for i in lang/eval-okay-*.nix; do
fi fi
done done
if test -n "${_NIX_TEST_ACCEPT-}"; then characterisationTestExit
if (( "$badDiff" )); then
echo 'Output did mot match, but accepted output as the persisted expected output.'
echo 'That means the next time the tests are run, they should pass.'
else
echo 'NOTE: Environment variable _NIX_TEST_ACCEPT is defined,'
echo 'indicating the unexpected output should be accepted as the expected output going forward,'
echo 'but no tests had unexpected output so there was no expected output to update.'
fi
if (( "$badExitCode" )); then
exit "$badExitCode"
else
skipTest "regenerating golden masters"
fi
else
if (( "$badDiff" )); then
echo ''
echo 'You can rerun this test with:'
echo ''
echo ' _NIX_TEST_ACCEPT=1 make tests/functional/lang.sh.test'
echo ''
echo 'to regenerate the files containing the expected output,'
echo 'and then view the git diff to decide whether a change is'
echo 'good/intentional or bad/unintentional.'
echo 'If the diff contains arbitrary or impure information,'
echo 'please improve the normalization that the test applies to the output.'
fi
exit $(( "$badExitCode" + "$badDiff" ))
fi

View File

@ -0,0 +1,6 @@
warning: In a derivation named 'eval-okay-derivation-legacy', 'structuredAttrs' disables the effect of the derivation attribute 'allowedReferences'; use 'outputChecks.<output>.allowedReferences' instead
warning: In a derivation named 'eval-okay-derivation-legacy', 'structuredAttrs' disables the effect of the derivation attribute 'allowedRequisites'; use 'outputChecks.<output>.allowedRequisites' instead
warning: In a derivation named 'eval-okay-derivation-legacy', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedReferences'; use 'outputChecks.<output>.disallowedReferences' instead
warning: In a derivation named 'eval-okay-derivation-legacy', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedRequisites'; use 'outputChecks.<output>.disallowedRequisites' instead
warning: In a derivation named 'eval-okay-derivation-legacy', 'structuredAttrs' disables the effect of the derivation attribute 'maxClosureSize'; use 'outputChecks.<output>.maxClosureSize' instead
warning: In a derivation named 'eval-okay-derivation-legacy', 'structuredAttrs' disables the effect of the derivation attribute 'maxSize'; use 'outputChecks.<output>.maxSize' instead

View File

@ -0,0 +1 @@
"/nix/store/mzgwvrjjir216ra58mwwizi8wj6y9ddr-eval-okay-derivation-legacy"

View File

@ -0,0 +1,12 @@
(builtins.derivationStrict {
name = "eval-okay-derivation-legacy";
system = "x86_64-linux";
builder = "/dontcare";
__structuredAttrs = true;
allowedReferences = [ ];
disallowedReferences = [ ];
allowedRequisites = [ ];
disallowedRequisites = [ ];
maxSize = 1234;
maxClosureSize = 12345;
}).out

View File

@ -1,33 +0,0 @@
# Golden test support
#
# Test that the output of the given test matches what is expected. If
# `_NIX_TEST_ACCEPT` is non-empty also update the expected output so
# that next time the test succeeds.
function diffAndAcceptInner() {
local -r testName=$1
local -r got="$2"
local -r expected="$3"
# Absence of expected file indicates empty output expected.
if test -e "$expected"; then
local -r expectedOrEmpty="$expected"
else
local -r expectedOrEmpty=lang/empty.exp
fi
# Diff so we get a nice message
if ! diff --color=always --unified "$expectedOrEmpty" "$got"; then
echo "FAIL: evaluation result of $testName not as expected"
badDiff=1
fi
# Update expected if `_NIX_TEST_ACCEPT` is non-empty.
if test -n "${_NIX_TEST_ACCEPT-}"; then
cp "$got" "$expected"
# Delete empty expected files to avoid bloating the repo with
# empty files.
if ! test -s "$expected"; then
rm "$expected"
fi
fi
}

View File

@ -23,7 +23,7 @@ nix_tests = \
remote-store.sh \ remote-store.sh \
legacy-ssh-store.sh \ legacy-ssh-store.sh \
lang.sh \ lang.sh \
lang-test-infra.sh \ characterisation-test-infra.sh \
experimental-features.sh \ experimental-features.sh \
fetchMercurial.sh \ fetchMercurial.sh \
gc-auto.sh \ gc-auto.sh \
@ -106,6 +106,7 @@ nix_tests = \
eval-store.sh \ eval-store.sh \
why-depends.sh \ why-depends.sh \
derivation-json.sh \ derivation-json.sh \
derivation-advanced-attributes.sh \
import-derivation.sh \ import-derivation.sh \
nix_path.sh \ nix_path.sh \
case-hack.sh \ case-hack.sh \

View File

@ -8,19 +8,27 @@
namespace nix { namespace nix {
class LibStoreTest : public virtual ::testing::Test { class LibStoreTest : public virtual ::testing::Test
public: {
static void SetUpTestSuite() { public:
initLibStore(false); static void SetUpTestSuite()
} {
initLibStore(false);
}
protected: protected:
LibStoreTest() LibStoreTest()
: store(openStore("dummy://")) : store(openStore({
{ } .variant =
StoreReference::Specified{
.scheme = "dummy",
},
.params = {},
}))
{
}
ref<Store> store; ref<Store> store;
}; };
} /* namespace nix */ } /* namespace nix */

View File

@ -83,15 +83,15 @@ CHARACTERIZATION_TEST(
"content-address", "content-address",
(std::tuple<ContentAddress, ContentAddress, ContentAddress> { (std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress { ContentAddress {
.method = TextIngestionMethod {}, .method = ContentAddressMethod::Raw::Text,
.hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = ContentAddressMethod::Raw::Flat,
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."), .hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Recursive, .method = ContentAddressMethod::Raw::NixArchive,
.hash = hashString(HashAlgorithm::SHA256, "(...)"), .hash = hashString(HashAlgorithm::SHA256, "(...)"),
}, },
})) }))
@ -178,7 +178,7 @@ CHARACTERIZATION_TEST(
std::nullopt, std::nullopt,
std::optional { std::optional {
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = ContentAddressMethod::Raw::Flat,
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."), .hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
}, },
}, },

View File

@ -9,11 +9,11 @@ namespace nix {
* --------------------------------------------------------------------------*/ * --------------------------------------------------------------------------*/
TEST(ContentAddressMethod, testRoundTripPrintParse_1) { TEST(ContentAddressMethod, testRoundTripPrintParse_1) {
for (const ContentAddressMethod & cam : { for (ContentAddressMethod cam : {
ContentAddressMethod { TextIngestionMethod {} }, ContentAddressMethod::Raw::Text,
ContentAddressMethod { FileIngestionMethod::Flat }, ContentAddressMethod::Raw::Flat,
ContentAddressMethod { FileIngestionMethod::Recursive }, ContentAddressMethod::Raw::NixArchive,
ContentAddressMethod { FileIngestionMethod::Git }, ContentAddressMethod::Raw::Git,
}) { }) {
EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam); EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam);
} }

View File

@ -0,0 +1 @@
../../../../functional/derivation/advanced-attributes-defaults.drv

View File

@ -0,0 +1,22 @@
{
"args": [
"-c",
"echo hello > $out"
],
"builder": "/bin/bash",
"env": {
"builder": "/bin/bash",
"name": "advanced-attributes-defaults",
"out": "/nix/store/1qsc7svv43m4dw2prh6mvyf7cai5czji-advanced-attributes-defaults",
"system": "my-system"
},
"inputDrvs": {},
"inputSrcs": [],
"name": "advanced-attributes-defaults",
"outputs": {
"out": {
"path": "/nix/store/1qsc7svv43m4dw2prh6mvyf7cai5czji-advanced-attributes-defaults"
}
},
"system": "my-system"
}

View File

@ -0,0 +1 @@
../../../../functional/derivation/advanced-attributes-structured-attrs-defaults.drv

View File

@ -0,0 +1,24 @@
{
"args": [
"-c",
"echo hello > $out"
],
"builder": "/bin/bash",
"env": {
"__json": "{\"builder\":\"/bin/bash\",\"name\":\"advanced-attributes-structured-attrs-defaults\",\"outputs\":[\"out\",\"dev\"],\"system\":\"my-system\"}",
"dev": "/nix/store/8bazivnbipbyi569623skw5zm91z6kc2-advanced-attributes-structured-attrs-defaults-dev",
"out": "/nix/store/f8f8nvnx32bxvyxyx2ff7akbvwhwd9dw-advanced-attributes-structured-attrs-defaults"
},
"inputDrvs": {},
"inputSrcs": [],
"name": "advanced-attributes-structured-attrs-defaults",
"outputs": {
"dev": {
"path": "/nix/store/8bazivnbipbyi569623skw5zm91z6kc2-advanced-attributes-structured-attrs-defaults-dev"
},
"out": {
"path": "/nix/store/f8f8nvnx32bxvyxyx2ff7akbvwhwd9dw-advanced-attributes-structured-attrs-defaults"
}
},
"system": "my-system"
}

View File

@ -0,0 +1 @@
../../../../functional/derivation/advanced-attributes-structured-attrs.drv

View File

@ -0,0 +1,41 @@
{
"args": [
"-c",
"echo hello > $out"
],
"builder": "/bin/bash",
"env": {
"__json": "{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar\"],\"disallowedRequisites\":[\"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo\"],\"allowedRequisites\":[\"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}",
"bin": "/nix/store/pbzb48v0ycf80jgligcp4n8z0rblna4n-advanced-attributes-structured-attrs-bin",
"dev": "/nix/store/7xapi8jv7flcz1qq8jhw55ar8ag8hldh-advanced-attributes-structured-attrs-dev",
"out": "/nix/store/mpq3l1l1qc2yr50q520g08kprprwv79f-advanced-attributes-structured-attrs"
},
"inputDrvs": {
"/nix/store/4xm4wccqsvagz9gjksn24s7rip2fdy7v-foo.drv": {
"dynamicOutputs": {},
"outputs": [
"out"
]
},
"/nix/store/plsq5jbr5nhgqwcgb2qxw7jchc09dnl8-bar.drv": {
"dynamicOutputs": {},
"outputs": [
"out"
]
}
},
"inputSrcs": [],
"name": "advanced-attributes-structured-attrs",
"outputs": {
"bin": {
"path": "/nix/store/pbzb48v0ycf80jgligcp4n8z0rblna4n-advanced-attributes-structured-attrs-bin"
},
"dev": {
"path": "/nix/store/7xapi8jv7flcz1qq8jhw55ar8ag8hldh-advanced-attributes-structured-attrs-dev"
},
"out": {
"path": "/nix/store/mpq3l1l1qc2yr50q520g08kprprwv79f-advanced-attributes-structured-attrs"
}
},
"system": "my-system"
}

View File

@ -0,0 +1 @@
../../../../functional/derivation/advanced-attributes.drv

View File

@ -0,0 +1,234 @@
#include <nlohmann/json.hpp>
#include <gtest/gtest.h>
#include <optional>
#include "experimental-features.hh"
#include "derivations.hh"
#include "tests/libstore.hh"
#include "tests/characterization.hh"
#include "parsed-derivations.hh"
#include "types.hh"
namespace nix {
using nlohmann::json;
class DerivationAdvancedAttrsTest : public CharacterizationTest, public LibStoreTest
{
Path unitTestData = getUnitTestData() + "/derivation";
public:
Path goldenMaster(std::string_view testStem) const override
{
return unitTestData + "/" + testStem;
}
};
#define TEST_ATERM_JSON(STEM, NAME) \
TEST_F(DerivationAdvancedAttrsTest, Derivation_##STEM##_from_json) \
{ \
readTest(NAME ".json", [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
/* Use DRV file instead of C++ literal as source of truth. */ \
auto aterm = readFile(goldenMaster(NAME ".drv")); \
auto expected = parseDerivation(*store, std::move(aterm), NAME); \
Derivation got = Derivation::fromJSON(*store, encoded); \
EXPECT_EQ(got, expected); \
}); \
} \
\
TEST_F(DerivationAdvancedAttrsTest, Derivation_##STEM##_to_json) \
{ \
writeTest( \
NAME ".json", \
[&]() -> json { \
/* Use DRV file instead of C++ literal as source of truth. */ \
auto aterm = readFile(goldenMaster(NAME ".drv")); \
return parseDerivation(*store, std::move(aterm), NAME).toJSON(*store); \
}, \
[](const auto & file) { return json::parse(readFile(file)); }, \
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
} \
\
TEST_F(DerivationAdvancedAttrsTest, Derivation_##STEM##_from_aterm) \
{ \
readTest(NAME ".drv", [&](auto encoded) { \
/* Use JSON file instead of C++ literal as source of truth. */ \
auto json = json::parse(readFile(goldenMaster(NAME ".json"))); \
auto expected = Derivation::fromJSON(*store, json); \
auto got = parseDerivation(*store, std::move(encoded), NAME); \
EXPECT_EQ(got.toJSON(*store), expected.toJSON(*store)); \
EXPECT_EQ(got, expected); \
}); \
} \
\
/* No corresponding write test, because we need to read the drv to write the json file */
TEST_ATERM_JSON(advancedAttributes_defaults, "advanced-attributes-defaults");
TEST_ATERM_JSON(advancedAttributes, "advanced-attributes-defaults");
TEST_ATERM_JSON(advancedAttributes_structuredAttrs_defaults, "advanced-attributes-structured-attrs");
TEST_ATERM_JSON(advancedAttributes_structuredAttrs, "advanced-attributes-structured-attrs-defaults");
#undef TEST_ATERM_JSON
TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes_defaults)
{
readTest("advanced-attributes-defaults.drv", [&](auto encoded) {
auto got = parseDerivation(*store, std::move(encoded), "foo");
auto drvPath = writeDerivation(*store, got, NoRepair, true);
ParsedDerivation parsedDrv(drvPath, got);
EXPECT_EQ(parsedDrv.getStringAttr("__sandboxProfile").value_or(""), "");
EXPECT_EQ(parsedDrv.getBoolAttr("__noChroot"), false);
EXPECT_EQ(parsedDrv.getStringsAttr("__impureHostDeps").value_or(Strings()), Strings());
EXPECT_EQ(parsedDrv.getStringsAttr("impureEnvVars").value_or(Strings()), Strings());
EXPECT_EQ(parsedDrv.getBoolAttr("__darwinAllowLocalNetworking"), false);
EXPECT_EQ(parsedDrv.getStringsAttr("allowedReferences"), std::nullopt);
EXPECT_EQ(parsedDrv.getStringsAttr("allowedRequisites"), std::nullopt);
EXPECT_EQ(parsedDrv.getStringsAttr("disallowedReferences"), std::nullopt);
EXPECT_EQ(parsedDrv.getStringsAttr("disallowedRequisites"), std::nullopt);
EXPECT_EQ(parsedDrv.getRequiredSystemFeatures(), StringSet());
EXPECT_EQ(parsedDrv.canBuildLocally(*store), false);
EXPECT_EQ(parsedDrv.willBuildLocally(*store), false);
EXPECT_EQ(parsedDrv.substitutesAllowed(), true);
EXPECT_EQ(parsedDrv.useUidRange(), false);
});
};
TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes)
{
readTest("advanced-attributes.drv", [&](auto encoded) {
auto got = parseDerivation(*store, std::move(encoded), "foo");
auto drvPath = writeDerivation(*store, got, NoRepair, true);
ParsedDerivation parsedDrv(drvPath, got);
StringSet systemFeatures{"rainbow", "uid-range"};
EXPECT_EQ(parsedDrv.getStringAttr("__sandboxProfile").value_or(""), "sandcastle");
EXPECT_EQ(parsedDrv.getBoolAttr("__noChroot"), true);
EXPECT_EQ(parsedDrv.getStringsAttr("__impureHostDeps").value_or(Strings()), Strings{"/usr/bin/ditto"});
EXPECT_EQ(parsedDrv.getStringsAttr("impureEnvVars").value_or(Strings()), Strings{"UNICORN"});
EXPECT_EQ(parsedDrv.getBoolAttr("__darwinAllowLocalNetworking"), true);
EXPECT_EQ(
parsedDrv.getStringsAttr("allowedReferences"), Strings{"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"});
EXPECT_EQ(
parsedDrv.getStringsAttr("allowedRequisites"), Strings{"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"});
EXPECT_EQ(
parsedDrv.getStringsAttr("disallowedReferences"),
Strings{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"});
EXPECT_EQ(
parsedDrv.getStringsAttr("disallowedRequisites"),
Strings{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"});
EXPECT_EQ(parsedDrv.getRequiredSystemFeatures(), systemFeatures);
EXPECT_EQ(parsedDrv.canBuildLocally(*store), false);
EXPECT_EQ(parsedDrv.willBuildLocally(*store), false);
EXPECT_EQ(parsedDrv.substitutesAllowed(), false);
EXPECT_EQ(parsedDrv.useUidRange(), true);
});
};
TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes_structuredAttrs_defaults)
{
readTest("advanced-attributes-structured-attrs-defaults.drv", [&](auto encoded) {
auto got = parseDerivation(*store, std::move(encoded), "foo");
auto drvPath = writeDerivation(*store, got, NoRepair, true);
ParsedDerivation parsedDrv(drvPath, got);
EXPECT_EQ(parsedDrv.getStringAttr("__sandboxProfile").value_or(""), "");
EXPECT_EQ(parsedDrv.getBoolAttr("__noChroot"), false);
EXPECT_EQ(parsedDrv.getStringsAttr("__impureHostDeps").value_or(Strings()), Strings());
EXPECT_EQ(parsedDrv.getStringsAttr("impureEnvVars").value_or(Strings()), Strings());
EXPECT_EQ(parsedDrv.getBoolAttr("__darwinAllowLocalNetworking"), false);
{
auto structuredAttrs_ = parsedDrv.getStructuredAttrs();
ASSERT_TRUE(structuredAttrs_);
auto & structuredAttrs = *structuredAttrs_;
auto outputChecks_ = get(structuredAttrs, "outputChecks");
ASSERT_FALSE(outputChecks_);
}
EXPECT_EQ(parsedDrv.getRequiredSystemFeatures(), StringSet());
EXPECT_EQ(parsedDrv.canBuildLocally(*store), false);
EXPECT_EQ(parsedDrv.willBuildLocally(*store), false);
EXPECT_EQ(parsedDrv.substitutesAllowed(), true);
EXPECT_EQ(parsedDrv.useUidRange(), false);
});
};
TEST_F(DerivationAdvancedAttrsTest, Derivation_advancedAttributes_structuredAttrs)
{
readTest("advanced-attributes-structured-attrs.drv", [&](auto encoded) {
auto got = parseDerivation(*store, std::move(encoded), "foo");
auto drvPath = writeDerivation(*store, got, NoRepair, true);
ParsedDerivation parsedDrv(drvPath, got);
StringSet systemFeatures{"rainbow", "uid-range"};
EXPECT_EQ(parsedDrv.getStringAttr("__sandboxProfile").value_or(""), "sandcastle");
EXPECT_EQ(parsedDrv.getBoolAttr("__noChroot"), true);
EXPECT_EQ(parsedDrv.getStringsAttr("__impureHostDeps").value_or(Strings()), Strings{"/usr/bin/ditto"});
EXPECT_EQ(parsedDrv.getStringsAttr("impureEnvVars").value_or(Strings()), Strings{"UNICORN"});
EXPECT_EQ(parsedDrv.getBoolAttr("__darwinAllowLocalNetworking"), true);
{
auto structuredAttrs_ = parsedDrv.getStructuredAttrs();
ASSERT_TRUE(structuredAttrs_);
auto & structuredAttrs = *structuredAttrs_;
auto outputChecks_ = get(structuredAttrs, "outputChecks");
ASSERT_TRUE(outputChecks_);
auto & outputChecks = *outputChecks_;
{
auto output_ = get(outputChecks, "out");
ASSERT_TRUE(output_);
auto & output = *output_;
EXPECT_EQ(
get(output, "allowedReferences")->get<Strings>(),
Strings{"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"});
EXPECT_EQ(
get(output, "allowedRequisites")->get<Strings>(),
Strings{"/nix/store/3c08bzb71z4wiag719ipjxr277653ynp-foo"});
}
{
auto output_ = get(outputChecks, "bin");
ASSERT_TRUE(output_);
auto & output = *output_;
EXPECT_EQ(
get(output, "disallowedReferences")->get<Strings>(),
Strings{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"});
EXPECT_EQ(
get(output, "disallowedRequisites")->get<Strings>(),
Strings{"/nix/store/7rhsm8i393hm1wcsmph782awg1hi2f7x-bar"});
}
{
auto output_ = get(outputChecks, "dev");
ASSERT_TRUE(output_);
auto & output = *output_;
EXPECT_EQ(get(output, "maxSize")->get<uint64_t>(), 789);
EXPECT_EQ(get(output, "maxClosureSize")->get<uint64_t>(), 5909);
}
}
EXPECT_EQ(parsedDrv.getRequiredSystemFeatures(), systemFeatures);
EXPECT_EQ(parsedDrv.canBuildLocally(*store), false);
EXPECT_EQ(parsedDrv.willBuildLocally(*store), false);
EXPECT_EQ(parsedDrv.substitutesAllowed(), false);
EXPECT_EQ(parsedDrv.useUidRange(), true);
});
};
}

View File

@ -108,7 +108,7 @@ TEST_JSON(DerivationTest, inputAddressed,
TEST_JSON(DerivationTest, caFixedFlat, TEST_JSON(DerivationTest, caFixedFlat,
(DerivationOutput::CAFixed { (DerivationOutput::CAFixed {
.ca = { .ca = {
.method = FileIngestionMethod::Flat, .method = ContentAddressMethod::Raw::Flat,
.hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="),
}, },
}), }),
@ -117,7 +117,7 @@ TEST_JSON(DerivationTest, caFixedFlat,
TEST_JSON(DerivationTest, caFixedNAR, TEST_JSON(DerivationTest, caFixedNAR,
(DerivationOutput::CAFixed { (DerivationOutput::CAFixed {
.ca = { .ca = {
.method = FileIngestionMethod::Recursive, .method = ContentAddressMethod::Raw::NixArchive,
.hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="),
}, },
}), }),
@ -126,6 +126,7 @@ TEST_JSON(DerivationTest, caFixedNAR,
TEST_JSON(DynDerivationTest, caFixedText, TEST_JSON(DynDerivationTest, caFixedText,
(DerivationOutput::CAFixed { (DerivationOutput::CAFixed {
.ca = { .ca = {
.method = ContentAddressMethod::Raw::Text,
.hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="),
}, },
}), }),
@ -133,7 +134,7 @@ TEST_JSON(DynDerivationTest, caFixedText,
TEST_JSON(CaDerivationTest, caFloating, TEST_JSON(CaDerivationTest, caFloating,
(DerivationOutput::CAFloating { (DerivationOutput::CAFloating {
.method = FileIngestionMethod::Recursive, .method = ContentAddressMethod::Raw::NixArchive,
.hashAlgo = HashAlgorithm::SHA256, .hashAlgo = HashAlgorithm::SHA256,
}), }),
"drv-name", "output-name") "drv-name", "output-name")
@ -144,7 +145,7 @@ TEST_JSON(DerivationTest, deferred,
TEST_JSON(ImpureDerivationTest, impure, TEST_JSON(ImpureDerivationTest, impure,
(DerivationOutput::Impure { (DerivationOutput::Impure {
.method = FileIngestionMethod::Recursive, .method = ContentAddressMethod::Raw::NixArchive,
.hashAlgo = HashAlgorithm::SHA256, .hashAlgo = HashAlgorithm::SHA256,
}), }),
"drv-name", "output-name") "drv-name", "output-name")

View File

@ -25,7 +25,7 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) {
store, store,
"foo", "foo",
FixedOutputInfo { FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::NixArchive,
.hash = hashString(HashAlgorithm::SHA256, "(...)"), .hash = hashString(HashAlgorithm::SHA256, "(...)"),
.references = { .references = {

View File

@ -32,7 +32,7 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo
store, store,
"foo", "foo",
FixedOutputInfo { FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::NixArchive,
.hash = hashString(HashAlgorithm::SHA256, "(...)"), .hash = hashString(HashAlgorithm::SHA256, "(...)"),
.references = { .references = {

View File

@ -55,15 +55,15 @@ VERSIONED_CHARACTERIZATION_TEST(
defaultVersion, defaultVersion,
(std::tuple<ContentAddress, ContentAddress, ContentAddress> { (std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress { ContentAddress {
.method = TextIngestionMethod {}, .method = ContentAddressMethod::Raw::Text,
.hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = ContentAddressMethod::Raw::Flat,
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."), .hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Recursive, .method = ContentAddressMethod::Raw::NixArchive,
.hash = hashString(HashAlgorithm::SHA256, "(...)"), .hash = hashString(HashAlgorithm::SHA256, "(...)"),
}, },
})) }))
@ -280,7 +280,7 @@ VERSIONED_CHARACTERIZATION_TEST(
*LibStoreTest::store, *LibStoreTest::store,
"foo", "foo",
FixedOutputInfo { FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::NixArchive,
.hash = hashString(HashAlgorithm::SHA256, "(...)"), .hash = hashString(HashAlgorithm::SHA256, "(...)"),
.references = { .references = {
.others = { .others = {
@ -398,7 +398,7 @@ VERSIONED_CHARACTERIZATION_TEST(
std::nullopt, std::nullopt,
std::optional { std::optional {
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = ContentAddressMethod::Raw::Flat,
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."), .hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
}, },
}, },

View File

@ -56,15 +56,15 @@ VERSIONED_CHARACTERIZATION_TEST(
defaultVersion, defaultVersion,
(std::tuple<ContentAddress, ContentAddress, ContentAddress> { (std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress { ContentAddress {
.method = TextIngestionMethod {}, .method = ContentAddressMethod::Raw::Text,
.hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = ContentAddressMethod::Raw::Flat,
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."), .hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Recursive, .method = ContentAddressMethod::Raw::NixArchive,
.hash = hashString(HashAlgorithm::SHA256, "(...)"), .hash = hashString(HashAlgorithm::SHA256, "(...)"),
}, },
})) }))
@ -512,7 +512,7 @@ VERSIONED_CHARACTERIZATION_TEST(
*LibStoreTest::store, *LibStoreTest::store,
"foo", "foo",
FixedOutputInfo { FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::NixArchive,
.hash = hashString(HashAlgorithm::SHA256, "(...)"), .hash = hashString(HashAlgorithm::SHA256, "(...)"),
.references = { .references = {
.others = { .others = {
@ -598,7 +598,7 @@ VERSIONED_CHARACTERIZATION_TEST(
std::nullopt, std::nullopt,
std::optional { std::optional {
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = ContentAddressMethod::Raw::Flat,
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."), .hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
}, },
}, },

View File

@ -5,6 +5,7 @@
#include "types.hh" #include "types.hh"
#include "environment-variables.hh" #include "environment-variables.hh"
#include "file-system.hh"
namespace nix { namespace nix {

View File

@ -11,7 +11,7 @@ namespace nix {
TEST(FileSerialisationMethod, testRoundTripPrintParse_1) { TEST(FileSerialisationMethod, testRoundTripPrintParse_1) {
for (const FileSerialisationMethod fim : { for (const FileSerialisationMethod fim : {
FileSerialisationMethod::Flat, FileSerialisationMethod::Flat,
FileSerialisationMethod::Recursive, FileSerialisationMethod::NixArchive,
}) { }) {
EXPECT_EQ(parseFileSerialisationMethod(renderFileSerialisationMethod(fim)), fim); EXPECT_EQ(parseFileSerialisationMethod(renderFileSerialisationMethod(fim)), fim);
} }
@ -37,7 +37,7 @@ TEST(FileSerialisationMethod, testParseFileSerialisationMethodOptException) {
TEST(FileIngestionMethod, testRoundTripPrintParse_1) { TEST(FileIngestionMethod, testRoundTripPrintParse_1) {
for (const FileIngestionMethod fim : { for (const FileIngestionMethod fim : {
FileIngestionMethod::Flat, FileIngestionMethod::Flat,
FileIngestionMethod::Recursive, FileIngestionMethod::NixArchive,
FileIngestionMethod::Git, FileIngestionMethod::Git,
}) { }) {
EXPECT_EQ(parseFileIngestionMethod(renderFileIngestionMethod(fim)), fim); EXPECT_EQ(parseFileIngestionMethod(renderFileIngestionMethod(fim)), fim);

View File

@ -0,0 +1,36 @@
#include <gtest/gtest.h>
#include "processes.hh"
namespace nix {
#ifdef _WIN32
TEST(SpawnTest, spawnEcho)
{
auto output = runProgram(RunOptions{.program = "cmd.exe", .args = {"/C", "echo", "hello world"}});
ASSERT_EQ(output.first, 0);
ASSERT_EQ(output.second, "\"hello world\"\r\n");
}
std::string windowsEscape(const std::string & str, bool cmd);
TEST(SpawnTest, windowsEscape)
{
auto empty = windowsEscape("", false);
ASSERT_EQ(empty, R"("")");
// There's no quotes in this argument so the input should equal the output
auto backslashStr = R"(\\\\)";
auto backslashes = windowsEscape(backslashStr, false);
ASSERT_EQ(backslashes, backslashStr);
auto nestedQuotes = windowsEscape(R"(he said: "hello there")", false);
ASSERT_EQ(nestedQuotes, R"("he said: \"hello there\"")");
auto middleQuote = windowsEscape(R"( \\\" )", false);
ASSERT_EQ(middleQuote, R"(" \\\\\\\" ")");
auto space = windowsEscape("hello world", false);
ASSERT_EQ(space, R"("hello world")");
}
#endif
}