mirror of
https://github.com/NixOS/nix.git
synced 2024-11-25 16:23:02 +00:00
treewide: Reference HashFormat members with scope
Base* -> HashFormat::Base*
This commit is contained in:
parent
5043e6cf4e
commit
e026f3e1ae
@ -78,7 +78,7 @@ SV * queryReferences(char * path)
|
||||
SV * queryPathHash(char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
|
||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -104,7 +104,7 @@ SV * queryPathInfo(char * path, int base32)
|
||||
XPUSHs(&PL_sv_undef);
|
||||
else
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
|
||||
auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
mXPUSHi(info->registrationTime);
|
||||
mXPUSHi(info->narSize);
|
||||
@ -206,7 +206,7 @@ SV * hashPath(char * algo, int base32, char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
Hash h = hashPath(parseHashType(algo), path).first;
|
||||
auto s = h.to_string(base32 ? Base32 : Base16, false);
|
||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -217,7 +217,7 @@ SV * hashFile(char * algo, int base32, char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
Hash h = hashFile(parseHashType(algo), path);
|
||||
auto s = h.to_string(base32 ? Base32 : Base16, false);
|
||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -228,7 +228,7 @@ SV * hashString(char * algo, int base32, char * s)
|
||||
PPCODE:
|
||||
try {
|
||||
Hash h = hashString(parseHashType(algo), s);
|
||||
auto s = h.to_string(base32 ? Base32 : Base16, false);
|
||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
@ -239,7 +239,7 @@ SV * convertHash(char * algo, char * s, int toBase32)
|
||||
PPCODE:
|
||||
try {
|
||||
auto h = Hash::parseAny(s, parseHashType(algo));
|
||||
auto s = h.to_string(toBase32 ? Base32 : Base16, false);
|
||||
auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
|
@ -50,7 +50,7 @@ struct AttrDb
|
||||
Path cacheDir = getCacheDir() + "/nix/eval-cache-v5";
|
||||
createDirs(cacheDir);
|
||||
|
||||
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
|
||||
Path dbPath = cacheDir + "/" + fingerprint.to_string(HashFormat::Base16, false) + ".sqlite";
|
||||
|
||||
state->db = SQLite(dbPath);
|
||||
state->db.isCache();
|
||||
|
@ -1758,7 +1758,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
|
||||
auto path = realisePath(state, pos, *args[1]);
|
||||
|
||||
v.mkString(hashString(*ht, path.readFile()).to_string(Base16, false));
|
||||
v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hashFile({
|
||||
@ -3760,7 +3760,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
|
||||
NixStringContext context; // discarded
|
||||
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
||||
|
||||
v.mkString(hashString(*ht, s).to_string(Base16, false));
|
||||
v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hashString({
|
||||
|
@ -31,7 +31,7 @@ void emitTreeAttrs(
|
||||
|
||||
auto narHash = input.getNarHash();
|
||||
assert(narHash);
|
||||
attrs.alloc("narHash").mkString(narHash->to_string(SRI, true));
|
||||
attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true));
|
||||
|
||||
if (input.getType() == "git")
|
||||
attrs.alloc("submodules").mkBool(
|
||||
@ -297,7 +297,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
: hashFile(htSHA256, state.store->toRealPath(storePath));
|
||||
if (hash != *expectedHash)
|
||||
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true)));
|
||||
*url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)));
|
||||
}
|
||||
|
||||
state.allowAndSetStorePathString(storePath, v);
|
||||
|
@ -147,12 +147,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
||||
};
|
||||
|
||||
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
|
||||
|
||||
if (auto prevNarHash = getNarHash()) {
|
||||
if (narHash != *prevNarHash)
|
||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
||||
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
|
||||
to_string(), tree.actualPath, prevNarHash->to_string(HashFormat::SRI, true), narHash.to_string(HashFormat::SRI, true));
|
||||
}
|
||||
|
||||
if (auto prevLastModified = getLastModified()) {
|
||||
|
@ -46,7 +46,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
|
||||
Path getCachePath(std::string_view key)
|
||||
{
|
||||
return getCacheDir() + "/nix/gitv3/" +
|
||||
hashString(htSHA256, key).to_string(Base32, false);
|
||||
hashString(htSHA256, key).to_string(HashFormat::Base32, false);
|
||||
}
|
||||
|
||||
// Returns the name of the HEAD branch.
|
||||
@ -417,7 +417,7 @@ struct GitInputScheme : InputScheme
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
|
||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true));
|
||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
||||
};
|
||||
|
||||
auto getLockedAttrs = [&]()
|
||||
|
@ -125,7 +125,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||
auto path = owner + "/" + repo;
|
||||
assert(!(ref && rev));
|
||||
if (ref) path += "/" + *ref;
|
||||
if (rev) path += "/" + rev->to_string(Base16, false);
|
||||
if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
|
||||
return ParsedURL {
|
||||
.scheme = type(),
|
||||
.path = path,
|
||||
@ -296,7 +296,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||
: "https://api.%s/repos/%s/%s/tarball/%s";
|
||||
|
||||
const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input),
|
||||
input.getRev()->to_string(Base16, false));
|
||||
input.getRev()->to_string(HashFormat::Base16, false));
|
||||
|
||||
return DownloadUrl { url, headers };
|
||||
}
|
||||
@ -362,7 +362,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
|
||||
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||
input.getRev()->to_string(Base16, false));
|
||||
input.getRev()->to_string(HashFormat::Base16, false));
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(host);
|
||||
return DownloadUrl { url, headers };
|
||||
@ -449,7 +449,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||
input.getRev()->to_string(Base16, false));
|
||||
input.getRev()->to_string(HashFormat::Base16, false));
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(host);
|
||||
return DownloadUrl { url, headers };
|
||||
|
@ -206,7 +206,7 @@ struct MercurialInputScheme : InputScheme
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && hash->type != htSHA1)
|
||||
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base16, true));
|
||||
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
|
||||
};
|
||||
|
||||
|
||||
@ -252,7 +252,7 @@ struct MercurialInputScheme : InputScheme
|
||||
}
|
||||
}
|
||||
|
||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
|
||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false));
|
||||
|
||||
/* If this is a commit hash that we already have, we don't
|
||||
have to pull again. */
|
||||
|
@ -250,7 +250,7 @@ struct CurlInputScheme : InputScheme
|
||||
// NAR hashes are preferred over file hashes since tar/zip
|
||||
// files don't have a canonical representation.
|
||||
if (auto narHash = input.getNarHash())
|
||||
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
|
||||
url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true));
|
||||
return url;
|
||||
}
|
||||
|
||||
|
@ -164,7 +164,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||
auto [fileHash, fileSize] = fileHashSink.finish();
|
||||
narInfo->fileHash = fileHash;
|
||||
narInfo->fileSize = fileSize;
|
||||
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
|
||||
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Base32, false) + ".nar"
|
||||
+ (compression == "xz" ? ".xz" :
|
||||
compression == "bzip2" ? ".bz2" :
|
||||
compression == "zstd" ? ".zst" :
|
||||
|
@ -1066,7 +1066,7 @@ void LocalDerivationGoal::initTmpDir() {
|
||||
env[i.first] = i.second;
|
||||
} else {
|
||||
auto hash = hashString(htSHA256, i.first);
|
||||
std::string fn = ".attr-" + hash.to_string(Base32, false);
|
||||
std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false);
|
||||
Path p = tmpDir + "/" + fn;
|
||||
writeFile(p, rewriteStrings(i.second, inputRewrites));
|
||||
chownToBuilder(p);
|
||||
@ -2583,8 +2583,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
delayedException = std::make_exception_ptr(
|
||||
BuildError("hash mismatch in fixed-output derivation '%s':\n specified: %s\n got: %s",
|
||||
worker.store.printStorePath(drvPath),
|
||||
wanted.to_string(SRI, true),
|
||||
got.to_string(SRI, true)));
|
||||
wanted.to_string(HashFormat::SRI, true),
|
||||
got.to_string(HashFormat::SRI, true)));
|
||||
}
|
||||
if (!newInfo0.references.empty())
|
||||
delayedException = std::make_exception_ptr(
|
||||
|
@ -65,7 +65,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
||||
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
||||
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
|
||||
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
|
||||
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
|
||||
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(HashFormat::Base16, false));
|
||||
return;
|
||||
} catch (Error & e) {
|
||||
debug(e.what());
|
||||
|
@ -60,7 +60,7 @@ std::string ContentAddress::render() const
|
||||
+ makeFileIngestionPrefix(method);
|
||||
},
|
||||
}, method.raw)
|
||||
+ this->hash.to_string(Base32, true);
|
||||
+ this->hash.to_string(HashFormat::Base32, true);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -334,7 +334,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||
logger->startWork();
|
||||
auto hash = store->queryPathInfo(path)->narHash;
|
||||
logger->stopWork();
|
||||
to << hash.to_string(Base16, false);
|
||||
to << hash.to_string(HashFormat::Base16, false);
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -542,7 +542,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs,
|
||||
[&](const DerivationOutput::CAFixed & dof) {
|
||||
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
|
||||
s += ','; printUnquotedString(s, dof.ca.printMethodAlgo());
|
||||
s += ','; printUnquotedString(s, dof.ca.hash.to_string(Base16, false));
|
||||
s += ','; printUnquotedString(s, dof.ca.hash.to_string(HashFormat::Base16, false));
|
||||
},
|
||||
[&](const DerivationOutput::CAFloating & dof) {
|
||||
s += ','; printUnquotedString(s, "");
|
||||
@ -775,7 +775,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
||||
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw);
|
||||
auto hash = hashString(htSHA256, "fixed:out:"
|
||||
+ dof.ca.printMethodAlgo() + ":"
|
||||
+ dof.ca.hash.to_string(Base16, false) + ":"
|
||||
+ dof.ca.hash.to_string(HashFormat::Base16, false) + ":"
|
||||
+ store.printStorePath(dof.path(store, drv.name, i.first)));
|
||||
outputHashes.insert_or_assign(i.first, std::move(hash));
|
||||
}
|
||||
@ -820,7 +820,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
||||
const auto h = get(res.hashes, outputName);
|
||||
if (!h)
|
||||
throw Error("no hash for output '%s' of derivation '%s'", outputName, drv.name);
|
||||
inputs2[h->to_string(Base16, false)].value.insert(outputName);
|
||||
inputs2[h->to_string(HashFormat::Base16, false)].value.insert(outputName);
|
||||
}
|
||||
}
|
||||
|
||||
@ -925,7 +925,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||
[&](const DerivationOutput::CAFixed & dof) {
|
||||
out << store.printStorePath(dof.path(store, drv.name, i.first))
|
||||
<< dof.ca.printMethodAlgo()
|
||||
<< dof.ca.hash.to_string(Base16, false);
|
||||
<< dof.ca.hash.to_string(HashFormat::Base16, false);
|
||||
},
|
||||
[&](const DerivationOutput::CAFloating & dof) {
|
||||
out << ""
|
||||
@ -957,7 +957,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||
std::string hashPlaceholder(const OutputNameView outputName)
|
||||
{
|
||||
// FIXME: memoize?
|
||||
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false);
|
||||
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Base32, false);
|
||||
}
|
||||
|
||||
|
||||
@ -1162,7 +1162,7 @@ nlohmann::json DerivationOutput::toJSON(
|
||||
[&](const DerivationOutput::CAFixed & dof) {
|
||||
res["path"] = store.printStorePath(dof.path(store, drvName, outputName));
|
||||
res["hashAlgo"] = dof.ca.printMethodAlgo();
|
||||
res["hash"] = dof.ca.hash.to_string(Base16, false);
|
||||
res["hash"] = dof.ca.hash.to_string(HashFormat::Base16, false);
|
||||
// FIXME print refs?
|
||||
},
|
||||
[&](const DerivationOutput::CAFloating & dof) {
|
||||
|
@ -5,7 +5,7 @@ namespace nix {
|
||||
|
||||
std::string DownstreamPlaceholder::render() const
|
||||
{
|
||||
return "/" + hash.to_string(Base32, false);
|
||||
return "/" + hash.to_string(HashFormat::Base32, false);
|
||||
}
|
||||
|
||||
|
||||
@ -31,7 +31,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation(
|
||||
xpSettings.require(Xp::DynamicDerivations);
|
||||
auto compressed = compressHash(placeholder.hash, 20);
|
||||
auto clearText = "nix-computed-output:"
|
||||
+ compressed.to_string(Base32, false)
|
||||
+ compressed.to_string(HashFormat::Base32, false)
|
||||
+ ":" + std::string { outputName };
|
||||
return DownstreamPlaceholder {
|
||||
hashString(htSHA256, clearText)
|
||||
|
@ -41,7 +41,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||
Hash hash = hashSink.currentHash().first;
|
||||
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
|
||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
|
||||
printStorePath(path), info->narHash.to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true));
|
||||
|
||||
teeSink
|
||||
<< exportMagic
|
||||
|
@ -43,7 +43,7 @@ static void makeSymlink(const Path & link, const Path & target)
|
||||
|
||||
void LocalStore::addIndirectRoot(const Path & path)
|
||||
{
|
||||
std::string hash = hashString(htSHA1, path).to_string(Base32, false);
|
||||
std::string hash = hashString(htSHA1, path).to_string(HashFormat::Base32, false);
|
||||
Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash));
|
||||
makeSymlink(realRoot, path);
|
||||
}
|
||||
|
@ -209,7 +209,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||
<< ServeProto::Command::AddToStoreNar
|
||||
<< printStorePath(info.path)
|
||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||
<< info.narHash.to_string(Base16, false);
|
||||
<< info.narHash.to_string(HashFormat::Base16, false);
|
||||
ServeProto::write(*this, *conn, info.references);
|
||||
conn->to
|
||||
<< info.registrationTime
|
||||
|
@ -826,7 +826,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||
|
||||
state.stmts->RegisterValidPath.use()
|
||||
(printStorePath(info.path))
|
||||
(info.narHash.to_string(Base16, true))
|
||||
(info.narHash.to_string(HashFormat::Base16, true))
|
||||
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
||||
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
||||
(info.narSize, info.narSize != 0)
|
||||
@ -933,7 +933,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
||||
{
|
||||
state.stmts->UpdatePathInfo.use()
|
||||
(info.narSize, info.narSize != 0)
|
||||
(info.narHash.to_string(Base16, true))
|
||||
(info.narHash.to_string(HashFormat::Base16, true))
|
||||
(info.ultimate ? 1 : 0, info.ultimate)
|
||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||
(renderContentAddress(info.ca), (bool) info.ca)
|
||||
@ -1236,7 +1236,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
|
||||
if (hashResult.first != info.narHash)
|
||||
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
||||
printStorePath(info.path), info.narHash.to_string(HashFormat::Base32, true), hashResult.first.to_string(HashFormat::Base32, true));
|
||||
|
||||
if (hashResult.second != info.narSize)
|
||||
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
@ -1252,8 +1252,8 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
if (specified.hash != actualHash.hash) {
|
||||
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path),
|
||||
specified.hash.to_string(Base32, true),
|
||||
actualHash.hash.to_string(Base32, true));
|
||||
specified.hash.to_string(HashFormat::Base32, true),
|
||||
actualHash.hash.to_string(HashFormat::Base32, true));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1545,7 +1545,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||
for (auto & link : readDirectory(linksDir)) {
|
||||
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
|
||||
Path linkPath = linksDir + "/" + link.name;
|
||||
std::string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false);
|
||||
std::string hash = hashPath(htSHA256, linkPath).first.to_string(HashFormat::Base32, false);
|
||||
if (hash != link.name) {
|
||||
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
||||
linkPath, link.name, hash);
|
||||
@ -1578,7 +1578,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||
|
||||
if (info->narHash != nullHash && info->narHash != current.first) {
|
||||
printError("path '%s' was modified! expected hash '%s', got '%s'",
|
||||
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true));
|
||||
printStorePath(i), info->narHash.to_string(HashFormat::Base32, true), current.first.to_string(HashFormat::Base32, true));
|
||||
if (repair) repairPath(i); else errors = true;
|
||||
} else {
|
||||
|
||||
|
@ -332,9 +332,9 @@ public:
|
||||
(std::string(info->path.name()))
|
||||
(narInfo ? narInfo->url : "", narInfo != 0)
|
||||
(narInfo ? narInfo->compression : "", narInfo != 0)
|
||||
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
||||
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Base32, true) : "", narInfo && narInfo->fileHash)
|
||||
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
||||
(info->narHash.to_string(Base32, true))
|
||||
(info->narHash.to_string(HashFormat::Base32, true))
|
||||
(info->narSize)
|
||||
(concatStringsSep(" ", info->shortRefs()))
|
||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||
|
@ -105,10 +105,10 @@ std::string NarInfo::to_string(const Store & store) const
|
||||
assert(compression != "");
|
||||
res += "Compression: " + compression + "\n";
|
||||
assert(fileHash && fileHash->type == htSHA256);
|
||||
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
|
||||
res += "FileHash: " + fileHash->to_string(HashFormat::Base32, true) + "\n";
|
||||
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
||||
assert(narHash.type == htSHA256);
|
||||
res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
|
||||
res += "NarHash: " + narHash.to_string(HashFormat::Base32, true) + "\n";
|
||||
res += "NarSize: " + std::to_string(narSize) + "\n";
|
||||
|
||||
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
||||
|
@ -146,10 +146,10 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||
contents of the symlink (i.e. the result of readlink()), not
|
||||
the contents of the target (which may not even exist). */
|
||||
Hash hash = hashPath(htSHA256, path).first;
|
||||
debug("'%1%' has hash '%2%'", path, hash.to_string(Base32, true));
|
||||
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Base32, true));
|
||||
|
||||
/* Check if this is a known hash. */
|
||||
Path linkPath = linksDir + "/" + hash.to_string(Base32, false);
|
||||
Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Base32, false);
|
||||
|
||||
/* Maybe delete the link, if it has been corrupted. */
|
||||
if (pathExists(linkPath)) {
|
||||
|
@ -12,7 +12,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||
store.printStorePath(path));
|
||||
return
|
||||
"1;" + store.printStorePath(path) + ";"
|
||||
+ narHash.to_string(Base32, true) + ";"
|
||||
+ narHash.to_string(HashFormat::Base32, true) + ";"
|
||||
+ std::to_string(narSize) + ";"
|
||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||
}
|
||||
@ -161,7 +161,7 @@ void ValidPathInfo::write(
|
||||
if (includePath)
|
||||
sink << store.printStorePath(path);
|
||||
sink << (deriver ? store.printStorePath(*deriver) : "")
|
||||
<< narHash.to_string(Base16, false);
|
||||
<< narHash.to_string(HashFormat::Base16, false);
|
||||
WorkerProto::write(store,
|
||||
WorkerProto::WriteConn { .to = sink },
|
||||
references);
|
||||
|
@ -35,7 +35,7 @@ StorePath::StorePath(std::string_view _baseName)
|
||||
}
|
||||
|
||||
StorePath::StorePath(const Hash & hash, std::string_view _name)
|
||||
: baseName((hash.to_string(Base32, false) + "-").append(std::string(_name)))
|
||||
: baseName((hash.to_string(HashFormat::Base32, false) + "-").append(std::string(_name)))
|
||||
{
|
||||
checkName(baseName, name());
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ struct DrvOutput {
|
||||
std::string to_string() const;
|
||||
|
||||
std::string strHash() const
|
||||
{ return drvHash.to_string(Base16, true); }
|
||||
{ return drvHash.to_string(HashFormat::Base16, true); }
|
||||
|
||||
static DrvOutput parse(const std::string &);
|
||||
|
||||
|
@ -541,7 +541,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
conn->to << WorkerProto::Op::AddToStoreNar
|
||||
<< printStorePath(info.path)
|
||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||
<< info.narHash.to_string(Base16, false);
|
||||
<< info.narHash.to_string(HashFormat::Base16, false);
|
||||
WorkerProto::write(*this, *conn, info.references);
|
||||
conn->to << info.registrationTime << info.narSize
|
||||
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
||||
|
@ -154,7 +154,7 @@ StorePath Store::makeStorePath(std::string_view type,
|
||||
StorePath Store::makeStorePath(std::string_view type,
|
||||
const Hash & hash, std::string_view name) const
|
||||
{
|
||||
return makeStorePath(type, hash.to_string(Base16, true), name);
|
||||
return makeStorePath(type, hash.to_string(HashFormat::Base16, true), name);
|
||||
}
|
||||
|
||||
|
||||
@ -192,7 +192,7 @@ StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInf
|
||||
hashString(htSHA256,
|
||||
"fixed:out:"
|
||||
+ makeFileIngestionPrefix(info.method)
|
||||
+ info.hash.to_string(Base16, true) + ":"),
|
||||
+ info.hash.to_string(HashFormat::Base16, true) + ":"),
|
||||
name);
|
||||
}
|
||||
}
|
||||
@ -884,7 +884,7 @@ std::string Store::makeValidityRegistration(const StorePathSet & paths,
|
||||
auto info = queryPathInfo(i);
|
||||
|
||||
if (showHash) {
|
||||
s += info->narHash.to_string(Base16, false) + "\n";
|
||||
s += info->narHash.to_string(HashFormat::Base16, false) + "\n";
|
||||
s += fmt("%1%\n", info->narSize);
|
||||
}
|
||||
|
||||
|
@ -676,7 +676,7 @@ public:
|
||||
*/
|
||||
nlohmann::json pathInfoToJSON(const StorePathSet & storePaths,
|
||||
bool includeImpureInfo, bool showClosureSize,
|
||||
HashFormat hashFormat = Base32,
|
||||
HashFormat hashFormat = HashFormat::Base32,
|
||||
AllowInvalidFlag allowInvalid = DisallowInvalid);
|
||||
|
||||
/**
|
||||
|
@ -111,26 +111,26 @@ static std::string printHash32(const Hash & hash)
|
||||
std::string printHash16or32(const Hash & hash)
|
||||
{
|
||||
assert(hash.type);
|
||||
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
|
||||
return hash.to_string(hash.type == htMD5 ? HashFormat::Base16 : HashFormat::Base32, false);
|
||||
}
|
||||
|
||||
|
||||
std::string Hash::to_string(HashFormat hashFormat, bool includeType) const
|
||||
{
|
||||
std::string s;
|
||||
if (hashFormat == SRI || includeType) {
|
||||
if (hashFormat == HashFormat::SRI || includeType) {
|
||||
s += printHashType(type);
|
||||
s += hashFormat == SRI ? '-' : ':';
|
||||
s += hashFormat == HashFormat::SRI ? '-' : ':';
|
||||
}
|
||||
switch (hashFormat) {
|
||||
case Base16:
|
||||
case HashFormat::Base16:
|
||||
s += printHash16(*this);
|
||||
break;
|
||||
case Base32:
|
||||
case HashFormat::Base32:
|
||||
s += printHash32(*this);
|
||||
break;
|
||||
case Base64:
|
||||
case SRI:
|
||||
case HashFormat::Base64:
|
||||
case HashFormat::SRI:
|
||||
s += base64Encode(std::string_view((const char *) hash, hashSize));
|
||||
break;
|
||||
}
|
||||
@ -267,7 +267,7 @@ Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht)
|
||||
if (!ht)
|
||||
throw BadHash("empty hash requires explicit hash type");
|
||||
Hash h(*ht);
|
||||
warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
|
||||
warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true));
|
||||
return h;
|
||||
} else
|
||||
return Hash::parseAny(hashStr, ht);
|
||||
|
@ -132,12 +132,12 @@ public:
|
||||
|
||||
std::string gitRev() const
|
||||
{
|
||||
return to_string(Base16, false);
|
||||
return to_string(HashFormat::Base16, false);
|
||||
}
|
||||
|
||||
std::string gitShortRev() const
|
||||
{
|
||||
return std::string(to_string(Base16, false), 0, 7);
|
||||
return std::string(to_string(HashFormat::Base16, false), 0, 7);
|
||||
}
|
||||
|
||||
static Hash dummy;
|
||||
|
@ -406,7 +406,7 @@ static void opQuery(Strings opFlags, Strings opArgs)
|
||||
auto info = store->queryPathInfo(j);
|
||||
if (query == qHash) {
|
||||
assert(info->narHash.type == htSHA256);
|
||||
cout << fmt("%s\n", info->narHash.to_string(Base32, true));
|
||||
cout << fmt("%s\n", info->narHash.to_string(HashFormat::Base32, true));
|
||||
} else if (query == qSize)
|
||||
cout << fmt("%d\n", info->narSize);
|
||||
}
|
||||
@ -769,8 +769,8 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
||||
if (current.first != info->narHash) {
|
||||
printError("path '%s' was modified! expected hash '%s', got '%s'",
|
||||
store->printStorePath(path),
|
||||
info->narHash.to_string(Base32, true),
|
||||
current.first.to_string(Base32, true));
|
||||
info->narHash.to_string(HashFormat::Base32, true),
|
||||
current.first.to_string(HashFormat::Base32, true));
|
||||
status = 1;
|
||||
}
|
||||
}
|
||||
@ -892,7 +892,7 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||
out << info->narSize // downloadSize
|
||||
<< info->narSize;
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
||||
out << info->narHash.to_string(Base32, true)
|
||||
out << info->narHash.to_string(HashFormat::Base32, true)
|
||||
<< renderContentAddress(info->ca)
|
||||
<< info->sigs;
|
||||
} catch (InvalidPath &) {
|
||||
|
@ -179,7 +179,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
|
||||
j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl
|
||||
j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
|
||||
if (auto rev = flake.lockedRef.input.getRev())
|
||||
j["revision"] = rev->to_string(Base16, false);
|
||||
j["revision"] = rev->to_string(HashFormat::Base16, false);
|
||||
if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev"))
|
||||
j["dirtyRevision"] = *dirtyRev;
|
||||
if (auto revCount = flake.lockedRef.input.getRevCount())
|
||||
@ -206,7 +206,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
|
||||
if (auto rev = flake.lockedRef.input.getRev())
|
||||
logger->cout(
|
||||
ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
|
||||
rev->to_string(Base16, false));
|
||||
rev->to_string(HashFormat::Base16, false));
|
||||
if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev"))
|
||||
logger->cout(
|
||||
ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
|
||||
@ -1345,7 +1345,7 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON
|
||||
if (json) {
|
||||
auto res = nlohmann::json::object();
|
||||
res["storePath"] = store->printStorePath(tree.storePath);
|
||||
res["hash"] = hash.to_string(SRI, true);
|
||||
res["hash"] = hash.to_string(HashFormat::SRI, true);
|
||||
res["original"] = fetchers::attrsToJSON(resolvedRef.toAttrs());
|
||||
res["locked"] = fetchers::attrsToJSON(lockedRef.toAttrs());
|
||||
logger->cout(res.dump());
|
||||
@ -1353,7 +1353,7 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON
|
||||
notice("Downloaded '%s' to '%s' (hash '%s').",
|
||||
lockedRef.to_string(),
|
||||
store->printStorePath(tree.storePath),
|
||||
hash.to_string(SRI, true));
|
||||
hash.to_string(HashFormat::SRI, true));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -11,7 +11,7 @@ using namespace nix;
|
||||
struct CmdHashBase : Command
|
||||
{
|
||||
FileIngestionMethod mode;
|
||||
HashFormat hashFormat = SRI;
|
||||
HashFormat hashFormat = HashFormat::SRI;
|
||||
bool truncate = false;
|
||||
HashType ht = htSHA256;
|
||||
std::vector<std::string> paths;
|
||||
@ -22,25 +22,25 @@ struct CmdHashBase : Command
|
||||
addFlag({
|
||||
.longName = "sri",
|
||||
.description = "Print the hash in SRI format.",
|
||||
.handler = {&hashFormat, SRI},
|
||||
.handler = {&hashFormat, HashFormat::SRI},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "base64",
|
||||
.description = "Print the hash in base-64 format.",
|
||||
.handler = {&hashFormat, Base64},
|
||||
.handler = {&hashFormat, HashFormat::Base64},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "base32",
|
||||
.description = "Print the hash in base-32 (Nix-specific) format.",
|
||||
.handler = {&hashFormat, Base32},
|
||||
.handler = {&hashFormat, HashFormat::Base32},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "base16",
|
||||
.description = "Print the hash in base-16 format.",
|
||||
.handler = {&hashFormat, Base16},
|
||||
.handler = {&hashFormat, HashFormat::Base16},
|
||||
});
|
||||
|
||||
addFlag(Flag::mkHashTypeFlag("type", &ht));
|
||||
@ -94,7 +94,7 @@ struct CmdHashBase : Command
|
||||
|
||||
Hash h = hashSink->finish().first;
|
||||
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
|
||||
logger->cout(h.to_string(hashFormat, hashFormat == SRI));
|
||||
logger->cout(h.to_string(hashFormat, hashFormat == HashFormat::SRI));
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -114,16 +114,16 @@ struct CmdToBase : Command
|
||||
std::string description() override
|
||||
{
|
||||
return fmt("convert a hash to %s representation",
|
||||
hashFormat == Base16 ? "base-16" :
|
||||
hashFormat == Base32 ? "base-32" :
|
||||
hashFormat == Base64 ? "base-64" :
|
||||
hashFormat == HashFormat::Base16 ? "base-16" :
|
||||
hashFormat == HashFormat::Base32 ? "base-32" :
|
||||
hashFormat == HashFormat::Base64 ? "base-64" :
|
||||
"SRI");
|
||||
}
|
||||
|
||||
void run() override
|
||||
{
|
||||
for (auto s : args)
|
||||
logger->cout(Hash::parseAny(s, ht).to_string(hashFormat, hashFormat == SRI));
|
||||
logger->cout(Hash::parseAny(s, ht).to_string(hashFormat, hashFormat == HashFormat::SRI));
|
||||
}
|
||||
};
|
||||
|
||||
@ -133,10 +133,10 @@ struct CmdHash : NixMultiCommand
|
||||
: MultiCommand({
|
||||
{"file", []() { return make_ref<CmdHashBase>(FileIngestionMethod::Flat);; }},
|
||||
{"path", []() { return make_ref<CmdHashBase>(FileIngestionMethod::Recursive); }},
|
||||
{"to-base16", []() { return make_ref<CmdToBase>(Base16); }},
|
||||
{"to-base32", []() { return make_ref<CmdToBase>(Base32); }},
|
||||
{"to-base64", []() { return make_ref<CmdToBase>(Base64); }},
|
||||
{"to-sri", []() { return make_ref<CmdToBase>(SRI); }},
|
||||
{"to-base16", []() { return make_ref<CmdToBase>(HashFormat::Base16); }},
|
||||
{"to-base32", []() { return make_ref<CmdToBase>(HashFormat::Base32); }},
|
||||
{"to-base64", []() { return make_ref<CmdToBase>(HashFormat::Base64); }},
|
||||
{"to-sri", []() { return make_ref<CmdToBase>(HashFormat::SRI); }},
|
||||
})
|
||||
{ }
|
||||
|
||||
@ -162,7 +162,7 @@ static int compatNixHash(int argc, char * * argv)
|
||||
{
|
||||
std::optional<HashType> ht;
|
||||
bool flat = false;
|
||||
HashFormat hashFormat = Base16;
|
||||
HashFormat hashFormat = HashFormat::Base16;
|
||||
bool truncate = false;
|
||||
enum { opHash, opTo } op = opHash;
|
||||
std::vector<std::string> ss;
|
||||
@ -173,10 +173,10 @@ static int compatNixHash(int argc, char * * argv)
|
||||
else if (*arg == "--version")
|
||||
printVersion("nix-hash");
|
||||
else if (*arg == "--flat") flat = true;
|
||||
else if (*arg == "--base16") hashFormat = Base16;
|
||||
else if (*arg == "--base32") hashFormat = Base32;
|
||||
else if (*arg == "--base64") hashFormat = Base64;
|
||||
else if (*arg == "--sri") hashFormat = SRI;
|
||||
else if (*arg == "--base16") hashFormat = HashFormat::Base16;
|
||||
else if (*arg == "--base32") hashFormat = HashFormat::Base32;
|
||||
else if (*arg == "--base64") hashFormat = HashFormat::Base64;
|
||||
else if (*arg == "--sri") hashFormat = HashFormat::SRI;
|
||||
else if (*arg == "--truncate") truncate = true;
|
||||
else if (*arg == "--type") {
|
||||
std::string s = getArg(*arg, arg, end);
|
||||
@ -184,19 +184,19 @@ static int compatNixHash(int argc, char * * argv)
|
||||
}
|
||||
else if (*arg == "--to-base16") {
|
||||
op = opTo;
|
||||
hashFormat = Base16;
|
||||
hashFormat = HashFormat::Base16;
|
||||
}
|
||||
else if (*arg == "--to-base32") {
|
||||
op = opTo;
|
||||
hashFormat = Base32;
|
||||
hashFormat = HashFormat::Base32;
|
||||
}
|
||||
else if (*arg == "--to-base64") {
|
||||
op = opTo;
|
||||
hashFormat = Base64;
|
||||
hashFormat = HashFormat::Base64;
|
||||
}
|
||||
else if (*arg == "--to-sri") {
|
||||
op = opTo;
|
||||
hashFormat = SRI;
|
||||
hashFormat = HashFormat::SRI;
|
||||
}
|
||||
else if (*arg != "" && arg->at(0) == '-')
|
||||
return false;
|
||||
|
@ -90,7 +90,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
||||
std::cout << store->pathInfoToJSON(
|
||||
// FIXME: preserve order?
|
||||
StorePathSet(storePaths.begin(), storePaths.end()),
|
||||
true, showClosureSize, SRI, AllowInvalid).dump();
|
||||
true, showClosureSize, HashFormat::SRI, AllowInvalid).dump();
|
||||
}
|
||||
|
||||
else {
|
||||
|
@ -310,13 +310,13 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON
|
||||
if (json) {
|
||||
auto res = nlohmann::json::object();
|
||||
res["storePath"] = store->printStorePath(storePath);
|
||||
res["hash"] = hash.to_string(SRI, true);
|
||||
res["hash"] = hash.to_string(HashFormat::SRI, true);
|
||||
logger->cout(res.dump());
|
||||
} else {
|
||||
notice("Downloaded '%s' to '%s' (hash '%s').",
|
||||
url,
|
||||
store->printStorePath(storePath),
|
||||
hash.to_string(SRI, true));
|
||||
hash.to_string(HashFormat::SRI, true));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -108,8 +108,8 @@ struct CmdVerify : StorePathsCommand
|
||||
act2.result(resCorruptedPath, store->printStorePath(info->path));
|
||||
printError("path '%s' was modified! expected hash '%s', got '%s'",
|
||||
store->printStorePath(info->path),
|
||||
info->narHash.to_string(Base32, true),
|
||||
hash.first.to_string(Base32, true));
|
||||
info->narHash.to_string(HashFormat::Base32, true),
|
||||
hash.first.to_string(HashFormat::Base32, true));
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user