Merge pull request #9452 from kolloch/feature/nix-hash-convert

Add nix hash convert
This commit is contained in:
tomberek 2023-12-06 19:47:53 -05:00 committed by GitHub
commit 82449a455f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
74 changed files with 889 additions and 530 deletions

View file

@ -0,0 +1,22 @@
synopsis: Rename hash format `base32` to `nix32`
prs: #9452
description: {
Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for
[Base32](https://en.wikipedia.org/wiki/Base32).
## Deprecation: Use `nix32` instead of `base32` as `toHashFormat`
For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from`
parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value
remains as a deprecated alias for `"base32"`. Please convert your code from:
```nix
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}
```
to
```nix
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";}
```

View file

@ -0,0 +1,47 @@
synopsis: Add `nix hash convert`
prs: #9452
description: {
New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track
to stabilization! Examples:
- Convert the hash to `nix32`.
```bash
$ nix hash convert --algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df"
vw46m23bizj4n8afrc0fj19wrp7mj3c0
```
`nix32` is a base32 encoding with a nix-specific character set.
Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input
hash.
- Convert the hash to the `sri` format that includes an algorithm specification:
```bash
nix hash convert --algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df"
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
```
or with an explicit `-to` format:
```bash
nix hash convert --algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df"
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
```
- Assert the input format of the hash:
```bash
nix hash convert --algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32'
nix hash convert --algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=
```
The `--to`/`--from`/`--algo` parameters have context-sensitive auto-completion.
## Related Deprecations
The following commands are still available but will emit a deprecation warning. Please convert your code to
`nix hash convert`:
- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead.
- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead.
- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead.
- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2`
or even just `nix hash convert $hash1 $hash2` instead.
}

View file

@ -78,7 +78,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path)
PPCODE:
try {
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true);
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -104,7 +104,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef);
else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true);
auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize);
@ -205,8 +205,8 @@ void importPaths(int fd, int dontCheckSigs)
SV * hashPath(char * algo, int base32, char * path)
PPCODE:
try {
Hash h = hashPath(parseHashType(algo), path).first;
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
Hash h = hashPath(parseHashAlgo(algo), path).first;
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -216,8 +216,8 @@ SV * hashPath(char * algo, int base32, char * path)
SV * hashFile(char * algo, int base32, char * path)
PPCODE:
try {
Hash h = hashFile(parseHashType(algo), path);
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
Hash h = hashFile(parseHashAlgo(algo), path);
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -227,8 +227,8 @@ SV * hashFile(char * algo, int base32, char * path)
SV * hashString(char * algo, int base32, char * s)
PPCODE:
try {
Hash h = hashString(parseHashType(algo), s);
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
Hash h = hashString(parseHashAlgo(algo), s);
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -238,8 +238,8 @@ SV * hashString(char * algo, int base32, char * s)
SV * convertHash(char * algo, char * s, int toBase32)
PPCODE:
try {
auto h = Hash::parseAny(s, parseHashType(algo));
auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false);
auto h = Hash::parseAny(s, parseHashAlgo(algo));
auto s = h.to_string(toBase32 ? HashFormat::Nix32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -281,7 +281,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
PPCODE:
try {
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashType(algo));
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashAlgo(algo));
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -291,7 +291,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
PPCODE:
try {
auto h = Hash::parseAny(hash, parseHashType(algo));
auto h = Hash::parseAny(hash, parseHashAlgo(algo));
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo {
.method = method,

View file

@ -904,7 +904,7 @@ Fingerprint LockedFlake::getFingerprint() const
// FIXME: as an optimization, if the flake contains a lock file
// and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint.
return hashString(htSHA256,
return hashString(HashAlgorithm::SHA256,
fmt("%s;%s;%d;%d;%s",
flake.storePath.to_string(),
flake.lockedRef.subdir,

View file

@ -1317,7 +1317,7 @@ drvName, Bindings * attrs, Value & v)
.errPos = state.positions[noPos]
}));
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo));
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
@ -1339,7 +1339,7 @@ drvName, Bindings * attrs, Value & v)
.errPos = state.positions[noPos]
});
auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256);
auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256);
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
for (auto & i : outputs) {
@ -1348,13 +1348,13 @@ drvName, Bindings * attrs, Value & v)
drv.outputs.insert_or_assign(i,
DerivationOutput::Impure {
.method = method,
.hashType = ht,
.hashAlgo = ha,
});
else
drv.outputs.insert_or_assign(i,
DerivationOutput::CAFloating {
.method = method,
.hashType = ht,
.hashAlgo = ha,
});
}
}
@ -1754,17 +1754,17 @@ static RegisterPrimOp primop_findFile(PrimOp {
/* Return the cryptographic hash of a file in base-16. */
static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
std::optional<HashType> ht = parseHashType(type);
if (!ht)
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
if (!ha)
state.debugThrowLastTrace(Error({
.msg = hintfmt("unknown hash type '%1%'", type),
.msg = hintfmt("unknown hash algo '%1%'", algo),
.errPos = state.positions[pos]
}));
auto path = realisePath(state, pos, *args[1]);
v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false));
v.mkString(hashString(*ha, path.readFile()).to_string(HashFormat::Base16, false));
}
static RegisterPrimOp primop_hashFile({
@ -2341,7 +2341,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
else if (n == "recursive")
method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") };
else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), htSHA256);
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
else
state.debugThrowLastTrace(EvalError({
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
@ -3766,18 +3766,18 @@ static RegisterPrimOp primop_stringLength({
/* Return the cryptographic hash of a string in base-16. */
static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
std::optional<HashType> ht = parseHashType(type);
if (!ht)
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
if (!ha)
state.debugThrowLastTrace(Error({
.msg = hintfmt("unknown hash type '%1%'", type),
.msg = hintfmt("unknown hash algo '%1%'", algo),
.errPos = state.positions[pos]
}));
NixStringContext context; // discarded
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false));
v.mkString(hashString(*ha, s).to_string(HashFormat::Base16, false));
}
static RegisterPrimOp primop_hashString({
@ -3800,15 +3800,15 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");
Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo"));
std::optional<HashType> ht = std::nullopt;
std::optional<HashAlgorithm> ha = std::nullopt;
if (iteratorHashAlgo != inputAttrs->end()) {
ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
ha = parseHashAlgo(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
}
Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'");
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));
v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI));
v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI));
}
static RegisterPrimOp primop_convertHash({
@ -3837,7 +3837,8 @@ static RegisterPrimOp primop_convertHash({
The format of the resulting hash. Must be one of
- `"base16"`
- `"base32"`
- `"nix32"`
- `"base32"` (deprecated alias for `"nix32"`)
- `"base64"`
- `"sri"`

View file

@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
// be both a revision or a branch/tag name.
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
if (std::regex_match(value.begin(), value.end(), revRegex))
rev = Hash::parseAny(value, htSHA1);
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
else
ref = value;
}
@ -79,7 +79,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
attrs2.alloc("branch").mkString(*input2.getRef());
// Backward compatibility: set 'rev' to
// 0000000000000000000000000000000000000000 for a dirty tree.
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
auto rev2 = input2.getRev().value_or(Hash(HashAlgorithm::SHA1));
attrs2.alloc("rev").mkString(rev2.gitRev());
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
if (auto revCount = input2.getRevCount())

View file

@ -46,7 +46,7 @@ void emitTreeAttrs(
attrs.alloc("shortRev").mkString(rev->gitShortRev());
} else if (emptyRevFallback) {
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
auto emptyHash = Hash(htSHA1);
auto emptyHash = Hash(HashAlgorithm::SHA1);
attrs.alloc("rev").mkString(emptyHash.gitRev());
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
}
@ -246,7 +246,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
if (n == "url")
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), htSHA256);
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256);
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
else
@ -276,7 +276,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
// early exit if pinned and already in the store
if (expectedHash && expectedHash->type == htSHA256) {
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
auto expectedPath = state.store->makeFixedOutputPath(
name,
FixedOutputInfo {
@ -301,10 +301,10 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
if (expectedHash) {
auto hash = unpack
? state.store->queryPathInfo(storePath)->narHash
: hashFile(htSHA256, state.store->toRealPath(storePath));
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
if (hash != *expectedHash)
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
*url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)));
*url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)));
}
state.allowAndSetStorePathString(storePath, v);

View file

@ -289,8 +289,8 @@ std::string Input::getType() const
std::optional<Hash> Input::getNarHash() const
{
if (auto s = maybeGetStrAttr(attrs, "narHash")) {
auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s);
if (hash.type != htSHA256)
auto hash = s->empty() ? Hash(HashAlgorithm::SHA256) : Hash::parseSRI(*s);
if (hash.algo != HashAlgorithm::SHA256)
throw UsageError("narHash must use SHA-256");
return hash;
}
@ -314,7 +314,7 @@ std::optional<Hash> Input::getRev() const
} catch (BadHash &e) {
// Default to sha1 for backwards compatibility with existing
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
hash = Hash::parseAny(*s, htSHA1);
hash = Hash::parseAny(*s, HashAlgorithm::SHA1);
}
}

View file

@ -91,7 +91,7 @@ Hash toHash(const git_oid & oid)
#ifdef GIT_EXPERIMENTAL_SHA256
assert(oid.type == GIT_OID_SHA1);
#endif
Hash hash(htSHA1);
Hash hash(HashAlgorithm::SHA1);
memcpy(hash.hash, oid.id, hash.hashSize);
return hash;
}
@ -439,7 +439,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
for (const fetchers::PublicKey & k : publicKeys){
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
re += "(" + escaped_fingerprint + ")";
}

View file

@ -52,7 +52,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
Path getCachePath(std::string_view key)
{
return getCacheDir() + "/nix/gitv3/" +
hashString(htSHA256, key).to_string(HashFormat::Base32, false);
hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false);
}
// Returns the name of the HEAD branch.
@ -369,7 +369,7 @@ struct GitInputScheme : InputScheme
{
auto checkHashType = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
};
@ -559,7 +559,7 @@ struct GitInputScheme : InputScheme
repoInfo.url
);
} else
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashAlgorithm::SHA1).gitRev());
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
}

View file

@ -42,7 +42,7 @@ struct GitArchiveInputScheme : InputScheme
auto size = path.size();
if (size == 3) {
if (std::regex_match(path[2], revRegex))
rev = Hash::parseAny(path[2], htSHA1);
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
else if (std::regex_match(path[2], refRegex))
ref = path[2];
else
@ -68,7 +68,7 @@ struct GitArchiveInputScheme : InputScheme
if (name == "rev") {
if (rev)
throw BadURL("URL '%s' contains multiple commit hashes", url.url);
rev = Hash::parseAny(value, htSHA1);
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
}
else if (name == "ref") {
if (!std::regex_match(value, refRegex))
@ -284,7 +284,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
readFile(
store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
auto rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev;
}
@ -356,7 +356,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
readFile(
store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
auto rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev;
}
@ -448,7 +448,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
if(!id)
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
auto rev = Hash::parseAny(*id, htSHA1);
auto rev = Hash::parseAny(*id, HashAlgorithm::SHA1);
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
return rev;
}

View file

@ -20,7 +20,7 @@ struct IndirectInputScheme : InputScheme
if (path.size() == 1) {
} else if (path.size() == 2) {
if (std::regex_match(path[1], revRegex))
rev = Hash::parseAny(path[1], htSHA1);
rev = Hash::parseAny(path[1], HashAlgorithm::SHA1);
else if (std::regex_match(path[1], refRegex))
ref = path[1];
else
@ -31,7 +31,7 @@ struct IndirectInputScheme : InputScheme
ref = path[1];
if (!std::regex_match(path[2], revRegex))
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
rev = Hash::parseAny(path[2], htSHA1);
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
} else
throw BadURL("GitHub URL '%s' is invalid", url.url);

View file

@ -44,8 +44,8 @@ StorePath InputAccessor::fetchToStore(
auto storePath =
settings.readOnlyMode
? store->computeStorePathFromDump(*source, name, method, htSHA256).first
: store->addToStoreFromDump(*source, name, method, htSHA256, repair);
? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first
: store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair);
if (cacheKey)
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);

View file

@ -210,7 +210,7 @@ struct MercurialInputScheme : InputScheme
return files.count(file);
};
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter);
return {std::move(storePath), input};
}
@ -220,7 +220,7 @@ struct MercurialInputScheme : InputScheme
auto checkHashType = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && hash->type != htSHA1)
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
};
@ -260,14 +260,14 @@ struct MercurialInputScheme : InputScheme
});
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second));
}
}
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false));
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false));
/* If this is a commit hash that we already have, we don't
have to pull again. */
@ -301,7 +301,7 @@ struct MercurialInputScheme : InputScheme
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
assert(tokens.size() == 3);
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev());
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashAlgorithm::SHA1).gitRev());
auto revCount = std::stoull(tokens[1]);
input.attrs.insert_or_assign("ref", tokens[2]);

View file

@ -73,7 +73,7 @@ DownloadFileResult downloadFile(
} else {
StringSink sink;
dumpString(res.data, sink);
auto hash = hashString(htSHA256, res.data);
auto hash = hashString(HashAlgorithm::SHA256, res.data);
ValidPathInfo info {
*store,
name,
@ -82,7 +82,7 @@ DownloadFileResult downloadFile(
.hash = hash,
.references = {},
},
hashString(htSHA256, sink.s),
hashString(HashAlgorithm::SHA256, sink.s),
};
info.narSize = sink.s.size();
auto source = StringSource { sink.s };
@ -156,7 +156,7 @@ DownloadTarballResult downloadTarball(
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
lastModified = lstat(topDir).st_mtime;
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair);
}
Attrs infoAttrs({

View file

@ -143,9 +143,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
/* Read the NAR simultaneously into a CompressionSink+FileSink (to
write the compressed NAR to disk), into a HashSink (to get the
NAR hash), and into a NarAccessor (to get the NAR listing). */
HashSink fileHashSink { htSHA256 };
HashSink fileHashSink { HashAlgorithm::SHA256 };
std::shared_ptr<SourceAccessor> narAccessor;
HashSink narHashSink { htSHA256 };
HashSink narHashSink { HashAlgorithm::SHA256 };
{
FdSink fileSink(fdTemp.get());
TeeSink teeSinkCompressed { fileSink, fileHashSink };
@ -165,8 +165,8 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
auto [fileHash, fileSize] = fileHashSink.finish();
narInfo->fileHash = fileHash;
narInfo->fileSize = fileSize;
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Nix32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" :
compression == "zstd" ? ".zst" :
compression == "lzip" ? ".lzip" :
@ -301,9 +301,9 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
}
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
{
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
unsupported("addToStoreFromDump");
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
@ -399,13 +399,13 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
}
StorePath BinaryCacheStore::addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashType hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references)
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references)
{
/* FIXME: Make BinaryCacheStore::addToStoreCommon support
non-recursive+sha256 so we can just use the default
@ -448,7 +448,7 @@ StorePath BinaryCacheStore::addTextToStore(
const StorePathSet & references,
RepairFlag repair)
{
auto textHash = hashString(htSHA256, s);
auto textHash = hashString(HashAlgorithm::SHA256, s);
auto path = makeTextPath(name, TextInfo { { textHash }, references });
if (!repair && isValidPath(path))

View file

@ -124,16 +124,16 @@ public:
RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
StorePath addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashType hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override;
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override;
StorePath addTextToStore(
std::string_view name,

View file

@ -1066,8 +1066,8 @@ void LocalDerivationGoal::initTmpDir() {
if (passAsFile.find(i.first) == passAsFile.end()) {
env[i.first] = i.second;
} else {
auto hash = hashString(htSHA256, i.first);
std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false);
auto hash = hashString(HashAlgorithm::SHA256, i.first);
std::string fn = ".attr-" + hash.to_string(HashFormat::Nix32, false);
Path p = tmpDir + "/" + fn;
writeFile(p, rewriteStrings(i.second, inputRewrites));
chownToBuilder(p);
@ -1290,13 +1290,13 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
{ throw Error("queryPathFromHashPart"); }
StorePath addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashType hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override
{ throw Error("addToStore"); }
void addToStore(const ValidPathInfo & info, Source & narSource,
@ -1318,12 +1318,12 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
}
StorePath addToStoreFromDump(
Source & dump,
std::string_view name,
FileIngestionMethod method,
HashType hashAlgo,
RepairFlag repair,
const StorePathSet & references) override
Source & dump,
std::string_view name,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
RepairFlag repair,
const StorePathSet & references) override
{
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references);
goal.addDependency(path);
@ -2466,7 +2466,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
rewriteOutput(outputRewrites);
/* FIXME optimize and deduplicate with addToStore */
std::string oldHashPart { scratchPath->hashPart() };
HashModuloSink caSink { outputHash.hashType, oldHashPart };
HashModuloSink caSink {outputHash.hashAlgo, oldHashPart };
std::visit(overloaded {
[&](const TextIngestionMethod &) {
readFile(actualPath, caSink);
@ -2511,7 +2511,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string(newInfo0.path.hashPart())}});
}
HashResult narHashAndSize = hashPath(htSHA256, actualPath);
HashResult narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
newInfo0.narHash = narHashAndSize.first;
newInfo0.narSize = narHashAndSize.second;
@ -2531,7 +2531,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string { scratchPath->hashPart() },
std::string { requiredFinalPath.hashPart() });
rewriteOutput(outputRewrites);
auto narHashAndSize = hashPath(htSHA256, actualPath);
auto narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
newInfo0.narSize = narHashAndSize.second;
auto refs = rewriteRefs();
@ -2546,7 +2546,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
.method = dof.ca.method,
.hashType = wanted.type,
.hashAlgo = wanted.algo,
});
/* Check wanted hash */
@ -2583,7 +2583,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
[&](const DerivationOutput::Impure & doi) {
return newInfoFromCA(DerivationOutput::CAFloating {
.method = doi.method,
.hashType = doi.hashType,
.hashAlgo = doi.hashAlgo,
});
},
@ -2945,7 +2945,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName)
{
return worker.store.makeStorePath(
"rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName),
Hash(htSHA256), outputPathName(drv->name, outputName));
Hash(HashAlgorithm::SHA256), outputPathName(drv->name, outputName));
}
@ -2953,7 +2953,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(const StorePath & path)
{
return worker.store.makeStorePath(
"rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()),
Hash(htSHA256), path.name());
Hash(HashAlgorithm::SHA256), path.name());
}

View file

@ -519,8 +519,8 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path)))
res = false;
else {
HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
Hash nullHash(htSHA256);
HashResult current = hashPath(info->narHash.algo, store.printStorePath(path));
Hash nullHash(HashAlgorithm::SHA256);
res = info->narHash == nullHash || info->narHash == current.first;
}
pathContentsGoodCache.insert_or_assign(path, res);

View file

@ -63,9 +63,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
for (auto hashedMirror : settings.hashedMirrors.get())
try {
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
std::optional<HashAlgorithm> ht = parseHashAlgoOpt(getAttr("outputHashAlgo"));
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(HashFormat::Base16, false));
fetch(hashedMirror + printHashAlgo(h.algo) + "/" + h.to_string(HashFormat::Base16, false));
return;
} catch (Error & e) {
debug(e.what());

View file

@ -38,14 +38,14 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
return FileIngestionMethod::Flat;
}
std::string ContentAddressMethod::render(HashType ht) const
std::string ContentAddressMethod::render(HashAlgorithm ha) const
{
return std::visit(overloaded {
[&](const TextIngestionMethod & th) {
return std::string{"text:"} + printHashType(ht);
return std::string{"text:"} + printHashAlgo(ha);
},
[&](const FileIngestionMethod & fim) {
return "fixed:" + makeFileIngestionPrefix(fim) + printHashType(ht);
return "fixed:" + makeFileIngestionPrefix(fim) + printHashAlgo(ha);
}
}, raw);
}
@ -61,13 +61,13 @@ std::string ContentAddress::render() const
+ makeFileIngestionPrefix(method);
},
}, method.raw)
+ this->hash.to_string(HashFormat::Base32, true);
+ this->hash.to_string(HashFormat::Nix32, true);
}
/**
* Parses content address strings up to the hash.
*/
static std::pair<ContentAddressMethod, HashType> parseContentAddressMethodPrefix(std::string_view & rest)
static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodPrefix(std::string_view & rest)
{
std::string_view wholeInput { rest };
@ -83,27 +83,27 @@ static std::pair<ContentAddressMethod, HashType> parseContentAddressMethodPrefix
auto hashTypeRaw = splitPrefixTo(rest, ':');
if (!hashTypeRaw)
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
HashType hashType = parseHashType(*hashTypeRaw);
return hashType;
HashAlgorithm hashAlgo = parseHashAlgo(*hashTypeRaw);
return hashAlgo;
};
// Switch on prefix
if (prefix == "text") {
// No parsing of the ingestion method, "text" only support flat.
HashType hashType = parseHashType_();
HashAlgorithm hashAlgo = parseHashType_();
return {
TextIngestionMethod {},
std::move(hashType),
std::move(hashAlgo),
};
} else if (prefix == "fixed") {
// Parse method
auto method = FileIngestionMethod::Flat;
if (splitPrefix(rest, "r:"))
method = FileIngestionMethod::Recursive;
HashType hashType = parseHashType_();
HashAlgorithm hashAlgo = parseHashType_();
return {
std::move(method),
std::move(hashType),
std::move(hashAlgo),
};
} else
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
@ -113,15 +113,15 @@ ContentAddress ContentAddress::parse(std::string_view rawCa)
{
auto rest = rawCa;
auto [caMethod, hashType] = parseContentAddressMethodPrefix(rest);
auto [caMethod, hashAlgo] = parseContentAddressMethodPrefix(rest);
return ContentAddress {
.method = std::move(caMethod),
.hash = Hash::parseNonSRIUnprefixed(rest, hashType),
.hash = Hash::parseNonSRIUnprefixed(rest, hashAlgo),
};
}
std::pair<ContentAddressMethod, HashType> ContentAddressMethod::parse(std::string_view caMethod)
std::pair<ContentAddressMethod, HashAlgorithm> ContentAddressMethod::parse(std::string_view caMethod)
{
std::string asPrefix = std::string{caMethod} + ":";
// parseContentAddressMethodPrefix takes its argument by reference
@ -144,7 +144,7 @@ std::string renderContentAddress(std::optional<ContentAddress> ca)
std::string ContentAddress::printMethodAlgo() const
{
return method.renderPrefix()
+ printHashType(hash.type);
+ printHashAlgo(hash.algo);
}
bool StoreReferences::empty() const

View file

@ -94,7 +94,7 @@ struct ContentAddressMethod
/**
* Parse a content addressing method and hash type.
*/
static std::pair<ContentAddressMethod, HashType> parse(std::string_view rawCaMethod);
static std::pair<ContentAddressMethod, HashAlgorithm> parse(std::string_view rawCaMethod);
/**
* Render a content addressing method and hash type in a
@ -102,7 +102,7 @@ struct ContentAddressMethod
*
* The rough inverse of `parse()`.
*/
std::string render(HashType ht) const;
std::string render(HashAlgorithm ha) const;
};

View file

@ -400,22 +400,22 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork();
auto pathInfo = [&]() {
// NB: FramedSource must be out of scope before logger->stopWork();
auto [contentAddressMethod, hashType_] = ContentAddressMethod::parse(camStr);
auto hashType = hashType_; // work around clang bug
auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
auto hashAlgo = hashAlgo_; // work around clang bug
FramedSource source(from);
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
return std::visit(overloaded {
[&](const TextIngestionMethod &) {
if (hashType != htSHA256)
if (hashAlgo != HashAlgorithm::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashType(hashType));
name, printHashAlgo(hashAlgo));
// We could stream this by changing Store
std::string contents = source.drain();
auto path = store->addTextToStore(name, contents, refs, repair);
return store->queryPathInfo(path);
},
[&](const FileIngestionMethod & fim) {
auto path = store->addToStoreFromDump(source, name, fim, hashType, repair, refs);
auto path = store->addToStoreFromDump(source, name, fim, hashAlgo, repair, refs);
return store->queryPathInfo(path);
},
}, contentAddressMethod.raw);
@ -424,7 +424,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
WorkerProto::Serialise<ValidPathInfo>::write(*store, wconn, *pathInfo);
} else {
HashType hashAlgo;
HashAlgorithm hashAlgo;
std::string baseName;
FileIngestionMethod method;
{
@ -440,7 +440,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
hashAlgoRaw = "sha256";
method = FileIngestionMethod::Recursive;
}
hashAlgo = parseHashType(hashAlgoRaw);
hashAlgo = parseHashAlgo(hashAlgoRaw);
}
auto dumpSource = sinkToSource([&](Sink & saved) {
@ -883,7 +883,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
bool repair, dontCheckSigs;
auto path = store->parseStorePath(readString(from));
auto deriver = readString(from);
auto narHash = Hash::parseAny(readString(from), htSHA256);
auto narHash = Hash::parseAny(readString(from), HashAlgorithm::SHA256);
ValidPathInfo info { path, narHash };
if (deriver != "")
info.deriver = store->parseStorePath(deriver);

View file

@ -215,25 +215,25 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
static DerivationOutput parseDerivationOutput(
const StoreDirConfig & store,
std::string_view pathS, std::string_view hashAlgo, std::string_view hashS,
std::string_view pathS, std::string_view hashAlgoStr, std::string_view hashS,
const ExperimentalFeatureSettings & xpSettings)
{
if (hashAlgo != "") {
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgo);
if (hashAlgoStr != "") {
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr);
if (method == TextIngestionMethod {})
xpSettings.require(Xp::DynamicDerivations);
const auto hashType = parseHashType(hashAlgo);
const auto hashAlgo = parseHashAlgo(hashAlgoStr);
if (hashS == "impure") {
xpSettings.require(Xp::ImpureDerivations);
if (pathS != "")
throw FormatError("impure derivation output should not specify output path");
return DerivationOutput::Impure {
.method = std::move(method),
.hashType = std::move(hashType),
.hashAlgo = std::move(hashAlgo),
};
} else if (hashS != "") {
validatePath(pathS);
auto hash = Hash::parseNonSRIUnprefixed(hashS, hashType);
auto hash = Hash::parseNonSRIUnprefixed(hashS, hashAlgo);
return DerivationOutput::CAFixed {
.ca = ContentAddress {
.method = std::move(method),
@ -246,7 +246,7 @@ static DerivationOutput parseDerivationOutput(
throw FormatError("content-addressed derivation output should not specify output path");
return DerivationOutput::CAFloating {
.method = std::move(method),
.hashType = std::move(hashType),
.hashAlgo = std::move(hashAlgo),
};
}
} else {
@ -547,7 +547,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
},
[&](const DerivationOutput::CAFloating & dof) {
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashType(dof.hashType));
s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo));
s += ','; printUnquotedString(s, "");
},
[&](const DerivationOutput::Deferred &) {
@ -558,7 +558,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
[&](const DerivationOutput::Impure & doi) {
// FIXME
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashType(doi.hashType));
s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo));
s += ','; printUnquotedString(s, "impure");
}
}, i.second.raw);
@ -631,7 +631,7 @@ DerivationType BasicDerivation::type() const
floatingCAOutputs,
deferredIAOutputs,
impureOutputs;
std::optional<HashType> floatingHashType;
std::optional<HashAlgorithm> floatingHashAlgo;
for (auto & i : outputs) {
std::visit(overloaded {
@ -643,10 +643,10 @@ DerivationType BasicDerivation::type() const
},
[&](const DerivationOutput::CAFloating & dof) {
floatingCAOutputs.insert(i.first);
if (!floatingHashType) {
floatingHashType = dof.hashType;
if (!floatingHashAlgo) {
floatingHashAlgo = dof.hashAlgo;
} else {
if (*floatingHashType != dof.hashType)
if (*floatingHashAlgo != dof.hashAlgo)
throw Error("all floating outputs must use the same hash type");
}
},
@ -774,7 +774,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
std::map<std::string, Hash> outputHashes;
for (const auto & i : drv.outputs) {
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw);
auto hash = hashString(htSHA256, "fixed:out:"
auto hash = hashString(HashAlgorithm::SHA256, "fixed:out:"
+ dof.ca.printMethodAlgo() + ":"
+ dof.ca.hash.to_string(HashFormat::Base16, false) + ":"
+ store.printStorePath(dof.path(store, drv.name, i.first)));
@ -825,7 +825,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
}
}
auto hash = hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2));
auto hash = hashString(HashAlgorithm::SHA256, drv.unparse(store, maskOutputs, &inputs2));
std::map<std::string, Hash> outputHashes;
for (const auto & [outputName, _] : drv.outputs) {
@ -930,7 +930,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
},
[&](const DerivationOutput::CAFloating & dof) {
out << ""
<< (dof.method.renderPrefix() + printHashType(dof.hashType))
<< (dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo))
<< "";
},
[&](const DerivationOutput::Deferred &) {
@ -940,7 +940,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
},
[&](const DerivationOutput::Impure & doi) {
out << ""
<< (doi.method.renderPrefix() + printHashType(doi.hashType))
<< (doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo))
<< "impure";
},
}, i.second.raw);
@ -958,7 +958,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
std::string hashPlaceholder(const OutputNameView outputName)
{
// FIXME: memoize?
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Base32, false);
return "/" + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Nix32, false);
}
@ -1150,7 +1150,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const
}
const Hash impureOutputHash = hashString(htSHA256, "impure");
const Hash impureOutputHash = hashString(HashAlgorithm::SHA256, "impure");
nlohmann::json DerivationOutput::toJSON(
const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const
@ -1167,11 +1167,11 @@ nlohmann::json DerivationOutput::toJSON(
// FIXME print refs?
},
[&](const DerivationOutput::CAFloating & dof) {
res["hashAlgo"] = dof.method.renderPrefix() + printHashType(dof.hashType);
res["hashAlgo"] = dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo);
},
[&](const DerivationOutput::Deferred &) {},
[&](const DerivationOutput::Impure & doi) {
res["hashAlgo"] = doi.method.renderPrefix() + printHashType(doi.hashType);
res["hashAlgo"] = doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo);
res["impure"] = true;
},
}, raw);
@ -1191,15 +1191,15 @@ DerivationOutput DerivationOutput::fromJSON(
for (const auto & [key, _] : json)
keys.insert(key);
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashType> {
std::string hashAlgo = json["hashAlgo"];
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashAlgorithm> {
std::string hashAlgoStr = json["hashAlgo"];
// remaining to parse, will be mutated by parsers
std::string_view s = hashAlgo;
std::string_view s = hashAlgoStr;
ContentAddressMethod method = ContentAddressMethod::parsePrefix(s);
if (method == TextIngestionMethod {})
xpSettings.require(Xp::DynamicDerivations);
auto hashType = parseHashType(s);
return { std::move(method), std::move(hashType) };
auto hashAlgo = parseHashAlgo(s);
return { std::move(method), std::move(hashAlgo) };
};
if (keys == (std::set<std::string_view> { "path" })) {
@ -1209,11 +1209,11 @@ DerivationOutput DerivationOutput::fromJSON(
}
else if (keys == (std::set<std::string_view> { "path", "hashAlgo", "hash" })) {
auto [method, hashType] = methodAlgo();
auto [method, hashAlgo] = methodAlgo();
auto dof = DerivationOutput::CAFixed {
.ca = ContentAddress {
.method = std::move(method),
.hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashType),
.hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashAlgo),
},
};
if (dof.path(store, drvName, outputName) != store.parseStorePath((std::string) json["path"]))
@ -1223,10 +1223,10 @@ DerivationOutput DerivationOutput::fromJSON(
else if (keys == (std::set<std::string_view> { "hashAlgo" })) {
xpSettings.require(Xp::CaDerivations);
auto [method, hashType] = methodAlgo();
auto [method, hashAlgo] = methodAlgo();
return DerivationOutput::CAFloating {
.method = std::move(method),
.hashType = std::move(hashType),
.hashAlgo = std::move(hashAlgo),
};
}
@ -1236,10 +1236,10 @@ DerivationOutput DerivationOutput::fromJSON(
else if (keys == (std::set<std::string_view> { "hashAlgo", "impure" })) {
xpSettings.require(Xp::ImpureDerivations);
auto [method, hashType] = methodAlgo();
auto [method, hashAlgo] = methodAlgo();
return DerivationOutput::Impure {
.method = std::move(method),
.hashType = hashType,
.hashAlgo = hashAlgo,
};
}

View file

@ -75,9 +75,9 @@ struct DerivationOutput
/**
* How the serialization will be hashed
*/
HashType hashType;
HashAlgorithm hashAlgo;
GENERATE_CMP(CAFloating, me->method, me->hashType);
GENERATE_CMP(CAFloating, me->method, me->hashAlgo);
};
/**
@ -102,9 +102,9 @@ struct DerivationOutput
/**
* How the serialization will be hashed
*/
HashType hashType;
HashAlgorithm hashAlgo;
GENERATE_CMP(Impure, me->method, me->hashType);
GENERATE_CMP(Impure, me->method, me->hashAlgo);
};
typedef std::variant<

View file

@ -5,7 +5,7 @@ namespace nix {
std::string DownstreamPlaceholder::render() const
{
return "/" + hash.to_string(HashFormat::Base32, false);
return "/" + hash.to_string(HashFormat::Nix32, false);
}
@ -19,7 +19,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput(
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
return DownstreamPlaceholder {
hashString(htSHA256, clearText)
hashString(HashAlgorithm::SHA256, clearText)
};
}
@ -31,10 +31,10 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation(
xpSettings.require(Xp::DynamicDerivations);
auto compressed = compressHash(placeholder.hash, 20);
auto clearText = "nix-computed-output:"
+ compressed.to_string(HashFormat::Base32, false)
+ compressed.to_string(HashFormat::Nix32, false)
+ ":" + std::string { outputName };
return DownstreamPlaceholder {
hashString(htSHA256, clearText)
hashString(HashAlgorithm::SHA256, clearText)
};
}

View file

@ -30,7 +30,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
{
auto info = queryPathInfo(path);
HashSink hashSink(htSHA256);
HashSink hashSink(HashAlgorithm::SHA256);
TeeSink teeSink(sink, hashSink);
narFromPath(path, teeSink);
@ -39,9 +39,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
filesystem corruption from spreading to other machines.
Don't complain if the stored hash is zero (unknown). */
Hash hash = hashSink.currentHash().first;
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
if (hash != info->narHash && info->narHash != Hash(info->narHash.algo))
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash.to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true));
printStorePath(path), info->narHash.to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true));
teeSink
<< exportMagic
@ -79,7 +79,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
auto references = CommonProto::Serialise<StorePathSet>::read(*this,
CommonProto::ReadConn { .from = source });
auto deriver = readString(source);
auto narHash = hashString(htSHA256, saved.s);
auto narHash = hashString(HashAlgorithm::SHA256, saved.s);
ValidPathInfo info { path, narHash };
if (deriver != "")

View file

@ -50,7 +50,7 @@ static void makeSymlink(const Path & link, const Path & target)
void LocalStore::addIndirectRoot(const Path & path)
{
std::string hash = hashString(htSHA1, path).to_string(HashFormat::Base32, false);
std::string hash = hashString(HashAlgorithm::SHA1, path).to_string(HashFormat::Nix32, false);
Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash));
makeSymlink(realRoot, path);
}

View file

@ -267,13 +267,13 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
{ unsupported("queryPathFromHashPart"); }
StorePath addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashType hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override
{ unsupported("addToStore"); }
StorePath addTextToStore(

View file

@ -955,7 +955,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths;
for (auto & [_, i] : infos) {
assert(i.narHash.type == htSHA256);
assert(i.narHash.algo == HashAlgorithm::SHA256);
if (isValidPath_(*state, i.path))
updatePathInfo(*state, i);
else
@ -1069,7 +1069,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
/* While restoring the path from the NAR, compute the hash
of the NAR. */
HashSink hashSink(htSHA256);
HashSink hashSink(HashAlgorithm::SHA256);
TeeSource wrapperSource { source, hashSink };
@ -1080,7 +1080,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path), info.narHash.to_string(HashFormat::Base32, true), hashResult.first.to_string(HashFormat::Base32, true));
printStorePath(info.path), info.narHash.to_string(HashFormat::Nix32, true), hashResult.first.to_string(HashFormat::Nix32, true));
if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
@ -1090,14 +1090,14 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
auto & specified = *info.ca;
auto actualHash = hashCAPath(
specified.method,
specified.hash.type,
specified.hash.algo,
info.path
);
if (specified.hash != actualHash.hash) {
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path),
specified.hash.to_string(HashFormat::Base32, true),
actualHash.hash.to_string(HashFormat::Base32, true));
specified.hash.to_string(HashFormat::Nix32, true),
actualHash.hash.to_string(HashFormat::Nix32, true));
}
}
@ -1116,7 +1116,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
{
/* For computing the store path. */
auto hashSink = std::make_unique<HashSink>(hashAlgo);
@ -1220,8 +1220,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
/* For computing the nar hash. In recursive SHA-256 mode, this
is the same as the store hash, so no need to do it again. */
auto narHash = std::pair { hash, size };
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) {
HashSink narSink { htSHA256 };
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) {
HashSink narSink { HashAlgorithm::SHA256 };
dumpPath(realPath, narSink);
narHash = narSink.finish();
}
@ -1252,7 +1252,7 @@ StorePath LocalStore::addTextToStore(
std::string_view s,
const StorePathSet & references, RepairFlag repair)
{
auto hash = hashString(htSHA256, s);
auto hash = hashString(HashAlgorithm::SHA256, s);
auto dstPath = makeTextPath(name, TextInfo {
.hash = hash,
.references = references,
@ -1278,7 +1278,7 @@ StorePath LocalStore::addTextToStore(
StringSink sink;
dumpString(s, sink);
auto narHash = hashString(htSHA256, sink.s);
auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
optimisePath(realPath, repair);
@ -1389,7 +1389,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
for (auto & link : readDirectory(linksDir)) {
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
Path linkPath = linksDir + "/" + link.name;
std::string hash = hashPath(htSHA256, linkPath).first.to_string(HashFormat::Base32, false);
std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Nix32, false);
if (hash != link.name) {
printError("link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash);
@ -1406,7 +1406,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
printInfo("checking store hashes...");
Hash nullHash(htSHA256);
Hash nullHash(HashAlgorithm::SHA256);
for (auto & i : validPaths) {
try {
@ -1415,14 +1415,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
/* Check the content hash (optionally - slow). */
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
auto hashSink = HashSink(info->narHash.type);
auto hashSink = HashSink(info->narHash.algo);
dumpPath(Store::toRealPath(i), hashSink);
auto current = hashSink.finish();
if (info->narHash != nullHash && info->narHash != current.first) {
printError("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(HashFormat::Base32, true), current.first.to_string(HashFormat::Base32, true));
printStorePath(i), info->narHash.to_string(HashFormat::Nix32, true), current.first.to_string(HashFormat::Nix32, true));
if (repair) repairPath(i); else errors = true;
} else {
@ -1697,20 +1697,20 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id,
}
ContentAddress LocalStore::hashCAPath(
const ContentAddressMethod & method, const HashType & hashType,
const ContentAddressMethod & method, const HashAlgorithm & hashAlgo,
const StorePath & path)
{
return hashCAPath(method, hashType, Store::toRealPath(path), path.hashPart());
return hashCAPath(method, hashAlgo, Store::toRealPath(path), path.hashPart());
}
ContentAddress LocalStore::hashCAPath(
const ContentAddressMethod & method,
const HashType & hashType,
const HashAlgorithm & hashAlgo,
const Path & path,
const std::string_view pathHash
)
{
HashModuloSink caSink ( hashType, std::string(pathHash) );
HashModuloSink caSink ( hashAlgo, std::string(pathHash) );
std::visit(overloaded {
[&](const TextIngestionMethod &) {
readFile(path, caSink);

View file

@ -178,7 +178,7 @@ public:
RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
StorePath addTextToStore(
std::string_view name,
@ -353,12 +353,12 @@ private:
// XXX: Make a generic `Store` method
ContentAddress hashCAPath(
const ContentAddressMethod & method,
const HashType & hashType,
const HashAlgorithm & hashAlgo,
const StorePath & path);
ContentAddress hashCAPath(
const ContentAddressMethod & method,
const HashType & hashType,
const HashAlgorithm & hashAlgo,
const Path & path,
const std::string_view pathHash
);

View file

@ -43,7 +43,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
sink.s = rewriteStrings(sink.s, rewrites);
HashModuloSink hashModuloSink(htSHA256, oldHashPart);
HashModuloSink hashModuloSink(HashAlgorithm::SHA256, oldHashPart);
hashModuloSink(sink.s);
auto narModuloHash = hashModuloSink.finish().first;
@ -66,7 +66,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
rsink2(sink.s);
rsink2.flush();
info.narHash = hashString(htSHA256, sink2.s);
info.narHash = hashString(HashAlgorithm::SHA256, sink2.s);
info.narSize = sink.s.size();
StringSource source(sink2.s);

View file

@ -333,9 +333,9 @@ public:
(std::string(info->path.name()))
(narInfo ? narInfo->url : "", narInfo != 0)
(narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Nix32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
(info->narHash.to_string(HashFormat::Base32, true))
(info->narHash.to_string(HashFormat::Nix32, true))
(info->narSize)
(concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)

View file

@ -113,11 +113,11 @@ std::string NarInfo::to_string(const Store & store) const
res += "URL: " + url + "\n";
assert(compression != "");
res += "Compression: " + compression + "\n";
assert(fileHash && fileHash->type == htSHA256);
res += "FileHash: " + fileHash->to_string(HashFormat::Base32, true) + "\n";
assert(fileHash && fileHash->algo == HashAlgorithm::SHA256);
res += "FileHash: " + fileHash->to_string(HashFormat::Nix32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n";
assert(narHash.type == htSHA256);
res += "NarHash: " + narHash.to_string(HashFormat::Base32, true) + "\n";
assert(narHash.algo == HashAlgorithm::SHA256);
res += "NarHash: " + narHash.to_string(HashFormat::Nix32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";

View file

@ -146,17 +146,17 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
Also note that if `path' is a symlink, then we're hashing the
contents of the symlink (i.e. the result of readlink()), not
the contents of the target (which may not even exist). */
Hash hash = hashPath(htSHA256, path).first;
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Base32, true));
Hash hash = hashPath(HashAlgorithm::SHA256, path).first;
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
/* Check if this is a known hash. */
Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Base32, false);
Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Nix32, false);
/* Maybe delete the link, if it has been corrupted. */
if (pathExists(linkPath)) {
auto stLink = lstat(linkPath);
if (st.st_size != stLink.st_size
|| (repair && hash != hashPath(htSHA256, linkPath).first))
|| (repair && hash != hashPath(HashAlgorithm::SHA256, linkPath).first))
{
// XXX: Consider overwriting linkPath with our valid version.
warn("removing corrupted link '%s'", linkPath);

View file

@ -146,7 +146,7 @@ static nlohmann::json pathInfoToJSON(
auto info = store.queryPathInfo(storePath);
auto & jsonPath = jsonList.emplace_back(
info->toJSON(store, false, HashFormat::Base32));
info->toJSON(store, false, HashFormat::Nix32));
// Add the path to the object whose metadata we are including.
jsonPath["path"] = store.printStorePath(storePath);

View file

@ -31,9 +31,9 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
store.printStorePath(path));
return
"1;" + store.printStorePath(path) + ";"
+ narHash.to_string(HashFormat::Base32, true) + ";"
+ std::to_string(narSize) + ";"
"1;" + store.printStorePath(path) + ";"
+ narHash.to_string(HashFormat::Nix32, true) + ";"
+ std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references));
}

View file

@ -49,7 +49,7 @@ std::pair<StorePathSet, HashResult> scanForReferences(
const std::string & path,
const StorePathSet & refs)
{
HashSink hashSink { htSHA256 };
HashSink hashSink { HashAlgorithm::SHA256 };
auto found = scanForReferences(hashSink, path, refs);
auto hash = hashSink.finish();
return std::pair<StorePathSet, HashResult>(found, hash);

View file

@ -35,7 +35,7 @@ StorePath::StorePath(std::string_view _baseName)
}
StorePath::StorePath(const Hash & hash, std::string_view _name)
: baseName((hash.to_string(HashFormat::Base32, false) + "-").append(std::string(_name)))
: baseName((hash.to_string(HashFormat::Nix32, false) + "-").append(std::string(_name)))
{
checkName(baseName, name());
}
@ -49,7 +49,7 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x");
StorePath StorePath::random(std::string_view name)
{
Hash hash(htSHA1);
Hash hash(HashAlgorithm::SHA1);
randombytes_buf(hash.hash, hash.hashSize);
return StorePath(hash, name);
}

View file

@ -417,12 +417,12 @@ std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string &
ref<const ValidPathInfo> RemoteStore::addCAToStore(
Source & dump,
std::string_view name,
ContentAddressMethod caMethod,
HashType hashType,
const StorePathSet & references,
RepairFlag repair)
Source & dump,
std::string_view name,
ContentAddressMethod caMethod,
HashAlgorithm hashAlgo,
const StorePathSet & references,
RepairFlag repair)
{
std::optional<ConnectionHandle> conn_(getConnection());
auto & conn = *conn_;
@ -432,7 +432,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
conn->to
<< WorkerProto::Op::AddToStore
<< name
<< caMethod.render(hashType);
<< caMethod.render(hashAlgo);
WorkerProto::write(*this, *conn, references);
conn->to << repair;
@ -453,9 +453,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
std::visit(overloaded {
[&](const TextIngestionMethod & thm) -> void {
if (hashType != htSHA256)
if (hashAlgo != HashAlgorithm::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashType(hashType));
name, printHashAlgo(hashAlgo));
std::string s = dump.drain();
conn->to << WorkerProto::Op::AddTextToStore << name << s;
WorkerProto::write(*this, *conn, references);
@ -465,9 +465,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
conn->to
<< WorkerProto::Op::AddToStore
<< name
<< ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (fim == FileIngestionMethod::Recursive ? 1 : 0)
<< printHashType(hashType);
<< printHashAlgo(hashAlgo);
try {
conn->to.written = 0;
@ -503,9 +503,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method, HashType hashType, RepairFlag repair, const StorePathSet & references)
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
{
return addCAToStore(dump, name, method, hashType, references, repair)->path;
return addCAToStore(dump, name, method, hashAlgo, references, repair)->path;
}
@ -610,7 +610,7 @@ StorePath RemoteStore::addTextToStore(
RepairFlag repair)
{
StringSource source(s);
return addCAToStore(source, name, TextIngestionMethod {}, htSHA256, references, repair)->path;
return addCAToStore(source, name, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair)->path;
}
void RemoteStore::registerDrvOutput(const Realisation & info)

View file

@ -74,18 +74,18 @@ public:
* Add a content-addressable store path. `dump` will be drained.
*/
ref<const ValidPathInfo> addCAToStore(
Source & dump,
std::string_view name,
ContentAddressMethod caMethod,
HashType hashType,
const StorePathSet & references,
RepairFlag repair);
Source & dump,
std::string_view name,
ContentAddressMethod caMethod,
HashAlgorithm hashAlgo,
const StorePathSet & references,
RepairFlag repair);
/**
* Add a content-addressable store path. Does not support references. `dump` will be drained.
*/
StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
void addToStore(const ValidPathInfo & info, Source & nar,
RepairFlag repair, CheckSigsFlag checkSigs) override;

View file

@ -153,7 +153,7 @@ StorePath StoreDirConfig::makeStorePath(std::string_view type,
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
auto s = std::string(type) + ":" + std::string(hash)
+ ":" + storeDir + ":" + std::string(name);
auto h = compressHash(hashString(htSHA256, s), 20);
auto h = compressHash(hashString(HashAlgorithm::SHA256, s), 20);
return StorePath(h, name);
}
@ -191,12 +191,12 @@ static std::string makeType(
StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const
{
if (info.hash.type == htSHA256 && info.method == FileIngestionMethod::Recursive) {
if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) {
return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
} else {
assert(info.references.size() == 0);
return makeStorePath("output:out",
hashString(htSHA256,
hashString(HashAlgorithm::SHA256,
"fixed:out:"
+ makeFileIngestionPrefix(info.method)
+ info.hash.to_string(HashFormat::Base16, true) + ":"),
@ -207,7 +207,7 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed
StorePath StoreDirConfig::makeTextPath(std::string_view name, const TextInfo & info) const
{
assert(info.hash.type == htSHA256);
assert(info.hash.algo == HashAlgorithm::SHA256);
return makeStorePath(
makeType(*this, "text", StoreReferences {
.others = info.references,
@ -233,11 +233,11 @@ StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const
std::pair<StorePath, Hash> StoreDirConfig::computeStorePathFromDump(
Source & dump,
std::string_view name,
FileIngestionMethod method,
HashType hashAlgo,
const StorePathSet & references) const
Source & dump,
std::string_view name,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
const StorePathSet & references) const
{
HashSink sink(hashAlgo);
dump.drainInto(sink);
@ -257,20 +257,20 @@ StorePath StoreDirConfig::computeStorePathForText(
const StorePathSet & references) const
{
return makeTextPath(name, TextInfo {
.hash = hashString(htSHA256, s),
.hash = hashString(HashAlgorithm::SHA256, s),
.references = references,
});
}
StorePath Store::addToStore(
std::string_view name,
const Path & _srcPath,
FileIngestionMethod method,
HashType hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references)
std::string_view name,
const Path & _srcPath,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references)
{
Path srcPath(absPath(_srcPath));
auto source = sinkToSource([&](Sink & sink) {
@ -405,10 +405,10 @@ digraph graphname {
}
*/
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo,
std::optional<Hash> expectedCAHash)
FileIngestionMethod method, HashAlgorithm hashAlgo,
std::optional<Hash> expectedCAHash)
{
HashSink narHashSink { htSHA256 };
HashSink narHashSink { HashAlgorithm::SHA256 };
HashSink caHashSink { hashAlgo };
/* Note that fileSink and unusualHashTee must be mutually exclusive, since
@ -417,7 +417,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
RegularFileSink fileSink { caHashSink };
TeeSink unusualHashTee { narHashSink, caHashSink };
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashAlgorithm::SHA256
? static_cast<Sink &>(unusualHashTee)
: narHashSink;
@ -445,7 +445,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
finish. */
auto [narHash, narSize] = narHashSink.finish();
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256
? narHash
: caHashSink.finish().first;
@ -1205,7 +1205,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istre
if (!hashGiven) {
std::string s;
getline(str, s);
auto narHash = Hash::parseAny(s, htSHA256);
auto narHash = Hash::parseAny(s, HashAlgorithm::SHA256);
getline(str, s);
auto narSize = string2Int<uint64_t>(s);
if (!narSize) throw Error("number expected");

View file

@ -427,13 +427,13 @@ public:
* libutil/archive.hh).
*/
virtual StorePath addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive,
HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter,
RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet());
std::string_view name,
const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
PathFilter & filter = defaultPathFilter,
RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet());
/**
* Copy the contents of a path to the store and register the
@ -441,8 +441,8 @@ public:
* memory.
*/
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
std::optional<Hash> expectedCAHash = {});
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
std::optional<Hash> expectedCAHash = {});
/**
* Like addToStore(), but the contents of the path are contained
@ -454,8 +454,8 @@ public:
* \todo remove?
*/
virtual StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet())
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet())
{ unsupported("addToStoreFromDump"); }
/**

View file

@ -98,7 +98,7 @@ struct StoreDirConfig : public Config
Source & dump,
std::string_view name,
FileIngestionMethod method = FileIngestionMethod::Recursive,
HashType hashAlgo = htSHA256,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = {}) const;
/**

View file

@ -160,7 +160,7 @@ void WorkerProto::Serialise<ValidPathInfo>::write(const StoreDirConfig & store,
UnkeyedValidPathInfo WorkerProto::Serialise<UnkeyedValidPathInfo>::read(const StoreDirConfig & store, ReadConn conn)
{
auto deriver = readString(conn.from);
auto narHash = Hash::parseAny(readString(conn.from), htSHA256);
auto narHash = Hash::parseAny(readString(conn.from), HashAlgorithm::SHA256);
UnkeyedValidPathInfo info(narHash);
if (deriver != "") info.deriver = store.parseStorePath(deriver);
info.references = WorkerProto::Serialise<StorePathSet>::read(store, conn);

View file

@ -544,36 +544,70 @@ nlohmann::json Args::toJSON()
return res;
}
static void hashTypeCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
{
for (auto & type : hashTypes)
if (hasPrefix(type, prefix))
completions.add(type);
for (auto & format : hashFormats) {
if (hasPrefix(format, prefix)) {
completions.add(format);
}
}
}
Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
{
return Flag {
.longName = std::move(longName),
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')",
.labels = {"hash-algo"},
.handler = {[ht](std::string s) {
*ht = parseHashType(s);
}},
.completer = hashTypeCompleter,
Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashFormat * hf) {
assert(*hf == nix::HashFormat::SRI);
return Flag{
.longName = std::move(longName),
.description = "hash format ('base16', 'nix32', 'base64', 'sri'). Default: 'sri'",
.labels = {"hash-format"},
.handler = {[hf](std::string s) {
*hf = parseHashFormat(s);
}},
.completer = hashFormatCompleter,
};
}
Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht)
Args::Flag Args::Flag::mkHashFormatOptFlag(std::string && longName, std::optional<HashFormat> * ohf) {
return Flag{
.longName = std::move(longName),
.description = "hash format ('base16', 'nix32', 'base64', 'sri').",
.labels = {"hash-format"},
.handler = {[ohf](std::string s) {
*ohf = std::optional<HashFormat>{parseHashFormat(s)};
}},
.completer = hashFormatCompleter,
};
}
static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
{
return Flag {
.longName = std::move(longName),
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.",
.labels = {"hash-algo"},
.handler = {[oht](std::string s) {
*oht = std::optional<HashType> { parseHashType(s) };
}},
.completer = hashTypeCompleter,
for (auto & algo : hashAlgorithms)
if (hasPrefix(algo, prefix))
completions.add(algo);
}
Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha)
{
return Flag{
.longName = std::move(longName),
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')",
.labels = {"hash-algo"},
.handler = {[ha](std::string s) {
*ha = parseHashAlgo(s);
}},
.completer = hashAlgoCompleter,
};
}
Args::Flag Args::Flag::mkHashAlgoOptFlag(std::string && longName, std::optional<HashAlgorithm> * oha)
{
return Flag{
.longName = std::move(longName),
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.",
.labels = {"hash-algo"},
.handler = {[oha](std::string s) {
*oha = std::optional<HashAlgorithm>{parseHashAlgo(s)};
}},
.completer = hashAlgoCompleter,
};
}

View file

@ -14,7 +14,8 @@
namespace nix {
enum HashType : char;
enum struct HashAlgorithm : char;
enum struct HashFormat : int;
class MultiCommand;
@ -175,8 +176,10 @@ protected:
std::optional<ExperimentalFeature> experimentalFeature;
static Flag mkHashTypeFlag(std::string && longName, HashType * ht);
static Flag mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht);
static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha);
static Flag mkHashAlgoOptFlag(std::string && longName, std::optional<HashAlgorithm> * oha);
static Flag mkHashFormatFlagWithDefault(std::string && longName, HashFormat * hf);
static Flag mkHashFormatOptFlag(std::string && longName, std::optional<HashFormat> * ohf);
};
/**

View file

@ -106,7 +106,7 @@ void parse(
std::string hashs = getString(source, 20);
left -= 20;
Hash hash(htSHA1);
Hash hash(HashAlgorithm::SHA1);
std::copy(hashs.begin(), hashs.end(), hash.hash);
hook(name, TreeEntry {
@ -241,12 +241,12 @@ Mode dump(
TreeEntry dumpHash(
HashType ht,
SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
HashAlgorithm ha,
SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
{
std::function<DumpHook> hook;
hook = [&](const CanonPath & path) -> TreeEntry {
auto hashSink = HashSink(ht);
auto hashSink = HashSink(ha);
auto mode = dump(accessor, path, hashSink, hook, filter);
auto hash = hashSink.finish().first;
return {

View file

@ -123,9 +123,9 @@ Mode dump(
* A smaller wrapper around `dump`.
*/
TreeEntry dumpHash(
HashType ht,
SourceAccessor & accessor, const CanonPath & path,
PathFilter & filter = defaultPathFilter);
HashAlgorithm ha,
SourceAccessor & accessor, const CanonPath & path,
PathFilter & filter = defaultPathFilter);
/**
* A line from the output of `git ls-remote --symref`.

View file

@ -16,23 +16,24 @@
namespace nix {
static size_t regularHashSize(HashType type) {
static size_t regularHashSize(HashAlgorithm type) {
switch (type) {
case htMD5: return md5HashSize;
case htSHA1: return sha1HashSize;
case htSHA256: return sha256HashSize;
case htSHA512: return sha512HashSize;
case HashAlgorithm::MD5: return md5HashSize;
case HashAlgorithm::SHA1: return sha1HashSize;
case HashAlgorithm::SHA256: return sha256HashSize;
case HashAlgorithm::SHA512: return sha512HashSize;
}
abort();
}
std::set<std::string> hashTypes = { "md5", "sha1", "sha256", "sha512" };
const std::set<std::string> hashAlgorithms = {"md5", "sha1", "sha256", "sha512" };
const std::set<std::string> hashFormats = {"base64", "nix32", "base16", "sri" };
Hash::Hash(HashType type) : type(type)
Hash::Hash(HashAlgorithm algo) : algo(algo)
{
hashSize = regularHashSize(type);
hashSize = regularHashSize(algo);
assert(hashSize <= maxHashSize);
memset(hash, 0, maxHashSize);
}
@ -81,7 +82,7 @@ static std::string printHash16(const Hash & hash)
// omitted: E O U T
const std::string base32Chars = "0123456789abcdfghijklmnpqrsvwxyz";
const std::string nix32Chars = "0123456789abcdfghijklmnpqrsvwxyz";
static std::string printHash32(const Hash & hash)
@ -100,7 +101,7 @@ static std::string printHash32(const Hash & hash)
unsigned char c =
(hash.hash[i] >> j)
| (i >= hash.hashSize - 1 ? 0 : hash.hash[i + 1] << (8 - j));
s.push_back(base32Chars[c & 0x1f]);
s.push_back(nix32Chars[c & 0x1f]);
}
return s;
@ -109,23 +110,23 @@ static std::string printHash32(const Hash & hash)
std::string printHash16or32(const Hash & hash)
{
assert(hash.type);
return hash.to_string(hash.type == htMD5 ? HashFormat::Base16 : HashFormat::Base32, false);
assert(static_cast<char>(hash.algo));
return hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Nix32, false);
}
std::string Hash::to_string(HashFormat hashFormat, bool includeType) const
std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const
{
std::string s;
if (hashFormat == HashFormat::SRI || includeType) {
s += printHashType(type);
if (hashFormat == HashFormat::SRI || includeAlgo) {
s += printHashAlgo(algo);
s += hashFormat == HashFormat::SRI ? '-' : ':';
}
switch (hashFormat) {
case HashFormat::Base16:
s += printHash16(*this);
break;
case HashFormat::Base32:
case HashFormat::Nix32:
s += printHash32(*this);
break;
case HashFormat::Base64:
@ -136,7 +137,7 @@ std::string Hash::to_string(HashFormat hashFormat, bool includeType) const
return s;
}
Hash Hash::dummy(htSHA256);
Hash Hash::dummy(HashAlgorithm::SHA256);
Hash Hash::parseSRI(std::string_view original) {
auto rest = original;
@ -145,18 +146,18 @@ Hash Hash::parseSRI(std::string_view original) {
auto hashRaw = splitPrefixTo(rest, '-');
if (!hashRaw)
throw BadHash("hash '%s' is not SRI", original);
HashType parsedType = parseHashType(*hashRaw);
HashAlgorithm parsedType = parseHashAlgo(*hashRaw);
return Hash(rest, parsedType, true);
}
// Mutates the string to eliminate the prefixes when found
static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_view & rest)
static std::pair<std::optional<HashAlgorithm>, bool> getParsedTypeAndSRI(std::string_view & rest)
{
bool isSRI = false;
// Parse the hash type before the separator, if there was one.
std::optional<HashType> optParsedType;
std::optional<HashAlgorithm> optParsedType;
{
auto hashRaw = splitPrefixTo(rest, ':');
@ -166,7 +167,7 @@ static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_
isSRI = true;
}
if (hashRaw)
optParsedType = parseHashType(*hashRaw);
optParsedType = parseHashAlgo(*hashRaw);
}
return {optParsedType, isSRI};
@ -185,29 +186,29 @@ Hash Hash::parseAnyPrefixed(std::string_view original)
return Hash(rest, *optParsedType, isSRI);
}
Hash Hash::parseAny(std::string_view original, std::optional<HashType> optType)
Hash Hash::parseAny(std::string_view original, std::optional<HashAlgorithm> optAlgo)
{
auto rest = original;
auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest);
// Either the string or user must provide the type, if they both do they
// must agree.
if (!optParsedType && !optType)
if (!optParsedType && !optAlgo)
throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context", rest);
else if (optParsedType && optType && *optParsedType != *optType)
throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType));
else if (optParsedType && optAlgo && *optParsedType != *optAlgo)
throw BadHash("hash '%s' should have type '%s'", original, printHashAlgo(*optAlgo));
HashType hashType = optParsedType ? *optParsedType : *optType;
return Hash(rest, hashType, isSRI);
HashAlgorithm hashAlgo = optParsedType ? *optParsedType : *optAlgo;
return Hash(rest, hashAlgo, isSRI);
}
Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashType type)
Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo)
{
return Hash(s, type, false);
return Hash(s, algo, false);
}
Hash::Hash(std::string_view rest, HashType type, bool isSRI)
: Hash(type)
Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI)
: Hash(algo)
{
if (!isSRI && rest.size() == base16Len()) {
@ -230,8 +231,8 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI)
for (unsigned int n = 0; n < rest.size(); ++n) {
char c = rest[rest.size() - n - 1];
unsigned char digit;
for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */
if (base32Chars[digit] == c) break;
for (digit = 0; digit < nix32Chars.size(); ++digit) /* !!! slow */
if (nix32Chars[digit] == c) break;
if (digit >= 32)
throw BadHash("invalid base-32 hash '%s'", rest);
unsigned int b = n * 5;
@ -257,19 +258,19 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI)
}
else
throw BadHash("hash '%s' has wrong length for hash type '%s'", rest, printHashType(this->type));
throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(this->algo));
}
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht)
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashAlgorithm> ha)
{
if (hashStr.empty()) {
if (!ht)
if (!ha)
throw BadHash("empty hash requires explicit hash type");
Hash h(*ht);
Hash h(*ha);
warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true));
return h;
} else
return Hash::parseAny(hashStr, ht);
return Hash::parseAny(hashStr, ha);
}
@ -282,58 +283,58 @@ union Ctx
};
static void start(HashType ht, Ctx & ctx)
static void start(HashAlgorithm ha, Ctx & ctx)
{
if (ht == htMD5) MD5_Init(&ctx.md5);
else if (ht == htSHA1) SHA1_Init(&ctx.sha1);
else if (ht == htSHA256) SHA256_Init(&ctx.sha256);
else if (ht == htSHA512) SHA512_Init(&ctx.sha512);
if (ha == HashAlgorithm::MD5) MD5_Init(&ctx.md5);
else if (ha == HashAlgorithm::SHA1) SHA1_Init(&ctx.sha1);
else if (ha == HashAlgorithm::SHA256) SHA256_Init(&ctx.sha256);
else if (ha == HashAlgorithm::SHA512) SHA512_Init(&ctx.sha512);
}
static void update(HashType ht, Ctx & ctx,
std::string_view data)
static void update(HashAlgorithm ha, Ctx & ctx,
std::string_view data)
{
if (ht == htMD5) MD5_Update(&ctx.md5, data.data(), data.size());
else if (ht == htSHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
else if (ht == htSHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
else if (ht == htSHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
if (ha == HashAlgorithm::MD5) MD5_Update(&ctx.md5, data.data(), data.size());
else if (ha == HashAlgorithm::SHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
else if (ha == HashAlgorithm::SHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
else if (ha == HashAlgorithm::SHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
}
static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
static void finish(HashAlgorithm ha, Ctx & ctx, unsigned char * hash)
{
if (ht == htMD5) MD5_Final(hash, &ctx.md5);
else if (ht == htSHA1) SHA1_Final(hash, &ctx.sha1);
else if (ht == htSHA256) SHA256_Final(hash, &ctx.sha256);
else if (ht == htSHA512) SHA512_Final(hash, &ctx.sha512);
if (ha == HashAlgorithm::MD5) MD5_Final(hash, &ctx.md5);
else if (ha == HashAlgorithm::SHA1) SHA1_Final(hash, &ctx.sha1);
else if (ha == HashAlgorithm::SHA256) SHA256_Final(hash, &ctx.sha256);
else if (ha == HashAlgorithm::SHA512) SHA512_Final(hash, &ctx.sha512);
}
Hash hashString(HashType ht, std::string_view s)
Hash hashString(HashAlgorithm ha, std::string_view s)
{
Ctx ctx;
Hash hash(ht);
start(ht, ctx);
update(ht, ctx, s);
finish(ht, ctx, hash.hash);
Hash hash(ha);
start(ha, ctx);
update(ha, ctx, s);
finish(ha, ctx, hash.hash);
return hash;
}
Hash hashFile(HashType ht, const Path & path)
Hash hashFile(HashAlgorithm ha, const Path & path)
{
HashSink sink(ht);
HashSink sink(ha);
readFile(path, sink);
return sink.finish().first;
}
HashSink::HashSink(HashType ht) : ht(ht)
HashSink::HashSink(HashAlgorithm ha) : ha(ha)
{
ctx = new Ctx;
bytes = 0;
start(ht, *ctx);
start(ha, *ctx);
}
HashSink::~HashSink()
@ -345,14 +346,14 @@ HashSink::~HashSink()
void HashSink::writeUnbuffered(std::string_view data)
{
bytes += data.size();
update(ht, *ctx, data);
update(ha, *ctx, data);
}
HashResult HashSink::finish()
{
flush();
Hash hash(ht);
nix::finish(ht, *ctx, hash.hash);
Hash hash(ha);
nix::finish(ha, *ctx, hash.hash);
return HashResult(hash, bytes);
}
@ -360,16 +361,16 @@ HashResult HashSink::currentHash()
{
flush();
Ctx ctx2 = *ctx;
Hash hash(ht);
nix::finish(ht, ctx2, hash.hash);
Hash hash(ha);
nix::finish(ha, ctx2, hash.hash);
return HashResult(hash, bytes);
}
HashResult hashPath(
HashType ht, const Path & path, PathFilter & filter)
HashAlgorithm ha, const Path & path, PathFilter & filter)
{
HashSink sink(ht);
HashSink sink(ha);
dumpPath(path, sink, filter);
return sink.finish();
}
@ -377,7 +378,7 @@ HashResult hashPath(
Hash compressHash(const Hash & hash, unsigned int newSize)
{
Hash h(hash.type);
Hash h(hash.algo);
h.hashSize = newSize;
for (unsigned int i = 0; i < hash.hashSize; ++i)
h.hash[i % newSize] ^= hash.hash[i];
@ -388,7 +389,11 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
std::optional<HashFormat> parseHashFormatOpt(std::string_view hashFormatName)
{
if (hashFormatName == "base16") return HashFormat::Base16;
if (hashFormatName == "base32") return HashFormat::Base32;
if (hashFormatName == "nix32") return HashFormat::Nix32;
if (hashFormatName == "base32") {
warn(R"("base32" is a deprecated alias for hash format "nix32".)");
return HashFormat::Nix32;
}
if (hashFormatName == "base64") return HashFormat::Base64;
if (hashFormatName == "sri") return HashFormat::SRI;
return std::nullopt;
@ -407,8 +412,8 @@ std::string_view printHashFormat(HashFormat HashFormat)
switch (HashFormat) {
case HashFormat::Base64:
return "base64";
case HashFormat::Base32:
return "base32";
case HashFormat::Nix32:
return "nix32";
case HashFormat::Base16:
return "base16";
case HashFormat::SRI:
@ -420,31 +425,31 @@ std::string_view printHashFormat(HashFormat HashFormat)
}
}
std::optional<HashType> parseHashTypeOpt(std::string_view s)
std::optional<HashAlgorithm> parseHashAlgoOpt(std::string_view s)
{
if (s == "md5") return htMD5;
if (s == "sha1") return htSHA1;
if (s == "sha256") return htSHA256;
if (s == "sha512") return htSHA512;
if (s == "md5") return HashAlgorithm::MD5;
if (s == "sha1") return HashAlgorithm::SHA1;
if (s == "sha256") return HashAlgorithm::SHA256;
if (s == "sha512") return HashAlgorithm::SHA512;
return std::nullopt;
}
HashType parseHashType(std::string_view s)
HashAlgorithm parseHashAlgo(std::string_view s)
{
auto opt_h = parseHashTypeOpt(s);
auto opt_h = parseHashAlgoOpt(s);
if (opt_h)
return *opt_h;
else
throw UsageError("unknown hash algorithm '%1%', expect 'md5', 'sha1', 'sha256', or 'sha512'", s);
}
std::string_view printHashType(HashType ht)
std::string_view printHashAlgo(HashAlgorithm ha)
{
switch (ht) {
case htMD5: return "md5";
case htSHA1: return "sha1";
case htSHA256: return "sha256";
case htSHA512: return "sha512";
switch (ha) {
case HashAlgorithm::MD5: return "md5";
case HashAlgorithm::SHA1: return "sha1";
case HashAlgorithm::SHA256: return "sha256";
case HashAlgorithm::SHA512: return "sha512";
default:
// illegal hash type enum value internally, as opposed to external input
// which should be validated with nice error message.

View file

@ -12,7 +12,7 @@ namespace nix {
MakeError(BadHash, Error);
enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
enum struct HashAlgorithm : char { MD5 = 42, SHA1, SHA256, SHA512 };
const int md5HashSize = 16;
@ -20,9 +20,9 @@ const int sha1HashSize = 20;
const int sha256HashSize = 32;
const int sha512HashSize = 64;
extern std::set<std::string> hashTypes;
extern const std::set<std::string> hashAlgorithms;
extern const std::string base32Chars;
extern const std::string nix32Chars;
/**
* @brief Enumeration representing the hash formats.
@ -31,8 +31,8 @@ enum struct HashFormat : int {
/// @brief Base 64 encoding.
/// @see [IETF RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4).
Base64,
/// @brief Nix-specific base-32 encoding. @see base32Chars
Base32,
/// @brief Nix-specific base-32 encoding. @see nix32Chars
Nix32,
/// @brief Lowercase hexadecimal encoding. @see base16Chars
Base16,
/// @brief "<hash algo>:<Base 64 hash>", format of the SRI integrity attribute.
@ -40,6 +40,7 @@ enum struct HashFormat : int {
SRI
};
extern const std::set<std::string> hashFormats;
struct Hash
{
@ -47,12 +48,12 @@ struct Hash
size_t hashSize = 0;
uint8_t hash[maxHashSize] = {};
HashType type;
HashAlgorithm algo;
/**
* Create a zero-filled hash object.
*/
Hash(HashType type);
explicit Hash(HashAlgorithm algo);
/**
* Parse the hash from a string representation in the format
@ -61,7 +62,7 @@ struct Hash
* is not present, then the hash type must be specified in the
* string.
*/
static Hash parseAny(std::string_view s, std::optional<HashType> type);
static Hash parseAny(std::string_view s, std::optional<HashAlgorithm> optAlgo);
/**
* Parse a hash from a string representation like the above, except the
@ -73,7 +74,7 @@ struct Hash
* Parse a plain hash that musst not have any prefix indicating the type.
* The type is passed in to disambiguate.
*/
static Hash parseNonSRIUnprefixed(std::string_view s, HashType type);
static Hash parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo);
static Hash parseSRI(std::string_view original);
@ -82,7 +83,7 @@ private:
* The type must be provided, the string view must not include <type>
* prefix. `isSRI` helps disambigate the various base-* encodings.
*/
Hash(std::string_view s, HashType type, bool isSRI);
Hash(std::string_view s, HashAlgorithm algo, bool isSRI);
public:
/**
@ -103,7 +104,7 @@ public:
/**
* Returns the length of a base-16 representation of this hash.
*/
size_t base16Len() const
[[nodiscard]] size_t base16Len() const
{
return hashSize * 2;
}
@ -111,7 +112,7 @@ public:
/**
* Returns the length of a base-32 representation of this hash.
*/
size_t base32Len() const
[[nodiscard]] size_t base32Len() const
{
return (hashSize * 8 - 1) / 5 + 1;
}
@ -119,24 +120,24 @@ public:
/**
* Returns the length of a base-64 representation of this hash.
*/
size_t base64Len() const
[[nodiscard]] size_t base64Len() const
{
return ((4 * hashSize / 3) + 3) & ~3;
}
/**
* Return a string representation of the hash, in base-16, base-32
* or base-64. By default, this is prefixed by the hash type
* or base-64. By default, this is prefixed by the hash algo
* (e.g. "sha256:").
*/
std::string to_string(HashFormat hashFormat, bool includeType) const;
[[nodiscard]] std::string to_string(HashFormat hashFormat, bool includeAlgo) const;
std::string gitRev() const
[[nodiscard]] std::string gitRev() const
{
return to_string(HashFormat::Base16, false);
}
std::string gitShortRev() const
[[nodiscard]] std::string gitShortRev() const
{
return std::string(to_string(HashFormat::Base16, false), 0, 7);
}
@ -147,7 +148,7 @@ public:
/**
* Helper that defaults empty hashes to the 0 hash.
*/
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht);
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashAlgorithm> ha);
/**
* Print a hash in base-16 if it's MD5, or base-32 otherwise.
@ -157,14 +158,14 @@ std::string printHash16or32(const Hash & hash);
/**
* Compute the hash of the given string.
*/
Hash hashString(HashType ht, std::string_view s);
Hash hashString(HashAlgorithm ha, std::string_view s);
/**
* Compute the hash of the given file, hashing its contents directly.
*
* (Metadata, such as the executable permission bit, is ignored.)
*/
Hash hashFile(HashType ht, const Path & path);
Hash hashFile(HashAlgorithm ha, const Path & path);
/**
* Compute the hash of the given path, serializing as a Nix Archive and
@ -173,8 +174,8 @@ Hash hashFile(HashType ht, const Path & path);
* The hash is defined as (essentially) hashString(ht, dumpPath(path)).
*/
typedef std::pair<Hash, uint64_t> HashResult;
HashResult hashPath(HashType ht, const Path & path,
PathFilter & filter = defaultPathFilter);
HashResult hashPath(HashAlgorithm ha, const Path & path,
PathFilter & filter = defaultPathFilter);
/**
* Compress a hash to the specified number of bytes by cyclically
@ -200,17 +201,17 @@ std::string_view printHashFormat(HashFormat hashFormat);
/**
* Parse a string representing a hash type.
*/
HashType parseHashType(std::string_view s);
HashAlgorithm parseHashAlgo(std::string_view s);
/**
* Will return nothing on parse error
*/
std::optional<HashType> parseHashTypeOpt(std::string_view s);
std::optional<HashAlgorithm> parseHashAlgoOpt(std::string_view s);
/**
* And the reverse.
*/
std::string_view printHashType(HashType ht);
std::string_view printHashAlgo(HashAlgorithm ha);
union Ctx;
@ -223,12 +224,12 @@ struct AbstractHashSink : virtual Sink
class HashSink : public BufferedSink, public AbstractHashSink
{
private:
HashType ht;
HashAlgorithm ha;
Ctx * ctx;
uint64_t bytes;
public:
HashSink(HashType ht);
HashSink(HashAlgorithm ha);
HashSink(const HashSink & h);
~HashSink();
void writeUnbuffered(std::string_view data) override;

View file

@ -23,8 +23,8 @@ static void search(
static bool isBase32[256];
std::call_once(initialised, [](){
for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false;
for (unsigned int i = 0; i < base32Chars.size(); ++i)
isBase32[(unsigned char) base32Chars[i]] = true;
for (unsigned int i = 0; i < nix32Chars.size(); ++i)
isBase32[(unsigned char) nix32Chars[i]] = true;
});
for (size_t i = 0; i + refLength <= s.size(); ) {
@ -110,8 +110,8 @@ void RewritingSink::flush()
prev.clear();
}
HashModuloSink::HashModuloSink(HashType ht, const std::string & modulus)
: hashSink(ht)
HashModuloSink::HashModuloSink(HashAlgorithm ha, const std::string & modulus)
: hashSink(ha)
, rewritingSink(modulus, std::string(modulus.size(), 0), hashSink)
{
}

View file

@ -46,7 +46,7 @@ struct HashModuloSink : AbstractHashSink
HashSink hashSink;
RewritingSink rewritingSink;
HashModuloSink(HashType ht, const std::string & modulus);
HashModuloSink(HashAlgorithm ha, const std::string & modulus);
void operator () (std::string_view data) override;

View file

@ -39,11 +39,11 @@ void SourceAccessor::readFile(
}
Hash SourceAccessor::hashPath(
const CanonPath & path,
PathFilter & filter,
HashType ht)
const CanonPath & path,
PathFilter & filter,
HashAlgorithm ha)
{
HashSink sink(ht);
HashSink sink(ha);
dumpPath(path, sink, filter);
return sink.finish().first;
}

View file

@ -97,9 +97,9 @@ struct SourceAccessor
PathFilter & filter = defaultPathFilter);
Hash hashPath(
const CanonPath & path,
PathFilter & filter = defaultPathFilter,
HashType ht = htSHA256);
const CanonPath & path,
PathFilter & filter = defaultPathFilter,
HashAlgorithm ha = HashAlgorithm::SHA256);
/**
* Return a corresponding path in the root filesystem, if

View file

@ -193,7 +193,7 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
if (opArgs.empty())
throw UsageError("first argument must be hash algorithm");
HashType hashAlgo = parseHashType(opArgs.front());
HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front());
opArgs.pop_front();
for (auto & i : opArgs)
@ -214,7 +214,7 @@ static void opPrintFixedPath(Strings opFlags, Strings opArgs)
throw UsageError("'--print-fixed-path' requires three arguments");
Strings::iterator i = opArgs.begin();
HashType hashAlgo = parseHashType(*i++);
HashAlgorithm hashAlgo = parseHashAlgo(*i++);
std::string hash = *i++;
std::string name = *i++;
@ -405,8 +405,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
auto info = store->queryPathInfo(j);
if (query == qHash) {
assert(info->narHash.type == htSHA256);
cout << fmt("%s\n", info->narHash.to_string(HashFormat::Base32, true));
assert(info->narHash.algo == HashAlgorithm::SHA256);
cout << fmt("%s\n", info->narHash.to_string(HashFormat::Nix32, true));
} else if (query == qSize)
cout << fmt("%d\n", info->narSize);
}
@ -541,7 +541,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
if (canonicalise)
canonicalisePathMetaData(store->printStorePath(info->path), {});
if (!hashGiven) {
HashResult hash = hashPath(htSHA256, store->printStorePath(info->path));
HashResult hash = hashPath(HashAlgorithm::SHA256, store->printStorePath(info->path));
info->narHash = hash.first;
info->narSize = hash.second;
}
@ -763,14 +763,14 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
auto path = store->followLinksToStorePath(i);
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
auto info = store->queryPathInfo(path);
HashSink sink(info->narHash.type);
HashSink sink(info->narHash.algo);
store->narFromPath(path, sink);
auto current = sink.finish();
if (current.first != info->narHash) {
printError("path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path),
info->narHash.to_string(HashFormat::Base32, true),
current.first.to_string(HashFormat::Base32, true));
info->narHash.to_string(HashFormat::Nix32, true),
current.first.to_string(HashFormat::Nix32, true));
status = 1;
}
}
@ -898,7 +898,7 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize
<< info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
out << info->narHash.to_string(HashFormat::Base32, true)
out << info->narHash.to_string(HashFormat::Nix32, true)
<< renderContentAddress(info->ca)
<< info->sigs;
} catch (InvalidPath &) {
@ -979,7 +979,7 @@ static void opServe(Strings opFlags, Strings opArgs)
auto deriver = readString(in);
ValidPathInfo info {
store->parseStorePath(path),
Hash::parseAny(readString(in), htSHA256),
Hash::parseAny(readString(in), HashAlgorithm::SHA256),
};
if (deriver != "")
info.deriver = store->parseStorePath(deriver);

View file

@ -60,11 +60,11 @@ struct CmdAddToStore : MixDryRun, StoreCommand
StringSink sink;
dumpPath(path, sink);
auto narHash = hashString(htSHA256, sink.s);
auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
Hash hash = narHash;
if (ingestionMethod == FileIngestionMethod::Flat) {
HashSink hsink(htSHA256);
HashSink hsink(HashAlgorithm::SHA256);
readFile(path, hsink);
hash = hsink.finish().first;
}

View file

@ -8,16 +8,21 @@
using namespace nix;
/**
* Base for `nix hash file` (deprecated), `nix hash path` and `nix-hash` (legacy).
*
* Deprecation Issue: https://github.com/NixOS/nix/issues/8876
*/
struct CmdHashBase : Command
{
FileIngestionMethod mode;
HashFormat hashFormat = HashFormat::SRI;
bool truncate = false;
HashType ht = htSHA256;
HashAlgorithm ha = HashAlgorithm::SHA256;
std::vector<std::string> paths;
std::optional<std::string> modulus;
CmdHashBase(FileIngestionMethod mode) : mode(mode)
explicit CmdHashBase(FileIngestionMethod mode) : mode(mode)
{
addFlag({
.longName = "sri",
@ -34,7 +39,7 @@ struct CmdHashBase : Command
addFlag({
.longName = "base32",
.description = "Print the hash in base-32 (Nix-specific) format.",
.handler = {&hashFormat, HashFormat::Base32},
.handler = {&hashFormat, HashFormat::Nix32},
});
addFlag({
@ -43,7 +48,7 @@ struct CmdHashBase : Command
.handler = {&hashFormat, HashFormat::Base16},
});
addFlag(Flag::mkHashTypeFlag("type", &ht));
addFlag(Flag::mkHashAlgoFlag("type", &ha));
#if 0
addFlag({
@ -79,9 +84,9 @@ struct CmdHashBase : Command
std::unique_ptr<AbstractHashSink> hashSink;
if (modulus)
hashSink = std::make_unique<HashModuloSink>(ht, *modulus);
hashSink = std::make_unique<HashModuloSink>(ha, *modulus);
else
hashSink = std::make_unique<HashSink>(ht);
hashSink = std::make_unique<HashSink>(ha);
switch (mode) {
case FileIngestionMethod::Flat:
@ -102,41 +107,90 @@ struct CmdHashBase : Command
struct CmdToBase : Command
{
HashFormat hashFormat;
std::optional<HashType> ht;
std::optional<HashAlgorithm> ht;
std::vector<std::string> args;
CmdToBase(HashFormat hashFormat) : hashFormat(hashFormat)
{
addFlag(Flag::mkHashTypeOptFlag("type", &ht));
addFlag(Flag::mkHashAlgoOptFlag("type", &ht));
expectArgs("strings", &args);
}
std::string description() override
{
return fmt("convert a hash to %s representation",
return fmt("convert a hash to %s representation (deprecated, use `nix hash convert` instead)",
hashFormat == HashFormat::Base16 ? "base-16" :
hashFormat == HashFormat::Base32 ? "base-32" :
hashFormat == HashFormat::Nix32 ? "base-32" :
hashFormat == HashFormat::Base64 ? "base-64" :
"SRI");
}
void run() override
{
warn("The old format conversion sub commands of `nix hash` where deprecated in favor of `nix hash convert`.");
for (auto s : args)
logger->cout(Hash::parseAny(s, ht).to_string(hashFormat, hashFormat == HashFormat::SRI));
}
};
/**
* `nix hash convert`
*/
struct CmdHashConvert : Command
{
std::optional<HashFormat> from;
HashFormat to;
std::optional<HashAlgorithm> algo;
std::vector<std::string> hashStrings;
CmdHashConvert(): to(HashFormat::SRI) {
addFlag(Args::Flag::mkHashFormatOptFlag("from", &from));
addFlag(Args::Flag::mkHashFormatFlagWithDefault("to", &to));
addFlag(Args::Flag::mkHashAlgoOptFlag("algo", &algo));
expectArgs({
.label = "hashes",
.handler = {&hashStrings},
});
}
std::string description() override
{
std::string descr( "convert between different hash formats. Choose from: ");
auto iter = hashFormats.begin();
assert(iter != hashFormats.end());
descr += *iter++;
while (iter != hashFormats.end()) {
descr += ", " + *iter++;
}
return descr;
}
Category category() override { return catUtility; }
void run() override {
for (const auto& s: hashStrings) {
Hash h = Hash::parseAny(s, algo);
if (from && h.to_string(*from, from == HashFormat::SRI) != s) {
auto from_as_string = printHashFormat(*from);
throw BadHash("input hash '%s' does not have the expected format '--from %s'", s, from_as_string);
}
logger->cout(h.to_string(to, to == HashFormat::SRI));
}
}
};
struct CmdHash : NixMultiCommand
{
CmdHash()
: NixMultiCommand(
"hash",
{
{"convert", []() { return make_ref<CmdHashConvert>();}},
{"file", []() { return make_ref<CmdHashBase>(FileIngestionMethod::Flat);; }},
{"path", []() { return make_ref<CmdHashBase>(FileIngestionMethod::Recursive); }},
{"to-base16", []() { return make_ref<CmdToBase>(HashFormat::Base16); }},
{"to-base32", []() { return make_ref<CmdToBase>(HashFormat::Base32); }},
{"to-base32", []() { return make_ref<CmdToBase>(HashFormat::Nix32); }},
{"to-base64", []() { return make_ref<CmdToBase>(HashFormat::Base64); }},
{"to-sri", []() { return make_ref<CmdToBase>(HashFormat::SRI); }},
})
@ -155,7 +209,10 @@ static auto rCmdHash = registerCommand<CmdHash>("hash");
/* Legacy nix-hash command. */
static int compatNixHash(int argc, char * * argv)
{
std::optional<HashType> ht;
// Wait until `nix hash convert` is not hidden behind experimental flags anymore.
// warn("`nix-hash` has been deprecated in favor of `nix hash convert`.");
std::optional<HashAlgorithm> ha;
bool flat = false;
HashFormat hashFormat = HashFormat::Base16;
bool truncate = false;
@ -169,13 +226,13 @@ static int compatNixHash(int argc, char * * argv)
printVersion("nix-hash");
else if (*arg == "--flat") flat = true;
else if (*arg == "--base16") hashFormat = HashFormat::Base16;
else if (*arg == "--base32") hashFormat = HashFormat::Base32;
else if (*arg == "--base32") hashFormat = HashFormat::Nix32;
else if (*arg == "--base64") hashFormat = HashFormat::Base64;
else if (*arg == "--sri") hashFormat = HashFormat::SRI;
else if (*arg == "--truncate") truncate = true;
else if (*arg == "--type") {
std::string s = getArg(*arg, arg, end);
ht = parseHashType(s);
ha = parseHashAlgo(s);
}
else if (*arg == "--to-base16") {
op = opTo;
@ -183,7 +240,7 @@ static int compatNixHash(int argc, char * * argv)
}
else if (*arg == "--to-base32") {
op = opTo;
hashFormat = HashFormat::Base32;
hashFormat = HashFormat::Nix32;
}
else if (*arg == "--to-base64") {
op = opTo;
@ -202,8 +259,8 @@ static int compatNixHash(int argc, char * * argv)
if (op == opHash) {
CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive);
if (!ht.has_value()) ht = htMD5;
cmd.ht = ht.value();
if (!ha.has_value()) ha = HashAlgorithm::MD5;
cmd.ha = ha.value();
cmd.hashFormat = hashFormat;
cmd.truncate = truncate;
cmd.paths = ss;
@ -213,7 +270,7 @@ static int compatNixHash(int argc, char * * argv)
else {
CmdToBase cmd(hashFormat);
cmd.args = ss;
if (ht.has_value()) cmd.ht = ht;
if (ha.has_value()) cmd.ht = ha;
cmd.run();
}

View file

@ -46,13 +46,13 @@ std::string resolveMirrorUrl(EvalState & state, const std::string & url)
}
std::tuple<StorePath, Hash> prefetchFile(
ref<Store> store,
std::string_view url,
std::optional<std::string> name,
HashType hashType,
std::optional<Hash> expectedHash,
bool unpack,
bool executable)
ref<Store> store,
std::string_view url,
std::optional<std::string> name,
HashAlgorithm hashAlgo,
std::optional<Hash> expectedHash,
bool unpack,
bool executable)
{
auto ingestionMethod = unpack || executable ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
@ -69,7 +69,7 @@ std::tuple<StorePath, Hash> prefetchFile(
/* If an expected hash is given, the file may already exist in
the store. */
if (expectedHash) {
hashType = expectedHash->type;
hashAlgo = expectedHash->algo;
storePath = store->makeFixedOutputPath(*name, FixedOutputInfo {
.method = ingestionMethod,
.hash = *expectedHash,
@ -122,7 +122,7 @@ std::tuple<StorePath, Hash> prefetchFile(
Activity act(*logger, lvlChatty, actUnknown,
fmt("adding '%s' to the store", url));
auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashType, expectedHash);
auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashAlgo, expectedHash);
storePath = info.path;
assert(info.ca);
hash = info.ca->hash;
@ -134,7 +134,7 @@ std::tuple<StorePath, Hash> prefetchFile(
static int main_nix_prefetch_url(int argc, char * * argv)
{
{
HashType ht = htSHA256;
HashAlgorithm ha = HashAlgorithm::SHA256;
std::vector<std::string> args;
bool printPath = getEnv("PRINT_PATH") == "1";
bool fromExpr = false;
@ -155,7 +155,7 @@ static int main_nix_prefetch_url(int argc, char * * argv)
printVersion("nix-prefetch-url");
else if (*arg == "--type") {
auto s = getArg(*arg, arg, end);
ht = parseHashType(s);
ha = parseHashAlgo(s);
}
else if (*arg == "--print-path")
printPath = true;
@ -233,10 +233,10 @@ static int main_nix_prefetch_url(int argc, char * * argv)
std::optional<Hash> expectedHash;
if (args.size() == 2)
expectedHash = Hash::parseAny(args[1], ht);
expectedHash = Hash::parseAny(args[1], ha);
auto [storePath, hash] = prefetchFile(
store, resolveMirrorUrl(*state, url), name, ht, expectedHash, unpack, executable);
store, resolveMirrorUrl(*state, url), name, ha, expectedHash, unpack, executable);
stopProgressBar();
@ -258,7 +258,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON
std::string url;
bool executable = false;
std::optional<std::string> name;
HashType hashType = htSHA256;
HashAlgorithm hashAlgo = HashAlgorithm::SHA256;
std::optional<Hash> expectedHash;
CmdStorePrefetchFile()
@ -275,11 +275,11 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON
.description = "The expected hash of the file.",
.labels = {"hash"},
.handler = {[&](std::string s) {
expectedHash = Hash::parseAny(s, hashType);
expectedHash = Hash::parseAny(s, hashAlgo);
}}
});
addFlag(Flag::mkHashTypeFlag("hash-type", &hashType));
addFlag(Flag::mkHashAlgoFlag("hash-type", &hashAlgo));
addFlag({
.longName = "executable",
@ -305,7 +305,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON
}
void run(ref<Store> store) override
{
auto [storePath, hash] = prefetchFile(store, url, name, hashType, expectedHash, false, executable);
auto [storePath, hash] = prefetchFile(store, url, name, hashAlgo, expectedHash, false, executable);
if (json) {
auto res = nlohmann::json::object();

View file

@ -216,7 +216,7 @@ struct ProfileManifest
StringSink sink;
dumpPath(tempDir, sink);
auto narHash = hashString(htSHA256, sink.s);
auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
ValidPathInfo info {
*store,

View file

@ -98,7 +98,7 @@ struct CmdVerify : StorePathsCommand
if (!noContents) {
auto hashSink = HashSink(info->narHash.type);
auto hashSink = HashSink(info->narHash.algo);
store->narFromPath(info->path, hashSink);
@ -109,8 +109,8 @@ struct CmdVerify : StorePathsCommand
act2.result(resCorruptedPath, store->printStorePath(info->path));
printError("path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path),
info->narHash.to_string(HashFormat::Base32, true),
hash.first.to_string(HashFormat::Base32, true));
info->narHash.to_string(HashFormat::Nix32, true),
hash.first.to_string(HashFormat::Nix32, true));
}
}

View file

@ -81,27 +81,106 @@ rm $TEST_ROOT/hash-path/hello
ln -s x $TEST_ROOT/hash-path/hello
try2 md5 "f78b733a68f5edbdf9413899339eaa4a"
# Conversion.
# Conversion with `nix hash` `nix-hash` and `nix hash convert`
try3() {
# $1 = hash algo
# $2 = expected hash in base16
# $3 = expected hash in base32
# $4 = expected hash in base64
h64=$(nix hash convert --algo "$1" --to base64 "$2")
[ "$h64" = "$4" ]
h64=$(nix-hash --type "$1" --to-base64 "$2")
[ "$h64" = "$4" ]
# Deprecated experiment
h64=$(nix hash to-base64 --type "$1" "$2")
[ "$h64" = "$4" ]
sri=$(nix hash convert --algo "$1" --to sri "$2")
[ "$sri" = "$1-$4" ]
sri=$(nix-hash --type "$1" --to-sri "$2")
[ "$sri" = "$1-$4" ]
sri=$(nix hash to-sri --type "$1" "$2")
[ "$sri" = "$1-$4" ]
h32=$(nix hash convert --algo "$1" --to base32 "$2")
[ "$h32" = "$3" ]
h32=$(nix-hash --type "$1" --to-base32 "$2")
[ "$h32" = "$3" ]
h32=$(nix hash to-base32 --type "$1" "$2")
[ "$h32" = "$3" ]
h16=$(nix-hash --type "$1" --to-base16 "$h32")
[ "$h16" = "$2" ]
h16=$(nix hash convert --algo "$1" --to base16 "$h64")
[ "$h16" = "$2" ]
h16=$(nix hash to-base16 --type "$1" "$h64")
[ "$h16" = "$2" ]
h16=$(nix hash convert --to base16 "$sri")
[ "$h16" = "$2" ]
h16=$(nix hash to-base16 "$sri")
[ "$h16" = "$2" ]
#
# Converting from SRI
#
# Input hash algo auto-detected from SRI and output defaults to SRI as well.
sri=$(nix hash convert "$1-$4")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --from sri "$1-$4")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --to sri "$1-$4")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --from sri --to sri "$1-$4")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --to base64 "$1-$4")
[ "$sri" = "$4" ]
#
# Auto-detecting the input from algo and length.
#
sri=$(nix hash convert --algo "$1" "$2")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --algo "$1" "$3")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --algo "$1" "$4")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --algo "$1" "$2")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --algo "$1" "$3")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --algo "$1" "$4")
[ "$sri" = "$1-$4" ]
#
# Asserting input format succeeds.
#
sri=$(nix hash convert --algo "$1" --from base16 "$2")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --algo "$1" --from nix32 "$3")
[ "$sri" = "$1-$4" ]
sri=$(nix hash convert --algo "$1" --from base64 "$4")
[ "$sri" = "$1-$4" ]
#
# Asserting input format fails.
#
fail=$(nix hash convert --algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?")
[[ "$fail" == *"error: input hash"*"exit: 1" ]]
fail=$(nix hash convert --algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?")
[[ "$fail" == *"error: input hash"*"exit: 1" ]]
fail=$(nix hash convert --algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?")
[[ "$fail" == *"error: input hash"*"exit: 1" ]]
}
try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8="
try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" "IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ=="

View file

@ -0,0 +1,108 @@
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".
warning: "base32" is a deprecated alias for hash format "nix32".

View file

@ -1 +1 @@
{ hashesBase16 = [ "d41d8cd98f00b204e9800998ecf8427e" "6c69ee7f211c640419d5366cc076ae46" "bb3438fbabd460ea6dbd27d153e2233b" "da39a3ee5e6b4b0d3255bfef95601890afd80709" "cd54e8568c1b37cf1e5badb0779bcbf382212189" "6d12e10b1d331dad210e47fd25d4f260802b7e77" "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" "900a4469df00ccbfd0c145c6d1e4b7953dd0afafadd7534e3a4019e8d38fc663" "ad0387b3bd8652f730ca46d25f9c170af0fd589f42e7f23f5a9e6412d97d7e56" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" "9d0886f8c6b389398a16257bc79780fab9831c7fc11c8ab07fa732cb7b348feade382f92617c9c5305fefba0af02ab5fd39a587d330997ff5bd0db19f7666653" "21644b72aa259e5a588cd3afbafb1d4310f4889680f6c83b9d531596a5a284f34dbebff409d23bcc86aee6bad10c891606f075c6f4755cb536da27db5693f3a7" ]; hashesBase32 = [ "3y8bwfr609h3lh9ch0izcqq7fl" "26mrvc0v1nslch8r0w45zywsbc" "1v4gi57l97pmnylq6lmgxkhd5v" "143xibwh31h9bvxzalr0sjvbbvpa6ffs" "i4hj30pkrfdpgc5dbcgcydqviibfhm6d" "fxz2p030yba2bza71qhss79k3l5y24kd" "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73" "0qy6iz9yh6a079757mxdmypx0gcmnzjd3ij5q78bzk00vxll82lh" "0mkygpci4r4yb8zz5rs2kxcgvw0a2yf5zlj6r8qgfll6pnrqf0xd" "0zdl9zrg8r3i9c1g90lgg9ip5ijzv3yhz91i0zzn3r8ap9ws784gkp9dk9j3aglhgf1amqb0pj21mh7h1nxcl18akqvvf7ggqsy30yg" "19ncrpp37dx0nzzjw4k6zaqkb9mzaq2myhgpzh5aff7qqcj5wwdxslg6ixwncm7gyq8l761gwf87fgsh2bwfyr52s53k2dkqvw8c24x" "2kz74snvckxldmmbisz9ikmy031d28cs6xfdbl6rhxx42glpyz4vww4lajrc5akklxwixl0js4g84233pxvmbykiic5m7i5m9r4nr11" ]; hashesBase64 = [ "1B2M2Y8AsgTpgAmY7PhCfg==" "bGnufyEcZAQZ1TZswHauRg==" "uzQ4+6vUYOptvSfRU+IjOw==" "2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "zVToVowbN88eW62wd5vL84IhIYk=" "bRLhCx0zHa0hDkf9JdTyYIArfnc=" "47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; hashesSRI = [ "md5-1B2M2Y8AsgTpgAmY7PhCfg==" "md5-bGnufyEcZAQZ1TZswHauRg==" "md5-uzQ4+6vUYOptvSfRU+IjOw==" "sha1-2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "sha1-zVToVowbN88eW62wd5vL84IhIYk=" "sha1-bRLhCx0zHa0hDkf9JdTyYIArfnc=" "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "sha256-kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "sha256-rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "sha512-nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "sha512-IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; }
{ hashesBase16 = [ "d41d8cd98f00b204e9800998ecf8427e" "6c69ee7f211c640419d5366cc076ae46" "bb3438fbabd460ea6dbd27d153e2233b" "da39a3ee5e6b4b0d3255bfef95601890afd80709" "cd54e8568c1b37cf1e5badb0779bcbf382212189" "6d12e10b1d331dad210e47fd25d4f260802b7e77" "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" "900a4469df00ccbfd0c145c6d1e4b7953dd0afafadd7534e3a4019e8d38fc663" "ad0387b3bd8652f730ca46d25f9c170af0fd589f42e7f23f5a9e6412d97d7e56" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" "9d0886f8c6b389398a16257bc79780fab9831c7fc11c8ab07fa732cb7b348feade382f92617c9c5305fefba0af02ab5fd39a587d330997ff5bd0db19f7666653" "21644b72aa259e5a588cd3afbafb1d4310f4889680f6c83b9d531596a5a284f34dbebff409d23bcc86aee6bad10c891606f075c6f4755cb536da27db5693f3a7" ]; hashesBase32 = [ "3y8bwfr609h3lh9ch0izcqq7fl" "26mrvc0v1nslch8r0w45zywsbc" "1v4gi57l97pmnylq6lmgxkhd5v" "143xibwh31h9bvxzalr0sjvbbvpa6ffs" "i4hj30pkrfdpgc5dbcgcydqviibfhm6d" "fxz2p030yba2bza71qhss79k3l5y24kd" "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73" "0qy6iz9yh6a079757mxdmypx0gcmnzjd3ij5q78bzk00vxll82lh" "0mkygpci4r4yb8zz5rs2kxcgvw0a2yf5zlj6r8qgfll6pnrqf0xd" "0zdl9zrg8r3i9c1g90lgg9ip5ijzv3yhz91i0zzn3r8ap9ws784gkp9dk9j3aglhgf1amqb0pj21mh7h1nxcl18akqvvf7ggqsy30yg" "19ncrpp37dx0nzzjw4k6zaqkb9mzaq2myhgpzh5aff7qqcj5wwdxslg6ixwncm7gyq8l761gwf87fgsh2bwfyr52s53k2dkqvw8c24x" "2kz74snvckxldmmbisz9ikmy031d28cs6xfdbl6rhxx42glpyz4vww4lajrc5akklxwixl0js4g84233pxvmbykiic5m7i5m9r4nr11" ]; hashesBase64 = [ "1B2M2Y8AsgTpgAmY7PhCfg==" "bGnufyEcZAQZ1TZswHauRg==" "uzQ4+6vUYOptvSfRU+IjOw==" "2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "zVToVowbN88eW62wd5vL84IhIYk=" "bRLhCx0zHa0hDkf9JdTyYIArfnc=" "47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; hashesNix32 = [ "3y8bwfr609h3lh9ch0izcqq7fl" "26mrvc0v1nslch8r0w45zywsbc" "1v4gi57l97pmnylq6lmgxkhd5v" "143xibwh31h9bvxzalr0sjvbbvpa6ffs" "i4hj30pkrfdpgc5dbcgcydqviibfhm6d" "fxz2p030yba2bza71qhss79k3l5y24kd" "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73" "0qy6iz9yh6a079757mxdmypx0gcmnzjd3ij5q78bzk00vxll82lh" "0mkygpci4r4yb8zz5rs2kxcgvw0a2yf5zlj6r8qgfll6pnrqf0xd" "0zdl9zrg8r3i9c1g90lgg9ip5ijzv3yhz91i0zzn3r8ap9ws784gkp9dk9j3aglhgf1amqb0pj21mh7h1nxcl18akqvvf7ggqsy30yg" "19ncrpp37dx0nzzjw4k6zaqkb9mzaq2myhgpzh5aff7qqcj5wwdxslg6ixwncm7gyq8l761gwf87fgsh2bwfyr52s53k2dkqvw8c24x" "2kz74snvckxldmmbisz9ikmy031d28cs6xfdbl6rhxx42glpyz4vww4lajrc5akklxwixl0js4g84233pxvmbykiic5m7i5m9r4nr11" ]; hashesSRI = [ "md5-1B2M2Y8AsgTpgAmY7PhCfg==" "md5-bGnufyEcZAQZ1TZswHauRg==" "md5-uzQ4+6vUYOptvSfRU+IjOw==" "sha1-2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "sha1-zVToVowbN88eW62wd5vL84IhIYk=" "sha1-bRLhCx0zHa0hDkf9JdTyYIArfnc=" "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "sha256-kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "sha256-rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "sha512-nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "sha512-IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; }

View file

@ -5,12 +5,14 @@ let
map2' = f: fsts: snds: map2 f { inherit fsts snds; };
getOutputHashes = hashes: {
hashesBase16 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base16";}) hashAlgos hashes;
hashesNix32 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";}) hashAlgos hashes;
hashesBase32 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}) hashAlgos hashes;
hashesBase64 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base64";}) hashAlgos hashes;
hashesSRI = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "sri" ;}) hashAlgos hashes;
};
getOutputHashesColon = hashes: {
hashesBase16 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base16";}) hashAlgos hashes;
hashesNix32 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "nix32";}) hashAlgos hashes;
hashesBase32 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base32";}) hashAlgos hashes;
hashesBase64 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base64";}) hashAlgos hashes;
hashesSRI = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "sri" ;}) hashAlgos hashes;

View file

@ -84,15 +84,15 @@ CHARACTERIZATION_TEST(
(std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress {
.method = TextIngestionMethod {},
.hash = hashString(HashType::htSHA256, "Derive(...)"),
.hash = hashString(HashAlgorithm::SHA256, "Derive(...)"),
},
ContentAddress {
.method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."),
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
},
ContentAddress {
.method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"),
.hash = hashString(HashAlgorithm::SHA256, "(...)"),
},
}))
@ -179,7 +179,7 @@ CHARACTERIZATION_TEST(
std::optional {
ContentAddress {
.method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."),
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
},
},
}))

View file

@ -134,7 +134,7 @@ TEST_JSON(DynDerivationTest, caFixedText,
TEST_JSON(CaDerivationTest, caFloating,
(DerivationOutput::CAFloating {
.method = FileIngestionMethod::Recursive,
.hashType = htSHA256,
.hashAlgo = HashAlgorithm::SHA256,
}),
"drv-name", "output-name")
@ -145,7 +145,7 @@ TEST_JSON(DerivationTest, deferred,
TEST_JSON(ImpureDerivationTest, impure,
(DerivationOutput::Impure {
.method = FileIngestionMethod::Recursive,
.hashType = htSHA256,
.hashAlgo = HashAlgorithm::SHA256,
}),
"drv-name", "output-name")

View file

@ -26,7 +26,7 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) {
"foo",
FixedOutputInfo {
.method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"),
.hash = hashString(HashAlgorithm::SHA256, "(...)"),
.references = {
.others = {

View file

@ -25,7 +25,7 @@ static UnkeyedValidPathInfo makePathInfo(const Store & store, bool includeImpure
"foo",
FixedOutputInfo {
.method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"),
.hash = hashString(HashAlgorithm::SHA256, "(...)"),
.references = {
.others = {

View file

@ -53,15 +53,15 @@ VERSIONED_CHARACTERIZATION_TEST(
(std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress {
.method = TextIngestionMethod {},
.hash = hashString(HashType::htSHA256, "Derive(...)"),
.hash = hashString(HashAlgorithm::SHA256, "Derive(...)"),
},
ContentAddress {
.method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."),
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
},
ContentAddress {
.method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"),
.hash = hashString(HashAlgorithm::SHA256, "(...)"),
},
}))
@ -271,7 +271,7 @@ VERSIONED_CHARACTERIZATION_TEST(
std::optional {
ContentAddress {
.method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."),
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
},
},
}))

View file

@ -55,15 +55,15 @@ VERSIONED_CHARACTERIZATION_TEST(
(std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress {
.method = TextIngestionMethod {},
.hash = hashString(HashType::htSHA256, "Derive(...)"),
.hash = hashString(HashAlgorithm::SHA256, "Derive(...)"),
},
ContentAddress {
.method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."),
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
},
ContentAddress {
.method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"),
.hash = hashString(HashAlgorithm::SHA256, "(...)"),
},
}))
@ -464,7 +464,7 @@ VERSIONED_CHARACTERIZATION_TEST(
"foo",
FixedOutputInfo {
.method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"),
.hash = hashString(HashAlgorithm::SHA256, "(...)"),
.references = {
.others = {
StorePath {
@ -539,7 +539,7 @@ VERSIONED_CHARACTERIZATION_TEST(
std::optional {
ContentAddress {
.method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."),
.hash = hashString(HashAlgorithm::SHA1, "blob blob..."),
},
},
}))

View file

@ -11,7 +11,7 @@ using namespace nix;
Gen<Hash> Arbitrary<Hash>::arbitrary()
{
Hash hash(htSHA1);
Hash hash(HashAlgorithm::SHA1);
for (size_t i = 0; i < hash.hashSize; ++i)
hash.hash[i] = *gen::arbitrary<uint8_t>();
return gen::just(hash);

View file

@ -95,7 +95,7 @@ const static Tree tree = {
{
.mode = Mode::Regular,
// hello world with special chars from above
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", htSHA1),
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
},
},
{
@ -103,7 +103,7 @@ const static Tree tree = {
{
.mode = Mode::Executable,
// ditto
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", htSHA1),
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
},
},
{
@ -111,7 +111,7 @@ const static Tree tree = {
{
.mode = Mode::Directory,
// Empty directory hash
.hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", htSHA1),
.hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", HashAlgorithm::SHA1),
},
},
};
@ -174,7 +174,7 @@ TEST_F(GitTest, both_roundrip) {
std::function<DumpHook> dumpHook;
dumpHook = [&](const CanonPath & path) {
StringSink s;
HashSink hashSink { htSHA1 };
HashSink hashSink { HashAlgorithm::SHA1 };
TeeSink s2 { s, hashSink };
auto mode = dump(
files, path, s2, dumpHook,

View file

@ -13,28 +13,28 @@ namespace nix {
TEST(hashString, testKnownMD5Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc1321
auto s1 = "";
auto hash = hashString(HashType::htMD5, s1);
auto hash = hashString(HashAlgorithm::MD5, s1);
ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e");
}
TEST(hashString, testKnownMD5Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc1321
auto s2 = "abc";
auto hash = hashString(HashType::htMD5, s2);
auto hash = hashString(HashAlgorithm::MD5, s2);
ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72");
}
TEST(hashString, testKnownSHA1Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc3174
auto s = "abc";
auto hash = hashString(HashType::htSHA1, s);
auto hash = hashString(HashAlgorithm::SHA1, s);
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
}
TEST(hashString, testKnownSHA1Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc3174
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
auto hash = hashString(HashType::htSHA1, s);
auto hash = hashString(HashAlgorithm::SHA1, s);
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
}
@ -42,7 +42,7 @@ namespace nix {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abc";
auto hash = hashString(HashType::htSHA256, s);
auto hash = hashString(HashAlgorithm::SHA256, s);
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
}
@ -50,7 +50,7 @@ namespace nix {
TEST(hashString, testKnownSHA256Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
auto hash = hashString(HashType::htSHA256, s);
auto hash = hashString(HashAlgorithm::SHA256, s);
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
}
@ -58,7 +58,7 @@ namespace nix {
TEST(hashString, testKnownSHA512Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abc";
auto hash = hashString(HashType::htSHA512, s);
auto hash = hashString(HashAlgorithm::SHA512, s);
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
@ -68,7 +68,7 @@ namespace nix {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
auto hash = hashString(HashType::htSHA512, s);
auto hash = hashString(HashAlgorithm::SHA512, s);
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
@ -80,7 +80,7 @@ namespace nix {
* --------------------------------------------------------------------------*/
TEST(hashFormat, testRoundTripPrintParse) {
for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Base32, HashFormat::Base16, HashFormat::SRI}) {
for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Nix32, HashFormat::Base16, HashFormat::SRI}) {
ASSERT_EQ(parseHashFormat(printHashFormat(hashFormat)), hashFormat);
ASSERT_EQ(*parseHashFormatOpt(printHashFormat(hashFormat)), hashFormat);
}