Refactor downloadCached() interface

This commit is contained in:
Eelco Dolstra 2019-05-22 23:36:29 +02:00
parent 66f1d7ee95
commit df3f5a78d5
7 changed files with 73 additions and 54 deletions

View file

@ -61,9 +61,11 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
Path lookupFileArg(EvalState & state, string s) Path lookupFileArg(EvalState & state, string s)
{ {
if (isUri(s)) if (isUri(s)) {
return getDownloader()->downloadCached(state.store, s, true).path; CachedDownloadRequest request(s);
else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { request.unpack = true;
return getDownloader()->downloadCached(state.store, request).path;
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
Path p = s.substr(1, s.size() - 2); Path p = s.substr(1, s.size() - 2);
return state.findFile(p); return state.findFile(p);
} else } else

View file

@ -657,7 +657,9 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
if (isUri(elem.second)) { if (isUri(elem.second)) {
try { try {
res = { true, getDownloader()->downloadCached(store, elem.second, true).path }; CachedDownloadRequest request(elem.second);
request.unpack = true;
res = { true, getDownloader()->downloadCached(store, request).path };
} catch (DownloadError & e) { } catch (DownloadError & e) {
printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second); printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
res = { false, "" }; res = { false, "" };

View file

@ -2050,9 +2050,9 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
const string & who, bool unpack, const std::string & defaultName) const string & who, bool unpack, const std::string & defaultName)
{ {
string url; CachedDownloadRequest request("");
Hash expectedHash; request.unpack = unpack;
string name = defaultName; request.name = defaultName;
state.forceValue(*args[0]); state.forceValue(*args[0]);
@ -2063,27 +2063,27 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
for (auto & attr : *args[0]->attrs) { for (auto & attr : *args[0]->attrs) {
string n(attr.name); string n(attr.name);
if (n == "url") if (n == "url")
url = state.forceStringNoCtx(*attr.value, *attr.pos); request.uri = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "sha256") else if (n == "sha256")
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
else if (n == "name") else if (n == "name")
name = state.forceStringNoCtx(*attr.value, *attr.pos); request.name = state.forceStringNoCtx(*attr.value, *attr.pos);
else else
throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos); throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos);
} }
if (url.empty()) if (request.uri.empty())
throw EvalError(format("'url' argument required, at %1%") % pos); throw EvalError(format("'url' argument required, at %1%") % pos);
} else } else
url = state.forceStringNoCtx(*args[0], pos); request.uri = state.forceStringNoCtx(*args[0], pos);
state.checkURI(url); state.checkURI(request.uri);
if (evalSettings.pureEval && !expectedHash) if (evalSettings.pureEval && !request.expectedHash)
throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who); throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
Path res = getDownloader()->downloadCached(state.store, url, unpack, name, expectedHash).path; Path res = getDownloader()->downloadCached(state.store, request).path;
if (state.allowedPaths) if (state.allowedPaths)
state.allowedPaths->insert(res); state.allowedPaths->insert(res);

View file

@ -136,9 +136,11 @@ std::shared_ptr<FlakeRegistry> EvalState::getGlobalFlakeRegistry()
std::call_once(_globalFlakeRegistryInit, [&]() { std::call_once(_globalFlakeRegistryInit, [&]() {
auto path = evalSettings.flakeRegistry; auto path = evalSettings.flakeRegistry;
if (!hasPrefix(path, "/")) if (!hasPrefix(path, "/")) {
path = getDownloader()->downloadCached(store, CachedDownloadRequest request(evalSettings.flakeRegistry);
evalSettings.flakeRegistry, false, "registry").path; request.name = "flake-registry.json";
path = getDownloader()->downloadCached(store, request).path;
}
_globalFlakeRegistry = readRegistry(path); _globalFlakeRegistry = readRegistry(path);
}); });
@ -244,8 +246,11 @@ static SourceInfo fetchFlake(EvalState & state, const FlakeRef & flakeRef, bool
if (accessToken != "") if (accessToken != "")
url += "?access_token=" + accessToken; url += "?access_token=" + accessToken;
auto result = getDownloader()->downloadCached(state.store, url, true, "source", CachedDownloadRequest request(url);
Hash(), nullptr, resolvedRef.rev ? 1000000000 : settings.tarballTtl); request.unpack = true;
request.name = "source";
request.ttl = resolvedRef.rev ? 1000000000 : settings.tarballTtl;
auto result = getDownloader()->downloadCached(state.store, request);
if (!result.etag) if (!result.etag)
throw Error("did not receive an ETag header from '%s'", url); throw Error("did not receive an ETag header from '%s'", url);

View file

@ -319,10 +319,10 @@ struct CurlDownloader : public Downloader
long httpStatus = 0; long httpStatus = 0;
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus); curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
char * effectiveUrlCStr; char * effectiveUriCStr;
curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUrlCStr); curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
if (effectiveUrlCStr) if (effectiveUriCStr)
result.effectiveUrl = effectiveUrlCStr; result.effectiveUri = effectiveUriCStr;
debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes", debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes",
request.verb(), request.uri, code, httpStatus, result.bodySize); request.verb(), request.uri, code, httpStatus, result.bodySize);
@ -790,18 +790,20 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
} }
} }
CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string & url_, bool unpack, string name, const Hash & expectedHash, string * effectiveUrl, int ttl) CachedDownloadResult Downloader::downloadCached(
ref<Store> store, const CachedDownloadRequest & request)
{ {
auto url = resolveUri(url_); auto url = resolveUri(request.uri);
auto name = request.name;
if (name == "") { if (name == "") {
auto p = url.rfind('/'); auto p = url.rfind('/');
if (p != string::npos) name = string(url, p + 1); if (p != string::npos) name = string(url, p + 1);
} }
Path expectedStorePath; Path expectedStorePath;
if (expectedHash) { if (request.expectedHash) {
expectedStorePath = store->makeFixedOutputPath(unpack, expectedHash, name); expectedStorePath = store->makeFixedOutputPath(request.unpack, request.expectedHash, name);
if (store->isValidPath(expectedStorePath)) { if (store->isValidPath(expectedStorePath)) {
CachedDownloadResult result; CachedDownloadResult result;
result.storePath = expectedStorePath; result.storePath = expectedStorePath;
@ -835,10 +837,9 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n"); auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
if (ss.size() >= 3 && ss[0] == url) { if (ss.size() >= 3 && ss[0] == url) {
time_t lastChecked; time_t lastChecked;
if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0)) { if (string2Int(ss[2], lastChecked) && lastChecked + request.ttl >= time(0)) {
skip = true; skip = true;
if (effectiveUrl) result.effectiveUri = request.uri;
*effectiveUrl = url_;
result.etag = ss[1]; result.etag = ss[1];
} else if (!ss[1].empty()) { } else if (!ss[1].empty()) {
debug(format("verifying previous ETag '%1%'") % ss[1]); debug(format("verifying previous ETag '%1%'") % ss[1]);
@ -852,18 +853,17 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
if (!skip) { if (!skip) {
try { try {
DownloadRequest request(url); DownloadRequest request2(url);
request.expectedETag = expectedETag; request2.expectedETag = expectedETag;
auto res = download(request); auto res = download(request2);
if (effectiveUrl) result.effectiveUri = res.effectiveUri;
*effectiveUrl = res.effectiveUrl;
result.etag = res.etag; result.etag = res.etag;
if (!res.cached) { if (!res.cached) {
ValidPathInfo info; ValidPathInfo info;
StringSink sink; StringSink sink;
dumpString(*res.data, sink); dumpString(*res.data, sink);
Hash hash = hashString(expectedHash ? expectedHash.type : htSHA256, *res.data); Hash hash = hashString(request.expectedHash ? request.expectedHash.type : htSHA256, *res.data);
info.path = store->makeFixedOutputPath(false, hash, name); info.path = store->makeFixedOutputPath(false, hash, name);
info.narHash = hashString(htSHA256, *sink.s); info.narHash = hashString(htSHA256, *sink.s);
info.narSize = sink.s->size(); info.narSize = sink.s->size();
@ -883,7 +883,7 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
} }
} }
if (unpack) { if (request.unpack) {
Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked"; Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked";
PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink)); PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink));
Path unpackedStorePath; Path unpackedStorePath;
@ -906,11 +906,11 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
} }
if (expectedStorePath != "" && storePath != expectedStorePath) { if (expectedStorePath != "" && storePath != expectedStorePath) {
Hash gotHash = unpack Hash gotHash = request.unpack
? hashPath(expectedHash.type, store->toRealPath(storePath)).first ? hashPath(request.expectedHash.type, store->toRealPath(storePath)).first
: hashFile(expectedHash.type, store->toRealPath(storePath)); : hashFile(request.expectedHash.type, store->toRealPath(storePath));
throw nix::Error("hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", throw nix::Error("hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
url, expectedHash.to_string(), gotHash.to_string()); url, request.expectedHash.to_string(), gotHash.to_string());
} }
result.storePath = storePath; result.storePath = storePath;

View file

@ -36,11 +36,23 @@ struct DownloadResult
{ {
bool cached = false; bool cached = false;
std::string etag; std::string etag;
std::string effectiveUrl; std::string effectiveUri;
std::shared_ptr<std::string> data; std::shared_ptr<std::string> data;
uint64_t bodySize = 0; uint64_t bodySize = 0;
}; };
struct CachedDownloadRequest
{
std::string uri;
bool unpack = false;
std::string name;
Hash expectedHash;
unsigned int ttl = settings.tarballTtl;
CachedDownloadRequest(const std::string & uri)
: uri(uri) { }
};
struct CachedDownloadResult struct CachedDownloadResult
{ {
// Note: 'storePath' may be different from 'path' when using a // Note: 'storePath' may be different from 'path' when using a
@ -48,6 +60,7 @@ struct CachedDownloadResult
Path storePath; Path storePath;
Path path; Path path;
std::optional<std::string> etag; std::optional<std::string> etag;
std::string effectiveUri;
}; };
class Store; class Store;
@ -73,10 +86,7 @@ struct Downloader
and is more recent than tarball-ttl seconds. Otherwise, and is more recent than tarball-ttl seconds. Otherwise,
use the recorded ETag to verify if the server has a more use the recorded ETag to verify if the server has a more
recent version, and if so, download it to the Nix store. */ recent version, and if so, download it to the Nix store. */
CachedDownloadResult downloadCached( CachedDownloadResult downloadCached(ref<Store> store, const CachedDownloadRequest & request);
ref<Store> store, const string & uri, bool unpack, string name = "",
const Hash & expectedHash = Hash(), string * effectiveUri = nullptr,
int ttl = settings.tarballTtl);
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted }; enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
}; };

View file

@ -86,10 +86,12 @@ static void update(const StringSet & channelNames)
// We want to download the url to a file to see if it's a tarball while also checking if we // We want to download the url to a file to see if it's a tarball while also checking if we
// got redirected in the process, so that we can grab the various parts of a nix channel // got redirected in the process, so that we can grab the various parts of a nix channel
// definition from a consistent location if the redirect changes mid-download. // definition from a consistent location if the redirect changes mid-download.
std::string effectiveUrl; CachedDownloadRequest request(url);
request.ttl = 0;
auto dl = getDownloader(); auto dl = getDownloader();
auto filename = dl->downloadCached(store, url, false, "", Hash(), &effectiveUrl, 0).path; auto result = dl->downloadCached(store, request);
url = chomp(std::move(effectiveUrl)); auto filename = result.path;
url = chomp(result.effectiveUri);
// If the URL contains a version number, append it to the name // If the URL contains a version number, append it to the name
// attribute (so that "nix-env -q" on the channels profile // attribute (so that "nix-env -q" on the channels profile
@ -121,12 +123,10 @@ static void update(const StringSet & channelNames)
} }
// Download the channel tarball. // Download the channel tarball.
auto fullURL = url + "/nixexprs.tar.xz";
try { try {
filename = dl->downloadCached(store, fullURL, false).path; filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.xz")).path;
} catch (DownloadError & e) { } catch (DownloadError & e) {
fullURL = url + "/nixexprs.tar.bz2"; filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.bz2")).path;
filename = dl->downloadCached(store, fullURL, false).path;
} }
chomp(filename); chomp(filename);
} }