about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorEelco Dolstra <edolstra@gmail.com>2019-05-22T21·36+0200
committerEelco Dolstra <edolstra@gmail.com>2019-06-24T20·12+0200
commitf8b30338ac231262bdf19844f044f0572c460048 (patch)
tree709d86e57a0fa53da7a52422ea9247e1d4034b9d /src
parent7b9c68766d513260d5262d5782b46384834cdb33 (diff)
Refactor downloadCached() interface
(cherry picked from commit df3f5a78d5ab0a1f2dc9d288b271b38a9b8b33b5)
Diffstat (limited to 'src')
-rw-r--r--src/libexpr/common-eval-args.cc8
-rw-r--r--src/libexpr/parser.y4
-rw-r--r--src/libexpr/primops.cc22
-rw-r--r--src/libstore/download.cc44
-rw-r--r--src/libstore/download.hh20
-rwxr-xr-xsrc/nix-channel/nix-channel.cc14
6 files changed, 63 insertions, 49 deletions
diff --git a/src/libexpr/common-eval-args.cc b/src/libexpr/common-eval-args.cc
index 37c74a94b2..13950ab8d1 100644
--- a/src/libexpr/common-eval-args.cc
+++ b/src/libexpr/common-eval-args.cc
@@ -45,9 +45,11 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
 
 Path lookupFileArg(EvalState & state, string s)
 {
-    if (isUri(s))
-        return getDownloader()->downloadCached(state.store, s, true).path;
-    else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
+    if (isUri(s)) {
+        CachedDownloadRequest request(s);
+        request.unpack = true;
+        return getDownloader()->downloadCached(state.store, request).path;
+    } else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
         Path p = s.substr(1, s.size() - 2);
         return state.findFile(p);
     } else
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index 29860fb057..967c88d9bc 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -677,7 +677,9 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
 
     if (isUri(elem.second)) {
         try {
-            res = { true, getDownloader()->downloadCached(store, elem.second, true).path };
+            CachedDownloadRequest request(elem.second);
+            request.unpack = true;
+            res = { true, getDownloader()->downloadCached(store, request).path };
         } catch (DownloadError & e) {
             printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
             res = { false, "" };
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 55a1bde117..070e72f3a9 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -2050,9 +2050,9 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
 void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
     const string & who, bool unpack, const std::string & defaultName)
 {
-    string url;
-    Hash expectedHash;
-    string name = defaultName;
+    CachedDownloadRequest request("");
+    request.unpack = unpack;
+    request.name = defaultName;
 
     state.forceValue(*args[0]);
 
@@ -2063,27 +2063,27 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
         for (auto & attr : *args[0]->attrs) {
             string n(attr.name);
             if (n == "url")
-                url = state.forceStringNoCtx(*attr.value, *attr.pos);
+                request.uri = state.forceStringNoCtx(*attr.value, *attr.pos);
             else if (n == "sha256")
-                expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
+                request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
             else if (n == "name")
-                name = state.forceStringNoCtx(*attr.value, *attr.pos);
+                request.name = state.forceStringNoCtx(*attr.value, *attr.pos);
             else
                 throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos);
         }
 
-        if (url.empty())
+        if (request.uri.empty())
             throw EvalError(format("'url' argument required, at %1%") % pos);
 
     } else
-        url = state.forceStringNoCtx(*args[0], pos);
+        request.uri = state.forceStringNoCtx(*args[0], pos);
 
-    state.checkURI(url);
+    state.checkURI(request.uri);
 
-    if (evalSettings.pureEval && !expectedHash)
+    if (evalSettings.pureEval && !request.expectedHash)
         throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
 
-    Path res = getDownloader()->downloadCached(state.store, url, unpack, name, expectedHash).path;
+    Path res = getDownloader()->downloadCached(state.store, request).path;
 
     if (state.allowedPaths)
         state.allowedPaths->insert(res);
diff --git a/src/libstore/download.cc b/src/libstore/download.cc
index 342a8aa214..892c1b21dc 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/download.cc
@@ -293,10 +293,10 @@ struct CurlDownloader : public Downloader
             long httpStatus = 0;
             curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
 
-            char * effectiveUrlCStr;
-            curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUrlCStr);
-            if (effectiveUrlCStr)
-                result.effectiveUrl = effectiveUrlCStr;
+            char * effectiveUriCStr;
+            curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
+            if (effectiveUriCStr)
+                result.effectiveUri = effectiveUriCStr;
 
             debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes",
                 request.verb(), request.uri, code, httpStatus, result.bodySize);
@@ -737,18 +737,20 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
     }
 }
 
-CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string & url_, bool unpack, string name, const Hash & expectedHash, string * effectiveUrl, int ttl)
+CachedDownloadResult Downloader::downloadCached(
+    ref<Store> store, const CachedDownloadRequest & request)
 {
-    auto url = resolveUri(url_);
+    auto url = resolveUri(request.uri);
 
+    auto name = request.name;
     if (name == "") {
         auto p = url.rfind('/');
         if (p != string::npos) name = string(url, p + 1);
     }
 
     Path expectedStorePath;
-    if (expectedHash) {
-        expectedStorePath = store->makeFixedOutputPath(unpack, expectedHash, name);
+    if (request.expectedHash) {
+        expectedStorePath = store->makeFixedOutputPath(request.unpack, request.expectedHash, name);
         if (store->isValidPath(expectedStorePath)) {
             CachedDownloadResult result;
             result.storePath = expectedStorePath;
@@ -782,10 +784,9 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
             auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
             if (ss.size() >= 3 && ss[0] == url) {
                 time_t lastChecked;
-                if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0)) {
+                if (string2Int(ss[2], lastChecked) && lastChecked + request.ttl >= time(0)) {
                     skip = true;
-                    if (effectiveUrl)
-                        *effectiveUrl = url_;
+                    result.effectiveUri = request.uri;
                     result.etag = ss[1];
                 } else if (!ss[1].empty()) {
                     debug(format("verifying previous ETag '%1%'") % ss[1]);
@@ -799,18 +800,17 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
     if (!skip) {
 
         try {
-            DownloadRequest request(url);
-            request.expectedETag = expectedETag;
-            auto res = download(request);
-            if (effectiveUrl)
-                *effectiveUrl = res.effectiveUrl;
+            DownloadRequest request2(url);
+            request2.expectedETag = expectedETag;
+            auto res = download(request2);
+            result.effectiveUri = res.effectiveUri;
             result.etag = res.etag;
 
             if (!res.cached) {
                 ValidPathInfo info;
                 StringSink sink;
                 dumpString(*res.data, sink);
-                Hash hash = hashString(expectedHash ? expectedHash.type : htSHA256, *res.data);
+                Hash hash = hashString(request.expectedHash ? request.expectedHash.type : htSHA256, *res.data);
                 info.path = store->makeFixedOutputPath(false, hash, name);
                 info.narHash = hashString(htSHA256, *sink.s);
                 info.narSize = sink.s->size();
@@ -830,7 +830,7 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
         }
     }
 
-    if (unpack) {
+    if (request.unpack) {
         Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked";
         PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink));
         Path unpackedStorePath;
@@ -853,11 +853,11 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
     }
 
     if (expectedStorePath != "" && storePath != expectedStorePath) {
-        Hash gotHash = unpack
-            ? hashPath(expectedHash.type, store->toRealPath(storePath)).first
-            : hashFile(expectedHash.type, store->toRealPath(storePath));
+        Hash gotHash = request.unpack
+            ? hashPath(request.expectedHash.type, store->toRealPath(storePath)).first
+            : hashFile(request.expectedHash.type, store->toRealPath(storePath));
         throw nix::Error("hash mismatch in file downloaded from '%s':\n  wanted: %s\n  got:    %s",
-            url, expectedHash.to_string(), gotHash.to_string());
+            url, request.expectedHash.to_string(), gotHash.to_string());
     }
 
     result.storePath = storePath;
diff --git a/src/libstore/download.hh b/src/libstore/download.hh
index dae082ab9a..9e965b506d 100644
--- a/src/libstore/download.hh
+++ b/src/libstore/download.hh
@@ -57,11 +57,23 @@ struct DownloadResult
 {
     bool cached = false;
     std::string etag;
-    std::string effectiveUrl;
+    std::string effectiveUri;
     std::shared_ptr<std::string> data;
     uint64_t bodySize = 0;
 };
 
+struct CachedDownloadRequest
+{
+    std::string uri;
+    bool unpack = false;
+    std::string name;
+    Hash expectedHash;
+    unsigned int ttl = settings.tarballTtl;
+
+    CachedDownloadRequest(const std::string & uri)
+        : uri(uri) { }
+};
+
 struct CachedDownloadResult
 {
     // Note: 'storePath' may be different from 'path' when using a
@@ -69,6 +81,7 @@ struct CachedDownloadResult
     Path storePath;
     Path path;
     std::optional<std::string> etag;
+    std::string effectiveUri;
 };
 
 class Store;
@@ -96,10 +109,7 @@ struct Downloader
        and is more recent than ‘tarball-ttl’ seconds. Otherwise,
        use the recorded ETag to verify if the server has a more
        recent version, and if so, download it to the Nix store. */
-    CachedDownloadResult downloadCached(
-        ref<Store> store, const string & uri, bool unpack, string name = "",
-        const Hash & expectedHash = Hash(), string * effectiveUri = nullptr,
-        int ttl = settings.tarballTtl);
+    CachedDownloadResult downloadCached(ref<Store> store, const CachedDownloadRequest & request);
 
     enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
 };
diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index 7b23088a2c..bd1371dbae 100755
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -86,10 +86,12 @@ static void update(const StringSet & channelNames)
         // We want to download the url to a file to see if it's a tarball while also checking if we
         // got redirected in the process, so that we can grab the various parts of a nix channel
         // definition from a consistent location if the redirect changes mid-download.
-        std::string effectiveUrl;
+        CachedDownloadRequest request(url);
+        request.ttl = 0;
         auto dl = getDownloader();
-        auto filename = dl->downloadCached(store, url, false, "", Hash(), &effectiveUrl, 0).path;
-        url = chomp(std::move(effectiveUrl));
+        auto result = dl->downloadCached(store, request);
+        auto filename = result.path;
+        url = chomp(result.effectiveUri);
 
         // If the URL contains a version number, append it to the name
         // attribute (so that "nix-env -q" on the channels profile
@@ -121,12 +123,10 @@ static void update(const StringSet & channelNames)
             }
 
             // Download the channel tarball.
-            auto fullURL = url + "/nixexprs.tar.xz";
             try {
-                filename = dl->downloadCached(store, fullURL, false).path;
+                filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.xz")).path;
             } catch (DownloadError & e) {
-                fullURL = url + "/nixexprs.tar.bz2";
-                filename = dl->downloadCached(store, fullURL, false).path;
+                filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.bz2")).path;
             }
             chomp(filename);
         }