diff options
Diffstat (limited to 'src/libstore')
-rw-r--r-- | src/libstore/binary-cache-store.cc | 18 | ||||
-rw-r--r-- | src/libstore/build.cc | 100 | ||||
-rw-r--r-- | src/libstore/builtins/fetchurl.cc | 3 | ||||
-rw-r--r-- | src/libstore/download.cc | 160 | ||||
-rw-r--r-- | src/libstore/download.hh | 61 | ||||
-rw-r--r-- | src/libstore/globals.hh | 2 | ||||
-rw-r--r-- | src/libstore/http-binary-cache-store.cc | 58 | ||||
-rw-r--r-- | src/libstore/store-api.cc | 107 | ||||
-rw-r--r-- | src/libstore/store-api.hh | 3 |
9 files changed, 319 insertions, 193 deletions
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 4527ee6ba660..8b736056e01d 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -10,6 +10,8 @@ #include "nar-info-disk-cache.hh" #include "nar-accessor.hh" #include "json.hh" +#include "retry.hh" +#include "download.hh" #include <chrono> @@ -79,13 +81,15 @@ void BinaryCacheStore::getFile(const std::string & path, Sink & sink) std::shared_ptr<std::string> BinaryCacheStore::getFile(const std::string & path) { - StringSink sink; - try { - getFile(path, sink); - } catch (NoSuchBinaryCacheFile &) { - return nullptr; - } - return sink.s; + return retry<std::shared_ptr<std::string>>(downloadSettings.tries, [&]() -> std::shared_ptr<std::string> { + StringSink sink; + try { + getFile(path, sink); + } catch (NoSuchBinaryCacheFile &) { + return nullptr; + } + return sink.s; + }); } Path BinaryCacheStore::narInfoFileFor(const Path & storePath) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 0bd7388097c6..350ac4092854 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -38,6 +38,7 @@ #include <unistd.h> #include <errno.h> #include <cstring> +#include <termios.h> #include <pwd.h> #include <grp.h> @@ -265,6 +266,12 @@ public: /* Set if at least one derivation had a timeout. */ bool timedOut; + /* Set if at least one derivation fails with a hash mismatch. */ + bool hashMismatch; + + /* Set if at least one derivation is not deterministic in check mode. */ + bool checkMismatch; + LocalStore & store; std::unique_ptr<HookInstance> hook; @@ -1558,8 +1565,8 @@ void DerivationGoal::buildDone() if (hook) { hook->builderOut.readSide = -1; hook->fromHook.readSide = -1; - } - else builderOut.readSide = -1; + } else + builderOut.readSide = -1; /* Close the log file. */ closeLogFile(); @@ -2181,7 +2188,48 @@ void DerivationGoal::startBuilder() Path logFile = openLogFile(); /* Create a pipe to get the output of the builder. */ - builderOut.create(); + //builderOut.create(); + + builderOut.readSide = posix_openpt(O_RDWR | O_NOCTTY); + if (!builderOut.readSide) + throw SysError("opening pseudoterminal master"); + + std::string slaveName(ptsname(builderOut.readSide.get())); + + if (buildUser) { + if (chmod(slaveName.c_str(), 0600)) + throw SysError("changing mode of pseudoterminal slave"); + + if (chown(slaveName.c_str(), buildUser->getUID(), 0)) + throw SysError("changing owner of pseudoterminal slave"); + } else { + if (grantpt(builderOut.readSide.get())) + throw SysError("granting access to pseudoterminal slave"); + } + + #if 0 + // Mount the pt in the sandbox so that the "tty" command works. + // FIXME: this doesn't work with the new devpts in the sandbox. + if (useChroot) + dirsInChroot[slaveName] = {slaveName, false}; + #endif + + if (unlockpt(builderOut.readSide.get())) + throw SysError("unlocking pseudoterminal"); + + builderOut.writeSide = open(slaveName.c_str(), O_RDWR | O_NOCTTY); + if (!builderOut.writeSide) + throw SysError("opening pseudoterminal slave"); + + // Put the pt into raw mode to prevent \n -> \r\n translation. + struct termios term; + if (tcgetattr(builderOut.writeSide.get(), &term)) + throw SysError("getting pseudoterminal attributes"); + + cfmakeraw(&term); + + if (tcsetattr(builderOut.writeSide.get(), TCSANOW, &term)) + throw SysError("putting pseudoterminal into raw mode"); result.startTime = time(0); @@ -2406,6 +2454,9 @@ void DerivationGoal::initEnv() may change that in the future. So tell the builder which file descriptor to use for that. */ env["NIX_LOG_FD"] = "2"; + + /* Trigger colored output in various tools. */ + env["TERM"] = "xterm-256color"; } @@ -3168,6 +3219,7 @@ void DerivationGoal::registerOutputs() /* Throw an error after registering the path as valid. */ + worker.hashMismatch = true; delayedException = std::make_exception_ptr( BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s", dest, h.to_string(), h2.to_string())); @@ -3210,6 +3262,7 @@ void DerivationGoal::registerOutputs() if (!worker.store.isValidPath(path)) continue; auto info = *worker.store.queryPathInfo(path); if (hash.first != info.narHash) { + worker.checkMismatch = true; if (settings.runDiffHook || settings.keepFailed) { Path dst = worker.store.toRealPath(path + checkSuffix); deletePath(dst); @@ -3221,10 +3274,10 @@ void DerivationGoal::registerOutputs() buildUser ? buildUser->getGID() : getgid(), path, dst, drvPath, tmpDir); - throw Error(format("derivation '%1%' may not be deterministic: output '%2%' differs from '%3%'") + throw NotDeterministic(format("derivation '%1%' may not be deterministic: output '%2%' differs from '%3%'") % drvPath % path % dst); } else - throw Error(format("derivation '%1%' may not be deterministic: output '%2%' differs") + throw NotDeterministic(format("derivation '%1%' may not be deterministic: output '%2%' differs") % drvPath % path); } @@ -4056,6 +4109,8 @@ Worker::Worker(LocalStore & store) lastWokenUp = steady_time_point::min(); permanentFailure = false; timedOut = false; + hashMismatch = false; + checkMismatch = false; } @@ -4361,14 +4416,15 @@ void Worker::waitForInput() for (auto & k : fds2) { if (FD_ISSET(k, &fds)) { ssize_t rd = read(k, buffer.data(), buffer.size()); - if (rd == -1) { - if (errno != EINTR) - throw SysError(format("reading from %1%") - % goal->getName()); - } else if (rd == 0) { + // FIXME: is there a cleaner way to handle pt close + // than EIO? Is this even standard? + if (rd == 0 || (rd == -1 && errno == EIO)) { debug(format("%1%: got EOF") % goal->getName()); goal->handleEOF(k); j->fds.erase(k); + } else if (rd == -1) { + if (errno != EINTR) + throw SysError("%s: read failed", goal->getName()); } else { printMsg(lvlVomit, format("%1%: read %2% bytes") % goal->getName() % rd); @@ -4415,7 +4471,29 @@ void Worker::waitForInput() unsigned int Worker::exitStatus() { - return timedOut ? 101 : (permanentFailure ? 100 : 1); + /* + * 1100100 + * ^^^^ + * |||`- timeout + * ||`-- output hash mismatch + * |`--- build failure + * `---- not deterministic + */ + unsigned int mask = 0; + bool buildFailure = permanentFailure || timedOut || hashMismatch; + if (buildFailure) + mask |= 0x04; // 100 + if (timedOut) + mask |= 0x01; // 101 + if (hashMismatch) + mask |= 0x02; // 102 + if (checkMismatch) { + mask |= 0x08; // 104 + } + + if (mask) + mask |= 0x60; + return mask ? mask : 1; } diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 92aec63a0379..b1af3b4fc316 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -64,7 +64,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) try { if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; auto ht = parseHashType(getAttr("outputHashAlgo")); - fetch(hashedMirror + printHashType(ht) + "/" + Hash(getAttr("outputHash"), ht).to_string(Base16, false)); + auto h = Hash(getAttr("outputHash"), ht); + fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false)); return; } catch (Error & e) { debug(e.what()); diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 22382ab1d6e8..7a2af237ee8f 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -8,6 +8,7 @@ #include "compression.hh" #include "pathlocks.hh" #include "finally.hh" +#include "retry.hh" #ifdef ENABLE_S3 #include <aws/core/client/ClientConfiguration.h> @@ -19,34 +20,16 @@ #include <curl/curl.h> #include <algorithm> -#include <cmath> #include <cstring> #include <iostream> #include <queue> -#include <random> #include <thread> using namespace std::string_literals; namespace nix { -struct DownloadSettings : Config -{ - Setting<bool> enableHttp2{this, true, "http2", - "Whether to enable HTTP/2 support."}; - - Setting<std::string> userAgentSuffix{this, "", "user-agent-suffix", - "String appended to the user agent in HTTP requests."}; - - Setting<size_t> httpConnections{this, 25, "http-connections", - "Number of parallel HTTP connections.", - {"binary-caches-parallel-connections"}}; - - Setting<unsigned long> connectTimeout{this, 0, "connect-timeout", - "Timeout for connecting to servers during downloads. 0 means use curl's builtin default."}; -}; - -static DownloadSettings downloadSettings; +DownloadSettings downloadSettings; static GlobalConfig::Register r1(&downloadSettings); @@ -62,9 +45,6 @@ struct CurlDownloader : public Downloader { CURLM * curlm = 0; - std::random_device rd; - std::mt19937 mt19937; - struct DownloadItem : public std::enable_shared_from_this<DownloadItem> { CurlDownloader & downloader; @@ -77,12 +57,6 @@ struct CurlDownloader : public Downloader bool active = false; // whether the handle has been added to the multi object std::string status; - unsigned int attempt = 0; - - /* Don't start this download until the specified time point - has been reached. */ - std::chrono::steady_clock::time_point embargo; - struct curl_slist * requestHeaders = 0; std::string encoding; @@ -270,6 +244,8 @@ struct CurlDownloader : public Downloader #if LIBCURL_VERSION_NUM >= 0x072f00 if (downloadSettings.enableHttp2) curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2TLS); + else + curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1); #endif curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, DownloadItem::writeCallbackWrapper); curl_easy_setopt(req, CURLOPT_WRITEDATA, this); @@ -319,16 +295,21 @@ struct CurlDownloader : public Downloader long httpStatus = 0; curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus); - char * effectiveUrlCStr; - curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUrlCStr); - if (effectiveUrlCStr) - result.effectiveUrl = effectiveUrlCStr; + char * effectiveUriCStr; + curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr); + if (effectiveUriCStr) + result.effectiveUri = effectiveUriCStr; debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes", request.verb(), request.uri, code, httpStatus, result.bodySize); - if (decompressionSink) - decompressionSink->finish(); + if (decompressionSink) { + try { + decompressionSink->finish(); + } catch (...) { + writeException = std::current_exception(); + } + } if (code == CURLE_WRITE_ERROR && result.etag == request.expectedETag) { code = CURLE_OK; @@ -396,9 +377,7 @@ struct CurlDownloader : public Downloader } } - attempt++; - - auto exc = + fail( code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted ? DownloadError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri)) : httpStatus != 0 @@ -409,31 +388,15 @@ struct CurlDownloader : public Downloader ) : DownloadError(err, fmt("unable to %s '%s': %s (%d)", - request.verb(), request.uri, curl_easy_strerror(code), code)); - - /* If this is a transient error, then maybe retry the - download after a while. */ - if (err == Transient && attempt < request.tries) { - int ms = request.baseRetryTimeMs * std::pow(2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(downloader.mt19937)); - printError(format("warning: %s; retrying in %d ms") % exc.what() % ms); - embargo = std::chrono::steady_clock::now() + std::chrono::milliseconds(ms); - downloader.enqueueItem(shared_from_this()); - } - else - fail(exc); + request.verb(), request.uri, curl_easy_strerror(code), code))); } } }; struct State { - struct EmbargoComparator { - bool operator() (const std::shared_ptr<DownloadItem> & i1, const std::shared_ptr<DownloadItem> & i2) { - return i1->embargo > i2->embargo; - } - }; bool quit = false; - std::priority_queue<std::shared_ptr<DownloadItem>, std::vector<std::shared_ptr<DownloadItem>>, EmbargoComparator> incoming; + std::vector<std::shared_ptr<DownloadItem>> incoming; }; Sync<State> state_; @@ -446,7 +409,6 @@ struct CurlDownloader : public Downloader std::thread workerThread; CurlDownloader() - : mt19937(rd()) { static std::once_flag globalInit; std::call_once(globalInit, curl_global_init, CURL_GLOBAL_ALL); @@ -540,9 +502,7 @@ struct CurlDownloader : public Downloader nextWakeup = std::chrono::steady_clock::time_point(); - /* Add new curl requests from the incoming requests queue, - except for requests that are embargoed (waiting for a - retry timeout to expire). */ + /* Add new curl requests from the incoming requests queue. */ if (extraFDs[0].revents & CURL_WAIT_POLLIN) { char buf[1024]; auto res = read(extraFDs[0].fd, buf, sizeof(buf)); @@ -551,22 +511,9 @@ struct CurlDownloader : public Downloader } std::vector<std::shared_ptr<DownloadItem>> incoming; - auto now = std::chrono::steady_clock::now(); - { auto state(state_.lock()); - while (!state->incoming.empty()) { - auto item = state->incoming.top(); - if (item->embargo <= now) { - incoming.push_back(item); - state->incoming.pop(); - } else { - if (nextWakeup == std::chrono::steady_clock::time_point() - || item->embargo < nextWakeup) - nextWakeup = item->embargo; - break; - } - } + std::swap(state->incoming, incoming); quit = state->quit; } @@ -593,7 +540,7 @@ struct CurlDownloader : public Downloader { auto state(state_.lock()); - while (!state->incoming.empty()) state->incoming.pop(); + state->incoming.clear(); state->quit = true; } } @@ -609,7 +556,7 @@ struct CurlDownloader : public Downloader auto state(state_.lock()); if (state->quit) throw nix::Error("cannot enqueue download request because the download thread is shutting down"); - state->incoming.push(item); + state->incoming.push_back(item); } writeFull(wakeupPipe.writeSide.get(), " "); } @@ -692,7 +639,9 @@ std::future<DownloadResult> Downloader::enqueueDownload(const DownloadRequest & DownloadResult Downloader::download(const DownloadRequest & request) { - return enqueueDownload(request).get(); + return retry<DownloadResult>(request.tries, [&]() { + return enqueueDownload(request).get(); + }); } void Downloader::download(DownloadRequest && request, Sink & sink) @@ -790,20 +739,26 @@ void Downloader::download(DownloadRequest && request, Sink & sink) } } -Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpack, string name, const Hash & expectedHash, string * effectiveUrl, int ttl) +CachedDownloadResult Downloader::downloadCached( + ref<Store> store, const CachedDownloadRequest & request) { - auto url = resolveUri(url_); + auto url = resolveUri(request.uri); + auto name = request.name; if (name == "") { auto p = url.rfind('/'); if (p != string::npos) name = string(url, p + 1); } Path expectedStorePath; - if (expectedHash) { - expectedStorePath = store->makeFixedOutputPath(unpack, expectedHash, name); - if (store->isValidPath(expectedStorePath)) - return store->toRealPath(expectedStorePath); + if (request.expectedHash) { + expectedStorePath = store->makeFixedOutputPath(request.unpack, request.expectedHash, name); + if (store->isValidPath(expectedStorePath)) { + CachedDownloadResult result; + result.storePath = expectedStorePath; + result.path = store->toRealPath(expectedStorePath); + return result; + } } Path cacheDir = getCacheDir() + "/nix/tarballs"; @@ -822,6 +777,8 @@ Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpa bool skip = false; + CachedDownloadResult result; + if (pathExists(fileLink) && pathExists(dataFile)) { storePath = readLink(fileLink); store->addTempRoot(storePath); @@ -829,10 +786,10 @@ Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpa auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n"); if (ss.size() >= 3 && ss[0] == url) { time_t lastChecked; - if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0)) { + if (string2Int(ss[2], lastChecked) && (uint64_t) lastChecked + request.ttl >= (uint64_t) time(0)) { skip = true; - if (effectiveUrl) - *effectiveUrl = url_; + result.effectiveUri = request.uri; + result.etag = ss[1]; } else if (!ss[1].empty()) { debug(format("verifying previous ETag '%1%'") % ss[1]); expectedETag = ss[1]; @@ -845,17 +802,17 @@ Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpa if (!skip) { try { - DownloadRequest request(url); - request.expectedETag = expectedETag; - auto res = download(request); - if (effectiveUrl) - *effectiveUrl = res.effectiveUrl; + DownloadRequest request2(url); + request2.expectedETag = expectedETag; + auto res = download(request2); + result.effectiveUri = res.effectiveUri; + result.etag = res.etag; if (!res.cached) { ValidPathInfo info; StringSink sink; dumpString(*res.data, sink); - Hash hash = hashString(expectedHash ? expectedHash.type : htSHA256, *res.data); + Hash hash = hashString(request.expectedHash ? request.expectedHash.type : htSHA256, *res.data); info.path = store->makeFixedOutputPath(false, hash, name); info.narHash = hashString(htSHA256, *sink.s); info.narSize = sink.s->size(); @@ -870,11 +827,12 @@ Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpa writeFile(dataFile, url + "\n" + res.etag + "\n" + std::to_string(time(0)) + "\n"); } catch (DownloadError & e) { if (storePath.empty()) throw; - printError(format("warning: %1%; using cached result") % e.msg()); + warn("%s; using cached result", e.msg()); + result.etag = expectedETag; } } - if (unpack) { + if (request.unpack) { Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked"; PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink)); Path unpackedStorePath; @@ -897,14 +855,17 @@ Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpa } if (expectedStorePath != "" && storePath != expectedStorePath) { - Hash gotHash = unpack - ? hashPath(expectedHash.type, store->toRealPath(storePath)).first - : hashFile(expectedHash.type, store->toRealPath(storePath)); - throw nix::Error("hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", - url, expectedHash.to_string(), gotHash.to_string()); + unsigned int statusCode = 102; + Hash gotHash = request.unpack + ? hashPath(request.expectedHash.type, store->toRealPath(storePath)).first + : hashFile(request.expectedHash.type, store->toRealPath(storePath)); + throw nix::Error(statusCode, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", + url, request.expectedHash.to_string(), gotHash.to_string()); } - return store->toRealPath(storePath); + result.storePath = storePath; + result.path = store->toRealPath(storePath); + return result; } @@ -917,5 +878,4 @@ bool isUri(const string & s) return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh"; } - } diff --git a/src/libstore/download.hh b/src/libstore/download.hh index f0228f7d053a..9e965b506d0a 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -9,13 +9,34 @@ namespace nix { +struct DownloadSettings : Config +{ + Setting<bool> enableHttp2{this, true, "http2", + "Whether to enable HTTP/2 support."}; + + Setting<std::string> userAgentSuffix{this, "", "user-agent-suffix", + "String appended to the user agent in HTTP requests."}; + + Setting<size_t> httpConnections{this, 25, "http-connections", + "Number of parallel HTTP connections.", + {"binary-caches-parallel-connections"}}; + + Setting<unsigned long> connectTimeout{this, 0, "connect-timeout", + "Timeout for connecting to servers during downloads. 0 means use curl's builtin default."}; + + Setting<unsigned int> tries{this, 5, "download-attempts", + "How often Nix will attempt to download a file before giving up."}; +}; + +extern DownloadSettings downloadSettings; + struct DownloadRequest { std::string uri; std::string expectedETag; bool verifyTLS = true; bool head = false; - size_t tries = 5; + size_t tries = downloadSettings.tries; unsigned int baseRetryTimeMs = 250; ActivityId parentAct; bool decompress = true; @@ -36,11 +57,33 @@ struct DownloadResult { bool cached = false; std::string etag; - std::string effectiveUrl; + std::string effectiveUri; std::shared_ptr<std::string> data; uint64_t bodySize = 0; }; +struct CachedDownloadRequest +{ + std::string uri; + bool unpack = false; + std::string name; + Hash expectedHash; + unsigned int ttl = settings.tarballTtl; + + CachedDownloadRequest(const std::string & uri) + : uri(uri) { } +}; + +struct CachedDownloadResult +{ + // Note: 'storePath' may be different from 'path' when using a + // chroot store. + Path storePath; + Path path; + std::optional<std::string> etag; + std::string effectiveUri; +}; + class Store; struct Downloader @@ -53,19 +96,20 @@ struct Downloader std::future<DownloadResult> enqueueDownload(const DownloadRequest & request); - /* Synchronously download a file. */ + /* Synchronously download a file. The request will be retried in + case of transient failures. */ DownloadResult download(const DownloadRequest & request); /* Download a file, writing its data to a sink. The sink will be - invoked on the thread of the caller. */ + invoked on the thread of the caller. The request will not be + retried in case of transient failures. */ void download(DownloadRequest && request, Sink & sink); /* Check if the specified file is already in ~/.cache/nix/tarballs and is more recent than ‘tarball-ttl’ seconds. Otherwise, use the recorded ETag to verify if the server has a more recent version, and if so, download it to the Nix store. */ - Path downloadCached(ref<Store> store, const string & uri, bool unpack, string name = "", - const Hash & expectedHash = Hash(), string * effectiveUri = nullptr, int ttl = settings.tarballTtl); + CachedDownloadResult downloadCached(ref<Store> store, const CachedDownloadRequest & request); enum Error { NotFound, Forbidden, Misc, Transient, Interrupted }; }; @@ -84,6 +128,11 @@ public: DownloadError(Downloader::Error error, const FormatOrString & fs) : Error(fs), error(error) { } + + bool isTransient() override + { + return error == Downloader::Error::Transient; + } }; bool isUri(const string & s); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 53efc6a90fb6..0af8215d1fd8 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -255,7 +255,7 @@ public: "Secret keys with which to sign local builds."}; Setting<unsigned int> tarballTtl{this, 60 * 60, "tarball-ttl", - "How soon to expire files fetched by builtins.fetchTarball and builtins.fetchurl."}; + "How long downloaded files are considered up-to-date."}; Setting<bool> requireSigs{this, true, "require-sigs", "Whether to check that any non-content-addressed path added to the " diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 8da0e2f9d82a..5633b4355d25 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -2,6 +2,7 @@ #include "download.hh" #include "globals.hh" #include "nar-info-disk-cache.hh" +#include "retry.hh" namespace nix { @@ -84,7 +85,6 @@ protected: try { DownloadRequest request(cacheUri + "/" + path); request.head = true; - request.tries = 5; getDownloader()->download(request); return true; } catch (DownloadError & e) { @@ -114,7 +114,6 @@ protected: DownloadRequest makeRequest(const std::string & path) { DownloadRequest request(cacheUri + "/" + path); - request.tries = 8; return request; } @@ -137,21 +136,46 @@ protected: { checkEnabled(); - auto request(makeRequest(path)); - - getDownloader()->enqueueDownload(request, - {[callback, this](std::future<DownloadResult> result) { - try { - callback(result.get().data); - } catch (DownloadError & e) { - if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden) - return callback(std::shared_ptr<std::string>()); - maybeDisable(); - callback.rethrow(); - } catch (...) { - callback.rethrow(); - } - }}); + struct State + { + DownloadRequest request; + std::function<void()> tryDownload; + unsigned int attempt = 0; + State(DownloadRequest && request) : request(request) {} + }; + + auto state = std::make_shared<State>(makeRequest(path)); + + state->tryDownload = [callback, state, this]() { + getDownloader()->enqueueDownload(state->request, + {[callback, state, this](std::future<DownloadResult> result) { + try { + callback(result.get().data); + } catch (DownloadError & e) { + if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden) + return callback(std::shared_ptr<std::string>()); + ++state->attempt; + if (state->attempt < state->request.tries && e.isTransient()) { + auto ms = retrySleepTime(state->attempt); + warn("%s; retrying in %d ms", e.what(), ms); + /* We can't sleep here because that would + block the download thread. So use a + separate thread for sleeping. */ + std::thread([state, ms]() { + std::this_thread::sleep_for(std::chrono::milliseconds(ms)); + state->tryDownload(); + }).detach(); + } else { + maybeDisable(); + callback.rethrow(); + } + } catch (...) { + callback.rethrow(); + } + }}); + }; + + state->tryDownload(); } }; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index c13ff11564ec..92f01fd2e0c7 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -6,10 +6,11 @@ #include "thread-pool.hh" #include "json.hh" #include "derivations.hh" +#include "retry.hh" +#include "download.hh" #include <future> - namespace nix { @@ -85,18 +86,25 @@ string storePathToHash(const Path & path) void checkStoreName(const string & name) { string validChars = "+-._?="; + + auto baseError = format("The path name '%2%' is invalid: %3%. " + "Path names are alphanumeric and can include the symbols %1% " + "and must not begin with a period. " + "Note: If '%2%' is a source file and you cannot rename it on " + "disk, builtins.path { name = ... } can be used to give it an " + "alternative name.") % validChars % name; + /* Disallow names starting with a dot for possible security reasons (e.g., "." and ".."). */ if (string(name, 0, 1) == ".") - throw Error(format("illegal name: '%1%'") % name); + throw Error(baseError % "it is illegal to start the name with a period"); for (auto & i : name) if (!((i >= 'A' && i <= 'Z') || (i >= 'a' && i <= 'z') || (i >= '0' && i <= '9') || validChars.find(i) != string::npos)) { - throw Error(format("invalid character '%1%' in name '%2%'") - % i % name); + throw Error(baseError % (format("the '%1%' character is invalid") % i)); } } @@ -572,54 +580,57 @@ void Store::buildPaths(const PathSet & paths, BuildMode buildMode) void copyStorePath(ref<Store> srcStore, ref<Store> dstStore, const Path & storePath, RepairFlag repair, CheckSigsFlag checkSigs) { - auto srcUri = srcStore->getUri(); - auto dstUri = dstStore->getUri(); - - Activity act(*logger, lvlInfo, actCopyPath, - srcUri == "local" || srcUri == "daemon" - ? fmt("copying path '%s' to '%s'", storePath, dstUri) - : dstUri == "local" || dstUri == "daemon" - ? fmt("copying path '%s' from '%s'", storePath, srcUri) - : fmt("copying path '%s' from '%s' to '%s'", storePath, srcUri, dstUri), - {storePath, srcUri, dstUri}); - PushActivity pact(act.id); - - auto info = srcStore->queryPathInfo(storePath); - - uint64_t total = 0; - - if (!info->narHash) { - StringSink sink; - srcStore->narFromPath({storePath}, sink); - auto info2 = make_ref<ValidPathInfo>(*info); - info2->narHash = hashString(htSHA256, *sink.s); - if (!info->narSize) info2->narSize = sink.s->size(); - if (info->ultimate) info2->ultimate = false; - info = info2; - - StringSource source(*sink.s); - dstStore->addToStore(*info, source, repair, checkSigs); - return; - } + retry<void>(downloadSettings.tries, [&]() { - if (info->ultimate) { - auto info2 = make_ref<ValidPathInfo>(*info); - info2->ultimate = false; - info = info2; - } + auto srcUri = srcStore->getUri(); + auto dstUri = dstStore->getUri(); + + Activity act(*logger, lvlInfo, actCopyPath, + srcUri == "local" || srcUri == "daemon" + ? fmt("copying path '%s' to '%s'", storePath, dstUri) + : dstUri == "local" || dstUri == "daemon" + ? fmt("copying path '%s' from '%s'", storePath, srcUri) + : fmt("copying path '%s' from '%s' to '%s'", storePath, srcUri, dstUri), + {storePath, srcUri, dstUri}); + PushActivity pact(act.id); + + auto info = srcStore->queryPathInfo(storePath); + + uint64_t total = 0; - auto source = sinkToSource([&](Sink & sink) { - LambdaSink wrapperSink([&](const unsigned char * data, size_t len) { - sink(data, len); - total += len; - act.progress(total, info->narSize); + if (!info->narHash) { + StringSink sink; + srcStore->narFromPath({storePath}, sink); + auto info2 = make_ref<ValidPathInfo>(*info); + info2->narHash = hashString(htSHA256, *sink.s); + if (!info->narSize) info2->narSize = sink.s->size(); + if (info->ultimate) info2->ultimate = false; + info = info2; + + StringSource source(*sink.s); + dstStore->addToStore(*info, source, repair, checkSigs); + return; + } + + if (info->ultimate) { + auto info2 = make_ref<ValidPathInfo>(*info); + info2->ultimate = false; + info = info2; + } + + auto source = sinkToSource([&](Sink & sink) { + LambdaSink wrapperSink([&](const unsigned char * data, size_t len) { + sink(data, len); + total += len; + act.progress(total, info->narSize); + }); + srcStore->narFromPath({storePath}, wrapperSink); + }, [&]() { + throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", storePath, srcStore->getUri()); }); - srcStore->narFromPath({storePath}, wrapperSink); - }, [&]() { - throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", storePath, srcStore->getUri()); - }); - dstStore->addToStore(*info, *source, repair, checkSigs); + dstStore->addToStore(*info, *source, repair, checkSigs); + }); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 7a1b31d0ff59..59967737670d 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -766,8 +766,7 @@ StoreType getStoreType(const std::string & uri = settings.storeUri.get(), const std::string & stateDir = settings.nixStateDir); /* Return the default substituter stores, defined by the - ‘substituters’ option and various legacy options like - ‘binary-caches’. */ + ‘substituters’ option and various legacy options. */ std::list<ref<Store>> getDefaultSubstituters(); |