about summary refs log tree commit diff
path: root/src/libstore
diff options
context:
space:
mode:
Diffstat (limited to 'src/libstore')
-rw-r--r--src/libstore/build.cc24
-rw-r--r--src/libstore/download.cc42
-rw-r--r--src/libstore/download.hh2
-rw-r--r--src/libstore/globals.hh5
-rw-r--r--src/libstore/http-binary-cache-store.cc11
-rw-r--r--src/libstore/local-store.hh2
6 files changed, 71 insertions, 15 deletions
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 523d737d9bf8..cca51f17ee26 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -6,6 +6,7 @@
 #include "archive.hh"
 #include "affinity.hh"
 #include "builtins.hh"
+#include "download.hh"
 #include "finally.hh"
 #include "compression.hh"
 #include "json.hh"
@@ -1777,6 +1778,19 @@ PathSet exportReferences(Store & store, PathSet storePaths)
     return paths;
 }
 
+static std::once_flag dns_resolve_flag;
+
+static void preloadNSS() {
+    /* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
+       one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
+       been loaded in the parent. So we force a download of an invalid URL to force the NSS machinery to
+       load its lookup libraries in the parent before any child gets a chance to. */
+    std::call_once(dns_resolve_flag, []() {
+        DownloadRequest request("http://this.pre-initializes.the.dns.resolvers.invalid");
+        request.tries = 1; // We only need to do it once, and this also suppresses an annoying warning
+        try { getDownloader()->download(request); } catch (...) {}
+    });
+}
 
 void DerivationGoal::startBuilder()
 {
@@ -1787,6 +1801,9 @@ void DerivationGoal::startBuilder()
             % drv->platform % settings.thisSystem % drvPath);
     }
 
+    if (drv->isBuiltin())
+        preloadNSS();
+
 #if __APPLE__
     additionalSandboxProfile = get(drv->env, "__sandboxProfile");
 #endif
@@ -2933,8 +2950,13 @@ void DerivationGoal::runChild()
         if (drv->isBuiltin()) {
             try {
                 logger = makeJSONLogger(*logger);
+
+                BasicDerivation drv2(*drv);
+                for (auto & e : drv2.env)
+                    e.second = rewriteStrings(e.second, inputRewrites);
+
                 if (drv->builder == "builtin:fetchurl")
-                    builtinFetchurl(*drv, netrcData);
+                    builtinFetchurl(drv2, netrcData);
                 else
                     throw Error(format("unsupported builtin function '%1%'") % string(drv->builder, 8));
                 _exit(0);
diff --git a/src/libstore/download.cc b/src/libstore/download.cc
index 4474dfd4b968..258d7937cc39 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/download.cc
@@ -17,11 +17,13 @@
 
 #include <curl/curl.h>
 
-#include <queue>
-#include <iostream>
-#include <thread>
+#include <algorithm>
 #include <cmath>
+#include <cstring>
+#include <iostream>
+#include <queue>
 #include <random>
+#include <thread>
 
 using namespace std::string_literals;
 
@@ -91,6 +93,8 @@ struct CurlDownloader : public Downloader
         {
             if (!request.expectedETag.empty())
                 requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str());
+            if (!request.mimeType.empty())
+                requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str());
         }
 
         ~DownloadItem()
@@ -185,6 +189,22 @@ struct CurlDownloader : public Downloader
             return 0;
         }
 
+        size_t readOffset = 0;
+        int readCallback(char *buffer, size_t size, size_t nitems)
+        {
+            if (readOffset == request.data->length())
+                return 0;
+            auto count = std::min(size * nitems, request.data->length() - readOffset);
+            memcpy(buffer, request.data->data() + readOffset, count);
+            readOffset += count;
+            return count;
+        }
+
+        static int readCallbackWrapper(char *buffer, size_t size, size_t nitems, void * userp)
+        {
+            return ((DownloadItem *) userp)->readCallback(buffer, size, nitems);
+        }
+
         long lowSpeedTimeout = 300;
 
         void init()
@@ -225,6 +245,13 @@ struct CurlDownloader : public Downloader
             if (request.head)
                 curl_easy_setopt(req, CURLOPT_NOBODY, 1);
 
+            if (request.data) {
+                curl_easy_setopt(req, CURLOPT_UPLOAD, 1L);
+                curl_easy_setopt(req, CURLOPT_READFUNCTION, readCallbackWrapper);
+                curl_easy_setopt(req, CURLOPT_READDATA, this);
+                curl_easy_setopt(req, CURLOPT_INFILESIZE_LARGE, (curl_off_t) request.data->length());
+            }
+
             if (request.verifyTLS) {
                 if (settings.caFile != "")
                     curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str());
@@ -265,7 +292,7 @@ struct CurlDownloader : public Downloader
             }
 
             if (code == CURLE_OK &&
-                (httpStatus == 200 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */))
+                (httpStatus == 200 || httpStatus == 201 || httpStatus == 204 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */))
             {
                 result.cached = httpStatus == 304;
                 done = true;
@@ -303,6 +330,7 @@ struct CurlDownloader : public Downloader
                     // Don't bother retrying on certain cURL errors either
                     switch (code) {
                         case CURLE_FAILED_INIT:
+                        case CURLE_URL_MALFORMAT:
                         case CURLE_NOT_BUILT_IN:
                         case CURLE_REMOTE_ACCESS_DENIED:
                         case CURLE_FILE_COULDNT_READ_FILE:
@@ -311,10 +339,10 @@ struct CurlDownloader : public Downloader
                         case CURLE_BAD_FUNCTION_ARGUMENT:
                         case CURLE_INTERFACE_FAILED:
                         case CURLE_UNKNOWN_OPTION:
-                        err = Misc;
-                        break;
+                            err = Misc;
+                            break;
                         default: // Shut up warnings
-                        break;
+                            break;
                     }
                 }
 
diff --git a/src/libstore/download.hh b/src/libstore/download.hh
index f2d65ad8d61d..d9d525d4e65f 100644
--- a/src/libstore/download.hh
+++ b/src/libstore/download.hh
@@ -18,6 +18,8 @@ struct DownloadRequest
     unsigned int baseRetryTimeMs = 250;
     ActivityId parentAct;
     bool decompress = true;
+    std::shared_ptr<std::string> data;
+    std::string mimeType;
 
     DownloadRequest(const std::string & uri)
         : uri(uri), parentAct(curActivity) { }
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 1e50e2d13e93..20ac8fe4e9ae 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -287,10 +287,7 @@ public:
     Setting<unsigned int> tarballTtl{this, 60 * 60, "tarball-ttl",
         "How soon to expire files fetched by builtins.fetchTarball and builtins.fetchurl."};
 
-    Setting<std::string> signedBinaryCaches{this, "*", "signed-binary-caches",
-        "Obsolete."};
-
-    Setting<bool> requireSigs{this, signedBinaryCaches == "*", "require-sigs",
+    Setting<bool> requireSigs{this, true, "require-sigs",
         "Whether to check that any non-content-addressed path added to the "
         "Nix store has a valid signature (that is, one signed using a key "
         "listed in 'trusted-public-keys'."};
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 057337685791..b9e9cd5daba5 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -38,7 +38,7 @@ public:
             try {
                 BinaryCacheStore::init();
             } catch (UploadToHTTP &) {
-                throw Error(format("'%s' does not appear to be a binary cache") % cacheUri);
+                throw Error("'%s' does not appear to be a binary cache", cacheUri);
             }
             diskCache->createCache(cacheUri, storeDir, wantMassQuery_, priority);
         }
@@ -67,7 +67,14 @@ protected:
         const std::string & data,
         const std::string & mimeType) override
     {
-        throw UploadToHTTP("uploading to an HTTP binary cache is not supported");
+        auto req = DownloadRequest(cacheUri + "/" + path);
+        req.data = std::make_shared<string>(data); // FIXME: inefficient
+        req.mimeType = mimeType;
+        try {
+            getDownloader()->download(req);
+        } catch (DownloadError & e) {
+            throw UploadToHTTP(format("uploading to HTTP binary cache at %1% not supported: %2%") % cacheUri % e.msg());
+        }
     }
 
     void getFile(const std::string & path,
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index d35cd1a949eb..30bef3a799d4 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -19,7 +19,7 @@ namespace nix {
 /* Nix store and database schema version.  Version 1 (or 0) was Nix <=
    0.7.  Version 2 was Nix 0.8 and 0.9.  Version 3 is Nix 0.10.
    Version 4 is Nix 0.11.  Version 5 is Nix 0.12-0.16.  Version 6 is
-   Nix 1.0.  Version 7 is Nix 1.3. Version 10 is 1.12. */
+   Nix 1.0.  Version 7 is Nix 1.3. Version 10 is 2.0. */
 const int nixSchemaVersion = 10;