about summary refs log tree commit diff
path: root/src/libstore/download.cc
diff options
context:
space:
mode:
Diffstat (limited to 'src/libstore/download.cc')
-rw-r--r--src/libstore/download.cc37
1 files changed, 31 insertions, 6 deletions
diff --git a/src/libstore/download.cc b/src/libstore/download.cc
index 75c00d85d344..22bde086e6a2 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/download.cc
@@ -5,6 +5,11 @@
 #include "store-api.hh"
 #include "archive.hh"
 #include "s3.hh"
+#include "compression.hh"
+
+#ifdef ENABLE_S3
+#include <aws/core/client/ClientConfiguration.h>
+#endif
 
 #include <unistd.h>
 #include <fcntl.h>
@@ -34,6 +39,16 @@ std::string resolveUri(const std::string & uri)
         return uri;
 }
 
+ref<std::string> decodeContent(const std::string & encoding, ref<std::string> data)
+{
+    if (encoding == "")
+        return data;
+    else if (encoding == "br")
+        return decompress(encoding, *data);
+    else
+        throw Error("unsupported Content-Encoding ‘%s’", encoding);
+}
+
 struct CurlDownloader : public Downloader
 {
     CURLM * curlm = 0;
@@ -67,6 +82,8 @@ struct CurlDownloader : public Downloader
 
         struct curl_slist * requestHeaders = 0;
 
+        std::string encoding;
+
         DownloadItem(CurlDownloader & downloader, const DownloadRequest & request)
             : downloader(downloader), request(request)
         {
@@ -124,6 +141,7 @@ struct CurlDownloader : public Downloader
                 auto ss = tokenizeString<vector<string>>(line, " ");
                 status = ss.size() >= 2 ? ss[1] : "";
                 result.data = std::make_shared<std::string>();
+                encoding = "";
             } else {
                 auto i = line.find(':');
                 if (i != string::npos) {
@@ -139,7 +157,8 @@ struct CurlDownloader : public Downloader
                             debug(format("shutting down on 200 HTTP response with expected ETag"));
                             return 0;
                         }
-                    }
+                    } else if (name == "content-encoding")
+                        encoding = trim(string(line, i + 1));;
                 }
             }
             return realSize;
@@ -224,8 +243,7 @@ struct CurlDownloader : public Downloader
                 curl_easy_setopt(req, CURLOPT_NOBODY, 1);
 
             if (request.verifyTLS)
-                curl_easy_setopt(req, CURLOPT_CAINFO,
-                    getEnv("NIX_SSL_CERT_FILE", getEnv("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt")).c_str());
+                curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str());
             else {
                 curl_easy_setopt(req, CURLOPT_SSL_VERIFYPEER, 0);
                 curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0);
@@ -266,7 +284,14 @@ struct CurlDownloader : public Downloader
             {
                 result.cached = httpStatus == 304;
                 done = true;
-                callSuccess(success, failure, const_cast<const DownloadResult &>(result));
+
+                try {
+                    result.data = decodeContent(encoding, ref<std::string>(result.data));
+                    callSuccess(success, failure, const_cast<const DownloadResult &>(result));
+                } catch (...) {
+                    done = true;
+                    callFailure(failure, std::current_exception());
+                }
             } else {
                 Error err =
                     (httpStatus == 404 || code == CURLE_FILE_COULDNT_READ_FILE) ? NotFound :
@@ -496,7 +521,7 @@ struct CurlDownloader : public Downloader
             // FIXME: do this on a worker thread
             sync2async<DownloadResult>(success, failure, [&]() -> DownloadResult {
 #ifdef ENABLE_S3
-                S3Helper s3Helper;
+                S3Helper s3Helper(Aws::Region::US_EAST_1); // FIXME: make configurable
                 auto slash = request.uri.find('/', 5);
                 if (slash == std::string::npos)
                     throw nix::Error("bad S3 URI ‘%s’", request.uri);
@@ -646,7 +671,7 @@ Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpa
             Path tmpDir = createTempDir();
             AutoDelete autoDelete(tmpDir, true);
             // FIXME: this requires GNU tar for decompression.
-            runProgram("tar", true, {"xf", storePath, "-C", tmpDir, "--strip-components", "1"}, "");
+            runProgram("tar", true, {"xf", storePath, "-C", tmpDir, "--strip-components", "1"});
             unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, false);
         }
         replaceSymlink(unpackedStorePath, unpackedLink);