about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/libexpr/eval.cc12
-rw-r--r--src/libexpr/lexer.l8
-rw-r--r--src/libexpr/nixexpr.hh1
-rw-r--r--src/libexpr/parser.y9
-rw-r--r--src/libstore/local.mk2
-rw-r--r--src/libstore/s3-binary-cache-store.cc80
6 files changed, 82 insertions, 30 deletions
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index a2cce162b90c..353097f89713 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -317,10 +317,20 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
 
     if (settings.restrictEval || settings.pureEval) {
         allowedPaths = PathSet();
+
         for (auto & i : searchPath) {
             auto r = resolveSearchPathElem(i);
             if (!r.first) continue;
-            allowedPaths->insert(r.second);
+
+            auto path = r.second;
+
+            if (store->isInStore(r.second)) {
+                PathSet closure;
+                store->computeFSClosure(store->toStorePath(r.second), closure);
+                for (auto & path : closure)
+                    allowedPaths->insert(path);
+            } else
+                allowedPaths->insert(r.second);
         }
     }
 
diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l
index 1e9c29afa133..29ca327c1e4e 100644
--- a/src/libexpr/lexer.l
+++ b/src/libexpr/lexer.l
@@ -209,11 +209,13 @@ or          { return OR_KW; }
 \#[^\r\n]*    /* single-line comments */
 \/\*([^*]|\*+[^*/])*\*+\/  /* long comments */
 
-{ANY}           return yytext[0];
+{ANY}       {
+              /* Don't return a negative number, as this will cause
+                 Bison to stop parsing without an error. */
+              return (unsigned char) yytext[0];
+            }
 
 }
 
-<<EOF>> { data->atEnd = true; return 0; }
-
 %%
 
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index b486595f07ab..665a42987dc1 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -11,7 +11,6 @@ namespace nix {
 
 MakeError(EvalError, Error)
 MakeError(ParseError, Error)
-MakeError(IncompleteParseError, ParseError)
 MakeError(AssertionError, EvalError)
 MakeError(ThrownError, AssertionError)
 MakeError(Abort, EvalError)
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index e3f4521844e8..eee48887dc22 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -31,12 +31,10 @@ namespace nix {
         Path basePath;
         Symbol path;
         string error;
-        bool atEnd;
         Symbol sLetBody;
         ParseData(EvalState & state)
             : state(state)
             , symbols(state.symbols)
-            , atEnd(false)
             , sLetBody(symbols.create("<let-body>"))
             { };
     };
@@ -541,12 +539,7 @@ Expr * EvalState::parse(const char * text,
     int res = yyparse(scanner, &data);
     yylex_destroy(scanner);
 
-    if (res) {
-      if (data.atEnd)
-        throw IncompleteParseError(data.error);
-      else
-        throw ParseError(data.error);
-    }
+    if (res) throw ParseError(data.error);
 
     data.result->bindVars(staticEnv);
 
diff --git a/src/libstore/local.mk b/src/libstore/local.mk
index a7279aa3939f..3799257f83ff 100644
--- a/src/libstore/local.mk
+++ b/src/libstore/local.mk
@@ -18,7 +18,7 @@ libstore_FILES = sandbox-defaults.sb sandbox-minimal.sb sandbox-network.sb
 $(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox)))
 
 ifeq ($(ENABLE_S3), 1)
-	libstore_LDFLAGS += -laws-cpp-sdk-s3 -laws-cpp-sdk-core
+	libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core
 endif
 
 ifeq ($(OS), SunOS)
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index 23af452094cf..103f141a1a11 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -17,6 +17,7 @@
 #include <aws/core/client/DefaultRetryStrategy.h>
 #include <aws/core/utils/logging/FormattedLogSystem.h>
 #include <aws/core/utils/logging/LogMacros.h>
+#include <aws/core/utils/threading/Executor.h>
 #include <aws/s3/S3Client.h>
 #include <aws/s3/model/CreateBucketRequest.h>
 #include <aws/s3/model/GetBucketLocationRequest.h>
@@ -24,6 +25,9 @@
 #include <aws/s3/model/HeadObjectRequest.h>
 #include <aws/s3/model/ListObjectsRequest.h>
 #include <aws/s3/model/PutObjectRequest.h>
+#include <aws/transfer/TransferManager.h>
+
+using namespace Aws::Transfer;
 
 namespace nix {
 
@@ -169,6 +173,8 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
     const Setting<std::string> narinfoCompression{this, "", "narinfo-compression", "compression method for .narinfo files"};
     const Setting<std::string> lsCompression{this, "", "ls-compression", "compression method for .ls files"};
     const Setting<std::string> logCompression{this, "", "log-compression", "compression method for log/* files"};
+    const Setting<uint64_t> bufferSize{
+        this, 5 * 1024 * 1024, "buffer-size", "size (in bytes) of each part in multi-part uploads"};
 
     std::string bucketName;
 
@@ -271,34 +277,76 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
         const std::string & mimeType,
         const std::string & contentEncoding)
     {
-        auto request =
-            Aws::S3::Model::PutObjectRequest()
-            .WithBucket(bucketName)
-            .WithKey(path);
+        auto stream = std::make_shared<istringstream_nocopy>(data);
 
-        request.SetContentType(mimeType);
+        auto maxThreads = std::thread::hardware_concurrency();
 
-        if (contentEncoding != "")
-            request.SetContentEncoding(contentEncoding);
+        static std::shared_ptr<Aws::Utils::Threading::PooledThreadExecutor>
+            executor = std::make_shared<Aws::Utils::Threading::PooledThreadExecutor>(maxThreads);
 
-        auto stream = std::make_shared<istringstream_nocopy>(data);
+        TransferManagerConfiguration transferConfig(executor.get());
 
-        request.SetBody(stream);
+        transferConfig.s3Client = s3Helper.client;
+        transferConfig.bufferSize = bufferSize;
 
-        stats.put++;
-        stats.putBytes += data.size();
+        if (contentEncoding != "")
+            transferConfig.createMultipartUploadTemplate.SetContentEncoding(
+                contentEncoding);
+
+        transferConfig.uploadProgressCallback =
+            [&](const TransferManager *transferManager,
+                const std::shared_ptr<const TransferHandle>
+                    &transferHandle) {
+              //FIXME: find a way to properly abort the multipart upload.
+              checkInterrupt();
+              printTalkative("upload progress ('%s'): '%d' of '%d' bytes",
+                             path,
+                             transferHandle->GetBytesTransferred(),
+                             transferHandle->GetBytesTotalSize());
+            };
+
+        transferConfig.transferStatusUpdatedCallback =
+            [&](const TransferManager *,
+                const std::shared_ptr<const TransferHandle>
+                    &transferHandle) {
+              switch (transferHandle->GetStatus()) {
+                  case TransferStatus::COMPLETED:
+                      printTalkative("upload of '%s' completed", path);
+                      stats.put++;
+                      stats.putBytes += data.size();
+                      break;
+                  case TransferStatus::IN_PROGRESS:
+                      break;
+                  case TransferStatus::FAILED:
+                      throw Error("AWS error: failed to upload 's3://%s/%s'",
+                                  bucketName, path);
+                      break;
+                  default:
+                      throw Error("AWS error: transfer status of 's3://%s/%s' "
+                                  "in unexpected state",
+                                  bucketName, path);
+              };
+            };
+
+        std::shared_ptr<TransferManager> transferManager =
+            TransferManager::Create(transferConfig);
 
         auto now1 = std::chrono::steady_clock::now();
 
-        auto result = checkAws(format("AWS error uploading '%s'") % path,
-            s3Helper.client->PutObject(request));
+        std::shared_ptr<TransferHandle> transferHandle =
+            transferManager->UploadFile(stream, bucketName, path, mimeType,
+                                        Aws::Map<Aws::String, Aws::String>());
+
+        transferHandle->WaitUntilFinished();
 
         auto now2 = std::chrono::steady_clock::now();
 
-        auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1).count();
+        auto duration =
+            std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1)
+                .count();
 
-        printInfo(format("uploaded 's3://%1%/%2%' (%3% bytes) in %4% ms")
-            % bucketName % path % data.size() % duration);
+        printInfo(format("uploaded 's3://%1%/%2%' (%3% bytes) in %4% ms") %
+                  bucketName % path % data.size() % duration);
 
         stats.putTimeMs += duration;
     }