diff options
author | Eelco Dolstra <edolstra@gmail.com> | 2017-03-14T14·03+0100 |
---|---|---|
committer | Eelco Dolstra <edolstra@gmail.com> | 2017-03-15T15·49+0100 |
commit | 8b1d65bebe5af8960ba813e1233f2596a3ffebb7 (patch) | |
tree | 34855f335e462c878f512ddd8a881f6360d41f79 /src/libstore/s3-binary-cache-store.cc | |
parent | 2691498b5c68a9c8908da296bf867bfc7f9a068f (diff) |
S3BinaryCacheStore: Support compression of narinfo and log files
You can now set the store parameter "text-compression=br" to compress textual files in the binary cache (i.e. narinfo and logs) using Brotli. This sets the Content-Encoding header; the extension of compressed files is unchanged. You can separately specify the compression of log files using "log-compression=br". This is useful when you don't want to compress narinfo files for backward compatibility.
Diffstat (limited to 'src/libstore/s3-binary-cache-store.cc')
-rw-r--r-- | src/libstore/s3-binary-cache-store.cc | 28 |
1 files changed, 25 insertions, 3 deletions
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 5134dd175261..1d44e68321f7 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -5,6 +5,8 @@ #include "nar-info.hh" #include "nar-info-disk-cache.hh" #include "globals.hh" +#include "compression.hh" +#include "download.hh" #include <aws/core/Aws.h> #include <aws/core/client/ClientConfiguration.h> @@ -104,8 +106,10 @@ S3Helper::DownloadResult S3Helper::getObject( auto result = checkAws(fmt("AWS error fetching ‘%s’", key), client->GetObject(request)); - res.data = std::make_shared<std::string>( - dynamic_cast<std::stringstream &>(result.GetBody()).str()); + res.data = decodeContent( + result.GetContentEncoding(), + make_ref<std::string>( + dynamic_cast<std::stringstream &>(result.GetBody()).str())); } catch (S3Error & e) { if (e.err != Aws::S3::S3Errors::NO_SUCH_KEY) throw; @@ -137,11 +141,15 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore S3Helper s3Helper; + std::string textCompression, logCompression; + S3BinaryCacheStoreImpl( const Params & params, const std::string & bucketName) : S3BinaryCacheStore(params) , bucketName(bucketName) , s3Helper(get(params, "aws-region", Aws::Region::US_EAST_1)) + , textCompression(get(params, "text-compression", "gzip")) + , logCompression(get(params, "log-compression", textCompression)) { diskCache = getNarInfoDiskCache(); } @@ -220,13 +228,17 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore return true; } - void upsertFile(const std::string & path, const std::string & data) override + void uploadFile(const std::string & path, const std::string & data, + const std::string & contentEncoding) { auto request = Aws::S3::Model::PutObjectRequest() .WithBucket(bucketName) .WithKey(path); + if (contentEncoding != "") + request.SetContentEncoding(contentEncoding); + auto stream = std::make_shared<istringstream_nocopy>(data); request.SetBody(stream); @@ -249,6 +261,16 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore stats.putTimeMs += duration; } + void upsertFile(const std::string & path, const std::string & data) override + { + if (path.find(".narinfo") != std::string::npos) + uploadFile(path, *compress(textCompression, data), textCompression); + else if (path.find("/log") != std::string::npos) + uploadFile(path, *compress(logCompression, data), logCompression); + else + uploadFile(path, data, ""); + } + void getFile(const std::string & path, std::function<void(std::shared_ptr<std::string>)> success, std::function<void(std::exception_ptr exc)> failure) override |