11#include " nix/store/s3-binary-cache-store.hh"
22#include " nix/store/http-binary-cache-store.hh"
33#include " nix/store/store-registration.hh"
4+ #include " nix/util/error.hh"
5+ #include " nix/util/logging.hh"
6+ #include " nix/util/serialise.hh"
7+ #include " nix/util/util.hh"
48
59#include < cassert>
610#include < ranges>
913
1014namespace nix {
1115
16+ MakeError (UploadToS3, Error);
17+
18+ static constexpr uint64_t AWS_MAX_PART_SIZE = 5ULL * 1024 * 1024 * 1024 ; // 5GiB
19+
1220class S3BinaryCacheStore : public virtual HttpBinaryCacheStore
1321{
1422public:
@@ -26,6 +34,26 @@ class S3BinaryCacheStore : public virtual HttpBinaryCacheStore
2634private:
2735 ref<S3BinaryCacheStoreConfig> s3Config;
2836
37+ /* *
38+ * Uploads a file to S3 using a regular (non-multipart) upload.
39+ *
40+ * This method is suitable for files up to 5GiB in size. For larger files,
41+ * multipart upload should be used instead.
42+ *
43+ * @see https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html
44+ */
45+ void upload (
46+ std::string_view path,
47+ RestartableSource & source,
48+ uint64_t sizeHint,
49+ std::string_view mimeType,
50+ std::optional<std::string_view> contentEncoding);
51+
52+ /* *
53+ * Uploads a file to S3 (CompressedSource overload).
54+ */
55+ void upload (std::string_view path, CompressedSource & source, std::string_view mimeType);
56+
2957 /* *
3058 * Creates a multipart upload for large objects to S3.
3159 *
@@ -69,7 +97,40 @@ class S3BinaryCacheStore : public virtual HttpBinaryCacheStore
6997void S3BinaryCacheStore::upsertFile (
7098 const std::string & path, RestartableSource & source, const std::string & mimeType, uint64_t sizeHint)
7199{
72- HttpBinaryCacheStore::upsertFile (path, source, mimeType, sizeHint);
100+ if (auto compressionMethod = getCompressionMethod (path)) {
101+ CompressedSource compressed (source, *compressionMethod);
102+ upload (path, compressed, mimeType);
103+ } else {
104+ upload (path, source, sizeHint, mimeType, std::nullopt );
105+ }
106+ }
107+
108+ void S3BinaryCacheStore::upload (
109+ std::string_view path,
110+ RestartableSource & source,
111+ uint64_t sizeHint,
112+ std::string_view mimeType,
113+ std::optional<std::string_view> contentEncoding)
114+ {
115+ debug (" using S3 regular upload for '%s' (%d bytes)" , path, sizeHint);
116+ if (sizeHint > AWS_MAX_PART_SIZE)
117+ throw Error (
118+ " file too large for S3 upload without multipart: %s would exceed maximum size of %s. Consider enabling multipart-upload." ,
119+ renderSize (sizeHint),
120+ renderSize (AWS_MAX_PART_SIZE));
121+
122+ try {
123+ HttpBinaryCacheStore::upload (path, source, sizeHint, mimeType, contentEncoding);
124+ } catch (FileTransferError & e) {
125+ UploadToS3 err (e.message ());
126+ err.addTrace ({}, " while uploading to S3 binary cache at '%s'" , config->cacheUri .to_string ());
127+ throw err;
128+ }
129+ }
130+
131+ void S3BinaryCacheStore::upload (std::string_view path, CompressedSource & source, std::string_view mimeType)
132+ {
133+ upload (path, static_cast <RestartableSource &>(source), source.size (), mimeType, source.getCompressionMethod ());
73134}
74135
75136std::string S3BinaryCacheStore::createMultipartUpload (
0 commit comments