@aztec/stdlib 3.0.0-nightly.20251005 → 3.0.0-nightly.20251007

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,9 +14,15 @@ export type FileStoreSaveOptions = {
14
14
  };
15
15
  /** Simple file store. */
16
16
  export interface FileStore extends ReadOnlyFileStore {
17
- /** Saves contents to the given path. Returns an URI that can be used later to `read` the file. */
17
+ /**
18
+ * Saves contents to the given path. Returns an URI that can be used later to `read` the file.
19
+ * Default: `compress` is false unless explicitly set.
20
+ */
18
21
  save(path: string, data: Buffer, opts?: FileStoreSaveOptions): Promise<string>;
19
- /** Uploads contents from a local file. Returns an URI that can be used later to `read` the file. */
22
+ /**
23
+ * Uploads contents from a local file. Returns an URI that can be used later to `read` the file.
24
+ * Default: `compress` is true unless explicitly set to false.
25
+ */
20
26
  upload(destPath: string, srcPath: string, opts?: FileStoreSaveOptions): Promise<string>;
21
27
  }
22
28
  //# sourceMappingURL=interface.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"interface.d.ts","sourceRoot":"","sources":["../../src/file-store/interface.ts"],"names":[],"mappings":"AAAA,mCAAmC;AACnC,MAAM,WAAW,iBAAiB;IAChC,iGAAiG;IACjG,IAAI,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IACzC,wGAAwG;IACxG,QAAQ,CAAC,YAAY,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAChE,8DAA8D;IAC9D,MAAM,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;CAC7C;AAED,MAAM,MAAM,oBAAoB,GAAG;IAAE,MAAM,CAAC,EAAE,OAAO,CAAC;IAAC,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAAC,QAAQ,CAAC,EAAE,OAAO,CAAA;CAAE,CAAC;AAE/G,yBAAyB;AACzB,MAAM,WAAW,SAAU,SAAQ,iBAAiB;IAClD,kGAAkG;IAClG,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IAC/E,oGAAoG;IACpG,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;CACzF"}
1
+ {"version":3,"file":"interface.d.ts","sourceRoot":"","sources":["../../src/file-store/interface.ts"],"names":[],"mappings":"AAAA,mCAAmC;AACnC,MAAM,WAAW,iBAAiB;IAChC,iGAAiG;IACjG,IAAI,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IACzC,wGAAwG;IACxG,QAAQ,CAAC,YAAY,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAChE,8DAA8D;IAC9D,MAAM,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;CAC7C;AAED,MAAM,MAAM,oBAAoB,GAAG;IAAE,MAAM,CAAC,EAAE,OAAO,CAAC;IAAC,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAAC,QAAQ,CAAC,EAAE,OAAO,CAAA;CAAE,CAAC;AAE/G,yBAAyB;AACzB,MAAM,WAAW,SAAU,SAAQ,iBAAiB;IAClD;;;OAGG;IACH,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IAC/E;;;OAGG;IACH,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;CACzF"}
@@ -18,6 +18,7 @@ export declare class S3FileStore implements FileStore {
18
18
  download(pathOrUrlStr: string, destPath: string): Promise<void>;
19
19
  exists(pathOrUrlStr: string): Promise<boolean>;
20
20
  private extractUserMetadata;
21
+ private detectContentType;
21
22
  private buildReturnedUrl;
22
23
  private getBucketAndKey;
23
24
  private getFullPath;
@@ -1 +1 @@
1
- {"version":3,"file":"s3.d.ts","sourceRoot":"","sources":["../../src/file-store/s3.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAgBlE,OAAO,KAAK,EAAE,SAAS,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAMtE,qBAAa,WAAY,YAAW,SAAS;IAOzC,OAAO,CAAC,QAAQ,CAAC,UAAU;IAC3B,OAAO,CAAC,QAAQ,CAAC,QAAQ;IAEzB,OAAO,CAAC,QAAQ,CAAC,GAAG;IATtB,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAW;IAC9B,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAS;IAChC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAS;IACnC,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAS;gBAGrB,UAAU,EAAE,MAAM,EAClB,QAAQ,EAAE,MAAM,EACjC,IAAI,EAAE;QAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAA;KAAE,EAClC,GAAG,GAAE,MAA6C;IAiBxD,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,GAAE,oBAAyB,GAAG,OAAO,CAAC,MAAM,CAAC;IAgBlF,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,GAAE,oBAAyB,GAAG,OAAO,CAAC,MAAM,CAAC;IAoB3F,IAAI,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAW3C,QAAQ,CAAC,YAAY,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQ/D,MAAM,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAe3D,OAAO,CAAC,mBAAmB;IAQ3B,OAAO,CAAC,gBAAgB;IA0BxB,OAAO,CAAC,eAAe;IA2BvB,OAAO,CAAC,WAAW;CAKpB"}
1
+ {"version":3,"file":"s3.d.ts","sourceRoot":"","sources":["../../src/file-store/s3.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAiBlE,OAAO,KAAK,EAAE,SAAS,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAMtE,qBAAa,WAAY,YAAW,SAAS;IAOzC,OAAO,CAAC,QAAQ,CAAC,UAAU;IAC3B,OAAO,CAAC,QAAQ,CAAC,QAAQ;IAEzB,OAAO,CAAC,QAAQ,CAAC,GAAG;IATtB,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAW;IAC9B,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAS;IAChC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAS;IACnC,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAS;gBAGrB,UAAU,EAAE,MAAM,EAClB,QAAQ,EAAE,MAAM,EACjC,IAAI,EAAE;QAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAA;KAAE,EAClC,GAAG,GAAE,MAA6C;IAiBxD,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,GAAE,oBAAyB,GAAG,OAAO,CAAC,MAAM,CAAC;IAoBlF,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,GAAE,oBAAyB,GAAG,OAAO,CAAC,MAAM,CAAC;IA6D3F,IAAI,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAW3C,QAAQ,CAAC,YAAY,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQ/D,MAAM,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAe3D,OAAO,CAAC,mBAAmB;IAQ3B,OAAO,CAAC,iBAAiB;IAsBzB,OAAO,CAAC,gBAAgB;IA0BxB,OAAO,CAAC,eAAe;IA2BvB,OAAO,CAAC,WAAW;CAKpB"}
@@ -1,8 +1,9 @@
1
1
  import { createLogger } from '@aztec/foundation/log';
2
2
  import { GetObjectCommand, HeadObjectCommand, PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
3
3
  import { createReadStream, createWriteStream } from 'fs';
4
- import { mkdir } from 'fs/promises';
5
- import { dirname, join } from 'path';
4
+ import { mkdir, mkdtemp, stat, unlink } from 'fs/promises';
5
+ import { tmpdir } from 'os';
6
+ import { basename, dirname, join } from 'path';
6
7
  import { finished } from 'stream/promises';
7
8
  import { createGzip } from 'zlib';
8
9
  function normalizeBasePath(path) {
@@ -35,15 +36,18 @@ export class S3FileStore {
35
36
  }
36
37
  async save(path, data, opts = {}) {
37
38
  const key = this.getFullPath(path);
38
- const shouldCompress = !opts.compress;
39
+ const shouldCompress = !!opts.compress;
39
40
  const body = shouldCompress ? (await import('zlib')).gzipSync(data) : data;
41
+ const contentLength = body.length;
42
+ const contentType = this.detectContentType(key, shouldCompress);
40
43
  const put = new PutObjectCommand({
41
44
  Bucket: this.bucketName,
42
45
  Key: key,
43
46
  Body: body,
44
- ContentEncoding: shouldCompress ? 'gzip' : undefined,
47
+ ContentType: contentType,
45
48
  CacheControl: opts.metadata?.['Cache-control'],
46
- Metadata: this.extractUserMetadata(opts.metadata)
49
+ Metadata: this.extractUserMetadata(opts.metadata),
50
+ ContentLength: contentLength
47
51
  });
48
52
  await this.s3.send(put);
49
53
  return this.buildReturnedUrl(key, !!opts.public);
@@ -54,17 +58,57 @@ export class S3FileStore {
54
58
  await mkdir(dirname(srcPath), {
55
59
  recursive: true
56
60
  }).catch(()=>undefined);
57
- const source = createReadStream(srcPath);
58
- const bodyStream = shouldCompress ? source.pipe(createGzip()) : source;
59
- const put = new PutObjectCommand({
60
- Bucket: this.bucketName,
61
- Key: key,
62
- Body: bodyStream,
63
- ContentEncoding: shouldCompress ? 'gzip' : undefined,
64
- CacheControl: opts.metadata?.['Cache-control'],
65
- Metadata: this.extractUserMetadata(opts.metadata)
66
- });
67
- await this.s3.send(put);
61
+ let contentLength;
62
+ let bodyPath = srcPath;
63
+ // We don't set Content-Encoding and we avoid SigV4 streaming (aws-chunked).
64
+ // With AWS SigV4 streaming uploads (Content-Encoding: aws-chunked[,gzip]), servers require
65
+ // x-amz-decoded-content-length (the size of the decoded payload) and an exact Content-Length
66
+ // that includes chunk metadata. For on-the-fly compression, providing
67
+ // those values without buffering or a pre-pass is impractical. Instead, we pre-gzip to a temp file
68
+ // to know ContentLength up-front and upload the gzipped bytes as-is, omitting Content-Encoding.
69
+ // Reference: AWS SigV4 streaming (chunked upload) requirements —
70
+ // https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
71
+ if (shouldCompress) {
72
+ // Pre-gzip to a temp file so we know the exact length for R2/S3 headers
73
+ const tmpDir = await mkdtemp(join(tmpdir(), 's3-upload-'));
74
+ const gzPath = join(tmpDir, `${basename(srcPath)}.gz`);
75
+ const source = createReadStream(srcPath);
76
+ const gz = createGzip();
77
+ const out = createWriteStream(gzPath);
78
+ try {
79
+ await finished(source.pipe(gz).pipe(out));
80
+ const st = await stat(gzPath);
81
+ contentLength = st.size;
82
+ bodyPath = gzPath;
83
+ } catch (err) {
84
+ // Ensure temp file is removed on failure
85
+ await unlink(gzPath).catch(()=>undefined);
86
+ throw err;
87
+ }
88
+ } else {
89
+ const st = await stat(srcPath);
90
+ contentLength = st.size;
91
+ bodyPath = srcPath;
92
+ }
93
+ const bodyStream = createReadStream(bodyPath);
94
+ const contentType = this.detectContentType(key, shouldCompress);
95
+ try {
96
+ const put = new PutObjectCommand({
97
+ Bucket: this.bucketName,
98
+ Key: key,
99
+ Body: bodyStream,
100
+ ContentType: contentType,
101
+ CacheControl: opts.metadata?.['Cache-control'],
102
+ Metadata: this.extractUserMetadata(opts.metadata),
103
+ // Explicitly set ContentLength so R2 can compute x-amz-decoded-content-length correctly
104
+ ContentLength: contentLength
105
+ });
106
+ await this.s3.send(put);
107
+ } finally{
108
+ if (shouldCompress && bodyPath !== srcPath) {
109
+ await unlink(bodyPath).catch(()=>undefined);
110
+ }
111
+ }
68
112
  return this.buildReturnedUrl(key, !!opts.public);
69
113
  }
70
114
  async read(pathOrUrlStr) {
@@ -116,6 +160,27 @@ export class S3FileStore {
116
160
  const { ['Cache-control']: _ignored, ...rest } = meta;
117
161
  return Object.keys(rest).length ? rest : undefined;
118
162
  }
163
+ detectContentType(key, isCompressed) {
164
+ // Basic content type inference
165
+ const lower = key.toLowerCase();
166
+ if (lower.endsWith('.json') || lower.endsWith('.json.gz')) {
167
+ return 'application/json';
168
+ }
169
+ if (lower.endsWith('.txt') || lower.endsWith('.log') || lower.endsWith('.csv') || lower.endsWith('.md')) {
170
+ return 'text/plain; charset=utf-8';
171
+ }
172
+ if (lower.endsWith('.db') || lower.endsWith('.sqlite') || lower.endsWith('.bin')) {
173
+ return 'application/octet-stream';
174
+ }
175
+ if (lower.endsWith('.wasm') || lower.endsWith('.wasm.gz')) {
176
+ return 'application/wasm';
177
+ }
178
+ // If compressed, prefer octet-stream unless known
179
+ if (isCompressed) {
180
+ return 'application/octet-stream';
181
+ }
182
+ return undefined;
183
+ }
119
184
  buildReturnedUrl(key, makePublic) {
120
185
  if (!makePublic) {
121
186
  return `s3://${this.bucketName}/${key}`;
@@ -1 +1 @@
1
- {"version":3,"file":"download.d.ts","sourceRoot":"","sources":["../../src/snapshots/download.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAElE,OAAO,EACL,gBAAgB,EAChB,KAAK,gBAAgB,EACrB,KAAK,gBAAgB,EACrB,KAAK,cAAc,EACnB,KAAK,sBAAsB,EAE5B,MAAM,YAAY,CAAC;AAEpB,wBAAsB,gBAAgB,CACpC,QAAQ,EAAE,sBAAsB,EAChC,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,cAAc,GAAG,SAAS,CAAC,CAarC;AAED,wBAAsB,yBAAyB,CAC7C,QAAQ,EAAE,sBAAsB,EAChC,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,gBAAgB,GAAG,SAAS,CAAC,CAGvC;AAED,wBAAgB,WAAW,CAAC,QAAQ,EAAE,sBAAsB,GAAG,MAAM,CAEpE;AAED,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,sBAAsB,GAAG,MAAM,CAE7E;AAED,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,gBAAgB,CAGnE;AAED,wBAAsB,gBAAgB,CACpC,QAAQ,EAAE,IAAI,CAAC,gBAAgB,EAAE,UAAU,CAAC,EAC5C,UAAU,EAAE,MAAM,CAAC,gBAAgB,EAAE,MAAM,CAAC,EAC5C,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,IAAI,CAAC,CAEf"}
1
+ {"version":3,"file":"download.d.ts","sourceRoot":"","sources":["../../src/snapshots/download.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAQlE,OAAO,EACL,gBAAgB,EAChB,KAAK,gBAAgB,EACrB,KAAK,gBAAgB,EACrB,KAAK,cAAc,EACnB,KAAK,sBAAsB,EAE5B,MAAM,YAAY,CAAC;AAEpB,wBAAsB,gBAAgB,CACpC,QAAQ,EAAE,sBAAsB,EAChC,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,cAAc,GAAG,SAAS,CAAC,CAcrC;AAED,wBAAsB,yBAAyB,CAC7C,QAAQ,EAAE,sBAAsB,EAChC,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,gBAAgB,GAAG,SAAS,CAAC,CAGvC;AAED,wBAAgB,WAAW,CAAC,QAAQ,EAAE,sBAAsB,GAAG,MAAM,CAEpE;AAED,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,sBAAsB,GAAG,MAAM,CAE7E;AAED,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,gBAAgB,CAGnE;AAoCD,wBAAsB,gBAAgB,CACpC,QAAQ,EAAE,IAAI,CAAC,gBAAgB,EAAE,UAAU,CAAC,EAC5C,UAAU,EAAE,MAAM,CAAC,gBAAgB,EAAE,MAAM,CAAC,EAC5C,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,IAAI,CAAC,CAyBf"}
@@ -1,5 +1,10 @@
1
1
  import { fromEntries, getEntries, maxBy } from '@aztec/foundation/collection';
2
2
  import { jsonParseWithSchema } from '@aztec/foundation/json-rpc';
3
+ import { createReadStream, createWriteStream } from 'fs';
4
+ import fs from 'fs/promises';
5
+ import pathMod from 'path';
6
+ import { pipeline } from 'stream/promises';
7
+ import { createGunzip, gunzipSync } from 'zlib';
3
8
  import { SnapshotDataKeys, SnapshotsIndexSchema } from './types.js';
4
9
  export async function getSnapshotIndex(metadata, store) {
5
10
  const basePath = getBasePath(metadata);
@@ -7,7 +12,8 @@ export async function getSnapshotIndex(metadata, store) {
7
12
  try {
8
13
  if (await store.exists(snapshotIndexPath)) {
9
14
  const snapshotIndexData = await store.read(snapshotIndexPath);
10
- return jsonParseWithSchema(snapshotIndexData.toString(), SnapshotsIndexSchema);
15
+ const buf = maybeGunzip(snapshotIndexData);
16
+ return jsonParseWithSchema(buf.toString('utf-8'), SnapshotsIndexSchema);
11
17
  } else {
12
18
  return undefined;
13
19
  }
@@ -32,6 +38,56 @@ export function makeSnapshotPaths(baseDir) {
32
38
  `${baseDir}/${key}.db`
33
39
  ]));
34
40
  }
41
+ function isGzipMagic(data) {
42
+ return data.length >= 2 && data[0] === 0x1f && data[1] === 0x8b;
43
+ }
44
+ function maybeGunzip(data) {
45
+ const magicNumberIndicatesGzip = isGzipMagic(data);
46
+ if (magicNumberIndicatesGzip) {
47
+ try {
48
+ const out = gunzipSync(data);
49
+ return out;
50
+ } catch (err) {
51
+ throw new Error(`Decompression of gzipped data failed: ${err.message}`);
52
+ }
53
+ }
54
+ return data;
55
+ }
56
+ async function detectGzip(localFilePathToPeek) {
57
+ // Peek the actual bytes we downloaded.
58
+ try {
59
+ const fd = await fs.open(localFilePathToPeek, 'r');
60
+ try {
61
+ const header = Buffer.alloc(2);
62
+ const { bytesRead } = await fd.read(header, 0, 2, 0);
63
+ return bytesRead >= 2 && isGzipMagic(header);
64
+ } finally{
65
+ await fd.close();
66
+ }
67
+ } catch {
68
+ return false;
69
+ }
70
+ }
35
71
  export async function downloadSnapshot(snapshot, localPaths, store) {
36
- await Promise.all(getEntries(localPaths).map(([key, path])=>store.download(snapshot.dataUrls[key], path)));
72
+ await Promise.all(getEntries(localPaths).map(async ([key, path])=>{
73
+ await fs.mkdir(pathMod.dirname(path), {
74
+ recursive: true
75
+ });
76
+ const tmpPath = `${path}.download`;
77
+ try {
78
+ const url = snapshot.dataUrls[key];
79
+ await store.download(url, tmpPath);
80
+ const isGzip = await detectGzip(tmpPath);
81
+ const read = createReadStream(tmpPath);
82
+ const write = createWriteStream(path);
83
+ if (isGzip) {
84
+ const gunzip = createGunzip();
85
+ await pipeline(read, gunzip, write);
86
+ } else {
87
+ await pipeline(read, write);
88
+ }
89
+ } finally{
90
+ await fs.unlink(tmpPath).catch(()=>undefined);
91
+ }
92
+ }));
37
93
  }
@@ -1 +1 @@
1
- {"version":3,"file":"upload.d.ts","sourceRoot":"","sources":["../../src/snapshots/upload.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAG1D,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAkB,sBAAsB,EAAE,MAAM,YAAY,CAAC;AAE7G,wBAAsB,kBAAkB,CACtC,UAAU,EAAE,MAAM,CAAC,gBAAgB,EAAE,MAAM,CAAC,EAC5C,cAAc,EAAE,gBAAgB,CAAC,gBAAgB,CAAC,EAClD,QAAQ,EAAE,sBAAsB,EAChC,KAAK,EAAE,SAAS,EAChB,IAAI,GAAE;IAAE,OAAO,CAAC,EAAE,CAAC,GAAG,EAAE,gBAAgB,KAAK,MAAM,CAAC;IAAC,OAAO,CAAC,EAAE,OAAO,CAAA;CAAO,GAC5E,OAAO,CAAC,gBAAgB,CAAC,CAsB3B;AAED,wBAAsB,qBAAqB,CACzC,UAAU,EAAE,MAAM,CAAC,gBAAgB,EAAE,MAAM,CAAC,EAC5C,cAAc,EAAE,gBAAgB,CAAC,gBAAgB,CAAC,EAClD,QAAQ,EAAE,sBAAsB,EAChC,KAAK,EAAE,SAAS,GACf,OAAO,CAAC,gBAAgB,CAAC,CAU3B"}
1
+ {"version":3,"file":"upload.d.ts","sourceRoot":"","sources":["../../src/snapshots/upload.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAG1D,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAkB,sBAAsB,EAAE,MAAM,YAAY,CAAC;AAE7G,wBAAsB,kBAAkB,CACtC,UAAU,EAAE,MAAM,CAAC,gBAAgB,EAAE,MAAM,CAAC,EAC5C,cAAc,EAAE,gBAAgB,CAAC,gBAAgB,CAAC,EAClD,QAAQ,EAAE,sBAAsB,EAChC,KAAK,EAAE,SAAS,EAChB,IAAI,GAAE;IAAE,OAAO,CAAC,EAAE,CAAC,GAAG,EAAE,gBAAgB,KAAK,MAAM,CAAC;IAAC,OAAO,CAAC,EAAE,OAAO,CAAA;CAAO,GAC5E,OAAO,CAAC,gBAAgB,CAAC,CAsB3B;AAED,wBAAsB,qBAAqB,CACzC,UAAU,EAAE,MAAM,CAAC,gBAAgB,EAAE,MAAM,CAAC,EAC5C,cAAc,EAAE,gBAAgB,CAAC,gBAAgB,CAAC,EAClD,QAAQ,EAAE,sBAAsB,EAChC,KAAK,EAAE,SAAS,GACf,OAAO,CAAC,gBAAgB,CAAC,CAW3B"}
@@ -27,6 +27,7 @@ export async function uploadSnapshotToIndex(localPaths, schemaVersions, metadata
27
27
  snapshotsIndex.snapshots.unshift(newSnapshotMetadata);
28
28
  await store.save(getSnapshotIndexPath(metadata), Buffer.from(jsonStringify(snapshotsIndex, true)), {
29
29
  public: true,
30
+ compress: false,
30
31
  metadata: {
31
32
  ['Cache-control']: 'no-store'
32
33
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aztec/stdlib",
3
- "version": "3.0.0-nightly.20251005",
3
+ "version": "3.0.0-nightly.20251007",
4
4
  "type": "module",
5
5
  "inherits": [
6
6
  "../package.common.json",
@@ -70,13 +70,13 @@
70
70
  },
71
71
  "dependencies": {
72
72
  "@aws-sdk/client-s3": "^3.892.0",
73
- "@aztec/bb.js": "3.0.0-nightly.20251005",
74
- "@aztec/blob-lib": "3.0.0-nightly.20251005",
75
- "@aztec/constants": "3.0.0-nightly.20251005",
76
- "@aztec/ethereum": "3.0.0-nightly.20251005",
77
- "@aztec/foundation": "3.0.0-nightly.20251005",
78
- "@aztec/l1-artifacts": "3.0.0-nightly.20251005",
79
- "@aztec/noir-noirc_abi": "3.0.0-nightly.20251005",
73
+ "@aztec/bb.js": "3.0.0-nightly.20251007",
74
+ "@aztec/blob-lib": "3.0.0-nightly.20251007",
75
+ "@aztec/constants": "3.0.0-nightly.20251007",
76
+ "@aztec/ethereum": "3.0.0-nightly.20251007",
77
+ "@aztec/foundation": "3.0.0-nightly.20251007",
78
+ "@aztec/l1-artifacts": "3.0.0-nightly.20251007",
79
+ "@aztec/noir-noirc_abi": "3.0.0-nightly.20251007",
80
80
  "@google-cloud/storage": "^7.15.0",
81
81
  "axios": "^1.12.0",
82
82
  "json-stringify-deterministic": "1.0.12",
@@ -12,8 +12,14 @@ export type FileStoreSaveOptions = { public?: boolean; metadata?: Record<string,
12
12
 
13
13
  /** Simple file store. */
14
14
  export interface FileStore extends ReadOnlyFileStore {
15
- /** Saves contents to the given path. Returns an URI that can be used later to `read` the file. */
15
+ /**
16
+ * Saves contents to the given path. Returns an URI that can be used later to `read` the file.
17
+ * Default: `compress` is false unless explicitly set.
18
+ */
16
19
  save(path: string, data: Buffer, opts?: FileStoreSaveOptions): Promise<string>;
17
- /** Uploads contents from a local file. Returns an URI that can be used later to `read` the file. */
20
+ /**
21
+ * Uploads contents from a local file. Returns an URI that can be used later to `read` the file.
22
+ * Default: `compress` is true unless explicitly set to false.
23
+ */
18
24
  upload(destPath: string, srcPath: string, opts?: FileStoreSaveOptions): Promise<string>;
19
25
  }
@@ -8,8 +8,9 @@ import {
8
8
  S3Client,
9
9
  } from '@aws-sdk/client-s3';
10
10
  import { createReadStream, createWriteStream } from 'fs';
11
- import { mkdir } from 'fs/promises';
12
- import { dirname, join } from 'path';
11
+ import { mkdir, mkdtemp, stat, unlink } from 'fs/promises';
12
+ import { tmpdir } from 'os';
13
+ import { basename, dirname, join } from 'path';
13
14
  import { Readable } from 'stream';
14
15
  import { finished } from 'stream/promises';
15
16
  import { createGzip } from 'zlib';
@@ -49,15 +50,19 @@ export class S3FileStore implements FileStore {
49
50
 
50
51
  public async save(path: string, data: Buffer, opts: FileStoreSaveOptions = {}): Promise<string> {
51
52
  const key = this.getFullPath(path);
52
- const shouldCompress = !opts.compress;
53
+ const shouldCompress = !!opts.compress;
54
+
53
55
  const body = shouldCompress ? (await import('zlib')).gzipSync(data) : data;
56
+ const contentLength = body.length;
57
+ const contentType = this.detectContentType(key, shouldCompress);
54
58
  const put = new PutObjectCommand({
55
59
  Bucket: this.bucketName,
56
60
  Key: key,
57
61
  Body: body,
58
- ContentEncoding: shouldCompress ? 'gzip' : undefined,
62
+ ContentType: contentType,
59
63
  CacheControl: opts.metadata?.['Cache-control'],
60
64
  Metadata: this.extractUserMetadata(opts.metadata),
65
+ ContentLength: contentLength,
61
66
  });
62
67
  await this.s3.send(put);
63
68
  return this.buildReturnedUrl(key, !!opts.public);
@@ -68,18 +73,59 @@ export class S3FileStore implements FileStore {
68
73
  const shouldCompress = opts.compress !== false; // default true like GCS impl
69
74
 
70
75
  await mkdir(dirname(srcPath), { recursive: true }).catch(() => undefined);
76
+ let contentLength: number | undefined;
77
+ let bodyPath = srcPath;
78
+
79
+ // We don't set Content-Encoding and we avoid SigV4 streaming (aws-chunked).
80
+ // With AWS SigV4 streaming uploads (Content-Encoding: aws-chunked[,gzip]), servers require
81
+ // x-amz-decoded-content-length (the size of the decoded payload) and an exact Content-Length
82
+ // that includes chunk metadata. For on-the-fly compression, providing
83
+ // those values without buffering or a pre-pass is impractical. Instead, we pre-gzip to a temp file
84
+ // to know ContentLength up-front and upload the gzipped bytes as-is, omitting Content-Encoding.
85
+ // Reference: AWS SigV4 streaming (chunked upload) requirements —
86
+ // https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
87
+ if (shouldCompress) {
88
+ // Pre-gzip to a temp file so we know the exact length for R2/S3 headers
89
+ const tmpDir = await mkdtemp(join(tmpdir(), 's3-upload-'));
90
+ const gzPath = join(tmpDir, `${basename(srcPath)}.gz`);
91
+ const source = createReadStream(srcPath);
92
+ const gz = createGzip();
93
+ const out = createWriteStream(gzPath);
94
+ try {
95
+ await finished(source.pipe(gz).pipe(out));
96
+ const st = await stat(gzPath);
97
+ contentLength = st.size;
98
+ bodyPath = gzPath;
99
+ } catch (err) {
100
+ // Ensure temp file is removed on failure
101
+ await unlink(gzPath).catch(() => undefined);
102
+ throw err;
103
+ }
104
+ } else {
105
+ const st = await stat(srcPath);
106
+ contentLength = st.size;
107
+ bodyPath = srcPath;
108
+ }
71
109
 
72
- const source = createReadStream(srcPath);
73
- const bodyStream = shouldCompress ? source.pipe(createGzip()) : source;
74
- const put = new PutObjectCommand({
75
- Bucket: this.bucketName,
76
- Key: key,
77
- Body: bodyStream as any,
78
- ContentEncoding: shouldCompress ? 'gzip' : undefined,
79
- CacheControl: opts.metadata?.['Cache-control'],
80
- Metadata: this.extractUserMetadata(opts.metadata),
81
- });
82
- await this.s3.send(put);
110
+ const bodyStream = createReadStream(bodyPath);
111
+ const contentType = this.detectContentType(key, shouldCompress);
112
+ try {
113
+ const put = new PutObjectCommand({
114
+ Bucket: this.bucketName,
115
+ Key: key,
116
+ Body: bodyStream as any,
117
+ ContentType: contentType,
118
+ CacheControl: opts.metadata?.['Cache-control'],
119
+ Metadata: this.extractUserMetadata(opts.metadata),
120
+ // Explicitly set ContentLength so R2 can compute x-amz-decoded-content-length correctly
121
+ ContentLength: contentLength,
122
+ } as any);
123
+ await this.s3.send(put);
124
+ } finally {
125
+ if (shouldCompress && bodyPath !== srcPath) {
126
+ await unlink(bodyPath).catch(() => undefined);
127
+ }
128
+ }
83
129
  return this.buildReturnedUrl(key, !!opts.public);
84
130
  }
85
131
 
@@ -125,6 +171,28 @@ export class S3FileStore implements FileStore {
125
171
  return Object.keys(rest).length ? rest : undefined;
126
172
  }
127
173
 
174
+ private detectContentType(key: string, isCompressed: boolean | undefined): string | undefined {
175
+ // Basic content type inference
176
+ const lower = key.toLowerCase();
177
+ if (lower.endsWith('.json') || lower.endsWith('.json.gz')) {
178
+ return 'application/json';
179
+ }
180
+ if (lower.endsWith('.txt') || lower.endsWith('.log') || lower.endsWith('.csv') || lower.endsWith('.md')) {
181
+ return 'text/plain; charset=utf-8';
182
+ }
183
+ if (lower.endsWith('.db') || lower.endsWith('.sqlite') || lower.endsWith('.bin')) {
184
+ return 'application/octet-stream';
185
+ }
186
+ if (lower.endsWith('.wasm') || lower.endsWith('.wasm.gz')) {
187
+ return 'application/wasm';
188
+ }
189
+ // If compressed, prefer octet-stream unless known
190
+ if (isCompressed) {
191
+ return 'application/octet-stream';
192
+ }
193
+ return undefined;
194
+ }
195
+
128
196
  private buildReturnedUrl(key: string, makePublic: boolean): string {
129
197
  if (!makePublic) {
130
198
  return `s3://${this.bucketName}/${key}`;
@@ -2,6 +2,12 @@ import { fromEntries, getEntries, maxBy } from '@aztec/foundation/collection';
2
2
  import { jsonParseWithSchema } from '@aztec/foundation/json-rpc';
3
3
  import type { ReadOnlyFileStore } from '@aztec/stdlib/file-store';
4
4
 
5
+ import { createReadStream, createWriteStream } from 'fs';
6
+ import fs from 'fs/promises';
7
+ import pathMod from 'path';
8
+ import { pipeline } from 'stream/promises';
9
+ import { createGunzip, gunzipSync } from 'zlib';
10
+
5
11
  import {
6
12
  SnapshotDataKeys,
7
13
  type SnapshotDataUrls,
@@ -20,7 +26,8 @@ export async function getSnapshotIndex(
20
26
  try {
21
27
  if (await store.exists(snapshotIndexPath)) {
22
28
  const snapshotIndexData = await store.read(snapshotIndexPath);
23
- return jsonParseWithSchema(snapshotIndexData.toString(), SnapshotsIndexSchema);
29
+ const buf = maybeGunzip(snapshotIndexData);
30
+ return jsonParseWithSchema(buf.toString('utf-8'), SnapshotsIndexSchema);
24
31
  } else {
25
32
  return undefined;
26
33
  }
@@ -50,10 +57,67 @@ export function makeSnapshotPaths(baseDir: string): SnapshotDataUrls {
50
57
  return fromEntries(SnapshotDataKeys.map(key => [key, `${baseDir}/${key}.db`]));
51
58
  }
52
59
 
60
+ function isGzipMagic(data: Buffer): boolean {
61
+ return data.length >= 2 && data[0] === 0x1f && data[1] === 0x8b;
62
+ }
63
+
64
+ function maybeGunzip(data: Buffer): Buffer {
65
+ const magicNumberIndicatesGzip = isGzipMagic(data);
66
+
67
+ if (magicNumberIndicatesGzip) {
68
+ try {
69
+ const out = gunzipSync(data);
70
+ return out;
71
+ } catch (err) {
72
+ throw new Error(`Decompression of gzipped data failed: ${(err as Error).message}`);
73
+ }
74
+ }
75
+ return data;
76
+ }
77
+
78
+ async function detectGzip(localFilePathToPeek: string): Promise<boolean> {
79
+ // Peek the actual bytes we downloaded.
80
+ try {
81
+ const fd = await fs.open(localFilePathToPeek, 'r');
82
+ try {
83
+ const header = Buffer.alloc(2);
84
+ const { bytesRead } = await fd.read(header, 0, 2, 0);
85
+ return bytesRead >= 2 && isGzipMagic(header);
86
+ } finally {
87
+ await fd.close();
88
+ }
89
+ } catch {
90
+ return false;
91
+ }
92
+ }
93
+
53
94
  export async function downloadSnapshot(
54
95
  snapshot: Pick<SnapshotMetadata, 'dataUrls'>,
55
96
  localPaths: Record<SnapshotDataKeys, string>,
56
97
  store: ReadOnlyFileStore,
57
98
  ): Promise<void> {
58
- await Promise.all(getEntries(localPaths).map(([key, path]) => store.download(snapshot.dataUrls[key], path)));
99
+ await Promise.all(
100
+ getEntries(localPaths).map(async ([key, path]) => {
101
+ await fs.mkdir(pathMod.dirname(path), { recursive: true });
102
+
103
+ const tmpPath = `${path}.download`;
104
+ try {
105
+ const url = snapshot.dataUrls[key];
106
+ await store.download(url, tmpPath);
107
+
108
+ const isGzip = await detectGzip(tmpPath);
109
+
110
+ const read = createReadStream(tmpPath);
111
+ const write = createWriteStream(path);
112
+ if (isGzip) {
113
+ const gunzip = createGunzip();
114
+ await pipeline(read, gunzip, write);
115
+ } else {
116
+ await pipeline(read, write);
117
+ }
118
+ } finally {
119
+ await fs.unlink(tmpPath).catch(() => undefined);
120
+ }
121
+ }),
122
+ );
59
123
  }
@@ -48,6 +48,7 @@ export async function uploadSnapshotToIndex(
48
48
 
49
49
  await store.save(getSnapshotIndexPath(metadata), Buffer.from(jsonStringify(snapshotsIndex, true)), {
50
50
  public: true, // Make the index publicly accessible
51
+ compress: false, // Ensure index.json is not gzipped
51
52
  metadata: { ['Cache-control']: 'no-store' }, // Do not cache object versions
52
53
  });
53
54
  return newSnapshotMetadata;