@aztec/stdlib 2.1.2-rc.2 → 2.1.2-rc.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"http.d.ts","sourceRoot":"","sources":["../../src/file-store/http.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAUlE,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,gBAAgB,CAAC;AAExD,qBAAa,aAAc,YAAW,iBAAiB;IAKnD,OAAO,CAAC,QAAQ,CAAC,OAAO;IACxB,OAAO,CAAC,QAAQ,CAAC,GAAG;IALtB,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAgB;IAC9C,OAAO,CAAC,QAAQ,CAAC,KAAK,CAA+D;gBAGlE,OAAO,EAAE,MAAM,EACf,GAAG,GAAE,MAA+C;IAc1D,IAAI,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAUxC,QAAQ,CAAC,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;
|
|
1
|
+
{"version":3,"file":"http.d.ts","sourceRoot":"","sources":["../../src/file-store/http.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAUlE,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,gBAAgB,CAAC;AAExD,qBAAa,aAAc,YAAW,iBAAiB;IAKnD,OAAO,CAAC,QAAQ,CAAC,OAAO;IACxB,OAAO,CAAC,QAAQ,CAAC,GAAG;IALtB,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAgB;IAC9C,OAAO,CAAC,QAAQ,CAAC,KAAK,CAA+D;gBAGlE,OAAO,EAAE,MAAM,EACf,GAAG,GAAE,MAA+C;IAc1D,IAAI,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAUxC,QAAQ,CAAC,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAc5D,MAAM,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAUxD,OAAO,CAAC,MAAM;CAGf"}
|
package/dest/file-store/http.js
CHANGED
|
@@ -4,7 +4,7 @@ import axios from 'axios';
|
|
|
4
4
|
import { createWriteStream } from 'fs';
|
|
5
5
|
import { mkdir } from 'fs/promises';
|
|
6
6
|
import { dirname } from 'path';
|
|
7
|
-
import {
|
|
7
|
+
import { pipeline } from 'stream/promises';
|
|
8
8
|
export class HttpFileStore {
|
|
9
9
|
baseUrl;
|
|
10
10
|
log;
|
|
@@ -38,17 +38,22 @@ export class HttpFileStore {
|
|
|
38
38
|
async download(pathOrUrl, destPath) {
|
|
39
39
|
const url = this.getUrl(pathOrUrl);
|
|
40
40
|
try {
|
|
41
|
+
this.log.debug(`Downloading file from ${url} to ${destPath}`);
|
|
41
42
|
const response = await this.fetch({
|
|
42
43
|
url,
|
|
43
44
|
method: 'GET',
|
|
44
45
|
responseType: 'stream'
|
|
45
46
|
});
|
|
47
|
+
this.log.debug(`Response ${response.status} (${response.statusText}) from ${url}, writing to ${destPath}`);
|
|
46
48
|
await mkdir(dirname(destPath), {
|
|
47
49
|
recursive: true
|
|
48
50
|
});
|
|
49
|
-
await
|
|
51
|
+
await pipeline(response.data, createWriteStream(destPath));
|
|
52
|
+
this.log.debug(`Download of ${url} to ${destPath} complete`);
|
|
50
53
|
} catch (error) {
|
|
51
|
-
throw new Error(`Error fetching file from ${url}
|
|
54
|
+
throw new Error(`Error fetching file from ${url}`, {
|
|
55
|
+
cause: error
|
|
56
|
+
});
|
|
52
57
|
}
|
|
53
58
|
}
|
|
54
59
|
async exists(pathOrUrl) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"s3.d.ts","sourceRoot":"","sources":["../../src/file-store/s3.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAiBlE,OAAO,KAAK,EAAE,SAAS,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAMtE,qBAAa,WAAY,YAAW,SAAS;IAOzC,OAAO,CAAC,QAAQ,CAAC,UAAU;IAC3B,OAAO,CAAC,QAAQ,CAAC,QAAQ;IAEzB,OAAO,CAAC,QAAQ,CAAC,GAAG;IATtB,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAW;IAC9B,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAS;IAChC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAS;IACnC,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAS;gBAGrB,UAAU,EAAE,MAAM,EAClB,QAAQ,EAAE,MAAM,EACjC,IAAI,EAAE;QAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAA;KAAE,EAClC,GAAG,GAAE,MAA6C;IAiBxD,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,GAAE,oBAAyB,GAAG,OAAO,CAAC,MAAM,CAAC;IAoBlF,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,GAAE,oBAAyB,GAAG,OAAO,CAAC,MAAM,CAAC;
|
|
1
|
+
{"version":3,"file":"s3.d.ts","sourceRoot":"","sources":["../../src/file-store/s3.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAiBlE,OAAO,KAAK,EAAE,SAAS,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAMtE,qBAAa,WAAY,YAAW,SAAS;IAOzC,OAAO,CAAC,QAAQ,CAAC,UAAU;IAC3B,OAAO,CAAC,QAAQ,CAAC,QAAQ;IAEzB,OAAO,CAAC,QAAQ,CAAC,GAAG;IATtB,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAW;IAC9B,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAS;IAChC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAS;IACnC,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAS;gBAGrB,UAAU,EAAE,MAAM,EAClB,QAAQ,EAAE,MAAM,EACjC,IAAI,EAAE;QAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAA;KAAE,EAClC,GAAG,GAAE,MAA6C;IAiBxD,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,GAAE,oBAAyB,GAAG,OAAO,CAAC,MAAM,CAAC;IAoBlF,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,GAAE,oBAAyB,GAAG,OAAO,CAAC,MAAM,CAAC;IA0D3F,IAAI,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAW3C,QAAQ,CAAC,YAAY,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAO/D,MAAM,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAe3D,OAAO,CAAC,mBAAmB;IAQ3B,OAAO,CAAC,iBAAiB;IAsBzB,OAAO,CAAC,gBAAgB;IA0BxB,OAAO,CAAC,eAAe;IA2BvB,OAAO,CAAC,WAAW;CAKpB"}
|
package/dest/file-store/s3.js
CHANGED
|
@@ -4,7 +4,7 @@ import { createReadStream, createWriteStream } from 'fs';
|
|
|
4
4
|
import { mkdir, mkdtemp, stat, unlink } from 'fs/promises';
|
|
5
5
|
import { tmpdir } from 'os';
|
|
6
6
|
import { basename, dirname, join } from 'path';
|
|
7
|
-
import {
|
|
7
|
+
import { pipeline } from 'stream/promises';
|
|
8
8
|
import { createGzip } from 'zlib';
|
|
9
9
|
function normalizeBasePath(path) {
|
|
10
10
|
return path?.replace(/^\/+|\/+$/g, '') ?? '';
|
|
@@ -72,11 +72,8 @@ export class S3FileStore {
|
|
|
72
72
|
// Pre-gzip to a temp file so we know the exact length for R2/S3 headers
|
|
73
73
|
const tmpDir = await mkdtemp(join(tmpdir(), 's3-upload-'));
|
|
74
74
|
const gzPath = join(tmpDir, `${basename(srcPath)}.gz`);
|
|
75
|
-
const source = createReadStream(srcPath);
|
|
76
|
-
const gz = createGzip();
|
|
77
|
-
const out = createWriteStream(gzPath);
|
|
78
75
|
try {
|
|
79
|
-
await
|
|
76
|
+
await pipeline(createReadStream(srcPath), createGzip(), createWriteStream(gzPath));
|
|
80
77
|
const st = await stat(gzPath);
|
|
81
78
|
contentLength = st.size;
|
|
82
79
|
bodyPath = gzPath;
|
|
@@ -133,8 +130,7 @@ export class S3FileStore {
|
|
|
133
130
|
await mkdir(dirname(destPath), {
|
|
134
131
|
recursive: true
|
|
135
132
|
});
|
|
136
|
-
|
|
137
|
-
await finished(out.Body.pipe(write));
|
|
133
|
+
await pipeline(out.Body, createWriteStream(destPath));
|
|
138
134
|
}
|
|
139
135
|
async exists(pathOrUrlStr) {
|
|
140
136
|
try {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aztec/stdlib",
|
|
3
|
-
"version": "2.1.2-rc.
|
|
3
|
+
"version": "2.1.2-rc.5",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"inherits": [
|
|
6
6
|
"../package.common.json",
|
|
@@ -70,13 +70,13 @@
|
|
|
70
70
|
},
|
|
71
71
|
"dependencies": {
|
|
72
72
|
"@aws-sdk/client-s3": "^3.892.0",
|
|
73
|
-
"@aztec/bb.js": "2.1.2-rc.
|
|
74
|
-
"@aztec/blob-lib": "2.1.2-rc.
|
|
75
|
-
"@aztec/constants": "2.1.2-rc.
|
|
76
|
-
"@aztec/ethereum": "2.1.2-rc.
|
|
77
|
-
"@aztec/foundation": "2.1.2-rc.
|
|
78
|
-
"@aztec/l1-artifacts": "2.1.2-rc.
|
|
79
|
-
"@aztec/noir-noirc_abi": "2.1.2-rc.
|
|
73
|
+
"@aztec/bb.js": "2.1.2-rc.5",
|
|
74
|
+
"@aztec/blob-lib": "2.1.2-rc.5",
|
|
75
|
+
"@aztec/constants": "2.1.2-rc.5",
|
|
76
|
+
"@aztec/ethereum": "2.1.2-rc.5",
|
|
77
|
+
"@aztec/foundation": "2.1.2-rc.5",
|
|
78
|
+
"@aztec/l1-artifacts": "2.1.2-rc.5",
|
|
79
|
+
"@aztec/noir-noirc_abi": "2.1.2-rc.5",
|
|
80
80
|
"@google-cloud/storage": "^7.15.0",
|
|
81
81
|
"axios": "^1.12.0",
|
|
82
82
|
"json-stringify-deterministic": "1.0.12",
|
package/src/file-store/http.ts
CHANGED
|
@@ -6,7 +6,7 @@ import { createWriteStream } from 'fs';
|
|
|
6
6
|
import { mkdir } from 'fs/promises';
|
|
7
7
|
import { dirname } from 'path';
|
|
8
8
|
import { Readable } from 'stream';
|
|
9
|
-
import {
|
|
9
|
+
import { pipeline } from 'stream/promises';
|
|
10
10
|
|
|
11
11
|
import type { ReadOnlyFileStore } from './interface.js';
|
|
12
12
|
|
|
@@ -43,11 +43,14 @@ export class HttpFileStore implements ReadOnlyFileStore {
|
|
|
43
43
|
public async download(pathOrUrl: string, destPath: string): Promise<void> {
|
|
44
44
|
const url = this.getUrl(pathOrUrl);
|
|
45
45
|
try {
|
|
46
|
+
this.log.debug(`Downloading file from ${url} to ${destPath}`);
|
|
46
47
|
const response = await this.fetch<Readable>({ url, method: 'GET', responseType: 'stream' });
|
|
48
|
+
this.log.debug(`Response ${response.status} (${response.statusText}) from ${url}, writing to ${destPath}`);
|
|
47
49
|
await mkdir(dirname(destPath), { recursive: true });
|
|
48
|
-
await
|
|
50
|
+
await pipeline(response.data, createWriteStream(destPath));
|
|
51
|
+
this.log.debug(`Download of ${url} to ${destPath} complete`);
|
|
49
52
|
} catch (error) {
|
|
50
|
-
throw new Error(`Error fetching file from ${url}
|
|
53
|
+
throw new Error(`Error fetching file from ${url}`, { cause: error });
|
|
51
54
|
}
|
|
52
55
|
}
|
|
53
56
|
|
package/src/file-store/s3.ts
CHANGED
|
@@ -12,7 +12,7 @@ import { mkdir, mkdtemp, stat, unlink } from 'fs/promises';
|
|
|
12
12
|
import { tmpdir } from 'os';
|
|
13
13
|
import { basename, dirname, join } from 'path';
|
|
14
14
|
import { Readable } from 'stream';
|
|
15
|
-
import {
|
|
15
|
+
import { pipeline } from 'stream/promises';
|
|
16
16
|
import { createGzip } from 'zlib';
|
|
17
17
|
|
|
18
18
|
import type { FileStore, FileStoreSaveOptions } from './interface.js';
|
|
@@ -88,11 +88,8 @@ export class S3FileStore implements FileStore {
|
|
|
88
88
|
// Pre-gzip to a temp file so we know the exact length for R2/S3 headers
|
|
89
89
|
const tmpDir = await mkdtemp(join(tmpdir(), 's3-upload-'));
|
|
90
90
|
const gzPath = join(tmpDir, `${basename(srcPath)}.gz`);
|
|
91
|
-
const source = createReadStream(srcPath);
|
|
92
|
-
const gz = createGzip();
|
|
93
|
-
const out = createWriteStream(gzPath);
|
|
94
91
|
try {
|
|
95
|
-
await
|
|
92
|
+
await pipeline(createReadStream(srcPath), createGzip(), createWriteStream(gzPath));
|
|
96
93
|
const st = await stat(gzPath);
|
|
97
94
|
contentLength = st.size;
|
|
98
95
|
bodyPath = gzPath;
|
|
@@ -144,8 +141,7 @@ export class S3FileStore implements FileStore {
|
|
|
144
141
|
const { bucket, key } = this.getBucketAndKey(pathOrUrlStr);
|
|
145
142
|
const out: GetObjectCommandOutput = await this.s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
|
|
146
143
|
await mkdir(dirname(destPath), { recursive: true });
|
|
147
|
-
|
|
148
|
-
await finished((out.Body as Readable).pipe(write));
|
|
144
|
+
await pipeline(out.Body as Readable, createWriteStream(destPath));
|
|
149
145
|
}
|
|
150
146
|
|
|
151
147
|
public async exists(pathOrUrlStr: string): Promise<boolean> {
|