lean-s3 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/S3File.js ADDED
@@ -0,0 +1,293 @@
1
+ // @ts-check
2
+
3
+ import S3Error from "./S3Error.js";
4
+ import S3Stat from "./S3Stat.js";
5
+ import { write, stream } from "./S3Client.js";
6
+ import { sha256 } from "./sign.js";
7
+ import { Readable } from "node:stream";
8
+
9
+ /** @typedef {import("./S3Client.js").default} S3Client */
10
+ /** @typedef {import("./index.d.ts").S3ClientOptions} S3ClientOptions */
11
+ /** @typedef {import("./index.d.ts").PresignableHttpMethod} PresignableHttpMethod */
12
+ /** @typedef {import("./index.d.ts").StorageClass} StorageClass */
13
+ /** @typedef {import("./index.d.ts").Acl} Acl */
14
+ /** @typedef {import("./index.d.ts").S3FilePresignOptions} S3FilePresignOptions */
15
+ /** @typedef {import("./index.d.ts").S3StatOptions} S3StatOptions */
16
+ /** @typedef {import("./index.d.ts").S3FileExistsOptions} S3FileExistsOptions */
17
+ /** @typedef {import("./index.d.ts").S3FileDeleteOptions} S3FileDeleteOptions */
18
+ /** @typedef {import("./index.d.ts").ByteSource} ByteSource */
19
+
20
+ // TODO: If we want to hack around, we can use this to access the private implementation of the "get stream" algorithm used by Node.js's blob internally
21
+ // We probably have to do this some day if the fetch implementation is moved to internals.
22
+ // If this happens, fetch will probably use `[kHandle].getReader()` instead of .stream() to read the Blob
23
+ // This would break our use-case of passing an S3File as a body
24
+ // Using this hack would also make `.text()`, `.bytes()` etc. "just work" in every case, since these use `[kHandle]` internally as well.
25
+ // We now resort back into overriding text/bytes/etc. But as soon as another internal Node.js API uses this functionality, this would probably also use `[kHandle]` and bypass our data.
26
+ // const kHandle = Object.getOwnPropertySymbols(new Blob).find(s => s.toString() === 'Symbol(kHandle)');
27
+ export default class S3File {
28
+ /** @type {S3Client} */
29
+ #client;
30
+ /** @type {string} */
31
+ #path;
32
+ /** @type {number | undefined} */
33
+ #start;
34
+ /** @type {number | undefined} */
35
+ #end;
36
+ /** @type {string} */
37
+ #contentType;
38
+
39
+ /**
40
+ * @internal
41
+ * @param {S3Client} client
42
+ * @param {string} path
43
+ * @param {number | undefined} start
44
+ * @param {number | undefined} end
45
+ * @param {string | undefined} contentType
46
+ */
47
+ constructor(client, path, start, end, contentType) {
48
+ if (typeof start === "number" && start < 0) {
49
+ throw new Error("Invalid slice `start`.");
50
+ }
51
+ if (
52
+ typeof end === "number" &&
53
+ (end < 0 || (typeof start === "number" && end < start))
54
+ ) {
55
+ throw new Error("Invalid slice `end`.");
56
+ }
57
+
58
+ this.#client = client;
59
+ this.#path = path;
60
+ this.#start = start;
61
+ this.#end = end;
62
+ this.#contentType = contentType ?? "application/octet-stream";
63
+ }
64
+
65
+ // TODO: slice overloads
66
+ /**
67
+ * @param {number | undefined} [start]
68
+ * @param {number | undefined} [end]
69
+ * @param {string | undefined} [contentType]
70
+ * @returns {S3File}
71
+ */
72
+ slice(start, end, contentType) {
73
+ return new S3File(
74
+ this.#client,
75
+ this.#path,
76
+ start ?? undefined,
77
+ end ?? undefined,
78
+ contentType ?? this.#contentType,
79
+ );
80
+ }
81
+
82
+ /**
83
+ * Get the stat of a file in the bucket. Uses `HEAD` request to check existence.
84
+ *
85
+ * @throws {Error} If the file does not exist.
86
+ * @param {Partial<S3StatOptions>} [options]
87
+ * @returns {Promise<S3Stat>}
88
+ */
89
+ async stat({ signal } = {}) {
90
+ // TODO: Support all options
91
+
92
+ // TODO: Don't use presign here
93
+ const url = this.#client.presign(this.#path, { method: "HEAD" });
94
+ const response = await fetch(url, { method: "HEAD", signal }); // TODO: Use undici
95
+
96
+ if (!response.ok) {
97
+ switch (response.status) {
98
+ case 404:
99
+ // TODO: Process response body
100
+ throw new S3Error("NoSuchKey", this.#path);
101
+ default:
102
+ // TODO: Process response body
103
+ throw new S3Error("Unknown", this.#path);
104
+ }
105
+ }
106
+
107
+ const result = S3Stat.tryParseFromHeaders(response.headers);
108
+ if (!result) {
109
+ throw new Error("S3 server returned an invalid response for HEAD");
110
+ }
111
+ return result;
112
+ }
113
+ /**
114
+ * Check if a file exists in the bucket. Uses `HEAD` request to check existence.
115
+ * @param {Partial<S3FileExistsOptions>} [options]
116
+ * @returns {Promise<boolean>}
117
+ */
118
+ async exists({ signal } = {}) {
119
+ // TODO: Support all options
120
+
121
+ // TODO: Don't use presign here
122
+ const url = this.#client.presign(this.#path, { method: "HEAD" });
123
+ const res = await fetch(url, { method: "HEAD", signal }); // TODO: Use undici
124
+ return res.ok;
125
+ }
126
+
127
+ /**
128
+ * Delete a file from the bucket.
129
+ * @param {Partial<S3FileDeleteOptions>} [options]
130
+ * @returns {Promise<void>}
131
+ *
132
+ * @example
133
+ * ```js
134
+ * // Simple delete
135
+ * await client.unlink("old-file.txt");
136
+ *
137
+ * // With error handling
138
+ * try {
139
+ * await client.unlink("file.dat");
140
+ * console.log("File deleted");
141
+ * } catch (err) {
142
+ * console.error("Delete failed:", err);
143
+ * }
144
+ * ```
145
+ */
146
+ async delete({ signal } = {}) {
147
+ // TODO: Support all options
148
+
149
+ // TODO: Don't use presign here
150
+ const url = this.#client.presign(this.#path, { method: "DELETE" });
151
+ const response = await fetch(url, { method: "DELETE", signal }); // TODO: Use undici
152
+ if (!response.ok) {
153
+ switch (response.status) {
154
+ case 404:
155
+ // TODO: Process response body
156
+ throw new S3Error("NoSuchKey", this.#path);
157
+ default:
158
+ // TODO: Process response body
159
+ throw new S3Error("Unknown", this.#path);
160
+ }
161
+ }
162
+ }
163
+
164
+ toString() {
165
+ return `S3File { path: "${this.#path}" }`;
166
+ }
167
+
168
+ /** @returns {Promise<unknown>} */
169
+ json() {
170
+ // Not using JSON.parse(await this.text()), so the env can parse json while loading
171
+ // Also, see TODO note above this class
172
+ return new Response(this.stream()).json();
173
+ }
174
+ // TODO
175
+ // /** @returns {Promise<Uint8Array>} */
176
+ // bytes() {
177
+ // return new Response(this.stream()).bytes(); // TODO: Does this exist?
178
+ // }
179
+ /** @returns {Promise<ArrayBuffer>} */
180
+ arrayBuffer() {
181
+ return new Response(this.stream()).arrayBuffer();
182
+ }
183
+ /** @returns {Promise<string>} */
184
+ text() {
185
+ return new Response(this.stream()).text();
186
+ }
187
+ /** @returns {Promise<Blob>} */
188
+ blob() {
189
+ return new Response(this.stream()).blob();
190
+ }
191
+
192
+ /** @returns {ReadableStream<Uint8Array>} */
193
+ stream() {
194
+ // This function is called for every operation on the blob
195
+ return this.#client[stream](this.#path, undefined, this.#start, this.#end);
196
+ }
197
+
198
+ /**
199
+ * @param {ByteSource} data
200
+ * @returns {Promise<[
201
+ * buffer: import("./index.d.ts").UndiciBodyInit,
202
+ * size: number | undefined,
203
+ * hash: Buffer | undefined,
204
+ * ]>}
205
+ */
206
+ async #transformData(data) {
207
+ if (typeof data === "string") {
208
+ const binary = new TextEncoder();
209
+ const bytes = binary.encode(data);
210
+ return [
211
+ bytes,
212
+ bytes.byteLength,
213
+ sha256(bytes), // TODO: Maybe use some streaming to compute hash while encoding?
214
+ ];
215
+ }
216
+
217
+ if (data instanceof Blob) {
218
+ const bytes = await data.bytes();
219
+ return [
220
+ bytes,
221
+ bytes.byteLength,
222
+ sha256(bytes), // TODO: Maybe use some streaming to compute hash while encoding?
223
+ ];
224
+ }
225
+
226
+ if (data instanceof Readable) {
227
+ return [data, undefined, undefined];
228
+ }
229
+
230
+ if (
231
+ data instanceof ArrayBuffer ||
232
+ data instanceof SharedArrayBuffer ||
233
+ ArrayBuffer.isView(data)
234
+ ) {
235
+ // TODO: Support hashing
236
+ return [
237
+ data,
238
+ data.byteLength,
239
+ undefined, // TODO: Compute hash some day
240
+ ];
241
+ }
242
+
243
+ assertNever(data);
244
+ }
245
+
246
+ /**
247
+ * @param {ByteSource} data
248
+ * @returns {Promise<void>}
249
+ */
250
+ async write(data) {
251
+ /** @type {AbortSignal | undefined} */
252
+ const signal = undefined; // TODO: Take this as param
253
+
254
+ // TODO: Support S3File as input and maybe use CopyObject
255
+ // TODO: Support Request and Response as input?
256
+ const [bytes, length, hash] = await this.#transformData(data);
257
+ return await this.#client[write](
258
+ this.#path,
259
+ bytes,
260
+ this.#contentType,
261
+ length,
262
+ hash,
263
+ this.#start,
264
+ this.#end,
265
+ signal,
266
+ );
267
+ }
268
+
269
+ /*
270
+ // Future API?
271
+ /** @returns {WritableStream<ArrayBufferLike | ArrayBufferView>} *
272
+ writer() {
273
+ throw new Error("Not implemented");
274
+ }
275
+ // Future API?
276
+ /** @returns {Promise<void>} *
277
+ setTags() {
278
+ throw new Error("Not implemented");
279
+ }
280
+ /** @returns {Promise<unknown>} *
281
+ getTags() {
282
+ throw new Error("Not implemented");
283
+ }
284
+ */
285
+ }
286
+
287
+ /**
288
+ * @param {never} v
289
+ * @returns {never}
290
+ */
291
+ function assertNever(v) {
292
+ throw new TypeError(`Expected value not to have type ${typeof v}`);
293
+ }
package/src/S3Stat.js ADDED
@@ -0,0 +1,76 @@
1
+ // @ts-check
2
+
3
+ import { inspect } from "node:util";
4
+
5
+ export default class S3Stat {
6
+ /**
7
+ * @type {string}
8
+ * @readonly
9
+ */
10
+ etag;
11
+ /**
12
+ * @type {Date}
13
+ * @readonly
14
+ */
15
+ lastModified;
16
+ /**
17
+ * @type {number}
18
+ * @readonly
19
+ */
20
+ size;
21
+ /**
22
+ * @type {string}
23
+ * @readonly
24
+ */
25
+ type;
26
+
27
+ /**
28
+ * @param {string} etag
29
+ * @param {Date} lastModified
30
+ * @param {number} size
31
+ * @param {string} type
32
+ */
33
+ constructor(etag, lastModified, size, type) {
34
+ this.etag = etag;
35
+ this.lastModified = lastModified;
36
+ this.size = size;
37
+ this.type = type;
38
+ }
39
+
40
+ /**
41
+ * @param {Headers} headers
42
+ * @returns {S3Stat | undefined}
43
+ */
44
+ static tryParseFromHeaders(headers) {
45
+ const lm = headers.get("last-modified");
46
+ if (lm === null) {
47
+ return undefined;
48
+ }
49
+
50
+ const etag = headers.get("etag");
51
+ if (etag === null) {
52
+ return undefined;
53
+ }
54
+
55
+ const cl = headers.get("content-length");
56
+ if (cl === null) {
57
+ return undefined;
58
+ }
59
+
60
+ const size = Number(cl);
61
+ if (!Number.isSafeInteger(size)) {
62
+ return undefined;
63
+ }
64
+
65
+ const ct = headers.get("content-type");
66
+ if (ct === null) {
67
+ return undefined;
68
+ }
69
+
70
+ return new S3Stat(etag, new Date(lm), size, ct);
71
+ }
72
+
73
+ toString() {
74
+ return `S3Stats {\n\tlastModified: ${inspect(this.lastModified)},\n\tsize: ${inspect(this.size)},\n\ttype: ${inspect(this.type)},\n\tetag: ${inspect(this.etag)}\n}`;
75
+ }
76
+ }
package/src/index.d.ts ADDED
@@ -0,0 +1,80 @@
1
+ import type { Readable } from "node:stream";
2
+
3
+ export { default as S3File } from "./S3File.js";
4
+ export { default as S3Client } from "./S3Client.js";
5
+ export { default as S3Error } from "./S3Error.js";
6
+ export { default as S3Stat } from "./S3Stat.js";
7
+
8
+ export interface S3ClientOptions {
9
+ bucket: string;
10
+ region: string;
11
+ endpoint: string;
12
+ accessKeyId: string;
13
+ secretAccessKey: string;
14
+ sessionToken?: string;
15
+ }
16
+
17
+ export type Acl =
18
+ | "private"
19
+ | "public-read"
20
+ | "public-read-write"
21
+ | "aws-exec-read"
22
+ | "authenticated-read"
23
+ | "bucket-owner-read"
24
+ | "bucket-owner-full-control"
25
+ | "log-delivery-write";
26
+
27
+ export type StorageClass =
28
+ | "STANDARD"
29
+ | "DEEP_ARCHIVE"
30
+ | "EXPRESS_ONEZONE"
31
+ | "GLACIER"
32
+ | "GLACIER_IR"
33
+ | "INTELLIGENT_TIERING"
34
+ | "ONEZONE_IA"
35
+ | "OUTPOSTS"
36
+ | "REDUCED_REDUNDANCY"
37
+ | "SNOW"
38
+ | "STANDARD_IA";
39
+
40
+ export type PresignableHttpMethod = "GET" | "DELETE" | "PUT" | "HEAD";
41
+
42
+ export interface S3FilePresignOptions {
43
+ contentHash: Buffer;
44
+ /** Seconds. */
45
+ expiresIn: number; // TODO: Maybe support Temporal.Duration once major support arrives
46
+ method: PresignableHttpMethod;
47
+ storageClass: StorageClass;
48
+ acl: Acl;
49
+ }
50
+
51
+ export type OverridableS3ClientOptions = Pick<
52
+ S3ClientOptions,
53
+ "region" | "bucket" | "endpoint"
54
+ >;
55
+
56
+ export interface S3StatOptions extends OverridableS3ClientOptions {
57
+ signal: AbortSignal;
58
+ }
59
+ export interface S3FileExistsOptions extends OverridableS3ClientOptions {
60
+ signal: AbortSignal;
61
+ }
62
+ export interface S3FileDeleteOptions extends OverridableS3ClientOptions {
63
+ signal: AbortSignal;
64
+ }
65
+
66
+ // biome-ignore lint/complexity/noBannedTypes: TODO
67
+ export type CreateFileInstanceOptions = {}; // TODO
68
+
69
+ /** Body values supported by undici. */
70
+ export type UndiciBodyInit = string | Buffer | Uint8Array | Readable;
71
+
72
+ export type ByteSource = UndiciBodyInit | Blob;
73
+ // TODO
74
+ // | ArrayBufferView
75
+ // | ArrayBuffer
76
+ // | SharedArrayBuffer
77
+ // | Request
78
+ // | Response
79
+ // | S3File
80
+ // | ReadableStream<Uint8Array>
package/src/index.js ADDED
@@ -0,0 +1,4 @@
1
+ export { default as S3Client } from "./S3Client.js";
2
+ export { default as S3File } from "./S3File.js";
3
+ export { default as S3Error } from "./S3Error.js";
4
+ export { default as S3Stat } from "./S3Stat.js";
package/src/sign.js ADDED
@@ -0,0 +1,136 @@
1
+ // @ts-check
2
+ import { createHmac, createHash } from "node:crypto";
3
+
4
+ // Spec:
5
+ // https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
6
+
7
+ /**
8
+ * @param {string} date
9
+ * @param {string} region
10
+ * @param {string} secretAccessKey
11
+ * @returns {Buffer}
12
+ */
13
+ export function deriveSigningKey(date, region, secretAccessKey) {
14
+ const key = `AWS4${secretAccessKey}`;
15
+
16
+ const signedDate = createHmac("sha256", key).update(date).digest();
17
+
18
+ const signedDateRegion = createHmac("sha256", signedDate)
19
+ .update(region)
20
+ .digest();
21
+
22
+ const signedDateRegionService = createHmac("sha256", signedDateRegion)
23
+ .update("s3")
24
+ .digest();
25
+
26
+ return createHmac("sha256", signedDateRegionService)
27
+ .update("aws4_request")
28
+ .digest();
29
+ }
30
+
31
+ /**
32
+ * @param {Buffer} signinKey
33
+ * @param {string} canonicalDataHash
34
+ * @param {import("./AmzDate.js").AmzDate} date
35
+ * @param {string} region
36
+ * @returns {string}
37
+ */
38
+ export function signCanonicalDataHash(
39
+ signinKey,
40
+ canonicalDataHash,
41
+ date,
42
+ region,
43
+ ) {
44
+ // TODO: Investigate if its actually faster than just concatenating the parts and do a single update()
45
+ return createHmac("sha256", signinKey)
46
+ .update("AWS4-HMAC-SHA256\n")
47
+ .update(date.dateTime)
48
+ .update("\n")
49
+ .update(date.date)
50
+ .update("/")
51
+ .update(region)
52
+ .update("/s3/aws4_request\n")
53
+ .update(canonicalDataHash)
54
+ .digest("hex");
55
+ }
56
+
57
+ export const unsignedPayload = "UNSIGNED-PAYLOAD";
58
+
59
+ /**
60
+ *
61
+ * Same as {@see createCanonicalDataDigest}, but only sets the `host` header and the content hash to `UNSIGNED-PAYLOAD`.
62
+ *
63
+ * Used for pre-signing only. Pre-signed URLs [cannot contain content hashes](https://github.com/aws/aws-sdk-js/blob/966fa6c316dbb11ca9277564ff7120e6b16467f4/lib/signers/v4.js#L182-L183)
64
+ * and the only header that is signed is `host`. So we can use an optimized version for that.
65
+ *
66
+ * TODO: Maybe passing a contentHash is supported on GET in order to restrict access to a specific file
67
+ *
68
+ * @param {import("./index.js").PresignableHttpMethod} method
69
+ * @param {string} path
70
+ * @param {string} query
71
+ * @param {string} host
72
+ * @returns
73
+ */
74
+ export function createCanonicalDataDigestHostOnly(method, path, query, host) {
75
+ // TODO: Investigate if its actually faster than just concatenating the parts and do a single update()
76
+ return createHash("sha256")
77
+ .update(method)
78
+ .update("\n")
79
+ .update(path)
80
+ .update("\n")
81
+ .update(query)
82
+ .update("\nhost:")
83
+ .update(host)
84
+ .update("\n\nhost\nUNSIGNED-PAYLOAD")
85
+ .digest("hex");
86
+ }
87
+
88
+ /**
89
+ * @param {import("./index.js").PresignableHttpMethod} method
90
+ * @param {string} path
91
+ * @param {string} query
92
+ * @param {Record<string, string>} sortedHeaders
93
+ * @param {string} contentHashStr
94
+ * @returns
95
+ */
96
+ export function createCanonicalDataDigest(
97
+ method,
98
+ path,
99
+ query,
100
+ sortedHeaders,
101
+ contentHashStr,
102
+ ) {
103
+ const sortedHeaderNames = Object.keys(sortedHeaders);
104
+ // TODO: Investigate if its actually faster than just concatenating the parts and do a single update()
105
+ const hash = createHash("sha256")
106
+ .update(method)
107
+ .update("\n")
108
+ .update(path)
109
+ .update("\n")
110
+ .update(query)
111
+ .update("\n");
112
+
113
+ for (const header of sortedHeaderNames) {
114
+ hash.update(header).update(":").update(sortedHeaders[header]);
115
+ hash.update("\n");
116
+ }
117
+
118
+ hash.update("\n");
119
+
120
+ for (let i = 0; i < sortedHeaderNames.length; ++i) {
121
+ hash.update(sortedHeaderNames[i]);
122
+ if (i < sortedHeaderNames.length - 1) {
123
+ hash.update(";");
124
+ }
125
+ }
126
+
127
+ return hash.update("\n").update(contentHashStr).digest("hex");
128
+ }
129
+
130
+ /**
131
+ * @param {import("node:crypto").BinaryLike} data
132
+ * @returns {Buffer}
133
+ */
134
+ export function sha256(data) {
135
+ return createHash("sha256").update(data).digest();
136
+ }
@@ -0,0 +1,94 @@
1
+ /**
2
+ * @module Used by integration tests and unit tests.
3
+ */
4
+
5
+ // @ts-check
6
+ import { test } from "node:test";
7
+ import { expect } from "expect";
8
+
9
+ import { S3Client } from "./index.js";
10
+
11
+ /**
12
+ * @param {number} runId
13
+ * @param {string} endpoint
14
+ * @param {boolean} forcePathStyle
15
+ * @param {string} accessKeyId
16
+ * @param {string} secretAccessKey
17
+ * @param {string} region
18
+ * @param {string} bucket
19
+ */
20
+ export function runTests(
21
+ runId,
22
+ endpoint,
23
+ forcePathStyle,
24
+ accessKeyId,
25
+ secretAccessKey,
26
+ region,
27
+ bucket,
28
+ ) {
29
+ const client = new S3Client({
30
+ endpoint,
31
+ accessKeyId,
32
+ secretAccessKey,
33
+ region,
34
+ bucket,
35
+ });
36
+
37
+ test("presign-put", async () => {
38
+ const testId = crypto.randomUUID();
39
+ const expected = {
40
+ hello: testId,
41
+ };
42
+
43
+ const url = client.presign(`${runId}/presign-test.json`, { method: "PUT" });
44
+ const res = await fetch(url, {
45
+ method: "PUT",
46
+ body: JSON.stringify(expected),
47
+ headers: {
48
+ accept: "application/json",
49
+ },
50
+ });
51
+ expect(res.ok).toBe(true);
52
+
53
+ const f = client.file(`${runId}/presign-test.json`);
54
+ try {
55
+ const actual = await f.json();
56
+ expect(actual).toStrictEqual(expected);
57
+ } finally {
58
+ await f.delete();
59
+ }
60
+ });
61
+
62
+ test("roundtrip", async () => {
63
+ const testId = crypto.randomUUID();
64
+ const f = client.file(`${runId}/roundtrip.txt`);
65
+ await f.write(testId);
66
+ try {
67
+ const stat = await f.stat();
68
+ expect(stat).toEqual(
69
+ expect.objectContaining({
70
+ size: testId.length,
71
+ type: "application/octet-stream",
72
+ }),
73
+ );
74
+
75
+ const actual = await f.text();
76
+ expect(actual).toStrictEqual(testId);
77
+ } finally {
78
+ await f.delete();
79
+ }
80
+ });
81
+
82
+ test("slicing", async () => {
83
+ const testId = crypto.randomUUID();
84
+ const f = client.file(`${runId}/slicing.txt`);
85
+ await f.write(testId);
86
+ try {
87
+ const slicedFile = f.slice(10, 20);
88
+ const s = await slicedFile.text();
89
+ expect(s).toEqual(testId.substring(10, 20));
90
+ } finally {
91
+ await f.delete();
92
+ }
93
+ });
94
+ }