lean-s3 0.1.6 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/S3File.js ADDED
@@ -0,0 +1,206 @@
1
+ import { Readable } from "node:stream";
2
+ import S3Error from "./S3Error.js";
3
+ import S3Stat from "./S3Stat.js";
4
+ import { write, stream } from "./S3Client.js";
5
+ import { sha256 } from "./sign.js";
6
+ // TODO: If we want to hack around, we can use this to access the private implementation of the "get stream" algorithm used by Node.js's blob internally
7
+ // We probably have to do this some day if the fetch implementation is moved to internals.
8
+ // If this happens, fetch will probably use `[kHandle].getReader()` instead of .stream() to read the Blob
9
+ // This would break our use-case of passing an S3File as a body
10
+ // Using this hack would also make `.text()`, `.bytes()` etc. "just work" in every case, since these use `[kHandle]` internally as well.
11
+ // We now resort back into overriding text/bytes/etc. But as soon as another internal Node.js API uses this functionality, this would probably also use `[kHandle]` and bypass our data.
12
+ // const kHandle = Object.getOwnPropertySymbols(new Blob).find(s => s.toString() === 'Symbol(kHandle)');
13
+ export default class S3File {
14
+ #client;
15
+ #path;
16
+ #start;
17
+ #end;
18
+ #contentType;
19
+ /**
20
+ * @internal
21
+ */
22
+ constructor(client, path, start, end, contentType) {
23
+ if (typeof start === "number" && start < 0) {
24
+ throw new Error("Invalid slice `start`.");
25
+ }
26
+ if (typeof end === "number" &&
27
+ (end < 0 || (typeof start === "number" && end < start))) {
28
+ throw new Error("Invalid slice `end`.");
29
+ }
30
+ this.#client = client;
31
+ this.#path = path;
32
+ this.#start = start;
33
+ this.#end = end;
34
+ this.#contentType = contentType ?? "application/octet-stream";
35
+ }
36
+ // TODO: slice overloads
37
+ slice(start, end, contentType) {
38
+ return new S3File(this.#client, this.#path, start ?? undefined, end ?? undefined, contentType ?? this.#contentType);
39
+ }
40
+ /**
41
+ * Get the stat of a file in the bucket. Uses `HEAD` request to check existence.
42
+ *
43
+ * @throws {Error} If the file does not exist.
44
+ * @param {Partial<S3StatOptions>} [options]
45
+ * @returns {Promise<S3Stat>}
46
+ */
47
+ async stat({ signal } = {}) {
48
+ // TODO: Support all options
49
+ // TODO: Don't use presign here
50
+ const url = this.#client.presign(this.#path, { method: "HEAD" });
51
+ const response = await fetch(url, { method: "HEAD", signal }); // TODO: Use undici
52
+ if (!response.ok) {
53
+ switch (response.status) {
54
+ case 404:
55
+ // TODO: Process response body
56
+ throw new S3Error("NoSuchKey", this.#path);
57
+ default:
58
+ // TODO: Process response body
59
+ throw new S3Error("Unknown", this.#path);
60
+ }
61
+ }
62
+ const result = S3Stat.tryParseFromHeaders(response.headers);
63
+ if (!result) {
64
+ throw new Error("S3 server returned an invalid response for HEAD");
65
+ }
66
+ return result;
67
+ }
68
+ /**
69
+ * Check if a file exists in the bucket. Uses `HEAD` request to check existence.
70
+ * @param {Partial<S3FileExistsOptions>} [options]
71
+ * @returns {Promise<boolean>}
72
+ */
73
+ async exists({ signal, } = {}) {
74
+ // TODO: Support all options
75
+ // TODO: Don't use presign here
76
+ const url = this.#client.presign(this.#path, { method: "HEAD" });
77
+ const res = await fetch(url, { method: "HEAD", signal }); // TODO: Use undici
78
+ return res.ok;
79
+ }
80
+ /**
81
+ * Delete a file from the bucket.
82
+ * @param {Partial<S3FileDeleteOptions>} [options]
83
+ * @returns {Promise<void>}
84
+ *
85
+ * @example
86
+ * ```js
87
+ * // Simple delete
88
+ * await client.unlink("old-file.txt");
89
+ *
90
+ * // With error handling
91
+ * try {
92
+ * await client.unlink("file.dat");
93
+ * console.log("File deleted");
94
+ * } catch (err) {
95
+ * console.error("Delete failed:", err);
96
+ * }
97
+ * ```
98
+ */
99
+ async delete({ signal } = {}) {
100
+ // TODO: Support all options
101
+ // TODO: Don't use presign here
102
+ const url = this.#client.presign(this.#path, { method: "DELETE" });
103
+ const response = await fetch(url, { method: "DELETE", signal }); // TODO: Use undici
104
+ if (!response.ok) {
105
+ switch (response.status) {
106
+ case 404:
107
+ // TODO: Process response body
108
+ throw new S3Error("NoSuchKey", this.#path);
109
+ default:
110
+ // TODO: Process response body
111
+ throw new S3Error("Unknown", this.#path);
112
+ }
113
+ }
114
+ }
115
+ toString() {
116
+ return `S3File { path: "${this.#path}" }`;
117
+ }
118
+ /** @returns {Promise<unknown>} */
119
+ json() {
120
+ // Not using JSON.parse(await this.text()), so the env can parse json while loading
121
+ // Also, see TODO note above this class
122
+ return new Response(this.stream()).json();
123
+ }
124
+ // TODO
125
+ // /** @returns {Promise<Uint8Array>} */
126
+ // bytes() {
127
+ // return new Response(this.stream()).bytes(); // TODO: Does this exist?
128
+ // }
129
+ /** @returns {Promise<ArrayBuffer>} */
130
+ arrayBuffer() {
131
+ return new Response(this.stream()).arrayBuffer();
132
+ }
133
+ /** @returns {Promise<string>} */
134
+ text() {
135
+ return new Response(this.stream()).text();
136
+ }
137
+ /** @returns {Promise<Blob>} */
138
+ blob() {
139
+ return new Response(this.stream()).blob();
140
+ }
141
+ /** @returns {ReadableStream<Uint8Array>} */
142
+ stream() {
143
+ // This function is called for every operation on the blob
144
+ return this.#client[stream](this.#path, undefined, this.#start, this.#end);
145
+ }
146
+ /**
147
+ * @param {ByteSource} data
148
+ * @returns {Promise<[
149
+ * buffer: import("./index.d.ts").UndiciBodyInit,
150
+ * size: number | undefined,
151
+ * hash: Buffer | undefined,
152
+ * ]>}
153
+ */
154
+ async #transformData(data) {
155
+ if (typeof data === "string") {
156
+ const binary = new TextEncoder();
157
+ const bytes = binary.encode(data);
158
+ return [
159
+ bytes,
160
+ bytes.byteLength,
161
+ sha256(bytes), // TODO: Maybe use some streaming to compute hash while encoding?
162
+ ];
163
+ }
164
+ if (data instanceof Blob) {
165
+ const bytes = await data.bytes();
166
+ return [
167
+ bytes,
168
+ bytes.byteLength,
169
+ sha256(bytes), // TODO: Maybe use some streaming to compute hash while encoding?
170
+ ];
171
+ }
172
+ if (data instanceof Readable) {
173
+ return [data, undefined, undefined];
174
+ }
175
+ if (data instanceof ArrayBuffer ||
176
+ data instanceof SharedArrayBuffer ||
177
+ ArrayBuffer.isView(data)) {
178
+ // TODO: Support hashing
179
+ return [
180
+ data,
181
+ data.byteLength,
182
+ undefined, // TODO: Compute hash some day
183
+ ];
184
+ }
185
+ assertNever(data);
186
+ }
187
+ /**
188
+ * @param {ByteSource} data
189
+ * @returns {Promise<void>}
190
+ */
191
+ async write(data) {
192
+ /** @type {AbortSignal | undefined} */
193
+ const signal = undefined; // TODO: Take this as param
194
+ // TODO: Support S3File as input and maybe use CopyObject
195
+ // TODO: Support Request and Response as input?
196
+ const [bytes, length, hash] = await this.#transformData(data);
197
+ return await this.#client[write](this.#path, bytes, this.#contentType, length, hash, this.#start, this.#end, signal);
198
+ }
199
+ }
200
+ /**
201
+ * @param {never} v
202
+ * @returns {never}
203
+ */
204
+ function assertNever(v) {
205
+ throw new TypeError(`Expected value not to have type ${typeof v}`);
206
+ }
@@ -0,0 +1,9 @@
1
+ import type { Headers } from "undici-types";
2
+ export default class S3Stat {
3
+ readonly etag: string;
4
+ readonly lastModified: Date;
5
+ readonly size: number;
6
+ readonly type: string;
7
+ constructor(etag: string, lastModified: Date, size: number, type: string);
8
+ static tryParseFromHeaders(headers: Headers): S3Stat | undefined;
9
+ }
package/dist/S3Stat.js ADDED
@@ -0,0 +1,35 @@
1
+ export default class S3Stat {
2
+ etag;
3
+ lastModified;
4
+ size;
5
+ type;
6
+ constructor(etag, lastModified, size, type) {
7
+ this.etag = etag;
8
+ this.lastModified = lastModified;
9
+ this.size = size;
10
+ this.type = type;
11
+ }
12
+ static tryParseFromHeaders(headers) {
13
+ const lm = headers.get("last-modified");
14
+ if (lm === null) {
15
+ return undefined;
16
+ }
17
+ const etag = headers.get("etag");
18
+ if (etag === null) {
19
+ return undefined;
20
+ }
21
+ const cl = headers.get("content-length");
22
+ if (cl === null) {
23
+ return undefined;
24
+ }
25
+ const size = Number(cl);
26
+ if (!Number.isSafeInteger(size)) {
27
+ return undefined;
28
+ }
29
+ const ct = headers.get("content-type");
30
+ if (ct === null) {
31
+ return undefined;
32
+ }
33
+ return new S3Stat(etag, new Date(lm), size, ct);
34
+ }
35
+ }
@@ -0,0 +1,15 @@
1
+ import type { Readable } from "node:stream";
2
+ export { default as S3File, type S3FileDeleteOptions, type S3FileExistsOptions, type S3StatOptions, } from "./S3File.ts";
3
+ export { default as S3Client, type ListObjectsResponse, type CreateFileInstanceOptions, type OverridableS3ClientOptions, type S3ClientOptions, type S3FilePresignOptions, } from "./S3Client.ts";
4
+ export { default as S3Error, type S3ErrorOptions } from "./S3Error.ts";
5
+ export { default as S3Stat } from "./S3Stat.ts";
6
+ export { default as S3BucketEntry } from "./S3BucketEntry.ts";
7
+ export type Acl = "private" | "public-read" | "public-read-write" | "aws-exec-read" | "authenticated-read" | "bucket-owner-read" | "bucket-owner-full-control" | "log-delivery-write";
8
+ export type StorageClass = "STANDARD" | "DEEP_ARCHIVE" | "EXPRESS_ONEZONE" | "GLACIER" | "GLACIER_IR" | "INTELLIGENT_TIERING" | "ONEZONE_IA" | "OUTPOSTS" | "REDUCED_REDUNDANCY" | "SNOW" | "STANDARD_IA";
9
+ export type ChecksumAlgorithm = "CRC32" | "CRC32C" | "CRC64NVME" | "SHA1" | "SHA256";
10
+ export type ChecksumType = "COMPOSITE" | "FULL_OBJECT";
11
+ export type PresignableHttpMethod = "GET" | "DELETE" | "PUT" | "HEAD";
12
+ export type HttpMethod = PresignableHttpMethod | "POST";
13
+ /** Body values supported by undici. */
14
+ export type UndiciBodyInit = string | Buffer | Uint8Array | Readable;
15
+ export type ByteSource = UndiciBodyInit | Blob;
package/dist/index.js ADDED
@@ -0,0 +1,13 @@
1
+ export { default as S3File, } from "./S3File.js";
2
+ export { default as S3Client, } from "./S3Client.js";
3
+ export { default as S3Error } from "./S3Error.js";
4
+ export { default as S3Stat } from "./S3Stat.js";
5
+ export { default as S3BucketEntry } from "./S3BucketEntry.js";
6
+ // TODO
7
+ // | ArrayBufferView
8
+ // | ArrayBuffer
9
+ // | SharedArrayBuffer
10
+ // | Request
11
+ // | Response
12
+ // | S3File
13
+ // | ReadableStream<Uint8Array>
@@ -0,0 +1 @@
1
+ export {};
package/dist/sign.d.ts ADDED
@@ -0,0 +1,16 @@
1
+ import { type BinaryLike } from "node:crypto";
2
+ import type { AmzDate } from "./AmzDate.ts";
3
+ import type { HttpMethod, PresignableHttpMethod } from "./index.ts";
4
+ export declare function deriveSigningKey(date: string, region: string, secretAccessKey: string): Buffer;
5
+ export declare function signCanonicalDataHash(signinKey: Buffer, canonicalDataHash: string, date: AmzDate, region: string): string;
6
+ export declare const unsignedPayload = "UNSIGNED-PAYLOAD";
7
+ /**
8
+ * Same as {@see createCanonicalDataDigest}, but only sets the `host` header and the content hash to `UNSIGNED-PAYLOAD`.
9
+ *
10
+ * Used for pre-signing only. Pre-signed URLs [cannot contain content hashes](https://github.com/aws/aws-sdk-js/blob/966fa6c316dbb11ca9277564ff7120e6b16467f4/lib/signers/v4.js#L182-L183)
11
+ * and the only header that is signed is `host`. So we can use an optimized version for that.
12
+ */
13
+ export declare function createCanonicalDataDigestHostOnly(method: PresignableHttpMethod, path: string, query: string, host: string): string;
14
+ export declare function createCanonicalDataDigest(method: HttpMethod, path: string, query: string, sortedHeaders: Record<string, string>, contentHashStr: string): string;
15
+ export declare function sha256(data: BinaryLike): Buffer;
16
+ export declare function md5Hex(data: BinaryLike): string;
package/dist/sign.js ADDED
@@ -0,0 +1,77 @@
1
+ import { createHmac, createHash } from "node:crypto";
2
+ // Spec:
3
+ // https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
4
+ export function deriveSigningKey(date, region, secretAccessKey) {
5
+ const key = `AWS4${secretAccessKey}`;
6
+ const signedDate = createHmac("sha256", key).update(date).digest();
7
+ const signedDateRegion = createHmac("sha256", signedDate)
8
+ .update(region)
9
+ .digest();
10
+ const signedDateRegionService = createHmac("sha256", signedDateRegion)
11
+ .update("s3")
12
+ .digest();
13
+ return createHmac("sha256", signedDateRegionService)
14
+ .update("aws4_request")
15
+ .digest();
16
+ }
17
+ export function signCanonicalDataHash(signinKey, canonicalDataHash, date, region) {
18
+ // it is actually faster to pass a single large string instead of doing multiple .update() chains with the parameters
19
+ // see `benchmark-operations.js`
20
+ return createHmac("sha256", signinKey)
21
+ .update(`AWS4-HMAC-SHA256\n${date.dateTime}\n${date.date}/${region}/s3/aws4_request\n${canonicalDataHash}`)
22
+ .digest("hex");
23
+ }
24
+ export const unsignedPayload = "UNSIGNED-PAYLOAD";
25
+ /**
26
+ * Same as {@see createCanonicalDataDigest}, but only sets the `host` header and the content hash to `UNSIGNED-PAYLOAD`.
27
+ *
28
+ * Used for pre-signing only. Pre-signed URLs [cannot contain content hashes](https://github.com/aws/aws-sdk-js/blob/966fa6c316dbb11ca9277564ff7120e6b16467f4/lib/signers/v4.js#L182-L183)
29
+ * and the only header that is signed is `host`. So we can use an optimized version for that.
30
+ */
31
+ export function createCanonicalDataDigestHostOnly(method, path, query, host) {
32
+ // it is actually faster to pass a single large string instead of doing multiple .update() chains with the parameters
33
+ // see `benchmark-operations.js`
34
+ return createHash("sha256")
35
+ .update(`${method}\n${path}\n${query}\nhost:${host}\n\nhost\nUNSIGNED-PAYLOAD`)
36
+ .digest("hex");
37
+ }
38
+ export function createCanonicalDataDigest(method, path, query, sortedHeaders, contentHashStr) {
39
+ // Use this for debugging
40
+ /*
41
+ const xHash = {
42
+ h: createHash("sha256"),
43
+ m: "",
44
+ update(v) {
45
+ this.m += v;
46
+ this.h.update(v);
47
+ return this;
48
+ },
49
+ digest(v) {
50
+ if (this.m.includes("continuation-token")) console.log(this.m);
51
+ return this.h.digest(v);
52
+ },
53
+ };
54
+ */
55
+ const sortedHeaderNames = Object.keys(sortedHeaders);
56
+ // it is actually faster to pass a single large string instead of doing multiple .update() chains with the parameters
57
+ // see `benchmark-operations.js`
58
+ let canonData = `${method}\n${path}\n${query}\n`;
59
+ for (const header of sortedHeaderNames) {
60
+ canonData += `${header}:${sortedHeaders[header]}\n`;
61
+ }
62
+ canonData += "\n";
63
+ // emit the first header without ";", so we can avoid branching inside the loop for the other headers
64
+ // this is just a version of `sortedHeaderList.join(";")` that seems about 2x faster (see `benchmark-operations.js`)
65
+ canonData += sortedHeaderNames.length > 0 ? sortedHeaderNames[0] : "";
66
+ for (let i = 1; i < sortedHeaderNames.length; ++i) {
67
+ canonData += `;${sortedHeaderNames[i]}`;
68
+ }
69
+ canonData += `\n${contentHashStr}`;
70
+ return createHash("sha256").update(canonData).digest("hex");
71
+ }
72
+ export function sha256(data) {
73
+ return createHash("sha256").update(data).digest();
74
+ }
75
+ export function md5Hex(data) {
76
+ return createHash("md5").update(data).digest("hex");
77
+ }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,4 @@
1
+ /**
2
+ * @module Used by integration tests and unit tests.
3
+ */
4
+ export declare function runTests(runId: number, endpoint: string, forcePathStyle: boolean, accessKeyId: string, secretAccessKey: string, region: string, bucket: string): void;
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,19 @@
1
+ // @ts-check
2
+ import { describe } from "node:test";
3
+ import { runTests } from "./test-common.js";
4
+ const env = process.env;
5
+ const runId = Date.now();
6
+ for (const provider of ["hetzner", "aws", "cloudflare"]) {
7
+ describe(`integration with ${provider}@runId:${runId}`, () => {
8
+ const p = provider.toUpperCase();
9
+ const endpoint = env[`${p}_S3_ENDPOINT`];
10
+ const region = env[`${p}_S3_REGION`];
11
+ const bucket = env[`${p}_S3_BUCKET`];
12
+ const accessKeyId = env[`${p}_S3_ACCESS_KEY_ID`];
13
+ const secretAccessKey = env[`${p}_S3_SECRET_KEY`];
14
+ if (!endpoint || !region || !bucket || !accessKeyId || !secretAccessKey) {
15
+ throw new Error("Invalid config");
16
+ }
17
+ runTests(runId, endpoint, false, accessKeyId, secretAccessKey, region, bucket);
18
+ });
19
+ }
package/dist/url.d.ts ADDED
@@ -0,0 +1,9 @@
1
+ export declare function buildRequestUrl(endpoint: string, bucket: string, region: string, path: string): URL;
2
+ /**
3
+ * Sorts headers alphabetically. Removes headers that are undefined/null.
4
+ *
5
+ * `http.request` doesn't allow passing `undefined` as header values (despite the types allowing it),
6
+ * so we have to filter afterwards.
7
+ */
8
+ export declare function prepareHeadersForSigning(unfilteredHeadersUnsorted: Record<string, string | undefined>): Record<string, string>;
9
+ export declare function getRangeHeader(start: number | undefined, endExclusive: number | undefined): string | undefined;
package/dist/url.js ADDED
@@ -0,0 +1,52 @@
1
+ export function buildRequestUrl(endpoint, bucket, region, path) {
2
+ const normalizedBucket = normalizePath(bucket);
3
+ const [endpointWithBucketAndRegion, replacedBucket] = replaceDomainPlaceholders(endpoint, normalizedBucket, region);
4
+ const result = new URL(endpointWithBucketAndRegion);
5
+ const pathPrefix = result.pathname.endsWith("/")
6
+ ? result.pathname
7
+ : `${result.pathname}/`;
8
+ const pathSuffix = replacedBucket
9
+ ? normalizePath(path)
10
+ : `${normalizedBucket}/${normalizePath(path)}`;
11
+ result.pathname = pathPrefix + pathSuffix;
12
+ return result;
13
+ }
14
+ function replaceDomainPlaceholders(endpoint, bucket, region) {
15
+ const replacedBucket = endpoint.includes("{bucket}");
16
+ return [
17
+ endpoint.replaceAll("{bucket}", bucket).replaceAll("{region}", region),
18
+ replacedBucket,
19
+ ];
20
+ }
21
+ /**
22
+ * Removes trailing and leading slash.
23
+ */
24
+ function normalizePath(path) {
25
+ const start = path[0] === "/" ? 1 : 0;
26
+ const end = path[path.length - 1] === "/" ? path.length - 1 : path.length;
27
+ return path.substring(start, end);
28
+ }
29
+ /**
30
+ * Sorts headers alphabetically. Removes headers that are undefined/null.
31
+ *
32
+ * `http.request` doesn't allow passing `undefined` as header values (despite the types allowing it),
33
+ * so we have to filter afterwards.
34
+ */
35
+ export function prepareHeadersForSigning(unfilteredHeadersUnsorted) {
36
+ const result = {};
37
+ // TODO: `Object.keys(headersUnsorted).sort()` is constant in our case,
38
+ // maybe we want to move this somewhere else to avoid sorting every time
39
+ for (const header of Object.keys(unfilteredHeadersUnsorted).sort()) {
40
+ const v = unfilteredHeadersUnsorted[header];
41
+ if (v !== undefined && v !== null) {
42
+ result[header] = v;
43
+ }
44
+ }
45
+ return result;
46
+ }
47
+ export function getRangeHeader(start, endExclusive) {
48
+ return typeof start === "number" || typeof endExclusive === "number"
49
+ ? // Http-ranges are end-inclusive, we are exclusiv ein our slice
50
+ `bytes=${start ?? 0}-${typeof endExclusive === "number" ? endExclusive - 1 : ""}`
51
+ : undefined;
52
+ }
@@ -0,0 +1 @@
1
+ export {};
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "lean-s3",
3
3
  "author": "Niklas Mollenhauer",
4
4
  "license": "MIT",
5
- "version": "0.1.6",
5
+ "version": "0.2.0",
6
6
  "description": "A server-side S3 API for the regular user.",
7
7
  "keywords": [
8
8
  "s3",
@@ -13,34 +13,39 @@
13
13
  "url": "https://github.com/nikeee/lean-s3"
14
14
  },
15
15
  "exports": {
16
- "types": "./src/index.d.ts",
17
- "default": "./src/index.js"
16
+ "types": "./dist/index.d.ts",
17
+ "default": "./dist/index.js"
18
18
  },
19
- "types": "./src/index.d.ts",
19
+ "types": "./dist/index.d.ts",
20
20
  "type": "module",
21
21
  "scripts": {
22
- "test": "node --test src/*.test.js",
23
- "test:integration": "node --test integration/*.test.js",
22
+ "build": "tsc",
23
+ "clean": "rimraf dist",
24
+ "test": "tsc && node --test dist/*.test.js",
25
+ "test:integration": "tsc && node --test dist/test.integration.js",
24
26
  "ci": "biome ci ./src",
25
27
  "docs": "typedoc",
26
28
  "lint": "biome lint ./src",
27
- "format": "biome format --write ./src && biome lint --write ./src && biome check --write ./src"
29
+ "format": "biome format --write ./src && biome lint --write ./src && biome check --write ./src",
30
+ "prepublishOnly": "npm run clean && npm run build"
28
31
  },
29
32
  "devDependencies": {
30
- "@aws-sdk/client-s3": "^3.821.0",
33
+ "@aws-sdk/client-s3": "^3.828.0",
31
34
  "@biomejs/biome": "^1.9.4",
32
- "@testcontainers/localstack": "^11.0.0",
33
- "@testcontainers/minio": "^11.0.0",
34
- "@types/node": "^22.15.29",
35
- "expect": "^29.7.0",
35
+ "@testcontainers/localstack": "^11.0.3",
36
+ "@testcontainers/minio": "^11.0.3",
37
+ "@types/node": "^24.0.1",
38
+ "expect": "^30.0.0",
36
39
  "lefthook": "^1.11.13",
37
- "typedoc": "^0.28.5"
40
+ "rimraf": "^6.0.1",
41
+ "typedoc": "^0.28.5",
42
+ "typescript": "^5.8.3"
38
43
  },
39
44
  "engines": {
40
45
  "node": "^20.19.0 || ^22.14.0 || ^24.0.0"
41
46
  },
42
47
  "dependencies": {
43
- "fast-xml-parser": "^5.2.3",
48
+ "fast-xml-parser": "^5.2.5",
44
49
  "undici": "^7.10.0"
45
50
  }
46
51
  }
package/src/AmzDate.js DELETED
@@ -1,56 +0,0 @@
1
- const ONE_DAY = 1000 * 60 * 60 * 24;
2
-
3
- /**
4
- * @typedef {{
5
- * numericDayStart: number;
6
- * date: string;
7
- * dateTime: string;
8
- * }} AmzDate
9
- */
10
-
11
- /**
12
- * @param {Date} dateTime
13
- * @return {AmzDate}
14
- */
15
- export function getAmzDate(dateTime) {
16
- const date =
17
- pad4(dateTime.getUTCFullYear()) +
18
- pad2(dateTime.getUTCMonth() + 1) +
19
- pad2(dateTime.getUTCDate());
20
-
21
- const time =
22
- pad2(dateTime.getUTCHours()) +
23
- pad2(dateTime.getUTCMinutes()) +
24
- pad2(dateTime.getUTCSeconds()); // it seems that we dont support milliseconds
25
-
26
- return {
27
- numericDayStart: (dateTime.getTime() / ONE_DAY) | 0,
28
- date,
29
- dateTime: `${date}T${time}Z`,
30
- };
31
- }
32
- export function now() {
33
- return getAmzDate(new Date());
34
- }
35
-
36
- /**
37
- * @param {number} v
38
- * @returns {string}
39
- */
40
- function pad4(v) {
41
- return v < 10
42
- ? `000${v}`
43
- : v < 100
44
- ? `00${v}`
45
- : v < 1000
46
- ? `0${v}`
47
- : v.toString();
48
- }
49
-
50
- /**
51
- * @param {number} v
52
- * @returns {string}
53
- */
54
- function pad2(v) {
55
- return v < 10 ? `0${v}` : v.toString();
56
- }
package/src/KeyCache.js DELETED
@@ -1,36 +0,0 @@
1
- import * as sign from "./sign.js";
2
-
3
- /**@typedef {import("./AmzDate.js").AmzDate} AmzDate */
4
-
5
- export default class KeyCache {
6
- /** @type {number} */
7
- #lastNumericDay = -1;
8
- /** @type {Map<string, Buffer>} */
9
- #keys = new Map();
10
-
11
- /**
12
- * @param {AmzDate} date
13
- * @param {string} region
14
- * @param {string} accessKeyId
15
- * @param {string} secretAccessKey
16
- * @returns {Buffer}
17
- */
18
- computeIfAbsent(date, region, accessKeyId, secretAccessKey) {
19
- if (date.numericDayStart !== this.#lastNumericDay) {
20
- this.#keys.clear();
21
- this.#lastNumericDay = date.numericDayStart;
22
- // TODO: Add mechanism to clear the cache after some time
23
- }
24
-
25
- // using accessKeyId to prevent keeping the secretAccessKey somewhere
26
- const cacheKey = `${date.date}:${region}:${accessKeyId}`;
27
- const key = this.#keys.get(cacheKey);
28
- if (key) {
29
- return key;
30
- }
31
-
32
- const newKey = sign.deriveSigningKey(date.date, region, secretAccessKey);
33
- this.#keys.set(cacheKey, newKey);
34
- return newKey;
35
- }
36
- }