lean-s3 0.2.0 → 0.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # lean-s3 [![npm badge](https://img.shields.io/npm/v/lean-s3)](https://www.npmjs.com/package/lean-s3)
2
2
 
3
- A server-side S3 API for the regular user. lean-s3 tries to provide the 80% of S3 that most people use. It is heavily inspired by [Bun's S3 API](https://bun.sh/docs/api/s3). Requires a Node.js version that supports `fetch`.
3
+ A server-side S3 API for the regular user. lean-s3 tries to provide the 80% of S3 that most people use. It is heavily inspired by [Bun's S3 API](https://bun.sh/docs/api/s3). Requires a supported Node.js version.
4
4
 
5
5
  ## Elevator Pitch
6
6
  ```js
@@ -77,7 +77,7 @@ $ du -sh node_modules
77
77
  `lean-s3` is _so_ lean that it is ~1.8MB just to do a couple of HTTP requests <img src="https://cdn.frankerfacez.com/emoticon/480839/1" width="20" height="20">
78
78
  BUT...
79
79
 
80
- Due to its scalability, portability and AWS integrations, pre-signing URLs is `async` and performs poorly in high-performance scenarios. By taking different trade-offs, lean-s3 can presign URLs much faster. I promise! This is the reason you cannot use lean-s3 in the browser.
80
+ Due to the scalability, portability and AWS integrations of @aws-sdk/client-s3, pre-signing URLs is `async` and performs poorly in high-performance scenarios. By taking different trade-offs, lean-s3 can presign URLs much faster. I promise! This is the reason you cannot use lean-s3 in the browser.
81
81
 
82
82
  lean-s3 is currently about 30x faster than AWS SDK when it comes to pre-signing URLs[^1]:
83
83
  ```
@@ -121,6 +121,20 @@ We try to keep this library small. If you happen to need something that is not s
121
121
 
122
122
  See [DESIGN_DECISIONS.md](./DESIGN_DECISIONS.md) to read about why this library is the way it is.
123
123
 
124
+ ## Supported Operations
125
+
126
+ ### Bucket Operations
127
+ - ✅ [`CreateBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_CreateBucket.html) via `.createBucket`
128
+ - ✅ [`DeleteBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucket.html) via `.deleteBucket`
129
+ - ✅ [`HeadBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html) via `.bucketExists`
130
+
131
+ ### Object Operations
132
+ - ✅ [`ListObjectsV2`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html) via `.list`/`.listIterating`
133
+ - ✅ [`DeleteObjects`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html) via `.deleteObjects`
134
+ - ✅ [`DeleteObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObject.html) via `S3File.delete`
135
+ - ✅ [`PutObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html) via `S3File.write`
136
+ - ✅ [`HeadObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html) via `S3File.exists`/`S3File.stat`
137
+
124
138
  ## Example Configurations
125
139
  ### Hetzner Object Storage
126
140
  ```js
@@ -14,5 +14,6 @@ export default class S3BucketEntry {
14
14
  /**
15
15
  * @internal
16
16
  */
17
+ // biome-ignore lint/suspicious/noExplicitAny: internal use only, any is ok here
17
18
  static parse(source: any): S3BucketEntry;
18
19
  }
@@ -1,7 +1,8 @@
1
+ import { type Dispatcher } from "undici";
1
2
  import S3File from "./S3File.ts";
2
3
  import S3BucketEntry from "./S3BucketEntry.ts";
3
4
  import * as amzDate from "./AmzDate.ts";
4
- import type { Acl, PresignableHttpMethod, StorageClass, UndiciBodyInit } from "./index.ts";
5
+ import type { Acl, BucketInfo, BucketLocationInfo, HttpMethod, PresignableHttpMethod, StorageClass, UndiciBodyInit } from "./index.ts";
5
6
  export declare const write: unique symbol;
6
7
  export declare const stream: unique symbol;
7
8
  export interface S3ClientOptions {
@@ -13,15 +14,34 @@ export interface S3ClientOptions {
13
14
  sessionToken?: string;
14
15
  }
15
16
  export type OverridableS3ClientOptions = Pick<S3ClientOptions, "region" | "bucket" | "endpoint">;
16
- export type CreateFileInstanceOptions = {};
17
+ // biome-ignore lint/complexity/noBannedTypes: TODO
18
+ export type CreateFileInstanceOptions = {}; // TODO
19
+ export type DeleteObjectsOptions = {
20
+ signal?: AbortSignal;
21
+ };
17
22
  export interface S3FilePresignOptions {
18
23
  contentHash: Buffer;
19
24
  /** Seconds. */
20
- expiresIn: number;
25
+ expiresIn: number; // TODO: Maybe support Temporal.Duration once major support arrives
21
26
  method: PresignableHttpMethod;
22
27
  storageClass: StorageClass;
23
28
  acl: Acl;
24
29
  }
30
+ export type ListObjectsOptions = {
31
+ bucket?: string;
32
+ prefix?: string;
33
+ maxKeys?: number;
34
+ startAfter?: string;
35
+ continuationToken?: string;
36
+ signal?: AbortSignal;
37
+ };
38
+ export type ListObjectsIteratingOptions = {
39
+ bucket?: string;
40
+ prefix?: string;
41
+ startAfter?: string;
42
+ signal?: AbortSignal;
43
+ internalPageSize?: number;
44
+ };
25
45
  export type ListObjectsResponse = {
26
46
  name: string;
27
47
  prefix: string | undefined;
@@ -33,6 +53,18 @@ export type ListObjectsResponse = {
33
53
  nextContinuationToken: string | undefined;
34
54
  contents: readonly S3BucketEntry[];
35
55
  };
56
+ export type BucketCreationOptions = {
57
+ locationConstraint?: string;
58
+ location?: BucketLocationInfo;
59
+ info?: BucketInfo;
60
+ signal?: AbortSignal;
61
+ };
62
+ export type BucketDeletionOptions = {
63
+ signal?: AbortSignal;
64
+ };
65
+ export type BucketExistsOptions = {
66
+ signal?: AbortSignal;
67
+ };
36
68
  /**
37
69
  * A configured S3 bucket instance for managing files.
38
70
  *
@@ -60,9 +92,27 @@ export default class S3Client {
60
92
  /**
61
93
  * Creates an S3File instance for the given path.
62
94
  *
63
- * @param {string} path
95
+ * @param {string} path The path to the object in the bucket. ALso known as [object key](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html).
96
+ * We recommend not using the following characters in a key name because of significant special character handling, which isn't consistent across all applications (see [AWS docs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html)):
97
+ * - Backslash (`\\`)
98
+ * - Left brace (`{`)
99
+ * - Non-printable ASCII characters (128–255 decimal characters)
100
+ * - Caret or circumflex (`^`)
101
+ * - Right brace (`}`)
102
+ * - Percent character (`%`)
103
+ * - Grave accent or backtick (`\``)
104
+ * - Right bracket (`]`)
105
+ * - Quotation mark (`"`)
106
+ * - Greater than sign (`>`)
107
+ * - Left bracket (`[`)
108
+ * - Tilde (`~`)
109
+ * - Less than sign (`<`)
110
+ * - Pound sign (`#`)
111
+ * - Vertical bar or pipe (`|`)
112
+ *
113
+ * lean-s3 does not enforce these restrictions.
114
+ *
64
115
  * @param {Partial<CreateFileInstanceOptions>} [options] TODO
65
- * @returns {S3File}
66
116
  * @example
67
117
  * ```js
68
118
  * const file = client.file("image.jpg");
@@ -88,43 +138,73 @@ export default class S3Client {
88
138
  * ```
89
139
  */
90
140
  presign(path: string, { method, expiresIn, // TODO: Maybe rename this to expiresInSeconds
91
- storageClass, acl, region: regionOverride, bucket: bucketOverride, endpoint: endpointOverride, }?: Partial<S3FilePresignOptions & OverridableS3ClientOptions>): string;
92
- deleteObjects(objects: readonly S3BucketEntry[] | readonly string[], options: unknown): Promise<void>;
141
+ storageClass, acl, region: regionOverride, bucket: bucketOverride, endpoint: endpointOverride }?: Partial<S3FilePresignOptions & OverridableS3ClientOptions>): string;
142
+ /**
143
+ * Uses [`DeleteObjects`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html) to delete multiple objects in a single request.
144
+ */
145
+ deleteObjects(objects: readonly S3BucketEntry[] | readonly string[], options?: DeleteObjectsOptions): Promise<{
146
+ errors: {
147
+ code: any;
148
+ key: any;
149
+ message: any;
150
+ versionId: any;
151
+ }[];
152
+ } | null>;
93
153
  /**
94
- * Uses `ListObjectsV2` to iterate over all keys. Pagination and continuation is handled internally.
154
+ * Creates a new bucket on the S3 server.
155
+ *
156
+ * @param name The name of the bucket to create. AWS the name according to [some rules](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html). The most important ones are:
157
+ * - Bucket names must be between `3` (min) and `63` (max) characters long.
158
+ * - Bucket names can consist only of lowercase letters, numbers, periods (`.`), and hyphens (`-`).
159
+ * - Bucket names must begin and end with a letter or number.
160
+ * - Bucket names must not contain two adjacent periods.
161
+ * - Bucket names must not be formatted as an IP address (for example, `192.168.5.4`).
162
+ *
163
+ * @throws {Error} If the bucket name is invalid.
164
+ * @throws {S3Error} If the bucket could not be created, e.g. if it already exists.
165
+ * @remarks Uses [`CreateBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_CreateBucket.html)
95
166
  */
96
- listIterating(options: {
97
- prefix?: string;
98
- startAfter?: string;
99
- signal?: AbortSignal;
100
- internalPageSize?: number;
101
- }): AsyncGenerator<S3BucketEntry>;
102
- list(options?: {
103
- prefix?: string;
104
- maxKeys?: number;
105
- startAfter?: string;
106
- continuationToken?: string;
107
- signal?: AbortSignal;
108
- }): Promise<ListObjectsResponse>;
167
+ createBucket(name: string, options?: BucketCreationOptions): Promise<void>;
168
+ /**
169
+ * Deletes a bucket from the S3 server.
170
+ * @param name The name of the bucket to delete. Same restrictions as in {@link S3Client#createBucket}.
171
+ * @throws {Error} If the bucket name is invalid.
172
+ * @throws {S3Error} If the bucket could not be deleted, e.g. if it is not empty.
173
+ * @remarks Uses [`DeleteBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucket.html).
174
+ */
175
+ deleteBucket(name: string, options?: BucketDeletionOptions): Promise<void>;
176
+ /**
177
+ * Checks if a bucket exists.
178
+ * @param name The name of the bucket to delete. Same restrictions as in {@link S3Client#createBucket}.
179
+ * @throws {Error} If the bucket name is invalid.
180
+ * @remarks Uses [`HeadBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html).
181
+ */
182
+ bucketExists(name: string, options?: BucketExistsOptions): Promise<boolean>;
183
+ //#region list
184
+ /**
185
+ * Uses [`ListObjectsV2`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html) to iterate over all keys. Pagination and continuation is handled internally.
186
+ */
187
+ listIterating(options: ListObjectsIteratingOptions): AsyncGenerator<S3BucketEntry>;
188
+ /**
189
+ * Implements [`ListObjectsV2`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html) to iterate over all keys.
190
+ */
191
+ list(options?: ListObjectsOptions): Promise<ListObjectsResponse>;
192
+ //#endregion
193
+ /**
194
+ * Do not use this. This is an internal method.
195
+ * TODO: Maybe move this into a separate free function?
196
+ * @internal
197
+ */
198
+ _signedRequest(method: HttpMethod, pathWithoutBucket: string, query: string | undefined, body: UndiciBodyInit | undefined, additionalSignedHeaders: Record<string, string> | undefined, additionalUnsignedHeaders: Record<string, string> | undefined, contentHash: Buffer | undefined, bucket: string | undefined, signal?: AbortSignal | undefined): Promise<Dispatcher.ResponseData<null>>;
109
199
  /**
110
200
  * @internal
111
201
  * @param {import("./index.d.ts").UndiciBodyInit} data TODO
112
202
  */
113
203
  [write](path: string, data: UndiciBodyInit, contentType: string, contentLength: number | undefined, contentHash: Buffer | undefined, rageStart: number | undefined, rangeEndExclusive: number | undefined, signal?: AbortSignal | undefined): Promise<void>;
204
+ // TODO: Support abortSignal
114
205
  /**
115
206
  * @internal
116
207
  */
117
208
  [stream](path: string, contentHash: Buffer | undefined, rageStart: number | undefined, rangeEndExclusive: number | undefined): import("stream/web").ReadableStream<Uint8Array<ArrayBufferLike>>;
118
209
  }
119
- /**
120
- * @param {string} amzCredential
121
- * @param {import("./AmzDate.ts").AmzDate} date
122
- * @param {number} expiresIn
123
- * @param {string} headerList
124
- * @param {StorageClass | null | undefined} storageClass
125
- * @param {string | null | undefined} sessionToken
126
- * @param {Acl | null | undefined} acl
127
- * @param {string | null | undefined} contentHashStr
128
- * @returns {string}
129
- */
130
210
  export declare function buildSearchParams(amzCredential: string, date: amzDate.AmzDate, expiresIn: number, headerList: string, contentHashStr: string | null | undefined, storageClass: StorageClass | null | undefined, sessionToken: string | null | undefined, acl: Acl | null | undefined): string;
package/dist/S3Client.js CHANGED
@@ -7,10 +7,14 @@ import KeyCache from "./KeyCache.js";
7
7
  import * as amzDate from "./AmzDate.js";
8
8
  import * as sign from "./sign.js";
9
9
  import { buildRequestUrl, getRangeHeader, prepareHeadersForSigning, } from "./url.js";
10
+ import { getResponseError } from "./error.js";
10
11
  export const write = Symbol("write");
11
12
  export const stream = Symbol("stream");
12
13
  const xmlParser = new XMLParser();
13
- const xmlBuilder = new XMLBuilder();
14
+ const xmlBuilder = new XMLBuilder({
15
+ attributeNamePrefix: "$",
16
+ ignoreAttributes: false,
17
+ });
14
18
  /**
15
19
  * A configured S3 bucket instance for managing files.
16
20
  *
@@ -28,11 +32,9 @@ const xmlBuilder = new XMLBuilder();
28
32
  * ```
29
33
  */
30
34
  export default class S3Client {
31
- /** @type {Readonly<S3ClientOptions>} */
32
35
  #options;
33
36
  #keyCache = new KeyCache();
34
- // TODO: pass options to this in client
35
- /** @type {Dispatcher} */
37
+ // TODO: pass options to this in client? Do we want to expose tjhe internal use of undici?
36
38
  #dispatcher = new Agent();
37
39
  /**
38
40
  * Create a new instance of an S3 bucket so that credentials can be managed from a single instance instead of being passed to every method.
@@ -71,9 +73,27 @@ export default class S3Client {
71
73
  /**
72
74
  * Creates an S3File instance for the given path.
73
75
  *
74
- * @param {string} path
76
+ * @param {string} path The path to the object in the bucket. ALso known as [object key](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html).
77
+ * We recommend not using the following characters in a key name because of significant special character handling, which isn't consistent across all applications (see [AWS docs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html)):
78
+ * - Backslash (`\\`)
79
+ * - Left brace (`{`)
80
+ * - Non-printable ASCII characters (128–255 decimal characters)
81
+ * - Caret or circumflex (`^`)
82
+ * - Right brace (`}`)
83
+ * - Percent character (`%`)
84
+ * - Grave accent or backtick (`\``)
85
+ * - Right bracket (`]`)
86
+ * - Quotation mark (`"`)
87
+ * - Greater than sign (`>`)
88
+ * - Left bracket (`[`)
89
+ * - Tilde (`~`)
90
+ * - Less than sign (`<`)
91
+ * - Pound sign (`#`)
92
+ * - Vertical bar or pipe (`|`)
93
+ *
94
+ * lean-s3 does not enforce these restrictions.
95
+ *
75
96
  * @param {Partial<CreateFileInstanceOptions>} [options] TODO
76
- * @returns {S3File}
77
97
  * @example
78
98
  * ```js
79
99
  * const file = client.file("image.jpg");
@@ -86,6 +106,7 @@ export default class S3Client {
86
106
  * ```
87
107
  */
88
108
  file(path, options) {
109
+ // TODO: Check max path length in bytes
89
110
  return new S3File(this, path, undefined, undefined, undefined);
90
111
  }
91
112
  /**
@@ -117,12 +138,155 @@ export default class S3Client {
117
138
  res.search = `${query}&X-Amz-Signature=${signature}`;
118
139
  return res.toString();
119
140
  }
120
- async deleteObjects(objects, options) {
121
- throw new Error("Not implemented");
141
+ /**
142
+ * Uses [`DeleteObjects`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html) to delete multiple objects in a single request.
143
+ */
144
+ async deleteObjects(objects, options = {}) {
145
+ const body = xmlBuilder.build({
146
+ Delete: {
147
+ Quiet: true,
148
+ Object: objects.map(o => ({
149
+ Key: typeof o === "string" ? o : o.key,
150
+ })),
151
+ },
152
+ });
153
+ const response = await this._signedRequest("POST", "", "delete=", // "=" is needed by minio for some reason
154
+ body, {
155
+ "content-md5": sign.md5Base64(body),
156
+ }, undefined, undefined, this.#options.bucket, options.signal);
157
+ if (response.statusCode === 200) {
158
+ const text = await response.body.text();
159
+ let res = undefined;
160
+ try {
161
+ // Quite mode omits all deleted elements, so it will be parsed as "", wich we need to coalasce to null/undefined
162
+ res = (xmlParser.parse(text)?.DeleteResult || undefined)?.Error ?? [];
163
+ }
164
+ catch (cause) {
165
+ // Possible according to AWS docs
166
+ throw new S3Error("Unknown", "", {
167
+ message: "S3 service responded with invalid XML.",
168
+ cause,
169
+ });
170
+ }
171
+ if (!res || !Array.isArray(res)) {
172
+ throw new S3Error("Unknown", "", {
173
+ message: "Could not process response.",
174
+ });
175
+ }
176
+ const errors = res.map(e => ({
177
+ code: e.Code,
178
+ key: e.Key,
179
+ message: e.Message,
180
+ versionId: e.VersionId,
181
+ }));
182
+ return errors.length > 0 ? { errors } : null;
183
+ }
184
+ if (400 <= response.statusCode && response.statusCode < 500) {
185
+ throw await getResponseError(response, "");
186
+ }
187
+ response.body.dump(); // undici docs state that we should dump the body if not used
188
+ throw new Error(`Response code not implemented yet: ${response.statusCode}`);
189
+ }
190
+ /**
191
+ * Creates a new bucket on the S3 server.
192
+ *
193
+ * @param name The name of the bucket to create. AWS the name according to [some rules](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html). The most important ones are:
194
+ * - Bucket names must be between `3` (min) and `63` (max) characters long.
195
+ * - Bucket names can consist only of lowercase letters, numbers, periods (`.`), and hyphens (`-`).
196
+ * - Bucket names must begin and end with a letter or number.
197
+ * - Bucket names must not contain two adjacent periods.
198
+ * - Bucket names must not be formatted as an IP address (for example, `192.168.5.4`).
199
+ *
200
+ * @throws {Error} If the bucket name is invalid.
201
+ * @throws {S3Error} If the bucket could not be created, e.g. if it already exists.
202
+ * @remarks Uses [`CreateBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_CreateBucket.html)
203
+ */
204
+ async createBucket(name, options) {
205
+ ensureValidBucketName(name);
206
+ let body = undefined;
207
+ if (options) {
208
+ const location = options.location && (options.location.name || options.location.type)
209
+ ? {
210
+ Name: options.location.name ?? undefined,
211
+ Type: options.location.type ?? undefined,
212
+ }
213
+ : undefined;
214
+ const bucket = options.info && (options.info.dataRedundancy || options.info.type)
215
+ ? {
216
+ DataRedundancy: options.info.dataRedundancy ?? undefined,
217
+ Type: options.info.type ?? undefined,
218
+ }
219
+ : undefined;
220
+ body =
221
+ location || bucket || options.locationConstraint
222
+ ? xmlBuilder.build({
223
+ CreateBucketConfiguration: {
224
+ $xmlns: "http://s3.amazonaws.com/doc/2006-03-01/",
225
+ LocationConstraint: options.locationConstraint ?? undefined,
226
+ Location: location,
227
+ Bucket: bucket,
228
+ },
229
+ })
230
+ : undefined;
231
+ }
232
+ const additionalSignedHeaders = body
233
+ ? { "content-md5": sign.md5Base64(body) }
234
+ : undefined;
235
+ const response = await this._signedRequest("PUT", "", undefined, body, additionalSignedHeaders, undefined, undefined, name, options?.signal);
236
+ if (400 <= response.statusCode && response.statusCode < 500) {
237
+ throw await getResponseError(response, "");
238
+ }
239
+ await response.body.dump(); // undici docs state that we should dump the body if not used
240
+ if (response.statusCode === 200) {
241
+ return;
242
+ }
243
+ throw new Error(`Response code not supported: ${response.statusCode}`);
244
+ }
245
+ /**
246
+ * Deletes a bucket from the S3 server.
247
+ * @param name The name of the bucket to delete. Same restrictions as in {@link S3Client#createBucket}.
248
+ * @throws {Error} If the bucket name is invalid.
249
+ * @throws {S3Error} If the bucket could not be deleted, e.g. if it is not empty.
250
+ * @remarks Uses [`DeleteBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucket.html).
251
+ */
252
+ async deleteBucket(name, options) {
253
+ ensureValidBucketName(name);
254
+ const response = await this._signedRequest("DELETE", "", undefined, undefined, undefined, undefined, undefined, name, options?.signal);
255
+ if (400 <= response.statusCode && response.statusCode < 500) {
256
+ throw await getResponseError(response, "");
257
+ }
258
+ await response.body.dump(); // undici docs state that we should dump the body if not used
259
+ if (response.statusCode === 204) {
260
+ return;
261
+ }
262
+ throw new Error(`Response code not supported: ${response.statusCode}`);
263
+ }
264
+ /**
265
+ * Checks if a bucket exists.
266
+ * @param name The name of the bucket to delete. Same restrictions as in {@link S3Client#createBucket}.
267
+ * @throws {Error} If the bucket name is invalid.
268
+ * @remarks Uses [`HeadBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html).
269
+ */
270
+ async bucketExists(name, options) {
271
+ ensureValidBucketName(name);
272
+ const response = await this._signedRequest("HEAD", "", undefined, undefined, undefined, undefined, undefined, name, options?.signal);
273
+ if (response.statusCode !== 404 &&
274
+ 400 <= response.statusCode &&
275
+ response.statusCode < 500) {
276
+ throw await getResponseError(response, "");
277
+ }
278
+ await response.body.dump(); // undici docs state that we should dump the body if not used
279
+ if (response.statusCode === 200) {
280
+ return true;
281
+ }
282
+ if (response.statusCode === 404) {
283
+ return false;
284
+ }
285
+ throw new Error(`Response code not supported: ${response.statusCode}`);
122
286
  }
123
287
  //#region list
124
288
  /**
125
- * Uses `ListObjectsV2` to iterate over all keys. Pagination and continuation is handled internally.
289
+ * Uses [`ListObjectsV2`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html) to iterate over all keys. Pagination and continuation is handled internally.
126
290
  */
127
291
  async *listIterating(options) {
128
292
  // only used to get smaller pages, so we can test this properly
@@ -142,6 +306,9 @@ export default class S3Client {
142
306
  continuationToken = res.nextContinuationToken;
143
307
  } while (continuationToken);
144
308
  }
309
+ /**
310
+ * Implements [`ListObjectsV2`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html) to iterate over all keys.
311
+ */
145
312
  async list(options = {}) {
146
313
  // See `benchmark-operations.js` on why we don't use URLSearchParams but string concat
147
314
  // tldr: This is faster and we know the params exactly, so we can focus our encoding
@@ -174,7 +341,7 @@ export default class S3Client {
174
341
  }
175
342
  query += `&start-after=${encodeURIComponent(options.startAfter)}`;
176
343
  }
177
- const response = await this.#signedRequest("GET", "", query, undefined, undefined, undefined, undefined, options.signal);
344
+ const response = await this._signedRequest("GET", "", query, undefined, undefined, undefined, undefined, options.bucket ?? this.#options.bucket, options.signal);
178
345
  if (response.statusCode === 200) {
179
346
  const text = await response.body.text();
180
347
  let res = undefined;
@@ -211,16 +378,20 @@ export default class S3Client {
211
378
  contents,
212
379
  };
213
380
  }
214
- // undici docs state that we shoul dump the body if not used
215
- response.body.dump();
381
+ response.body.dump(); // undici docs state that we should dump the body if not used
216
382
  throw new Error(`Response code not implemented yet: ${response.statusCode}`);
217
383
  }
218
384
  //#endregion
219
- async #signedRequest(method, pathWithoutBucket, query, body, additionalSignedHeaders, additionalUnsignedHeaders, contentHash, signal = undefined) {
220
- const bucket = this.#options.bucket;
385
+ /**
386
+ * Do not use this. This is an internal method.
387
+ * TODO: Maybe move this into a separate free function?
388
+ * @internal
389
+ */
390
+ async _signedRequest(method, pathWithoutBucket, query, body, additionalSignedHeaders, additionalUnsignedHeaders, contentHash, bucket, signal = undefined) {
221
391
  const endpoint = this.#options.endpoint;
222
392
  const region = this.#options.region;
223
- const url = buildRequestUrl(endpoint, bucket, region, pathWithoutBucket);
393
+ const effectiveBucket = bucket ?? this.#options.bucket;
394
+ const url = buildRequestUrl(endpoint, effectiveBucket, region, pathWithoutBucket);
224
395
  if (query) {
225
396
  url.search = query;
226
397
  }
@@ -304,34 +475,7 @@ export default class S3Client {
304
475
  // everything seemed to work, no need to process response body
305
476
  return;
306
477
  }
307
- let body = undefined;
308
- try {
309
- body = await response.body.text();
310
- }
311
- catch (cause) {
312
- throw new S3Error("Unknown", path, {
313
- message: "Could not read response body.",
314
- cause,
315
- });
316
- }
317
- if (response.headers["content-type"] === "application/xml") {
318
- let error = undefined;
319
- try {
320
- error = xmlParser.parse(body);
321
- }
322
- catch (cause) {
323
- throw new S3Error("Unknown", path, {
324
- message: "Could not parse XML error response.",
325
- cause,
326
- });
327
- }
328
- throw new S3Error(error.Code || "Unknown", path, {
329
- message: error.Message || undefined, // Message might be "",
330
- });
331
- }
332
- throw new S3Error("Unknown", path, {
333
- message: "Unknown error during S3 request.",
334
- });
478
+ throw await getResponseError(response, path);
335
479
  }
336
480
  // TODO: Support abortSignal
337
481
  /**
@@ -445,17 +589,6 @@ export default class S3Client {
445
589
  return `AWS4-HMAC-SHA256 Credential=${credentialSpec}, SignedHeaders=${signedHeadersSpec}, Signature=${signature}`;
446
590
  }
447
591
  }
448
- /**
449
- * @param {string} amzCredential
450
- * @param {import("./AmzDate.ts").AmzDate} date
451
- * @param {number} expiresIn
452
- * @param {string} headerList
453
- * @param {StorageClass | null | undefined} storageClass
454
- * @param {string | null | undefined} sessionToken
455
- * @param {Acl | null | undefined} acl
456
- * @param {string | null | undefined} contentHashStr
457
- * @returns {string}
458
- */
459
592
  export function buildSearchParams(amzCredential, date, expiresIn, headerList, contentHashStr, storageClass, sessionToken, acl) {
460
593
  // We tried to make these query params entirely lower-cased, just like the headers
461
594
  // but Cloudflare R2 requires them to have this exact casing
@@ -483,3 +616,17 @@ export function buildSearchParams(amzCredential, date, expiresIn, headerList, co
483
616
  }
484
617
  return res;
485
618
  }
619
+ function ensureValidBucketName(name) {
620
+ if (name.length < 3 || name.length > 63) {
621
+ throw new Error("`name` must be between 3 and 63 characters long.");
622
+ }
623
+ if (name.startsWith(".") || name.endsWith(".")) {
624
+ throw new Error("`name` must not start or end with a period (.)");
625
+ }
626
+ if (!/^[a-z0-9.-]+$/.test(name)) {
627
+ throw new Error("`name` can only contain lowercase letters, numbers, periods (.), and hyphens (-).");
628
+ }
629
+ if (name.includes("..")) {
630
+ throw new Error("`name` must not contain two adjacent periods (..)");
631
+ }
632
+ }
package/dist/S3Error.d.ts CHANGED
@@ -4,7 +4,7 @@ export default class S3Error extends Error {
4
4
  readonly message: string;
5
5
  readonly requestId: string | undefined;
6
6
  readonly hostId: string | undefined;
7
- constructor(code: string, path: string, { message, requestId, hostId, cause, }?: S3ErrorOptions);
7
+ constructor(code: string, path: string, { message, requestId, hostId, cause }?: S3ErrorOptions);
8
8
  }
9
9
  export type S3ErrorOptions = {
10
10
  message?: string | undefined;
package/dist/S3File.d.ts CHANGED
@@ -1,32 +1,43 @@
1
- import S3Stat from "./S3Stat.ts";
2
1
  import type S3Client from "./S3Client.ts";
3
- import { type OverridableS3ClientOptions } from "./S3Client.ts";
4
2
  import type { ByteSource } from "./index.ts";
3
+ import S3Stat from "./S3Stat.ts";
4
+ import { type OverridableS3ClientOptions } from "./S3Client.ts";
5
+ // TODO: If we want to hack around, we can use this to access the private implementation of the "get stream" algorithm used by Node.js's blob internally
6
+ // We probably have to do this some day if the fetch implementation is moved to internals.
7
+ // If this happens, fetch will probably use `[kHandle].getReader()` instead of .stream() to read the Blob
8
+ // This would break our use-case of passing an S3File as a body
9
+ // Using this hack would also make `.text()`, `.bytes()` etc. "just work" in every case, since these use `[kHandle]` internally as well.
10
+ // We now resort back into overriding text/bytes/etc. But as soon as another internal Node.js API uses this functionality, this would probably also use `[kHandle]` and bypass our data.
11
+ // const kHandle = Object.getOwnPropertySymbols(new Blob).find(s => s.toString() === 'Symbol(kHandle)');
5
12
  export default class S3File {
6
13
  #private;
7
14
  /**
8
15
  * @internal
9
16
  */
10
17
  constructor(client: S3Client, path: string, start: number | undefined, end: number | undefined, contentType: string | undefined);
18
+ // TODO: slice overloads
11
19
  slice(start?: number | undefined, end?: number | undefined, contentType?: string | undefined): S3File;
12
20
  /**
13
21
  * Get the stat of a file in the bucket. Uses `HEAD` request to check existence.
14
22
  *
15
- * @throws {Error} If the file does not exist.
16
- * @param {Partial<S3StatOptions>} [options]
17
- * @returns {Promise<S3Stat>}
23
+ * @remarks Uses [`HeadObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html).
24
+ * @throws {S3Error} If the file does not exist or the server has some other issues.
25
+ * @throws {Error} If the server returns an invalid response.
18
26
  */
19
27
  stat({ signal }?: Partial<S3StatOptions>): Promise<S3Stat>;
20
28
  /**
21
29
  * Check if a file exists in the bucket. Uses `HEAD` request to check existence.
22
- * @param {Partial<S3FileExistsOptions>} [options]
23
- * @returns {Promise<boolean>}
30
+ *
31
+ * @remarks Uses [`HeadObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html).
24
32
  */
25
- exists({ signal, }?: Partial<S3FileExistsOptions>): Promise<boolean>;
33
+ exists({ signal }?: Partial<S3FileExistsOptions>): Promise<boolean>;
26
34
  /**
27
35
  * Delete a file from the bucket.
36
+ *
37
+ * @remarks Uses [`DeleteObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObject.html).
38
+ * @remarks `versionId` not supported.
39
+ *
28
40
  * @param {Partial<S3FileDeleteOptions>} [options]
29
- * @returns {Promise<void>}
30
41
  *
31
42
  * @example
32
43
  * ```js
@@ -46,6 +57,11 @@ export default class S3File {
46
57
  toString(): string;
47
58
  /** @returns {Promise<unknown>} */
48
59
  json(): Promise<unknown>;
60
+ // TODO
61
+ // /** @returns {Promise<Uint8Array>} */
62
+ // bytes() {
63
+ // return new Response(this.stream()).bytes(); // TODO: Does this exist?
64
+ // }
49
65
  /** @returns {Promise<ArrayBuffer>} */
50
66
  arrayBuffer(): Promise<ArrayBuffer>;
51
67
  /** @returns {Promise<string>} */
package/dist/S3File.js CHANGED
@@ -3,6 +3,7 @@ import S3Error from "./S3Error.js";
3
3
  import S3Stat from "./S3Stat.js";
4
4
  import { write, stream } from "./S3Client.js";
5
5
  import { sha256 } from "./sign.js";
6
+ import { fromStatusCode, getResponseError } from "./error.js";
6
7
  // TODO: If we want to hack around, we can use this to access the private implementation of the "get stream" algorithm used by Node.js's blob internally
7
8
  // We probably have to do this some day if the fetch implementation is moved to internals.
8
9
  // If this happens, fetch will probably use `[kHandle].getReader()` instead of .stream() to read the Blob
@@ -40,47 +41,51 @@ export default class S3File {
40
41
  /**
41
42
  * Get the stat of a file in the bucket. Uses `HEAD` request to check existence.
42
43
  *
43
- * @throws {Error} If the file does not exist.
44
- * @param {Partial<S3StatOptions>} [options]
45
- * @returns {Promise<S3Stat>}
44
+ * @remarks Uses [`HeadObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html).
45
+ * @throws {S3Error} If the file does not exist or the server has some other issues.
46
+ * @throws {Error} If the server returns an invalid response.
46
47
  */
47
48
  async stat({ signal } = {}) {
48
49
  // TODO: Support all options
49
- // TODO: Don't use presign here
50
- const url = this.#client.presign(this.#path, { method: "HEAD" });
51
- const response = await fetch(url, { method: "HEAD", signal }); // TODO: Use undici
52
- if (!response.ok) {
53
- switch (response.status) {
54
- case 404:
55
- // TODO: Process response body
56
- throw new S3Error("NoSuchKey", this.#path);
57
- default:
58
- // TODO: Process response body
59
- throw new S3Error("Unknown", this.#path);
50
+ const response = await this.#client._signedRequest("HEAD", this.#path, undefined, undefined, undefined, undefined, undefined, undefined, signal);
51
+ // Heads don't have a body, but we still need to consume it to avoid leaks
52
+ await response.body.dump();
53
+ if (200 <= response.statusCode && response.statusCode < 300) {
54
+ const result = S3Stat.tryParseFromHeaders(response.headers);
55
+ if (!result) {
56
+ throw new Error("S3 server returned an invalid response for `HeadObject`");
60
57
  }
58
+ return result;
61
59
  }
62
- const result = S3Stat.tryParseFromHeaders(response.headers);
63
- if (!result) {
64
- throw new Error("S3 server returned an invalid response for HEAD");
65
- }
66
- return result;
60
+ throw (fromStatusCode(response.statusCode, this.#path) ??
61
+ new Error(`S3 server returned an unsupported status code for \`HeadObject\`: ${response.statusCode}`));
67
62
  }
68
63
  /**
69
64
  * Check if a file exists in the bucket. Uses `HEAD` request to check existence.
70
- * @param {Partial<S3FileExistsOptions>} [options]
71
- * @returns {Promise<boolean>}
65
+ *
66
+ * @remarks Uses [`HeadObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html).
72
67
  */
73
68
  async exists({ signal, } = {}) {
74
69
  // TODO: Support all options
75
- // TODO: Don't use presign here
76
- const url = this.#client.presign(this.#path, { method: "HEAD" });
77
- const res = await fetch(url, { method: "HEAD", signal }); // TODO: Use undici
78
- return res.ok;
70
+ const response = await this.#client._signedRequest("HEAD", this.#path, undefined, undefined, undefined, undefined, undefined, undefined, signal);
71
+ // Heads don't have a body, but we still need to consume it to avoid leaks
72
+ await response.body.dump();
73
+ if (200 <= response.statusCode && response.statusCode < 300) {
74
+ return true;
75
+ }
76
+ if (response.statusCode === 404) {
77
+ return false;
78
+ }
79
+ throw (fromStatusCode(response.statusCode, this.#path) ??
80
+ new Error(`S3 server returned an unsupported status code for \`HeadObject\`: ${response.statusCode}`));
79
81
  }
80
82
  /**
81
83
  * Delete a file from the bucket.
84
+ *
85
+ * @remarks Uses [`DeleteObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObject.html).
86
+ * @remarks `versionId` not supported.
87
+ *
82
88
  * @param {Partial<S3FileDeleteOptions>} [options]
83
- * @returns {Promise<void>}
84
89
  *
85
90
  * @example
86
91
  * ```js
@@ -98,19 +103,12 @@ export default class S3File {
98
103
  */
99
104
  async delete({ signal } = {}) {
100
105
  // TODO: Support all options
101
- // TODO: Don't use presign here
102
- const url = this.#client.presign(this.#path, { method: "DELETE" });
103
- const response = await fetch(url, { method: "DELETE", signal }); // TODO: Use undici
104
- if (!response.ok) {
105
- switch (response.status) {
106
- case 404:
107
- // TODO: Process response body
108
- throw new S3Error("NoSuchKey", this.#path);
109
- default:
110
- // TODO: Process response body
111
- throw new S3Error("Unknown", this.#path);
112
- }
106
+ const response = await this.#client._signedRequest("DELETE", this.#path, undefined, undefined, undefined, undefined, undefined, undefined, signal);
107
+ if (response.statusCode === 204) {
108
+ await response.body.dump(); // Consume the body to avoid leaks
109
+ return;
113
110
  }
111
+ throw await getResponseError(response, this.#path);
114
112
  }
115
113
  toString() {
116
114
  return `S3File { path: "${this.#path}" }`;
@@ -143,14 +141,6 @@ export default class S3File {
143
141
  // This function is called for every operation on the blob
144
142
  return this.#client[stream](this.#path, undefined, this.#start, this.#end);
145
143
  }
146
- /**
147
- * @param {ByteSource} data
148
- * @returns {Promise<[
149
- * buffer: import("./index.d.ts").UndiciBodyInit,
150
- * size: number | undefined,
151
- * hash: Buffer | undefined,
152
- * ]>}
153
- */
154
144
  async #transformData(data) {
155
145
  if (typeof data === "string") {
156
146
  const binary = new TextEncoder();
@@ -197,10 +187,6 @@ export default class S3File {
197
187
  return await this.#client[write](this.#path, bytes, this.#contentType, length, hash, this.#start, this.#end, signal);
198
188
  }
199
189
  }
200
- /**
201
- * @param {never} v
202
- * @returns {never}
203
- */
204
190
  function assertNever(v) {
205
191
  throw new TypeError(`Expected value not to have type ${typeof v}`);
206
192
  }
package/dist/S3Stat.d.ts CHANGED
@@ -1,9 +1,8 @@
1
- import type { Headers } from "undici-types";
2
1
  export default class S3Stat {
3
2
  readonly etag: string;
4
3
  readonly lastModified: Date;
5
4
  readonly size: number;
6
5
  readonly type: string;
7
6
  constructor(etag: string, lastModified: Date, size: number, type: string);
8
- static tryParseFromHeaders(headers: Headers): S3Stat | undefined;
7
+ static tryParseFromHeaders(headers: Record<string, string | string[] | undefined>): S3Stat | undefined;
9
8
  }
package/dist/S3Stat.js CHANGED
@@ -10,15 +10,15 @@ export default class S3Stat {
10
10
  this.type = type;
11
11
  }
12
12
  static tryParseFromHeaders(headers) {
13
- const lm = headers.get("last-modified");
14
- if (lm === null) {
13
+ const lm = headers["last-modified"];
14
+ if (lm === null || typeof lm !== "string") {
15
15
  return undefined;
16
16
  }
17
- const etag = headers.get("etag");
18
- if (etag === null) {
17
+ const etag = headers.etag;
18
+ if (etag === null || typeof etag !== "string") {
19
19
  return undefined;
20
20
  }
21
- const cl = headers.get("content-length");
21
+ const cl = headers["content-length"];
22
22
  if (cl === null) {
23
23
  return undefined;
24
24
  }
@@ -26,8 +26,8 @@ export default class S3Stat {
26
26
  if (!Number.isSafeInteger(size)) {
27
27
  return undefined;
28
28
  }
29
- const ct = headers.get("content-type");
30
- if (ct === null) {
29
+ const ct = headers["content-type"];
30
+ if (ct === null || typeof ct !== "string") {
31
31
  return undefined;
32
32
  }
33
33
  return new S3Stat(etag, new Date(lm), size, ct);
@@ -0,0 +1,4 @@
1
+ import type { Dispatcher } from "undici";
2
+ import S3Error from "./S3Error.ts";
3
+ export declare function getResponseError(response: Dispatcher.ResponseData<unknown>, path: string): Promise<S3Error>;
4
+ export declare function fromStatusCode(code: number, path: string): S3Error | undefined;
package/dist/error.js ADDED
@@ -0,0 +1,57 @@
1
+ import { XMLParser } from "fast-xml-parser";
2
+ import S3Error from "./S3Error.js";
3
+ const xmlParser = new XMLParser();
4
+ export async function getResponseError(response, path) {
5
+ let body = undefined;
6
+ try {
7
+ body = await response.body.text();
8
+ }
9
+ catch (cause) {
10
+ return new S3Error("Unknown", path, {
11
+ message: "Could not read response body.",
12
+ cause,
13
+ });
14
+ }
15
+ if (response.headers["content-type"] === "application/xml") {
16
+ return parseAndGetXmlError(body, path);
17
+ }
18
+ return new S3Error("Unknown", path, {
19
+ message: "Unknown error during S3 request.",
20
+ });
21
+ }
22
+ export function fromStatusCode(code, path) {
23
+ switch (code) {
24
+ case 404:
25
+ return new S3Error("NoSuchKey", path, {
26
+ message: "The specified key does not exist.",
27
+ });
28
+ case 403:
29
+ return new S3Error("AccessDenied", path, {
30
+ message: "Access denied to the key.",
31
+ });
32
+ // TODO: Add more status codes as needed
33
+ default:
34
+ return undefined;
35
+ }
36
+ }
37
+ function parseAndGetXmlError(body, path) {
38
+ let error = undefined;
39
+ try {
40
+ error = xmlParser.parse(body);
41
+ }
42
+ catch (cause) {
43
+ return new S3Error("Unknown", path, {
44
+ message: "Could not parse XML error response.",
45
+ cause,
46
+ });
47
+ }
48
+ if (error.Error) {
49
+ const e = error.Error;
50
+ return new S3Error(e.Code || "Unknown", path, {
51
+ message: e.Message || undefined, // Message might be "",
52
+ });
53
+ }
54
+ return new S3Error(error.Code || "Unknown", path, {
55
+ message: error.Message || undefined, // Message might be "",
56
+ });
57
+ }
package/dist/index.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import type { Readable } from "node:stream";
2
2
  export { default as S3File, type S3FileDeleteOptions, type S3FileExistsOptions, type S3StatOptions, } from "./S3File.ts";
3
- export { default as S3Client, type ListObjectsResponse, type CreateFileInstanceOptions, type OverridableS3ClientOptions, type S3ClientOptions, type S3FilePresignOptions, } from "./S3Client.ts";
3
+ export { default as S3Client, type ListObjectsOptions, type ListObjectsIteratingOptions, type ListObjectsResponse, type CreateFileInstanceOptions, type OverridableS3ClientOptions, type S3ClientOptions, type S3FilePresignOptions, type BucketCreationOptions, type DeleteObjectsOptions, } from "./S3Client.ts";
4
4
  export { default as S3Error, type S3ErrorOptions } from "./S3Error.ts";
5
5
  export { default as S3Stat } from "./S3Stat.ts";
6
6
  export { default as S3BucketEntry } from "./S3BucketEntry.ts";
@@ -9,7 +9,29 @@ export type StorageClass = "STANDARD" | "DEEP_ARCHIVE" | "EXPRESS_ONEZONE" | "GL
9
9
  export type ChecksumAlgorithm = "CRC32" | "CRC32C" | "CRC64NVME" | "SHA1" | "SHA256";
10
10
  export type ChecksumType = "COMPOSITE" | "FULL_OBJECT";
11
11
  export type PresignableHttpMethod = "GET" | "DELETE" | "PUT" | "HEAD";
12
- export type HttpMethod = PresignableHttpMethod | "POST";
12
+ export type HttpMethod = PresignableHttpMethod | "POST"; // There are also others, but we don't want to support them yet
13
13
  /** Body values supported by undici. */
14
14
  export type UndiciBodyInit = string | Buffer | Uint8Array | Readable;
15
15
  export type ByteSource = UndiciBodyInit | Blob;
16
+ // TODO
17
+ // | ArrayBufferView
18
+ // | ArrayBuffer
19
+ // | SharedArrayBuffer
20
+ // | Request
21
+ // | Response
22
+ // | S3File
23
+ // | ReadableStream<Uint8Array>
24
+ /**
25
+ * Implements [LocationInfo](https://docs.aws.amazon.com/AmazonS3/latest/API/API_LocationInfo.html)
26
+ */
27
+ export type BucketLocationInfo = {
28
+ name?: string;
29
+ type?: string;
30
+ };
31
+ /**
32
+ * Implements [BucketInfo](https://docs.aws.amazon.com/AmazonS3/latest/API/API_BucketInfo.html)
33
+ */
34
+ export type BucketInfo = {
35
+ dataRedundancy?: string;
36
+ type?: string;
37
+ };
package/dist/index.js CHANGED
@@ -3,11 +3,3 @@ export { default as S3Client, } from "./S3Client.js";
3
3
  export { default as S3Error } from "./S3Error.js";
4
4
  export { default as S3Stat } from "./S3Stat.js";
5
5
  export { default as S3BucketEntry } from "./S3BucketEntry.js";
6
- // TODO
7
- // | ArrayBufferView
8
- // | ArrayBuffer
9
- // | SharedArrayBuffer
10
- // | Request
11
- // | Response
12
- // | S3File
13
- // | ReadableStream<Uint8Array>
package/dist/sign.d.ts CHANGED
@@ -1,6 +1,8 @@
1
1
  import { type BinaryLike } from "node:crypto";
2
2
  import type { AmzDate } from "./AmzDate.ts";
3
3
  import type { HttpMethod, PresignableHttpMethod } from "./index.ts";
4
+ // Spec:
5
+ // https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
4
6
  export declare function deriveSigningKey(date: string, region: string, secretAccessKey: string): Buffer;
5
7
  export declare function signCanonicalDataHash(signinKey: Buffer, canonicalDataHash: string, date: AmzDate, region: string): string;
6
8
  export declare const unsignedPayload = "UNSIGNED-PAYLOAD";
@@ -13,4 +15,4 @@ export declare const unsignedPayload = "UNSIGNED-PAYLOAD";
13
15
  export declare function createCanonicalDataDigestHostOnly(method: PresignableHttpMethod, path: string, query: string, host: string): string;
14
16
  export declare function createCanonicalDataDigest(method: HttpMethod, path: string, query: string, sortedHeaders: Record<string, string>, contentHashStr: string): string;
15
17
  export declare function sha256(data: BinaryLike): Buffer;
16
- export declare function md5Hex(data: BinaryLike): string;
18
+ export declare function md5Base64(data: BinaryLike): string;
package/dist/sign.js CHANGED
@@ -72,6 +72,6 @@ export function createCanonicalDataDigest(method, path, query, sortedHeaders, co
72
72
  export function sha256(data) {
73
73
  return createHash("sha256").update(data).digest();
74
74
  }
75
- export function md5Hex(data) {
76
- return createHash("md5").update(data).digest("hex");
75
+ export function md5Base64(data) {
76
+ return createHash("md5").update(data).digest("base64");
77
77
  }
@@ -1,4 +1 @@
1
- /**
2
- * @module Used by integration tests and unit tests.
3
- */
4
- export declare function runTests(runId: number, endpoint: string, forcePathStyle: boolean, accessKeyId: string, secretAccessKey: string, region: string, bucket: string): void;
1
+ export declare function runTests(runId: number, endpoint: string, accessKeyId: string, secretAccessKey: string, region: string, bucket: string): void;
@@ -1,6 +1,8 @@
1
1
  // @ts-check
2
- import { describe } from "node:test";
2
+ import { describe, before, after } from "node:test";
3
+ import { expect } from "expect";
3
4
  import { runTests } from "./test-common.js";
5
+ import { S3Client } from "./index.js";
4
6
  const env = process.env;
5
7
  const runId = Date.now();
6
8
  for (const provider of ["hetzner", "aws", "cloudflare"]) {
@@ -14,6 +16,29 @@ for (const provider of ["hetzner", "aws", "cloudflare"]) {
14
16
  if (!endpoint || !region || !bucket || !accessKeyId || !secretAccessKey) {
15
17
  throw new Error("Invalid config");
16
18
  }
17
- runTests(runId, endpoint, false, accessKeyId, secretAccessKey, region, bucket);
19
+ {
20
+ const client = new S3Client({
21
+ endpoint,
22
+ accessKeyId,
23
+ secretAccessKey,
24
+ region,
25
+ bucket,
26
+ });
27
+ before(async () => {
28
+ expect(await client.bucketExists(bucket)).toBe(true);
29
+ const objects = (await client.list({ prefix: `${runId}/` })).contents;
30
+ expect(objects.length).toBe(0);
31
+ });
32
+ after(async () => {
33
+ expect(await client.bucketExists(bucket)).toBe(true);
34
+ const objects = (await client.list({ prefix: `${runId}/`, maxKeys: 1000 })).contents;
35
+ // clean up after all tests, but we want to fail because there are still objects
36
+ if (objects.length > 0) {
37
+ await client.deleteObjects(objects);
38
+ }
39
+ expect(objects.length).toBe(0);
40
+ });
41
+ }
42
+ runTests(runId, endpoint, accessKeyId, secretAccessKey, region, bucket);
18
43
  });
19
44
  }
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "lean-s3",
3
3
  "author": "Niklas Mollenhauer",
4
4
  "license": "MIT",
5
- "version": "0.2.0",
5
+ "version": "0.2.2",
6
6
  "description": "A server-side S3 API for the regular user.",
7
7
  "keywords": [
8
8
  "s3",
@@ -19,10 +19,10 @@
19
19
  "types": "./dist/index.d.ts",
20
20
  "type": "module",
21
21
  "scripts": {
22
- "build": "tsc",
22
+ "build": "tsgo",
23
23
  "clean": "rimraf dist",
24
- "test": "tsc && node --test dist/*.test.js",
25
- "test:integration": "tsc && node --test dist/test.integration.js",
24
+ "test": "tsgo && node --test dist/*.test.js",
25
+ "test:integration": "tsgo && node --test dist/test.integration.js",
26
26
  "ci": "biome ci ./src",
27
27
  "docs": "typedoc",
28
28
  "lint": "biome lint ./src",
@@ -30,11 +30,11 @@
30
30
  "prepublishOnly": "npm run clean && npm run build"
31
31
  },
32
32
  "devDependencies": {
33
- "@aws-sdk/client-s3": "^3.828.0",
34
33
  "@biomejs/biome": "^1.9.4",
35
34
  "@testcontainers/localstack": "^11.0.3",
36
35
  "@testcontainers/minio": "^11.0.3",
37
36
  "@types/node": "^24.0.1",
37
+ "@typescript/native-preview": "^7.0.0-dev.20250613.1",
38
38
  "expect": "^30.0.0",
39
39
  "lefthook": "^1.11.13",
40
40
  "rimraf": "^6.0.1",