lean-s3 0.2.2 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +330 -24
- package/dist/index.js +1257 -5
- package/package.json +5 -7
- package/dist/AmzDate.d.ts +0 -7
- package/dist/AmzDate.js +0 -29
- package/dist/KeyCache.d.ts +0 -5
- package/dist/KeyCache.js +0 -21
- package/dist/S3BucketEntry.d.ts +0 -19
- package/dist/S3BucketEntry.js +0 -29
- package/dist/S3Client.d.ts +0 -210
- package/dist/S3Client.js +0 -632
- package/dist/S3Error.d.ts +0 -14
- package/dist/S3Error.js +0 -15
- package/dist/S3File.d.ts +0 -87
- package/dist/S3File.js +0 -192
- package/dist/S3Stat.d.ts +0 -8
- package/dist/S3Stat.js +0 -35
- package/dist/error.d.ts +0 -4
- package/dist/error.js +0 -57
- package/dist/index.test.d.ts +0 -1
- package/dist/sign.d.ts +0 -18
- package/dist/sign.js +0 -77
- package/dist/sign.test.d.ts +0 -1
- package/dist/test-common.d.ts +0 -1
- package/dist/test.integration.d.ts +0 -1
- package/dist/test.integration.js +0 -44
- package/dist/url.d.ts +0 -9
- package/dist/url.js +0 -52
- package/dist/url.test.d.ts +0 -1
package/dist/index.js
CHANGED
|
@@ -1,5 +1,1257 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
// src/S3File.ts
|
|
2
|
+
import { Readable } from "stream";
|
|
3
|
+
|
|
4
|
+
// src/S3Error.ts
|
|
5
|
+
var S3Error = class extends Error {
|
|
6
|
+
code;
|
|
7
|
+
path;
|
|
8
|
+
message;
|
|
9
|
+
requestId;
|
|
10
|
+
hostId;
|
|
11
|
+
constructor(code, path, {
|
|
12
|
+
message = void 0,
|
|
13
|
+
requestId = void 0,
|
|
14
|
+
hostId = void 0,
|
|
15
|
+
cause = void 0
|
|
16
|
+
} = {}) {
|
|
17
|
+
super(message, { cause });
|
|
18
|
+
this.code = code;
|
|
19
|
+
this.path = path;
|
|
20
|
+
this.message = message ?? "Some unknown error occurred.";
|
|
21
|
+
this.requestId = requestId;
|
|
22
|
+
this.hostId = hostId;
|
|
23
|
+
}
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
// src/S3Stat.ts
|
|
27
|
+
var S3Stat = class _S3Stat {
|
|
28
|
+
etag;
|
|
29
|
+
lastModified;
|
|
30
|
+
size;
|
|
31
|
+
type;
|
|
32
|
+
constructor(etag, lastModified, size, type) {
|
|
33
|
+
this.etag = etag;
|
|
34
|
+
this.lastModified = lastModified;
|
|
35
|
+
this.size = size;
|
|
36
|
+
this.type = type;
|
|
37
|
+
}
|
|
38
|
+
static tryParseFromHeaders(headers) {
|
|
39
|
+
const lm = headers["last-modified"];
|
|
40
|
+
if (lm === null || typeof lm !== "string") {
|
|
41
|
+
return void 0;
|
|
42
|
+
}
|
|
43
|
+
const etag = headers.etag;
|
|
44
|
+
if (etag === null || typeof etag !== "string") {
|
|
45
|
+
return void 0;
|
|
46
|
+
}
|
|
47
|
+
const cl = headers["content-length"];
|
|
48
|
+
if (cl === null) {
|
|
49
|
+
return void 0;
|
|
50
|
+
}
|
|
51
|
+
const size = Number(cl);
|
|
52
|
+
if (!Number.isSafeInteger(size)) {
|
|
53
|
+
return void 0;
|
|
54
|
+
}
|
|
55
|
+
const ct = headers["content-type"];
|
|
56
|
+
if (ct === null || typeof ct !== "string") {
|
|
57
|
+
return void 0;
|
|
58
|
+
}
|
|
59
|
+
return new _S3Stat(etag, new Date(lm), size, ct);
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
// src/S3Client.ts
|
|
64
|
+
import { request, Agent } from "undici";
|
|
65
|
+
import { XMLParser as XMLParser2, XMLBuilder } from "fast-xml-parser";
|
|
66
|
+
|
|
67
|
+
// src/S3BucketEntry.ts
|
|
68
|
+
var S3BucketEntry = class _S3BucketEntry {
|
|
69
|
+
key;
|
|
70
|
+
size;
|
|
71
|
+
lastModified;
|
|
72
|
+
etag;
|
|
73
|
+
storageClass;
|
|
74
|
+
checksumAlgorithm;
|
|
75
|
+
checksumType;
|
|
76
|
+
constructor(key, size, lastModified, etag, storageClass, checksumAlgorithm, checksumType) {
|
|
77
|
+
this.key = key;
|
|
78
|
+
this.size = size;
|
|
79
|
+
this.lastModified = lastModified;
|
|
80
|
+
this.etag = etag;
|
|
81
|
+
this.storageClass = storageClass;
|
|
82
|
+
this.checksumAlgorithm = checksumAlgorithm;
|
|
83
|
+
this.checksumType = checksumType;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* @internal
|
|
87
|
+
*/
|
|
88
|
+
// biome-ignore lint/suspicious/noExplicitAny: internal use only, any is ok here
|
|
89
|
+
static parse(source) {
|
|
90
|
+
return new _S3BucketEntry(
|
|
91
|
+
source.Key,
|
|
92
|
+
source.Size,
|
|
93
|
+
new Date(source.LastModified),
|
|
94
|
+
source.ETag,
|
|
95
|
+
source.StorageClass,
|
|
96
|
+
source.ChecksumAlgorithm,
|
|
97
|
+
source.ChecksumType
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
};
|
|
101
|
+
|
|
102
|
+
// src/sign.ts
|
|
103
|
+
import { createHmac, createHash } from "crypto";
|
|
104
|
+
function deriveSigningKey(date, region, secretAccessKey) {
|
|
105
|
+
const key = `AWS4${secretAccessKey}`;
|
|
106
|
+
const signedDate = createHmac("sha256", key).update(date).digest();
|
|
107
|
+
const signedDateRegion = createHmac("sha256", signedDate).update(region).digest();
|
|
108
|
+
const signedDateRegionService = createHmac("sha256", signedDateRegion).update("s3").digest();
|
|
109
|
+
return createHmac("sha256", signedDateRegionService).update("aws4_request").digest();
|
|
110
|
+
}
|
|
111
|
+
function signCanonicalDataHash(signinKey, canonicalDataHash, date, region) {
|
|
112
|
+
return createHmac("sha256", signinKey).update(
|
|
113
|
+
`AWS4-HMAC-SHA256
|
|
114
|
+
${date.dateTime}
|
|
115
|
+
${date.date}/${region}/s3/aws4_request
|
|
116
|
+
${canonicalDataHash}`
|
|
117
|
+
).digest("hex");
|
|
118
|
+
}
|
|
119
|
+
var unsignedPayload = "UNSIGNED-PAYLOAD";
|
|
120
|
+
function createCanonicalDataDigestHostOnly(method, path, query, host) {
|
|
121
|
+
return createHash("sha256").update(
|
|
122
|
+
`${method}
|
|
123
|
+
${path}
|
|
124
|
+
${query}
|
|
125
|
+
host:${host}
|
|
126
|
+
|
|
127
|
+
host
|
|
128
|
+
UNSIGNED-PAYLOAD`
|
|
129
|
+
).digest("hex");
|
|
130
|
+
}
|
|
131
|
+
function createCanonicalDataDigest(method, path, query, sortedHeaders, contentHashStr) {
|
|
132
|
+
const sortedHeaderNames = Object.keys(sortedHeaders);
|
|
133
|
+
let canonData = `${method}
|
|
134
|
+
${path}
|
|
135
|
+
${query}
|
|
136
|
+
`;
|
|
137
|
+
for (const header of sortedHeaderNames) {
|
|
138
|
+
canonData += `${header}:${sortedHeaders[header]}
|
|
139
|
+
`;
|
|
140
|
+
}
|
|
141
|
+
canonData += "\n";
|
|
142
|
+
canonData += sortedHeaderNames.length > 0 ? sortedHeaderNames[0] : "";
|
|
143
|
+
for (let i = 1; i < sortedHeaderNames.length; ++i) {
|
|
144
|
+
canonData += `;${sortedHeaderNames[i]}`;
|
|
145
|
+
}
|
|
146
|
+
canonData += `
|
|
147
|
+
${contentHashStr}`;
|
|
148
|
+
return createHash("sha256").update(canonData).digest("hex");
|
|
149
|
+
}
|
|
150
|
+
function sha256(data) {
|
|
151
|
+
return createHash("sha256").update(data).digest();
|
|
152
|
+
}
|
|
153
|
+
function md5Base64(data) {
|
|
154
|
+
return createHash("md5").update(data).digest("base64");
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// src/KeyCache.ts
|
|
158
|
+
var KeyCache = class {
|
|
159
|
+
#lastNumericDay = -1;
|
|
160
|
+
#keys = /* @__PURE__ */ new Map();
|
|
161
|
+
computeIfAbsent(date, region, accessKeyId, secretAccessKey) {
|
|
162
|
+
if (date.numericDayStart !== this.#lastNumericDay) {
|
|
163
|
+
this.#keys.clear();
|
|
164
|
+
this.#lastNumericDay = date.numericDayStart;
|
|
165
|
+
}
|
|
166
|
+
const cacheKey = `${date.date}:${region}:${accessKeyId}`;
|
|
167
|
+
const key = this.#keys.get(cacheKey);
|
|
168
|
+
if (key) {
|
|
169
|
+
return key;
|
|
170
|
+
}
|
|
171
|
+
const newKey = deriveSigningKey(date.date, region, secretAccessKey);
|
|
172
|
+
this.#keys.set(cacheKey, newKey);
|
|
173
|
+
return newKey;
|
|
174
|
+
}
|
|
175
|
+
};
|
|
176
|
+
|
|
177
|
+
// src/AmzDate.ts
|
|
178
|
+
var ONE_DAY = 1e3 * 60 * 60 * 24;
|
|
179
|
+
function getAmzDate(dateTime) {
|
|
180
|
+
const date = pad4(dateTime.getUTCFullYear()) + pad2(dateTime.getUTCMonth() + 1) + pad2(dateTime.getUTCDate());
|
|
181
|
+
const time = pad2(dateTime.getUTCHours()) + pad2(dateTime.getUTCMinutes()) + pad2(dateTime.getUTCSeconds());
|
|
182
|
+
return {
|
|
183
|
+
numericDayStart: dateTime.getTime() / ONE_DAY | 0,
|
|
184
|
+
date,
|
|
185
|
+
dateTime: `${date}T${time}Z`
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
function now() {
|
|
189
|
+
return getAmzDate(/* @__PURE__ */ new Date());
|
|
190
|
+
}
|
|
191
|
+
function pad4(v) {
|
|
192
|
+
return v < 10 ? `000${v}` : v < 100 ? `00${v}` : v < 1e3 ? `0${v}` : v.toString();
|
|
193
|
+
}
|
|
194
|
+
function pad2(v) {
|
|
195
|
+
return v < 10 ? `0${v}` : v.toString();
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// src/url.ts
|
|
199
|
+
function buildRequestUrl(endpoint, bucket, region, path) {
|
|
200
|
+
const normalizedBucket = normalizePath(bucket);
|
|
201
|
+
const [endpointWithBucketAndRegion, replacedBucket] = replaceDomainPlaceholders(endpoint, normalizedBucket, region);
|
|
202
|
+
const result = new URL(endpointWithBucketAndRegion);
|
|
203
|
+
const pathPrefix = result.pathname.endsWith("/") ? result.pathname : `${result.pathname}/`;
|
|
204
|
+
const pathSuffix = replacedBucket ? normalizePath(path) : `${normalizedBucket}/${normalizePath(path)}`;
|
|
205
|
+
result.pathname = pathPrefix + pathSuffix;
|
|
206
|
+
return result;
|
|
207
|
+
}
|
|
208
|
+
function replaceDomainPlaceholders(endpoint, bucket, region) {
|
|
209
|
+
const replacedBucket = endpoint.includes("{bucket}");
|
|
210
|
+
return [
|
|
211
|
+
endpoint.replaceAll("{bucket}", bucket).replaceAll("{region}", region),
|
|
212
|
+
replacedBucket
|
|
213
|
+
];
|
|
214
|
+
}
|
|
215
|
+
function normalizePath(path) {
|
|
216
|
+
const start = path[0] === "/" ? 1 : 0;
|
|
217
|
+
const end = path[path.length - 1] === "/" ? path.length - 1 : path.length;
|
|
218
|
+
return path.substring(start, end);
|
|
219
|
+
}
|
|
220
|
+
function prepareHeadersForSigning(unfilteredHeadersUnsorted) {
|
|
221
|
+
const result = {};
|
|
222
|
+
for (const header of Object.keys(unfilteredHeadersUnsorted).sort()) {
|
|
223
|
+
const v = unfilteredHeadersUnsorted[header];
|
|
224
|
+
if (v !== void 0 && v !== null) {
|
|
225
|
+
result[header] = v;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
return result;
|
|
229
|
+
}
|
|
230
|
+
function getRangeHeader(start, endExclusive) {
|
|
231
|
+
return typeof start === "number" || typeof endExclusive === "number" ? (
|
|
232
|
+
// Http-ranges are end-inclusive, we are exclusiv ein our slice
|
|
233
|
+
`bytes=${start ?? 0}-${typeof endExclusive === "number" ? endExclusive - 1 : ""}`
|
|
234
|
+
) : void 0;
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// src/error.ts
|
|
238
|
+
import { XMLParser } from "fast-xml-parser";
|
|
239
|
+
var xmlParser = new XMLParser();
|
|
240
|
+
async function getResponseError(response, path) {
|
|
241
|
+
let body = void 0;
|
|
242
|
+
try {
|
|
243
|
+
body = await response.body.text();
|
|
244
|
+
} catch (cause) {
|
|
245
|
+
return new S3Error("Unknown", path, {
|
|
246
|
+
message: "Could not read response body.",
|
|
247
|
+
cause
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
if (response.headers["content-type"] === "application/xml") {
|
|
251
|
+
return parseAndGetXmlError(body, path);
|
|
252
|
+
}
|
|
253
|
+
return new S3Error("Unknown", path, {
|
|
254
|
+
message: "Unknown error during S3 request."
|
|
255
|
+
});
|
|
256
|
+
}
|
|
257
|
+
function fromStatusCode(code, path) {
|
|
258
|
+
switch (code) {
|
|
259
|
+
case 404:
|
|
260
|
+
return new S3Error("NoSuchKey", path, {
|
|
261
|
+
message: "The specified key does not exist."
|
|
262
|
+
});
|
|
263
|
+
case 403:
|
|
264
|
+
return new S3Error("AccessDenied", path, {
|
|
265
|
+
message: "Access denied to the key."
|
|
266
|
+
});
|
|
267
|
+
// TODO: Add more status codes as needed
|
|
268
|
+
default:
|
|
269
|
+
return void 0;
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
function parseAndGetXmlError(body, path) {
|
|
273
|
+
let error = void 0;
|
|
274
|
+
try {
|
|
275
|
+
error = xmlParser.parse(body);
|
|
276
|
+
} catch (cause) {
|
|
277
|
+
return new S3Error("Unknown", path, {
|
|
278
|
+
message: "Could not parse XML error response.",
|
|
279
|
+
cause
|
|
280
|
+
});
|
|
281
|
+
}
|
|
282
|
+
if (error.Error) {
|
|
283
|
+
const e = error.Error;
|
|
284
|
+
return new S3Error(e.Code || "Unknown", path, {
|
|
285
|
+
message: e.Message || void 0
|
|
286
|
+
// Message might be "",
|
|
287
|
+
});
|
|
288
|
+
}
|
|
289
|
+
return new S3Error(error.Code || "Unknown", path, {
|
|
290
|
+
message: error.Message || void 0
|
|
291
|
+
// Message might be "",
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// src/S3Client.ts
|
|
296
|
+
var write = Symbol("write");
|
|
297
|
+
var stream = Symbol("stream");
|
|
298
|
+
var xmlParser2 = new XMLParser2();
|
|
299
|
+
var xmlBuilder = new XMLBuilder({
|
|
300
|
+
attributeNamePrefix: "$",
|
|
301
|
+
ignoreAttributes: false
|
|
302
|
+
});
|
|
303
|
+
var S3Client = class {
|
|
304
|
+
#options;
|
|
305
|
+
#keyCache = new KeyCache();
|
|
306
|
+
// TODO: pass options to this in client? Do we want to expose tjhe internal use of undici?
|
|
307
|
+
#dispatcher = new Agent();
|
|
308
|
+
/**
|
|
309
|
+
* Create a new instance of an S3 bucket so that credentials can be managed from a single instance instead of being passed to every method.
|
|
310
|
+
*
|
|
311
|
+
* @param options The default options to use for the S3 client.
|
|
312
|
+
*/
|
|
313
|
+
constructor(options) {
|
|
314
|
+
if (!options) {
|
|
315
|
+
throw new Error("`options` is required.");
|
|
316
|
+
}
|
|
317
|
+
const {
|
|
318
|
+
accessKeyId,
|
|
319
|
+
secretAccessKey,
|
|
320
|
+
endpoint,
|
|
321
|
+
region,
|
|
322
|
+
bucket,
|
|
323
|
+
sessionToken
|
|
324
|
+
} = options;
|
|
325
|
+
if (!accessKeyId || typeof accessKeyId !== "string") {
|
|
326
|
+
throw new Error("`accessKeyId` is required.");
|
|
327
|
+
}
|
|
328
|
+
if (!secretAccessKey || typeof secretAccessKey !== "string") {
|
|
329
|
+
throw new Error("`secretAccessKey` is required.");
|
|
330
|
+
}
|
|
331
|
+
if (!endpoint || typeof endpoint !== "string") {
|
|
332
|
+
throw new Error("`endpoint` is required.");
|
|
333
|
+
}
|
|
334
|
+
if (!region || typeof region !== "string") {
|
|
335
|
+
throw new Error("`region` is required.");
|
|
336
|
+
}
|
|
337
|
+
if (!bucket || typeof bucket !== "string") {
|
|
338
|
+
throw new Error("`bucket` is required.");
|
|
339
|
+
}
|
|
340
|
+
this.#options = {
|
|
341
|
+
accessKeyId,
|
|
342
|
+
secretAccessKey,
|
|
343
|
+
endpoint,
|
|
344
|
+
region,
|
|
345
|
+
bucket,
|
|
346
|
+
sessionToken
|
|
347
|
+
};
|
|
348
|
+
}
|
|
349
|
+
/**
|
|
350
|
+
* Creates an S3File instance for the given path.
|
|
351
|
+
*
|
|
352
|
+
* @param {string} path The path to the object in the bucket. ALso known as [object key](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html).
|
|
353
|
+
* We recommend not using the following characters in a key name because of significant special character handling, which isn't consistent across all applications (see [AWS docs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html)):
|
|
354
|
+
* - Backslash (`\\`)
|
|
355
|
+
* - Left brace (`{`)
|
|
356
|
+
* - Non-printable ASCII characters (128–255 decimal characters)
|
|
357
|
+
* - Caret or circumflex (`^`)
|
|
358
|
+
* - Right brace (`}`)
|
|
359
|
+
* - Percent character (`%`)
|
|
360
|
+
* - Grave accent or backtick (`\``)
|
|
361
|
+
* - Right bracket (`]`)
|
|
362
|
+
* - Quotation mark (`"`)
|
|
363
|
+
* - Greater than sign (`>`)
|
|
364
|
+
* - Left bracket (`[`)
|
|
365
|
+
* - Tilde (`~`)
|
|
366
|
+
* - Less than sign (`<`)
|
|
367
|
+
* - Pound sign (`#`)
|
|
368
|
+
* - Vertical bar or pipe (`|`)
|
|
369
|
+
*
|
|
370
|
+
* lean-s3 does not enforce these restrictions.
|
|
371
|
+
*
|
|
372
|
+
* @param {Partial<CreateFileInstanceOptions>} [options] TODO
|
|
373
|
+
* @example
|
|
374
|
+
* ```js
|
|
375
|
+
* const file = client.file("image.jpg");
|
|
376
|
+
* await file.write(imageData);
|
|
377
|
+
*
|
|
378
|
+
* const configFile = client.file("config.json", {
|
|
379
|
+
* type: "application/json",
|
|
380
|
+
* acl: "private"
|
|
381
|
+
* });
|
|
382
|
+
* ```
|
|
383
|
+
*/
|
|
384
|
+
file(path, options) {
|
|
385
|
+
return new S3File(this, path, void 0, void 0, void 0);
|
|
386
|
+
}
|
|
387
|
+
/**
|
|
388
|
+
* Generate a presigned URL for temporary access to a file.
|
|
389
|
+
* Useful for generating upload/download URLs without exposing credentials.
|
|
390
|
+
* @returns The operation on {@link S3Client#presign.path} as a pre-signed URL.
|
|
391
|
+
*
|
|
392
|
+
* @example
|
|
393
|
+
* ```js
|
|
394
|
+
* const downloadUrl = client.presign("file.pdf", {
|
|
395
|
+
* expiresIn: 3600 // 1 hour
|
|
396
|
+
* });
|
|
397
|
+
* ```
|
|
398
|
+
*/
|
|
399
|
+
presign(path, {
|
|
400
|
+
method = "GET",
|
|
401
|
+
expiresIn = 3600,
|
|
402
|
+
// TODO: Maybe rename this to expiresInSeconds
|
|
403
|
+
storageClass,
|
|
404
|
+
acl,
|
|
405
|
+
region: regionOverride,
|
|
406
|
+
bucket: bucketOverride,
|
|
407
|
+
endpoint: endpointOverride
|
|
408
|
+
} = {}) {
|
|
409
|
+
const now2 = /* @__PURE__ */ new Date();
|
|
410
|
+
const date = getAmzDate(now2);
|
|
411
|
+
const options = this.#options;
|
|
412
|
+
const region = regionOverride ?? options.region;
|
|
413
|
+
const bucket = bucketOverride ?? options.bucket;
|
|
414
|
+
const endpoint = endpointOverride ?? options.endpoint;
|
|
415
|
+
const res = buildRequestUrl(endpoint, bucket, region, path);
|
|
416
|
+
const query = buildSearchParams(
|
|
417
|
+
`${options.accessKeyId}/${date.date}/${region}/s3/aws4_request`,
|
|
418
|
+
date,
|
|
419
|
+
expiresIn,
|
|
420
|
+
"host",
|
|
421
|
+
void 0,
|
|
422
|
+
storageClass,
|
|
423
|
+
options.sessionToken,
|
|
424
|
+
acl
|
|
425
|
+
);
|
|
426
|
+
const dataDigest = createCanonicalDataDigestHostOnly(
|
|
427
|
+
method,
|
|
428
|
+
res.pathname,
|
|
429
|
+
query,
|
|
430
|
+
res.host
|
|
431
|
+
);
|
|
432
|
+
const signingKey = this.#keyCache.computeIfAbsent(
|
|
433
|
+
date,
|
|
434
|
+
region,
|
|
435
|
+
options.accessKeyId,
|
|
436
|
+
options.secretAccessKey
|
|
437
|
+
);
|
|
438
|
+
const signature = signCanonicalDataHash(
|
|
439
|
+
signingKey,
|
|
440
|
+
dataDigest,
|
|
441
|
+
date,
|
|
442
|
+
region
|
|
443
|
+
);
|
|
444
|
+
res.search = `${query}&X-Amz-Signature=${signature}`;
|
|
445
|
+
return res.toString();
|
|
446
|
+
}
|
|
447
|
+
/**
|
|
448
|
+
* Uses [`DeleteObjects`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html) to delete multiple objects in a single request.
|
|
449
|
+
*/
|
|
450
|
+
async deleteObjects(objects, options = {}) {
|
|
451
|
+
const body = xmlBuilder.build({
|
|
452
|
+
Delete: {
|
|
453
|
+
Quiet: true,
|
|
454
|
+
Object: objects.map((o) => ({
|
|
455
|
+
Key: typeof o === "string" ? o : o.key
|
|
456
|
+
}))
|
|
457
|
+
}
|
|
458
|
+
});
|
|
459
|
+
const response = await this._signedRequest(
|
|
460
|
+
"POST",
|
|
461
|
+
"",
|
|
462
|
+
"delete=",
|
|
463
|
+
// "=" is needed by minio for some reason
|
|
464
|
+
body,
|
|
465
|
+
{
|
|
466
|
+
"content-md5": md5Base64(body)
|
|
467
|
+
},
|
|
468
|
+
void 0,
|
|
469
|
+
void 0,
|
|
470
|
+
this.#options.bucket,
|
|
471
|
+
options.signal
|
|
472
|
+
);
|
|
473
|
+
if (response.statusCode === 200) {
|
|
474
|
+
const text = await response.body.text();
|
|
475
|
+
let res = void 0;
|
|
476
|
+
try {
|
|
477
|
+
res = (xmlParser2.parse(text)?.DeleteResult || void 0)?.Error ?? [];
|
|
478
|
+
} catch (cause) {
|
|
479
|
+
throw new S3Error("Unknown", "", {
|
|
480
|
+
message: "S3 service responded with invalid XML.",
|
|
481
|
+
cause
|
|
482
|
+
});
|
|
483
|
+
}
|
|
484
|
+
if (!res || !Array.isArray(res)) {
|
|
485
|
+
throw new S3Error("Unknown", "", {
|
|
486
|
+
message: "Could not process response."
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
const errors = res.map((e) => ({
|
|
490
|
+
code: e.Code,
|
|
491
|
+
key: e.Key,
|
|
492
|
+
message: e.Message,
|
|
493
|
+
versionId: e.VersionId
|
|
494
|
+
}));
|
|
495
|
+
return errors.length > 0 ? { errors } : null;
|
|
496
|
+
}
|
|
497
|
+
if (400 <= response.statusCode && response.statusCode < 500) {
|
|
498
|
+
throw await getResponseError(response, "");
|
|
499
|
+
}
|
|
500
|
+
response.body.dump();
|
|
501
|
+
throw new Error(
|
|
502
|
+
`Response code not implemented yet: ${response.statusCode}`
|
|
503
|
+
);
|
|
504
|
+
}
|
|
505
|
+
/**
|
|
506
|
+
* Creates a new bucket on the S3 server.
|
|
507
|
+
*
|
|
508
|
+
* @param name The name of the bucket to create. AWS the name according to [some rules](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html). The most important ones are:
|
|
509
|
+
* - Bucket names must be between `3` (min) and `63` (max) characters long.
|
|
510
|
+
* - Bucket names can consist only of lowercase letters, numbers, periods (`.`), and hyphens (`-`).
|
|
511
|
+
* - Bucket names must begin and end with a letter or number.
|
|
512
|
+
* - Bucket names must not contain two adjacent periods.
|
|
513
|
+
* - Bucket names must not be formatted as an IP address (for example, `192.168.5.4`).
|
|
514
|
+
*
|
|
515
|
+
* @throws {Error} If the bucket name is invalid.
|
|
516
|
+
* @throws {S3Error} If the bucket could not be created, e.g. if it already exists.
|
|
517
|
+
* @remarks Uses [`CreateBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_CreateBucket.html)
|
|
518
|
+
*/
|
|
519
|
+
async createBucket(name, options) {
|
|
520
|
+
ensureValidBucketName(name);
|
|
521
|
+
let body = void 0;
|
|
522
|
+
if (options) {
|
|
523
|
+
const location = options.location && (options.location.name || options.location.type) ? {
|
|
524
|
+
Name: options.location.name ?? void 0,
|
|
525
|
+
Type: options.location.type ?? void 0
|
|
526
|
+
} : void 0;
|
|
527
|
+
const bucket = options.info && (options.info.dataRedundancy || options.info.type) ? {
|
|
528
|
+
DataRedundancy: options.info.dataRedundancy ?? void 0,
|
|
529
|
+
Type: options.info.type ?? void 0
|
|
530
|
+
} : void 0;
|
|
531
|
+
body = location || bucket || options.locationConstraint ? xmlBuilder.build({
|
|
532
|
+
CreateBucketConfiguration: {
|
|
533
|
+
$xmlns: "http://s3.amazonaws.com/doc/2006-03-01/",
|
|
534
|
+
LocationConstraint: options.locationConstraint ?? void 0,
|
|
535
|
+
Location: location,
|
|
536
|
+
Bucket: bucket
|
|
537
|
+
}
|
|
538
|
+
}) : void 0;
|
|
539
|
+
}
|
|
540
|
+
const additionalSignedHeaders = body ? { "content-md5": md5Base64(body) } : void 0;
|
|
541
|
+
const response = await this._signedRequest(
|
|
542
|
+
"PUT",
|
|
543
|
+
"",
|
|
544
|
+
void 0,
|
|
545
|
+
body,
|
|
546
|
+
additionalSignedHeaders,
|
|
547
|
+
void 0,
|
|
548
|
+
void 0,
|
|
549
|
+
name,
|
|
550
|
+
options?.signal
|
|
551
|
+
);
|
|
552
|
+
if (400 <= response.statusCode && response.statusCode < 500) {
|
|
553
|
+
throw await getResponseError(response, "");
|
|
554
|
+
}
|
|
555
|
+
await response.body.dump();
|
|
556
|
+
if (response.statusCode === 200) {
|
|
557
|
+
return;
|
|
558
|
+
}
|
|
559
|
+
throw new Error(`Response code not supported: ${response.statusCode}`);
|
|
560
|
+
}
|
|
561
|
+
/**
|
|
562
|
+
* Deletes a bucket from the S3 server.
|
|
563
|
+
* @param name The name of the bucket to delete. Same restrictions as in {@link S3Client#createBucket}.
|
|
564
|
+
* @throws {Error} If the bucket name is invalid.
|
|
565
|
+
* @throws {S3Error} If the bucket could not be deleted, e.g. if it is not empty.
|
|
566
|
+
* @remarks Uses [`DeleteBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucket.html).
|
|
567
|
+
*/
|
|
568
|
+
async deleteBucket(name, options) {
|
|
569
|
+
ensureValidBucketName(name);
|
|
570
|
+
const response = await this._signedRequest(
|
|
571
|
+
"DELETE",
|
|
572
|
+
"",
|
|
573
|
+
void 0,
|
|
574
|
+
void 0,
|
|
575
|
+
void 0,
|
|
576
|
+
void 0,
|
|
577
|
+
void 0,
|
|
578
|
+
name,
|
|
579
|
+
options?.signal
|
|
580
|
+
);
|
|
581
|
+
if (400 <= response.statusCode && response.statusCode < 500) {
|
|
582
|
+
throw await getResponseError(response, "");
|
|
583
|
+
}
|
|
584
|
+
await response.body.dump();
|
|
585
|
+
if (response.statusCode === 204) {
|
|
586
|
+
return;
|
|
587
|
+
}
|
|
588
|
+
throw new Error(`Response code not supported: ${response.statusCode}`);
|
|
589
|
+
}
|
|
590
|
+
/**
|
|
591
|
+
* Checks if a bucket exists.
|
|
592
|
+
* @param name The name of the bucket to delete. Same restrictions as in {@link S3Client#createBucket}.
|
|
593
|
+
* @throws {Error} If the bucket name is invalid.
|
|
594
|
+
* @remarks Uses [`HeadBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html).
|
|
595
|
+
*/
|
|
596
|
+
async bucketExists(name, options) {
|
|
597
|
+
ensureValidBucketName(name);
|
|
598
|
+
const response = await this._signedRequest(
|
|
599
|
+
"HEAD",
|
|
600
|
+
"",
|
|
601
|
+
void 0,
|
|
602
|
+
void 0,
|
|
603
|
+
void 0,
|
|
604
|
+
void 0,
|
|
605
|
+
void 0,
|
|
606
|
+
name,
|
|
607
|
+
options?.signal
|
|
608
|
+
);
|
|
609
|
+
if (response.statusCode !== 404 && 400 <= response.statusCode && response.statusCode < 500) {
|
|
610
|
+
throw await getResponseError(response, "");
|
|
611
|
+
}
|
|
612
|
+
await response.body.dump();
|
|
613
|
+
if (response.statusCode === 200) {
|
|
614
|
+
return true;
|
|
615
|
+
}
|
|
616
|
+
if (response.statusCode === 404) {
|
|
617
|
+
return false;
|
|
618
|
+
}
|
|
619
|
+
throw new Error(`Response code not supported: ${response.statusCode}`);
|
|
620
|
+
}
|
|
621
|
+
//#region list
|
|
622
|
+
/**
|
|
623
|
+
* Uses [`ListObjectsV2`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html) to iterate over all keys. Pagination and continuation is handled internally.
|
|
624
|
+
*/
|
|
625
|
+
async *listIterating(options) {
|
|
626
|
+
const maxKeys = options?.internalPageSize ?? void 0;
|
|
627
|
+
let res = void 0;
|
|
628
|
+
let continuationToken = void 0;
|
|
629
|
+
do {
|
|
630
|
+
res = await this.list({
|
|
631
|
+
...options,
|
|
632
|
+
maxKeys,
|
|
633
|
+
continuationToken
|
|
634
|
+
});
|
|
635
|
+
if (!res || res.contents.length === 0) {
|
|
636
|
+
break;
|
|
637
|
+
}
|
|
638
|
+
yield* res.contents;
|
|
639
|
+
continuationToken = res.nextContinuationToken;
|
|
640
|
+
} while (continuationToken);
|
|
641
|
+
}
|
|
642
|
+
/**
|
|
643
|
+
* Implements [`ListObjectsV2`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html) to iterate over all keys.
|
|
644
|
+
*/
|
|
645
|
+
async list(options = {}) {
|
|
646
|
+
let query = "";
|
|
647
|
+
if (typeof options.continuationToken !== "undefined") {
|
|
648
|
+
if (typeof options.continuationToken !== "string") {
|
|
649
|
+
throw new TypeError("`continuationToken` should be a `string`.");
|
|
650
|
+
}
|
|
651
|
+
query += `continuation-token=${encodeURIComponent(options.continuationToken)}&`;
|
|
652
|
+
}
|
|
653
|
+
query += "list-type=2";
|
|
654
|
+
if (typeof options.maxKeys !== "undefined") {
|
|
655
|
+
if (typeof options.maxKeys !== "number") {
|
|
656
|
+
throw new TypeError("`maxKeys` should be a `number`.");
|
|
657
|
+
}
|
|
658
|
+
query += `&max-keys=${options.maxKeys}`;
|
|
659
|
+
}
|
|
660
|
+
if (options.prefix) {
|
|
661
|
+
if (typeof options.prefix !== "string") {
|
|
662
|
+
throw new TypeError("`prefix` should be a `string`.");
|
|
663
|
+
}
|
|
664
|
+
query += `&prefix=${encodeURIComponent(options.prefix)}`;
|
|
665
|
+
}
|
|
666
|
+
if (typeof options.startAfter !== "undefined") {
|
|
667
|
+
if (typeof options.startAfter !== "string") {
|
|
668
|
+
throw new TypeError("`startAfter` should be a `string`.");
|
|
669
|
+
}
|
|
670
|
+
query += `&start-after=${encodeURIComponent(options.startAfter)}`;
|
|
671
|
+
}
|
|
672
|
+
const response = await this._signedRequest(
|
|
673
|
+
"GET",
|
|
674
|
+
"",
|
|
675
|
+
query,
|
|
676
|
+
void 0,
|
|
677
|
+
void 0,
|
|
678
|
+
void 0,
|
|
679
|
+
void 0,
|
|
680
|
+
options.bucket ?? this.#options.bucket,
|
|
681
|
+
options.signal
|
|
682
|
+
);
|
|
683
|
+
if (response.statusCode === 200) {
|
|
684
|
+
const text = await response.body.text();
|
|
685
|
+
let res = void 0;
|
|
686
|
+
try {
|
|
687
|
+
res = xmlParser2.parse(text)?.ListBucketResult;
|
|
688
|
+
} catch (cause) {
|
|
689
|
+
throw new S3Error("Unknown", "", {
|
|
690
|
+
message: "S3 service responded with invalid XML.",
|
|
691
|
+
cause
|
|
692
|
+
});
|
|
693
|
+
}
|
|
694
|
+
if (!res) {
|
|
695
|
+
throw new S3Error("Unknown", "", {
|
|
696
|
+
message: "Could not read bucket contents."
|
|
697
|
+
});
|
|
698
|
+
}
|
|
699
|
+
const contents = Array.isArray(res.Contents) ? res.Contents?.map(S3BucketEntry.parse) ?? [] : res.Contents ? [res.Contents] : [];
|
|
700
|
+
return {
|
|
701
|
+
name: res.Name,
|
|
702
|
+
prefix: res.Prefix,
|
|
703
|
+
startAfter: res.StartAfter,
|
|
704
|
+
isTruncated: res.IsTruncated,
|
|
705
|
+
continuationToken: res.ContinuationToken,
|
|
706
|
+
maxKeys: res.MaxKeys,
|
|
707
|
+
keyCount: res.KeyCount,
|
|
708
|
+
nextContinuationToken: res.NextContinuationToken,
|
|
709
|
+
contents
|
|
710
|
+
};
|
|
711
|
+
}
|
|
712
|
+
response.body.dump();
|
|
713
|
+
throw new Error(
|
|
714
|
+
`Response code not implemented yet: ${response.statusCode}`
|
|
715
|
+
);
|
|
716
|
+
}
|
|
717
|
+
//#endregion
|
|
718
|
+
/**
|
|
719
|
+
* Do not use this. This is an internal method.
|
|
720
|
+
* TODO: Maybe move this into a separate free function?
|
|
721
|
+
* @internal
|
|
722
|
+
*/
|
|
723
|
+
async _signedRequest(method, pathWithoutBucket, query, body, additionalSignedHeaders, additionalUnsignedHeaders, contentHash, bucket, signal = void 0) {
|
|
724
|
+
const endpoint = this.#options.endpoint;
|
|
725
|
+
const region = this.#options.region;
|
|
726
|
+
const effectiveBucket = bucket ?? this.#options.bucket;
|
|
727
|
+
const url = buildRequestUrl(
|
|
728
|
+
endpoint,
|
|
729
|
+
effectiveBucket,
|
|
730
|
+
region,
|
|
731
|
+
pathWithoutBucket
|
|
732
|
+
);
|
|
733
|
+
if (query) {
|
|
734
|
+
url.search = query;
|
|
735
|
+
}
|
|
736
|
+
const now2 = now();
|
|
737
|
+
const contentHashStr = contentHash?.toString("hex") ?? unsignedPayload;
|
|
738
|
+
const headersToBeSigned = prepareHeadersForSigning({
|
|
739
|
+
host: url.host,
|
|
740
|
+
"x-amz-date": now2.dateTime,
|
|
741
|
+
"x-amz-content-sha256": contentHashStr,
|
|
742
|
+
...additionalSignedHeaders
|
|
743
|
+
});
|
|
744
|
+
try {
|
|
745
|
+
return await request(url, {
|
|
746
|
+
method,
|
|
747
|
+
signal,
|
|
748
|
+
dispatcher: this.#dispatcher,
|
|
749
|
+
headers: {
|
|
750
|
+
...headersToBeSigned,
|
|
751
|
+
authorization: this.#getAuthorizationHeader(
|
|
752
|
+
method,
|
|
753
|
+
url.pathname,
|
|
754
|
+
query ?? "",
|
|
755
|
+
now2,
|
|
756
|
+
headersToBeSigned,
|
|
757
|
+
region,
|
|
758
|
+
contentHashStr,
|
|
759
|
+
this.#options.accessKeyId,
|
|
760
|
+
this.#options.secretAccessKey
|
|
761
|
+
),
|
|
762
|
+
...additionalUnsignedHeaders,
|
|
763
|
+
"user-agent": "lean-s3"
|
|
764
|
+
},
|
|
765
|
+
body
|
|
766
|
+
});
|
|
767
|
+
} catch (cause) {
|
|
768
|
+
signal?.throwIfAborted();
|
|
769
|
+
throw new S3Error("Unknown", pathWithoutBucket, {
|
|
770
|
+
message: "Unknown error during S3 request.",
|
|
771
|
+
cause
|
|
772
|
+
});
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
/**
|
|
776
|
+
* @internal
|
|
777
|
+
* @param {import("./index.d.ts").UndiciBodyInit} data TODO
|
|
778
|
+
*/
|
|
779
|
+
async [write](path, data, contentType, contentLength, contentHash, rageStart, rangeEndExclusive, signal = void 0) {
|
|
780
|
+
const bucket = this.#options.bucket;
|
|
781
|
+
const endpoint = this.#options.endpoint;
|
|
782
|
+
const region = this.#options.region;
|
|
783
|
+
const url = buildRequestUrl(endpoint, bucket, region, path);
|
|
784
|
+
const now2 = now();
|
|
785
|
+
const contentHashStr = contentHash?.toString("hex") ?? unsignedPayload;
|
|
786
|
+
const headersToBeSigned = prepareHeadersForSigning({
|
|
787
|
+
"content-length": contentLength?.toString() ?? void 0,
|
|
788
|
+
"content-type": contentType,
|
|
789
|
+
host: url.host,
|
|
790
|
+
range: getRangeHeader(rageStart, rangeEndExclusive),
|
|
791
|
+
"x-amz-content-sha256": contentHashStr,
|
|
792
|
+
"x-amz-date": now2.dateTime
|
|
793
|
+
});
|
|
794
|
+
let response = void 0;
|
|
795
|
+
try {
|
|
796
|
+
response = await request(url, {
|
|
797
|
+
method: "PUT",
|
|
798
|
+
signal,
|
|
799
|
+
dispatcher: this.#dispatcher,
|
|
800
|
+
headers: {
|
|
801
|
+
...headersToBeSigned,
|
|
802
|
+
authorization: this.#getAuthorizationHeader(
|
|
803
|
+
"PUT",
|
|
804
|
+
url.pathname,
|
|
805
|
+
url.search,
|
|
806
|
+
now2,
|
|
807
|
+
headersToBeSigned,
|
|
808
|
+
region,
|
|
809
|
+
contentHashStr,
|
|
810
|
+
this.#options.accessKeyId,
|
|
811
|
+
this.#options.secretAccessKey
|
|
812
|
+
),
|
|
813
|
+
"user-agent": "lean-s3"
|
|
814
|
+
},
|
|
815
|
+
body: data
|
|
816
|
+
});
|
|
817
|
+
} catch (cause) {
|
|
818
|
+
signal?.throwIfAborted();
|
|
819
|
+
throw new S3Error("Unknown", path, {
|
|
820
|
+
message: "Unknown error during S3 request.",
|
|
821
|
+
cause
|
|
822
|
+
});
|
|
823
|
+
}
|
|
824
|
+
const status = response.statusCode;
|
|
825
|
+
if (200 <= status && status < 300) {
|
|
826
|
+
return;
|
|
827
|
+
}
|
|
828
|
+
throw await getResponseError(response, path);
|
|
829
|
+
}
|
|
830
|
+
// TODO: Support abortSignal
|
|
831
|
+
/**
|
|
832
|
+
* @internal
|
|
833
|
+
*/
|
|
834
|
+
[stream](path, contentHash, rageStart, rangeEndExclusive) {
|
|
835
|
+
const bucket = this.#options.bucket;
|
|
836
|
+
const endpoint = this.#options.endpoint;
|
|
837
|
+
const region = this.#options.region;
|
|
838
|
+
const now2 = now();
|
|
839
|
+
const url = buildRequestUrl(endpoint, bucket, region, path);
|
|
840
|
+
const range = getRangeHeader(rageStart, rangeEndExclusive);
|
|
841
|
+
const contentHashStr = contentHash?.toString("hex") ?? unsignedPayload;
|
|
842
|
+
const headersToBeSigned = prepareHeadersForSigning({
|
|
843
|
+
"amz-sdk-invocation-id": crypto.randomUUID(),
|
|
844
|
+
// TODO: Maybe support retries and do "amz-sdk-request": attempt=1; max=3
|
|
845
|
+
host: url.host,
|
|
846
|
+
range,
|
|
847
|
+
// Hetzner doesnt care if the x-amz-content-sha256 header is missing, R2 requires it to be present
|
|
848
|
+
"x-amz-content-sha256": contentHashStr,
|
|
849
|
+
"x-amz-date": now2.dateTime
|
|
850
|
+
});
|
|
851
|
+
const ac = new AbortController();
|
|
852
|
+
return new ReadableStream({
|
|
853
|
+
type: "bytes",
|
|
854
|
+
start: (controller) => {
|
|
855
|
+
const onNetworkError = (cause) => {
|
|
856
|
+
controller.error(
|
|
857
|
+
new S3Error("Unknown", path, {
|
|
858
|
+
message: void 0,
|
|
859
|
+
cause
|
|
860
|
+
})
|
|
861
|
+
);
|
|
862
|
+
};
|
|
863
|
+
request(url, {
|
|
864
|
+
method: "GET",
|
|
865
|
+
signal: ac.signal,
|
|
866
|
+
dispatcher: this.#dispatcher,
|
|
867
|
+
headers: {
|
|
868
|
+
...headersToBeSigned,
|
|
869
|
+
authorization: this.#getAuthorizationHeader(
|
|
870
|
+
"GET",
|
|
871
|
+
url.pathname,
|
|
872
|
+
url.search,
|
|
873
|
+
now2,
|
|
874
|
+
headersToBeSigned,
|
|
875
|
+
region,
|
|
876
|
+
contentHashStr,
|
|
877
|
+
this.#options.accessKeyId,
|
|
878
|
+
this.#options.secretAccessKey
|
|
879
|
+
),
|
|
880
|
+
"user-agent": "lean-s3"
|
|
881
|
+
}
|
|
882
|
+
}).then((response) => {
|
|
883
|
+
const onData = controller.enqueue.bind(controller);
|
|
884
|
+
const onClose = controller.close.bind(controller);
|
|
885
|
+
const expectPartialResponse = range !== void 0;
|
|
886
|
+
const status = response.statusCode;
|
|
887
|
+
if (status === 200) {
|
|
888
|
+
if (expectPartialResponse) {
|
|
889
|
+
return controller.error(
|
|
890
|
+
new S3Error("Unknown", path, {
|
|
891
|
+
message: "Expected partial response to range request."
|
|
892
|
+
})
|
|
893
|
+
);
|
|
894
|
+
}
|
|
895
|
+
response.body.on("data", onData);
|
|
896
|
+
response.body.once("error", onNetworkError);
|
|
897
|
+
response.body.once("end", onClose);
|
|
898
|
+
return;
|
|
899
|
+
}
|
|
900
|
+
if (status === 206) {
|
|
901
|
+
if (!expectPartialResponse) {
|
|
902
|
+
return controller.error(
|
|
903
|
+
new S3Error("Unknown", path, {
|
|
904
|
+
message: "Received partial response but expected a full response."
|
|
905
|
+
})
|
|
906
|
+
);
|
|
907
|
+
}
|
|
908
|
+
response.body.on("data", onData);
|
|
909
|
+
response.body.once("error", onNetworkError);
|
|
910
|
+
response.body.once("end", onClose);
|
|
911
|
+
return;
|
|
912
|
+
}
|
|
913
|
+
if (400 <= status && status < 500) {
|
|
914
|
+
const responseText = void 0;
|
|
915
|
+
const ct = response.headers["content-type"];
|
|
916
|
+
if (response.headers["content-type"] === "application/xml") {
|
|
917
|
+
return response.body.text().then((body) => {
|
|
918
|
+
let error = void 0;
|
|
919
|
+
try {
|
|
920
|
+
error = xmlParser2.parse(body);
|
|
921
|
+
} catch (cause) {
|
|
922
|
+
return controller.error(
|
|
923
|
+
new S3Error("Unknown", path, {
|
|
924
|
+
message: "Could not parse XML error response.",
|
|
925
|
+
cause
|
|
926
|
+
})
|
|
927
|
+
);
|
|
928
|
+
}
|
|
929
|
+
return controller.error(
|
|
930
|
+
new S3Error(error.Code || "Unknown", path, {
|
|
931
|
+
message: error.Message || void 0
|
|
932
|
+
// Message might be "",
|
|
933
|
+
})
|
|
934
|
+
);
|
|
935
|
+
}, onNetworkError);
|
|
936
|
+
}
|
|
937
|
+
return controller.error(
|
|
938
|
+
new S3Error("Unknown", path, {
|
|
939
|
+
message: void 0,
|
|
940
|
+
cause: responseText
|
|
941
|
+
})
|
|
942
|
+
);
|
|
943
|
+
}
|
|
944
|
+
return controller.error(
|
|
945
|
+
new Error(
|
|
946
|
+
`Handling for status code ${status} not implemented yet. You might want to open an issue and describe your situation.`
|
|
947
|
+
)
|
|
948
|
+
);
|
|
949
|
+
}, onNetworkError);
|
|
950
|
+
},
|
|
951
|
+
cancel(reason) {
|
|
952
|
+
ac.abort(reason);
|
|
953
|
+
}
|
|
954
|
+
});
|
|
955
|
+
}
|
|
956
|
+
#getAuthorizationHeader(method, path, query, date, sortedSignedHeaders, region, contentHashStr, accessKeyId, secretAccessKey) {
|
|
957
|
+
const dataDigest = createCanonicalDataDigest(
|
|
958
|
+
method,
|
|
959
|
+
path,
|
|
960
|
+
query,
|
|
961
|
+
sortedSignedHeaders,
|
|
962
|
+
contentHashStr
|
|
963
|
+
);
|
|
964
|
+
const signingKey = this.#keyCache.computeIfAbsent(
|
|
965
|
+
date,
|
|
966
|
+
region,
|
|
967
|
+
accessKeyId,
|
|
968
|
+
secretAccessKey
|
|
969
|
+
);
|
|
970
|
+
const signature = signCanonicalDataHash(
|
|
971
|
+
signingKey,
|
|
972
|
+
dataDigest,
|
|
973
|
+
date,
|
|
974
|
+
region
|
|
975
|
+
);
|
|
976
|
+
const signedHeadersSpec = Object.keys(sortedSignedHeaders).join(";");
|
|
977
|
+
const credentialSpec = `${accessKeyId}/${date.date}/${region}/s3/aws4_request`;
|
|
978
|
+
return `AWS4-HMAC-SHA256 Credential=${credentialSpec}, SignedHeaders=${signedHeadersSpec}, Signature=${signature}`;
|
|
979
|
+
}
|
|
980
|
+
};
|
|
981
|
+
function buildSearchParams(amzCredential, date, expiresIn, headerList, contentHashStr, storageClass, sessionToken, acl) {
|
|
982
|
+
let res = "";
|
|
983
|
+
if (acl) {
|
|
984
|
+
res += `X-Amz-Acl=${encodeURIComponent(acl)}&`;
|
|
985
|
+
}
|
|
986
|
+
res += "X-Amz-Algorithm=AWS4-HMAC-SHA256";
|
|
987
|
+
if (contentHashStr) {
|
|
988
|
+
res += `&X-Amz-Content-Sha256=${contentHashStr}`;
|
|
989
|
+
}
|
|
990
|
+
res += `&X-Amz-Credential=${encodeURIComponent(amzCredential)}`;
|
|
991
|
+
res += `&X-Amz-Date=${date.dateTime}`;
|
|
992
|
+
res += `&X-Amz-Expires=${expiresIn}`;
|
|
993
|
+
if (sessionToken) {
|
|
994
|
+
res += `&X-Amz-Security-Token=${encodeURIComponent(sessionToken)}`;
|
|
995
|
+
}
|
|
996
|
+
res += `&X-Amz-SignedHeaders=${encodeURIComponent(headerList)}`;
|
|
997
|
+
if (storageClass) {
|
|
998
|
+
res += `&X-Amz-Storage-Class=${storageClass}`;
|
|
999
|
+
}
|
|
1000
|
+
return res;
|
|
1001
|
+
}
|
|
1002
|
+
function ensureValidBucketName(name) {
|
|
1003
|
+
if (name.length < 3 || name.length > 63) {
|
|
1004
|
+
throw new Error("`name` must be between 3 and 63 characters long.");
|
|
1005
|
+
}
|
|
1006
|
+
if (name.startsWith(".") || name.endsWith(".")) {
|
|
1007
|
+
throw new Error("`name` must not start or end with a period (.)");
|
|
1008
|
+
}
|
|
1009
|
+
if (!/^[a-z0-9.-]+$/.test(name)) {
|
|
1010
|
+
throw new Error(
|
|
1011
|
+
"`name` can only contain lowercase letters, numbers, periods (.), and hyphens (-)."
|
|
1012
|
+
);
|
|
1013
|
+
}
|
|
1014
|
+
if (name.includes("..")) {
|
|
1015
|
+
throw new Error("`name` must not contain two adjacent periods (..)");
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
// src/S3File.ts
|
|
1020
|
+
var S3File = class _S3File {
|
|
1021
|
+
#client;
|
|
1022
|
+
#path;
|
|
1023
|
+
#start;
|
|
1024
|
+
#end;
|
|
1025
|
+
#contentType;
|
|
1026
|
+
/**
|
|
1027
|
+
* @internal
|
|
1028
|
+
*/
|
|
1029
|
+
constructor(client, path, start, end, contentType) {
|
|
1030
|
+
if (typeof start === "number" && start < 0) {
|
|
1031
|
+
throw new Error("Invalid slice `start`.");
|
|
1032
|
+
}
|
|
1033
|
+
if (typeof end === "number" && (end < 0 || typeof start === "number" && end < start)) {
|
|
1034
|
+
throw new Error("Invalid slice `end`.");
|
|
1035
|
+
}
|
|
1036
|
+
this.#client = client;
|
|
1037
|
+
this.#path = path;
|
|
1038
|
+
this.#start = start;
|
|
1039
|
+
this.#end = end;
|
|
1040
|
+
this.#contentType = contentType ?? "application/octet-stream";
|
|
1041
|
+
}
|
|
1042
|
+
// TODO: slice overloads
|
|
1043
|
+
slice(start, end, contentType) {
|
|
1044
|
+
return new _S3File(
|
|
1045
|
+
this.#client,
|
|
1046
|
+
this.#path,
|
|
1047
|
+
start ?? void 0,
|
|
1048
|
+
end ?? void 0,
|
|
1049
|
+
contentType ?? this.#contentType
|
|
1050
|
+
);
|
|
1051
|
+
}
|
|
1052
|
+
/**
|
|
1053
|
+
* Get the stat of a file in the bucket. Uses `HEAD` request to check existence.
|
|
1054
|
+
*
|
|
1055
|
+
* @remarks Uses [`HeadObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html).
|
|
1056
|
+
* @throws {S3Error} If the file does not exist or the server has some other issues.
|
|
1057
|
+
* @throws {Error} If the server returns an invalid response.
|
|
1058
|
+
*/
|
|
1059
|
+
async stat({ signal } = {}) {
|
|
1060
|
+
const response = await this.#client._signedRequest(
|
|
1061
|
+
"HEAD",
|
|
1062
|
+
this.#path,
|
|
1063
|
+
void 0,
|
|
1064
|
+
void 0,
|
|
1065
|
+
void 0,
|
|
1066
|
+
void 0,
|
|
1067
|
+
void 0,
|
|
1068
|
+
void 0,
|
|
1069
|
+
signal
|
|
1070
|
+
);
|
|
1071
|
+
await response.body.dump();
|
|
1072
|
+
if (200 <= response.statusCode && response.statusCode < 300) {
|
|
1073
|
+
const result = S3Stat.tryParseFromHeaders(response.headers);
|
|
1074
|
+
if (!result) {
|
|
1075
|
+
throw new Error(
|
|
1076
|
+
"S3 server returned an invalid response for `HeadObject`"
|
|
1077
|
+
);
|
|
1078
|
+
}
|
|
1079
|
+
return result;
|
|
1080
|
+
}
|
|
1081
|
+
throw fromStatusCode(response.statusCode, this.#path) ?? new Error(
|
|
1082
|
+
`S3 server returned an unsupported status code for \`HeadObject\`: ${response.statusCode}`
|
|
1083
|
+
);
|
|
1084
|
+
}
|
|
1085
|
+
/**
|
|
1086
|
+
* Check if a file exists in the bucket. Uses `HEAD` request to check existence.
|
|
1087
|
+
*
|
|
1088
|
+
* @remarks Uses [`HeadObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html).
|
|
1089
|
+
*/
|
|
1090
|
+
async exists({
|
|
1091
|
+
signal
|
|
1092
|
+
} = {}) {
|
|
1093
|
+
const response = await this.#client._signedRequest(
|
|
1094
|
+
"HEAD",
|
|
1095
|
+
this.#path,
|
|
1096
|
+
void 0,
|
|
1097
|
+
void 0,
|
|
1098
|
+
void 0,
|
|
1099
|
+
void 0,
|
|
1100
|
+
void 0,
|
|
1101
|
+
void 0,
|
|
1102
|
+
signal
|
|
1103
|
+
);
|
|
1104
|
+
await response.body.dump();
|
|
1105
|
+
if (200 <= response.statusCode && response.statusCode < 300) {
|
|
1106
|
+
return true;
|
|
1107
|
+
}
|
|
1108
|
+
if (response.statusCode === 404) {
|
|
1109
|
+
return false;
|
|
1110
|
+
}
|
|
1111
|
+
throw fromStatusCode(response.statusCode, this.#path) ?? new Error(
|
|
1112
|
+
`S3 server returned an unsupported status code for \`HeadObject\`: ${response.statusCode}`
|
|
1113
|
+
);
|
|
1114
|
+
}
|
|
1115
|
+
/**
|
|
1116
|
+
* Delete a file from the bucket.
|
|
1117
|
+
*
|
|
1118
|
+
* @remarks - Uses [`DeleteObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObject.html).
|
|
1119
|
+
* - `versionId` not supported.
|
|
1120
|
+
*
|
|
1121
|
+
* @param {Partial<S3FileDeleteOptions>} [options]
|
|
1122
|
+
*
|
|
1123
|
+
* @example
|
|
1124
|
+
* ```js
|
|
1125
|
+
* // Simple delete
|
|
1126
|
+
* await client.unlink("old-file.txt");
|
|
1127
|
+
*
|
|
1128
|
+
* // With error handling
|
|
1129
|
+
* try {
|
|
1130
|
+
* await client.unlink("file.dat");
|
|
1131
|
+
* console.log("File deleted");
|
|
1132
|
+
* } catch (err) {
|
|
1133
|
+
* console.error("Delete failed:", err);
|
|
1134
|
+
* }
|
|
1135
|
+
* ```
|
|
1136
|
+
*/
|
|
1137
|
+
async delete({ signal } = {}) {
|
|
1138
|
+
const response = await this.#client._signedRequest(
|
|
1139
|
+
"DELETE",
|
|
1140
|
+
this.#path,
|
|
1141
|
+
void 0,
|
|
1142
|
+
void 0,
|
|
1143
|
+
void 0,
|
|
1144
|
+
void 0,
|
|
1145
|
+
void 0,
|
|
1146
|
+
void 0,
|
|
1147
|
+
signal
|
|
1148
|
+
);
|
|
1149
|
+
if (response.statusCode === 204) {
|
|
1150
|
+
await response.body.dump();
|
|
1151
|
+
return;
|
|
1152
|
+
}
|
|
1153
|
+
throw await getResponseError(response, this.#path);
|
|
1154
|
+
}
|
|
1155
|
+
toString() {
|
|
1156
|
+
return `S3File { path: "${this.#path}" }`;
|
|
1157
|
+
}
|
|
1158
|
+
/** @returns {Promise<unknown>} */
|
|
1159
|
+
json() {
|
|
1160
|
+
return new Response(this.stream()).json();
|
|
1161
|
+
}
|
|
1162
|
+
// TODO
|
|
1163
|
+
// /** @returns {Promise<Uint8Array>} */
|
|
1164
|
+
// bytes() {
|
|
1165
|
+
// return new Response(this.stream()).bytes(); // TODO: Does this exist?
|
|
1166
|
+
// }
|
|
1167
|
+
/** @returns {Promise<ArrayBuffer>} */
|
|
1168
|
+
arrayBuffer() {
|
|
1169
|
+
return new Response(this.stream()).arrayBuffer();
|
|
1170
|
+
}
|
|
1171
|
+
/** @returns {Promise<string>} */
|
|
1172
|
+
text() {
|
|
1173
|
+
return new Response(this.stream()).text();
|
|
1174
|
+
}
|
|
1175
|
+
/** @returns {Promise<Blob>} */
|
|
1176
|
+
blob() {
|
|
1177
|
+
return new Response(this.stream()).blob();
|
|
1178
|
+
}
|
|
1179
|
+
/** @returns {ReadableStream<Uint8Array>} */
|
|
1180
|
+
stream() {
|
|
1181
|
+
return this.#client[stream](this.#path, void 0, this.#start, this.#end);
|
|
1182
|
+
}
|
|
1183
|
+
async #transformData(data) {
|
|
1184
|
+
if (typeof data === "string") {
|
|
1185
|
+
const binary = new TextEncoder();
|
|
1186
|
+
const bytes = binary.encode(data);
|
|
1187
|
+
return [
|
|
1188
|
+
bytes,
|
|
1189
|
+
bytes.byteLength,
|
|
1190
|
+
sha256(bytes)
|
|
1191
|
+
// TODO: Maybe use some streaming to compute hash while encoding?
|
|
1192
|
+
];
|
|
1193
|
+
}
|
|
1194
|
+
if (data instanceof Blob) {
|
|
1195
|
+
const bytes = await data.bytes();
|
|
1196
|
+
return [
|
|
1197
|
+
bytes,
|
|
1198
|
+
bytes.byteLength,
|
|
1199
|
+
sha256(bytes)
|
|
1200
|
+
// TODO: Maybe use some streaming to compute hash while encoding?
|
|
1201
|
+
];
|
|
1202
|
+
}
|
|
1203
|
+
if (data instanceof Readable) {
|
|
1204
|
+
return [data, void 0, void 0];
|
|
1205
|
+
}
|
|
1206
|
+
if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer || ArrayBuffer.isView(data)) {
|
|
1207
|
+
return [
|
|
1208
|
+
data,
|
|
1209
|
+
data.byteLength,
|
|
1210
|
+
void 0
|
|
1211
|
+
// TODO: Compute hash some day
|
|
1212
|
+
];
|
|
1213
|
+
}
|
|
1214
|
+
assertNever(data);
|
|
1215
|
+
}
|
|
1216
|
+
/**
|
|
1217
|
+
* @param {ByteSource} data
|
|
1218
|
+
* @returns {Promise<void>}
|
|
1219
|
+
*/
|
|
1220
|
+
async write(data) {
|
|
1221
|
+
const signal = void 0;
|
|
1222
|
+
const [bytes, length, hash] = await this.#transformData(data);
|
|
1223
|
+
return await this.#client[write](
|
|
1224
|
+
this.#path,
|
|
1225
|
+
bytes,
|
|
1226
|
+
this.#contentType,
|
|
1227
|
+
length,
|
|
1228
|
+
hash,
|
|
1229
|
+
this.#start,
|
|
1230
|
+
this.#end,
|
|
1231
|
+
signal
|
|
1232
|
+
);
|
|
1233
|
+
}
|
|
1234
|
+
/*
|
|
1235
|
+
// Future API?
|
|
1236
|
+
writer(): WritableStream<ArrayBufferLike | ArrayBufferView> {
|
|
1237
|
+
throw new Error("Not implemented");
|
|
1238
|
+
}
|
|
1239
|
+
// Future API?
|
|
1240
|
+
setTags(): Promise<void> {
|
|
1241
|
+
throw new Error("Not implemented");
|
|
1242
|
+
}
|
|
1243
|
+
getTags(): Promise<unknown> {
|
|
1244
|
+
throw new Error("Not implemented");
|
|
1245
|
+
}
|
|
1246
|
+
*/
|
|
1247
|
+
};
|
|
1248
|
+
function assertNever(v) {
|
|
1249
|
+
throw new TypeError(`Expected value not to have type ${typeof v}`);
|
|
1250
|
+
}
|
|
1251
|
+
export {
|
|
1252
|
+
S3BucketEntry,
|
|
1253
|
+
S3Client,
|
|
1254
|
+
S3Error,
|
|
1255
|
+
S3File,
|
|
1256
|
+
S3Stat
|
|
1257
|
+
};
|