@aws-sdk/lib-storage 3.903.0 → 3.906.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist-cjs/index.js CHANGED
@@ -1,572 +1,512 @@
1
- "use strict";
2
- var __defProp = Object.defineProperty;
3
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
- var __getOwnPropNames = Object.getOwnPropertyNames;
5
- var __hasOwnProp = Object.prototype.hasOwnProperty;
6
- var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
7
- var __export = (target, all) => {
8
- for (var name in all)
9
- __defProp(target, name, { get: all[name], enumerable: true });
10
- };
11
- var __copyProps = (to, from, except, desc) => {
12
- if (from && typeof from === "object" || typeof from === "function") {
13
- for (let key of __getOwnPropNames(from))
14
- if (!__hasOwnProp.call(to, key) && key !== except)
15
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
16
- }
17
- return to;
18
- };
19
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
20
-
21
- // src/index.ts
22
- var index_exports = {};
23
- __export(index_exports, {
24
- Upload: () => Upload
25
- });
26
- module.exports = __toCommonJS(index_exports);
1
+ 'use strict';
27
2
 
28
- // src/Upload.ts
29
- var import_client_s3 = require("@aws-sdk/client-s3");
30
- var import_abort_controller = require("@smithy/abort-controller");
31
- var import_middleware_endpoint = require("@smithy/middleware-endpoint");
32
- var import_smithy_client = require("@smithy/smithy-client");
33
- var import_events = require("events");
3
+ var clientS3 = require('@aws-sdk/client-s3');
4
+ var abortController = require('@smithy/abort-controller');
5
+ var middlewareEndpoint = require('@smithy/middleware-endpoint');
6
+ var smithyClient = require('@smithy/smithy-client');
7
+ var events = require('events');
8
+ var buffer = require('buffer');
9
+ var runtimeConfig = require('./runtimeConfig');
10
+ var stream = require('stream');
34
11
 
35
- // src/byteLength.ts
36
- var import_buffer = require("buffer");
37
- var import_runtimeConfig = require("././runtimeConfig");
38
- var byteLength = /* @__PURE__ */ __name((input) => {
39
- if (input == null) {
40
- return 0;
41
- }
42
- if (typeof input === "string") {
43
- return import_buffer.Buffer.byteLength(input);
44
- }
45
- if (typeof input.byteLength === "number") {
46
- return input.byteLength;
47
- } else if (typeof input.length === "number") {
48
- return input.length;
49
- } else if (typeof input.size === "number") {
50
- return input.size;
51
- } else if (typeof input.start === "number" && typeof input.end === "number") {
52
- return input.end + 1 - input.start;
53
- } else if (typeof input.path === "string") {
54
- try {
55
- return import_runtimeConfig.runtimeConfig.lstatSync(input.path).size;
56
- } catch (error) {
57
- return void 0;
12
+ const byteLength = (input) => {
13
+ if (input == null) {
14
+ return 0;
58
15
  }
59
- }
60
- return void 0;
61
- }, "byteLength");
62
-
63
- // src/byteLengthSource.ts
64
- var import_runtimeConfig2 = require("././runtimeConfig");
65
- var byteLengthSource = /* @__PURE__ */ __name((input, override) => {
66
- if (override != null) {
67
- return "the ContentLength property of the params set by the caller" /* CONTENT_LENGTH */;
68
- }
69
- if (input == null) {
70
- return "a null or undefined Body" /* EMPTY_INPUT */;
71
- }
72
- if (typeof input === "string") {
73
- return "the encoded byte length of the Body string" /* STRING_LENGTH */;
74
- }
75
- if (typeof input.byteLength === "number") {
76
- return "the byteLength of a typed byte array such as Uint8Array" /* TYPED_ARRAY */;
77
- } else if (typeof input.length === "number") {
78
- return "the value of Body.length" /* LENGTH */;
79
- } else if (typeof input.size === "number") {
80
- return "the value of Body.size" /* SIZE */;
81
- } else if (typeof input.start === "number" && typeof input.end === "number") {
82
- return "the numeric difference between Body.start and Body.end" /* START_END_DIFF */;
83
- } else if (typeof input.path === "string") {
84
- try {
85
- import_runtimeConfig2.runtimeConfig.lstatSync(input.path).size;
86
- return "the size of the file given by Body.path on disk as reported by lstatSync" /* LSTAT */;
87
- } catch (error) {
88
- return void 0;
89
- }
90
- }
91
- return void 0;
92
- }, "byteLengthSource");
93
-
94
- // src/chunker.ts
95
-
96
- var import_stream = require("stream");
16
+ if (typeof input === "string") {
17
+ return buffer.Buffer.byteLength(input);
18
+ }
19
+ if (typeof input.byteLength === "number") {
20
+ return input.byteLength;
21
+ }
22
+ else if (typeof input.length === "number") {
23
+ return input.length;
24
+ }
25
+ else if (typeof input.size === "number") {
26
+ return input.size;
27
+ }
28
+ else if (typeof input.start === "number" && typeof input.end === "number") {
29
+ return input.end + 1 - input.start;
30
+ }
31
+ else if (runtimeConfig.runtimeConfig.isFileReadStream(input)) {
32
+ try {
33
+ return runtimeConfig.runtimeConfig.lstatSync(input.path).size;
34
+ }
35
+ catch (error) {
36
+ return undefined;
37
+ }
38
+ }
39
+ return undefined;
40
+ };
97
41
 
98
- // src/chunks/getChunkStream.ts
42
+ var BYTE_LENGTH_SOURCE;
43
+ (function (BYTE_LENGTH_SOURCE) {
44
+ BYTE_LENGTH_SOURCE["EMPTY_INPUT"] = "a null or undefined Body";
45
+ BYTE_LENGTH_SOURCE["CONTENT_LENGTH"] = "the ContentLength property of the params set by the caller";
46
+ BYTE_LENGTH_SOURCE["STRING_LENGTH"] = "the encoded byte length of the Body string";
47
+ BYTE_LENGTH_SOURCE["TYPED_ARRAY"] = "the byteLength of a typed byte array such as Uint8Array";
48
+ BYTE_LENGTH_SOURCE["LENGTH"] = "the value of Body.length";
49
+ BYTE_LENGTH_SOURCE["SIZE"] = "the value of Body.size";
50
+ BYTE_LENGTH_SOURCE["START_END_DIFF"] = "the numeric difference between Body.start and Body.end";
51
+ BYTE_LENGTH_SOURCE["LSTAT"] = "the size of the file given by Body.path on disk as reported by lstatSync";
52
+ })(BYTE_LENGTH_SOURCE || (BYTE_LENGTH_SOURCE = {}));
53
+ const byteLengthSource = (input, override) => {
54
+ if (override != null) {
55
+ return BYTE_LENGTH_SOURCE.CONTENT_LENGTH;
56
+ }
57
+ if (input == null) {
58
+ return BYTE_LENGTH_SOURCE.EMPTY_INPUT;
59
+ }
60
+ if (typeof input === "string") {
61
+ return BYTE_LENGTH_SOURCE.STRING_LENGTH;
62
+ }
63
+ if (typeof input.byteLength === "number") {
64
+ return BYTE_LENGTH_SOURCE.TYPED_ARRAY;
65
+ }
66
+ else if (typeof input.length === "number") {
67
+ return BYTE_LENGTH_SOURCE.LENGTH;
68
+ }
69
+ else if (typeof input.size === "number") {
70
+ return BYTE_LENGTH_SOURCE.SIZE;
71
+ }
72
+ else if (typeof input.start === "number" && typeof input.end === "number") {
73
+ return BYTE_LENGTH_SOURCE.START_END_DIFF;
74
+ }
75
+ else if (runtimeConfig.runtimeConfig.isFileReadStream(input)) {
76
+ try {
77
+ runtimeConfig.runtimeConfig.lstatSync(input.path).size;
78
+ return BYTE_LENGTH_SOURCE.LSTAT;
79
+ }
80
+ catch (error) {
81
+ return undefined;
82
+ }
83
+ }
84
+ return undefined;
85
+ };
99
86
 
100
87
  async function* getChunkStream(data, partSize, getNextData) {
101
- let partNumber = 1;
102
- const currentBuffer = { chunks: [], length: 0 };
103
- for await (const datum of getNextData(data)) {
104
- currentBuffer.chunks.push(datum);
105
- currentBuffer.length += datum.byteLength;
106
- while (currentBuffer.length > partSize) {
107
- const dataChunk = currentBuffer.chunks.length > 1 ? import_buffer.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0];
108
- yield {
88
+ let partNumber = 1;
89
+ const currentBuffer = { chunks: [], length: 0 };
90
+ for await (const datum of getNextData(data)) {
91
+ currentBuffer.chunks.push(datum);
92
+ currentBuffer.length += datum.byteLength;
93
+ while (currentBuffer.length > partSize) {
94
+ const dataChunk = currentBuffer.chunks.length > 1 ? buffer.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0];
95
+ yield {
96
+ partNumber,
97
+ data: dataChunk.subarray(0, partSize),
98
+ };
99
+ currentBuffer.chunks = [dataChunk.subarray(partSize)];
100
+ currentBuffer.length = currentBuffer.chunks[0].byteLength;
101
+ partNumber += 1;
102
+ }
103
+ }
104
+ yield {
109
105
  partNumber,
110
- data: dataChunk.subarray(0, partSize)
111
- };
112
- currentBuffer.chunks = [dataChunk.subarray(partSize)];
113
- currentBuffer.length = currentBuffer.chunks[0].byteLength;
114
- partNumber += 1;
115
- }
116
- }
117
- yield {
118
- partNumber,
119
- data: currentBuffer.chunks.length !== 1 ? import_buffer.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0],
120
- lastPart: true
121
- };
106
+ data: currentBuffer.chunks.length !== 1 ? buffer.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0],
107
+ lastPart: true,
108
+ };
122
109
  }
123
- __name(getChunkStream, "getChunkStream");
124
110
 
125
- // src/chunks/getChunkUint8Array.ts
126
111
  async function* getChunkUint8Array(data, partSize) {
127
- let partNumber = 1;
128
- let startByte = 0;
129
- let endByte = partSize;
130
- while (endByte < data.byteLength) {
112
+ let partNumber = 1;
113
+ let startByte = 0;
114
+ let endByte = partSize;
115
+ while (endByte < data.byteLength) {
116
+ yield {
117
+ partNumber,
118
+ data: data.subarray(startByte, endByte),
119
+ };
120
+ partNumber += 1;
121
+ startByte = endByte;
122
+ endByte = startByte + partSize;
123
+ }
131
124
  yield {
132
- partNumber,
133
- data: data.subarray(startByte, endByte)
125
+ partNumber,
126
+ data: data.subarray(startByte),
127
+ lastPart: true,
134
128
  };
135
- partNumber += 1;
136
- startByte = endByte;
137
- endByte = startByte + partSize;
138
- }
139
- yield {
140
- partNumber,
141
- data: data.subarray(startByte),
142
- lastPart: true
143
- };
144
129
  }
145
- __name(getChunkUint8Array, "getChunkUint8Array");
146
-
147
- // src/chunks/getDataReadable.ts
148
130
 
149
131
  async function* getDataReadable(data) {
150
- for await (const chunk of data) {
151
- if (import_buffer.Buffer.isBuffer(chunk) || chunk instanceof Uint8Array) {
152
- yield chunk;
153
- } else {
154
- yield import_buffer.Buffer.from(chunk);
132
+ for await (const chunk of data) {
133
+ if (buffer.Buffer.isBuffer(chunk) || chunk instanceof Uint8Array) {
134
+ yield chunk;
135
+ }
136
+ else {
137
+ yield buffer.Buffer.from(chunk);
138
+ }
155
139
  }
156
- }
157
140
  }
158
- __name(getDataReadable, "getDataReadable");
159
-
160
- // src/chunks/getDataReadableStream.ts
161
141
 
162
142
  async function* getDataReadableStream(data) {
163
- const reader = data.getReader();
164
- try {
165
- while (true) {
166
- const { done, value } = await reader.read();
167
- if (done) {
168
- return;
169
- }
170
- if (import_buffer.Buffer.isBuffer(value) || value instanceof Uint8Array) {
171
- yield value;
172
- } else {
173
- yield import_buffer.Buffer.from(value);
174
- }
175
- }
176
- } catch (e) {
177
- throw e;
178
- } finally {
179
- reader.releaseLock();
180
- }
143
+ const reader = data.getReader();
144
+ try {
145
+ while (true) {
146
+ const { done, value } = await reader.read();
147
+ if (done) {
148
+ return;
149
+ }
150
+ if (buffer.Buffer.isBuffer(value) || value instanceof Uint8Array) {
151
+ yield value;
152
+ }
153
+ else {
154
+ yield buffer.Buffer.from(value);
155
+ }
156
+ }
157
+ }
158
+ catch (e) {
159
+ throw e;
160
+ }
161
+ finally {
162
+ reader.releaseLock();
163
+ }
181
164
  }
182
- __name(getDataReadableStream, "getDataReadableStream");
183
165
 
184
- // src/chunker.ts
185
- var getChunk = /* @__PURE__ */ __name((data, partSize) => {
186
- if (data instanceof Uint8Array) {
187
- return getChunkUint8Array(data, partSize);
188
- }
189
- if (data instanceof import_stream.Readable) {
190
- return getChunkStream(data, partSize, getDataReadable);
191
- }
192
- if (data instanceof String || typeof data === "string") {
193
- return getChunkUint8Array(import_buffer.Buffer.from(data), partSize);
194
- }
195
- if (typeof data.stream === "function") {
196
- return getChunkStream(data.stream(), partSize, getDataReadableStream);
197
- }
198
- if (data instanceof ReadableStream) {
199
- return getChunkStream(data, partSize, getDataReadableStream);
200
- }
201
- throw new Error(
202
- "Body Data is unsupported format, expected data to be one of: string | Uint8Array | Buffer | Readable | ReadableStream | Blob;."
203
- );
204
- }, "getChunk");
166
+ const getChunk = (data, partSize) => {
167
+ if (data instanceof Uint8Array) {
168
+ return getChunkUint8Array(data, partSize);
169
+ }
170
+ if (data instanceof stream.Readable) {
171
+ return getChunkStream(data, partSize, getDataReadable);
172
+ }
173
+ if (data instanceof String || typeof data === "string") {
174
+ return getChunkUint8Array(buffer.Buffer.from(data), partSize);
175
+ }
176
+ if (typeof data.stream === "function") {
177
+ return getChunkStream(data.stream(), partSize, getDataReadableStream);
178
+ }
179
+ if (data instanceof ReadableStream) {
180
+ return getChunkStream(data, partSize, getDataReadableStream);
181
+ }
182
+ throw new Error("Body Data is unsupported format, expected data to be one of: string | Uint8Array | Buffer | Readable | ReadableStream | Blob;.");
183
+ };
205
184
 
206
- // src/Upload.ts
207
- var Upload = class _Upload extends import_events.EventEmitter {
208
- static {
209
- __name(this, "Upload");
210
- }
211
- /**
212
- * @internal
213
- * modified in testing only.
214
- */
215
- static MIN_PART_SIZE = 1024 * 1024 * 5;
216
- /**
217
- * S3 multipart upload does not allow more than 10,000 parts.
218
- */
219
- MAX_PARTS = 1e4;
220
- // Defaults.
221
- queueSize = 4;
222
- partSize;
223
- leavePartsOnError = false;
224
- tags = [];
225
- client;
226
- params;
227
- // used for reporting progress.
228
- totalBytes;
229
- totalBytesSource;
230
- bytesUploadedSoFar;
231
- // used in the upload.
232
- abortController;
233
- concurrentUploaders = [];
234
- createMultiPartPromise;
235
- abortMultipartUploadCommand = null;
236
- uploadedParts = [];
237
- uploadEnqueuedPartsCount = 0;
238
- expectedPartsCount;
239
- /**
240
- * Last UploadId if the upload was done with MultipartUpload and not PutObject.
241
- */
242
- uploadId;
243
- uploadEvent;
244
- isMultiPart = true;
245
- singleUploadResult;
246
- sent = false;
247
- constructor(options) {
248
- super();
249
- this.queueSize = options.queueSize || this.queueSize;
250
- this.leavePartsOnError = options.leavePartsOnError || this.leavePartsOnError;
251
- this.tags = options.tags || this.tags;
252
- this.client = options.client;
253
- this.params = options.params;
254
- if (!this.params) {
255
- throw new Error(`InputError: Upload requires params to be passed to upload.`);
256
- }
257
- this.totalBytes = this.params.ContentLength ?? byteLength(this.params.Body);
258
- this.totalBytesSource = byteLengthSource(this.params.Body, this.params.ContentLength);
259
- this.bytesUploadedSoFar = 0;
260
- this.abortController = options.abortController ?? new import_abort_controller.AbortController();
261
- this.partSize = options.partSize || Math.max(_Upload.MIN_PART_SIZE, Math.floor((this.totalBytes || 0) / this.MAX_PARTS));
262
- if (this.totalBytes !== void 0) {
263
- this.expectedPartsCount = Math.ceil(this.totalBytes / this.partSize);
264
- }
265
- this.__validateInput();
266
- }
267
- async abort() {
268
- this.abortController.abort();
269
- }
270
- async done() {
271
- if (this.sent) {
272
- throw new Error(
273
- "@aws-sdk/lib-storage: this instance of Upload has already executed .done(). Create a new instance."
274
- );
275
- }
276
- this.sent = true;
277
- return await Promise.race([this.__doMultipartUpload(), this.__abortTimeout(this.abortController.signal)]);
278
- }
279
- on(event, listener) {
280
- this.uploadEvent = event;
281
- return super.on(event, listener);
282
- }
283
- async __uploadUsingPut(dataPart) {
284
- this.isMultiPart = false;
285
- const params = { ...this.params, Body: dataPart.data };
286
- const clientConfig = this.client.config;
287
- const requestHandler = clientConfig.requestHandler;
288
- const eventEmitter = requestHandler instanceof import_events.EventEmitter ? requestHandler : null;
289
- const uploadEventListener = /* @__PURE__ */ __name((event) => {
290
- this.bytesUploadedSoFar = event.loaded;
291
- this.totalBytes = event.total;
292
- this.__notifyProgress({
293
- loaded: this.bytesUploadedSoFar,
294
- total: this.totalBytes,
295
- part: dataPart.partNumber,
296
- Key: this.params.Key,
297
- Bucket: this.params.Bucket
298
- });
299
- }, "uploadEventListener");
300
- if (eventEmitter !== null) {
301
- eventEmitter.on("xhr.upload.progress", uploadEventListener);
302
- }
303
- const resolved = await Promise.all([this.client.send(new import_client_s3.PutObjectCommand(params)), clientConfig?.endpoint?.()]);
304
- const putResult = resolved[0];
305
- let endpoint = resolved[1];
306
- if (!endpoint) {
307
- endpoint = (0, import_middleware_endpoint.toEndpointV1)(
308
- await (0, import_middleware_endpoint.getEndpointFromInstructions)(params, import_client_s3.PutObjectCommand, {
309
- ...clientConfig
310
- })
311
- );
312
- }
313
- if (!endpoint) {
314
- throw new Error('Could not resolve endpoint from S3 "client.config.endpoint()" nor EndpointsV2.');
315
- }
316
- if (eventEmitter !== null) {
317
- eventEmitter.off("xhr.upload.progress", uploadEventListener);
318
- }
319
- const locationKey = this.params.Key.split("/").map((segment) => (0, import_smithy_client.extendedEncodeURIComponent)(segment)).join("/");
320
- const locationBucket = (0, import_smithy_client.extendedEncodeURIComponent)(this.params.Bucket);
321
- const Location = (() => {
322
- const endpointHostnameIncludesBucket = endpoint.hostname.startsWith(`${locationBucket}.`);
323
- const forcePathStyle = this.client.config.forcePathStyle;
324
- const optionalPort = endpoint.port ? `:${endpoint.port}` : ``;
325
- if (forcePathStyle) {
326
- return `${endpoint.protocol}//${endpoint.hostname}${optionalPort}/${locationBucket}/${locationKey}`;
327
- }
328
- if (endpointHostnameIncludesBucket) {
329
- return `${endpoint.protocol}//${endpoint.hostname}${optionalPort}/${locationKey}`;
330
- }
331
- return `${endpoint.protocol}//${locationBucket}.${endpoint.hostname}${optionalPort}/${locationKey}`;
332
- })();
333
- this.singleUploadResult = {
334
- ...putResult,
335
- Bucket: this.params.Bucket,
336
- Key: this.params.Key,
337
- Location
338
- };
339
- const totalSize = byteLength(dataPart.data);
340
- this.__notifyProgress({
341
- loaded: totalSize,
342
- total: totalSize,
343
- part: 1,
344
- Key: this.params.Key,
345
- Bucket: this.params.Bucket
346
- });
347
- }
348
- async __createMultipartUpload() {
349
- const requestChecksumCalculation = await this.client.config.requestChecksumCalculation();
350
- if (!this.createMultiPartPromise) {
351
- const createCommandParams = { ...this.params, Body: void 0 };
352
- if (requestChecksumCalculation === "WHEN_SUPPORTED") {
353
- createCommandParams.ChecksumAlgorithm = this.params.ChecksumAlgorithm || import_client_s3.ChecksumAlgorithm.CRC32;
354
- }
355
- this.createMultiPartPromise = this.client.send(new import_client_s3.CreateMultipartUploadCommand(createCommandParams)).then((createMpuResponse) => {
356
- this.abortMultipartUploadCommand = new import_client_s3.AbortMultipartUploadCommand({
357
- Bucket: this.params.Bucket,
358
- Key: this.params.Key,
359
- UploadId: createMpuResponse.UploadId
360
- });
361
- return createMpuResponse;
362
- });
363
- }
364
- return this.createMultiPartPromise;
365
- }
366
- async __doConcurrentUpload(dataFeeder) {
367
- for await (const dataPart of dataFeeder) {
368
- if (this.uploadEnqueuedPartsCount > this.MAX_PARTS) {
369
- throw new Error(
370
- `Exceeded ${this.MAX_PARTS} parts in multipart upload to Bucket: ${this.params.Bucket} Key: ${this.params.Key}.`
371
- );
372
- }
373
- if (this.abortController.signal.aborted) {
374
- return;
375
- }
376
- if (dataPart.partNumber === 1 && dataPart.lastPart) {
377
- return await this.__uploadUsingPut(dataPart);
378
- }
379
- if (!this.uploadId) {
380
- const { UploadId } = await this.__createMultipartUpload();
381
- this.uploadId = UploadId;
382
- if (this.abortController.signal.aborted) {
383
- return;
185
+ class Upload extends events.EventEmitter {
186
+ static MIN_PART_SIZE = 1024 * 1024 * 5;
187
+ MAX_PARTS = 10_000;
188
+ queueSize = 4;
189
+ partSize;
190
+ leavePartsOnError = false;
191
+ tags = [];
192
+ client;
193
+ params;
194
+ totalBytes;
195
+ totalBytesSource;
196
+ bytesUploadedSoFar;
197
+ abortController;
198
+ concurrentUploaders = [];
199
+ createMultiPartPromise;
200
+ abortMultipartUploadCommand = null;
201
+ uploadedParts = [];
202
+ uploadEnqueuedPartsCount = 0;
203
+ expectedPartsCount;
204
+ uploadId;
205
+ uploadEvent;
206
+ isMultiPart = true;
207
+ singleUploadResult;
208
+ sent = false;
209
+ constructor(options) {
210
+ super();
211
+ this.queueSize = options.queueSize || this.queueSize;
212
+ this.leavePartsOnError = options.leavePartsOnError || this.leavePartsOnError;
213
+ this.tags = options.tags || this.tags;
214
+ this.client = options.client;
215
+ this.params = options.params;
216
+ if (!this.params) {
217
+ throw new Error(`InputError: Upload requires params to be passed to upload.`);
218
+ }
219
+ this.totalBytes = this.params.ContentLength ?? byteLength(this.params.Body);
220
+ this.totalBytesSource = byteLengthSource(this.params.Body, this.params.ContentLength);
221
+ this.bytesUploadedSoFar = 0;
222
+ this.abortController = options.abortController ?? new abortController.AbortController();
223
+ this.partSize =
224
+ options.partSize || Math.max(Upload.MIN_PART_SIZE, Math.floor((this.totalBytes || 0) / this.MAX_PARTS));
225
+ if (this.totalBytes !== undefined) {
226
+ this.expectedPartsCount = Math.ceil(this.totalBytes / this.partSize);
227
+ }
228
+ this.__validateInput();
229
+ }
230
+ async abort() {
231
+ this.abortController.abort();
232
+ }
233
+ async done() {
234
+ if (this.sent) {
235
+ throw new Error("@aws-sdk/lib-storage: this instance of Upload has already executed .done(). Create a new instance.");
384
236
  }
385
- }
386
- const partSize = byteLength(dataPart.data) || 0;
387
- const requestHandler = this.client.config.requestHandler;
388
- const eventEmitter = requestHandler instanceof import_events.EventEmitter ? requestHandler : null;
389
- let lastSeenBytes = 0;
390
- const uploadEventListener = /* @__PURE__ */ __name((event, request) => {
391
- const requestPartSize = Number(request.query["partNumber"]) || -1;
392
- if (requestPartSize !== dataPart.partNumber) {
393
- return;
237
+ this.sent = true;
238
+ return await Promise.race([this.__doMultipartUpload(), this.__abortTimeout(this.abortController.signal)]);
239
+ }
240
+ on(event, listener) {
241
+ this.uploadEvent = event;
242
+ return super.on(event, listener);
243
+ }
244
+ async __uploadUsingPut(dataPart) {
245
+ this.isMultiPart = false;
246
+ const params = { ...this.params, Body: dataPart.data };
247
+ const clientConfig = this.client.config;
248
+ const requestHandler = clientConfig.requestHandler;
249
+ const eventEmitter = requestHandler instanceof events.EventEmitter ? requestHandler : null;
250
+ const uploadEventListener = (event) => {
251
+ this.bytesUploadedSoFar = event.loaded;
252
+ this.totalBytes = event.total;
253
+ this.__notifyProgress({
254
+ loaded: this.bytesUploadedSoFar,
255
+ total: this.totalBytes,
256
+ part: dataPart.partNumber,
257
+ Key: this.params.Key,
258
+ Bucket: this.params.Bucket,
259
+ });
260
+ };
261
+ if (eventEmitter !== null) {
262
+ eventEmitter.on("xhr.upload.progress", uploadEventListener);
263
+ }
264
+ const resolved = await Promise.all([this.client.send(new clientS3.PutObjectCommand(params)), clientConfig?.endpoint?.()]);
265
+ const putResult = resolved[0];
266
+ let endpoint = resolved[1];
267
+ if (!endpoint) {
268
+ endpoint = middlewareEndpoint.toEndpointV1(await middlewareEndpoint.getEndpointFromInstructions(params, clientS3.PutObjectCommand, {
269
+ ...clientConfig,
270
+ }));
394
271
  }
395
- if (event.total && partSize) {
396
- this.bytesUploadedSoFar += event.loaded - lastSeenBytes;
397
- lastSeenBytes = event.loaded;
272
+ if (!endpoint) {
273
+ throw new Error('Could not resolve endpoint from S3 "client.config.endpoint()" nor EndpointsV2.');
398
274
  }
275
+ if (eventEmitter !== null) {
276
+ eventEmitter.off("xhr.upload.progress", uploadEventListener);
277
+ }
278
+ const locationKey = this.params
279
+ .Key.split("/")
280
+ .map((segment) => smithyClient.extendedEncodeURIComponent(segment))
281
+ .join("/");
282
+ const locationBucket = smithyClient.extendedEncodeURIComponent(this.params.Bucket);
283
+ const Location = (() => {
284
+ const endpointHostnameIncludesBucket = endpoint.hostname.startsWith(`${locationBucket}.`);
285
+ const forcePathStyle = this.client.config.forcePathStyle;
286
+ const optionalPort = endpoint.port ? `:${endpoint.port}` : ``;
287
+ if (forcePathStyle) {
288
+ return `${endpoint.protocol}//${endpoint.hostname}${optionalPort}/${locationBucket}/${locationKey}`;
289
+ }
290
+ if (endpointHostnameIncludesBucket) {
291
+ return `${endpoint.protocol}//${endpoint.hostname}${optionalPort}/${locationKey}`;
292
+ }
293
+ return `${endpoint.protocol}//${locationBucket}.${endpoint.hostname}${optionalPort}/${locationKey}`;
294
+ })();
295
+ this.singleUploadResult = {
296
+ ...putResult,
297
+ Bucket: this.params.Bucket,
298
+ Key: this.params.Key,
299
+ Location,
300
+ };
301
+ const totalSize = byteLength(dataPart.data);
399
302
  this.__notifyProgress({
400
- loaded: this.bytesUploadedSoFar,
401
- total: this.totalBytes,
402
- part: dataPart.partNumber,
403
- Key: this.params.Key,
404
- Bucket: this.params.Bucket
303
+ loaded: totalSize,
304
+ total: totalSize,
305
+ part: 1,
306
+ Key: this.params.Key,
307
+ Bucket: this.params.Bucket,
405
308
  });
406
- }, "uploadEventListener");
407
- if (eventEmitter !== null) {
408
- eventEmitter.on("xhr.upload.progress", uploadEventListener);
409
- }
410
- this.uploadEnqueuedPartsCount += 1;
411
- this.__validateUploadPart(dataPart);
412
- const partResult = await this.client.send(
413
- new import_client_s3.UploadPartCommand({
414
- ...this.params,
415
- // dataPart.data is chunked into a non-streaming buffer
416
- // so the ContentLength from the input should not be used for MPU.
417
- ContentLength: void 0,
418
- UploadId: this.uploadId,
419
- Body: dataPart.data,
420
- PartNumber: dataPart.partNumber
421
- })
422
- );
423
- if (eventEmitter !== null) {
424
- eventEmitter.off("xhr.upload.progress", uploadEventListener);
425
- }
426
- if (this.abortController.signal.aborted) {
427
- return;
428
- }
429
- if (!partResult.ETag) {
430
- throw new Error(
431
- `Part ${dataPart.partNumber} is missing ETag in UploadPart response. Missing Bucket CORS configuration for ETag header?`
432
- );
433
- }
434
- this.uploadedParts.push({
435
- PartNumber: dataPart.partNumber,
436
- ETag: partResult.ETag,
437
- ...partResult.ChecksumCRC32 && { ChecksumCRC32: partResult.ChecksumCRC32 },
438
- ...partResult.ChecksumCRC32C && { ChecksumCRC32C: partResult.ChecksumCRC32C },
439
- ...partResult.ChecksumSHA1 && { ChecksumSHA1: partResult.ChecksumSHA1 },
440
- ...partResult.ChecksumSHA256 && { ChecksumSHA256: partResult.ChecksumSHA256 }
441
- });
442
- if (eventEmitter === null) {
443
- this.bytesUploadedSoFar += partSize;
444
- }
445
- this.__notifyProgress({
446
- loaded: this.bytesUploadedSoFar,
447
- total: this.totalBytes,
448
- part: dataPart.partNumber,
449
- Key: this.params.Key,
450
- Bucket: this.params.Bucket
451
- });
452
- }
453
- }
454
- async __doMultipartUpload() {
455
- const dataFeeder = getChunk(this.params.Body, this.partSize);
456
- const concurrentUploaderFailures = [];
457
- for (let index = 0; index < this.queueSize; index++) {
458
- const currentUpload = this.__doConcurrentUpload(dataFeeder).catch((err) => {
459
- concurrentUploaderFailures.push(err);
460
- });
461
- this.concurrentUploaders.push(currentUpload);
462
- }
463
- await Promise.all(this.concurrentUploaders);
464
- if (concurrentUploaderFailures.length >= 1) {
465
- await this.markUploadAsAborted();
466
- throw concurrentUploaderFailures[0];
467
- }
468
- if (this.abortController.signal.aborted) {
469
- await this.markUploadAsAborted();
470
- throw Object.assign(new Error("Upload aborted."), { name: "AbortError" });
471
- }
472
- let result;
473
- if (this.isMultiPart) {
474
- const { expectedPartsCount, uploadedParts, totalBytes, totalBytesSource } = this;
475
- if (totalBytes !== void 0 && expectedPartsCount !== void 0 && uploadedParts.length !== expectedPartsCount) {
476
- throw new Error(`Expected ${expectedPartsCount} part(s) but uploaded ${uploadedParts.length} part(s).
309
+ }
310
+ async __createMultipartUpload() {
311
+ const requestChecksumCalculation = await this.client.config.requestChecksumCalculation();
312
+ if (!this.createMultiPartPromise) {
313
+ const createCommandParams = { ...this.params, Body: undefined };
314
+ if (requestChecksumCalculation === "WHEN_SUPPORTED") {
315
+ createCommandParams.ChecksumAlgorithm = this.params.ChecksumAlgorithm || clientS3.ChecksumAlgorithm.CRC32;
316
+ }
317
+ this.createMultiPartPromise = this.client
318
+ .send(new clientS3.CreateMultipartUploadCommand(createCommandParams))
319
+ .then((createMpuResponse) => {
320
+ this.abortMultipartUploadCommand = new clientS3.AbortMultipartUploadCommand({
321
+ Bucket: this.params.Bucket,
322
+ Key: this.params.Key,
323
+ UploadId: createMpuResponse.UploadId,
324
+ });
325
+ return createMpuResponse;
326
+ });
327
+ }
328
+ return this.createMultiPartPromise;
329
+ }
330
+ async __doConcurrentUpload(dataFeeder) {
331
+ for await (const dataPart of dataFeeder) {
332
+ if (this.uploadEnqueuedPartsCount > this.MAX_PARTS) {
333
+ throw new Error(`Exceeded ${this.MAX_PARTS} parts in multipart upload to Bucket: ${this.params.Bucket} Key: ${this.params.Key}.`);
334
+ }
335
+ if (this.abortController.signal.aborted) {
336
+ return;
337
+ }
338
+ if (dataPart.partNumber === 1 && dataPart.lastPart) {
339
+ return await this.__uploadUsingPut(dataPart);
340
+ }
341
+ if (!this.uploadId) {
342
+ const { UploadId } = await this.__createMultipartUpload();
343
+ this.uploadId = UploadId;
344
+ if (this.abortController.signal.aborted) {
345
+ return;
346
+ }
347
+ }
348
+ const partSize = byteLength(dataPart.data) || 0;
349
+ const requestHandler = this.client.config.requestHandler;
350
+ const eventEmitter = requestHandler instanceof events.EventEmitter ? requestHandler : null;
351
+ let lastSeenBytes = 0;
352
+ const uploadEventListener = (event, request) => {
353
+ const requestPartSize = Number(request.query["partNumber"]) || -1;
354
+ if (requestPartSize !== dataPart.partNumber) {
355
+ return;
356
+ }
357
+ if (event.total && partSize) {
358
+ this.bytesUploadedSoFar += event.loaded - lastSeenBytes;
359
+ lastSeenBytes = event.loaded;
360
+ }
361
+ this.__notifyProgress({
362
+ loaded: this.bytesUploadedSoFar,
363
+ total: this.totalBytes,
364
+ part: dataPart.partNumber,
365
+ Key: this.params.Key,
366
+ Bucket: this.params.Bucket,
367
+ });
368
+ };
369
+ if (eventEmitter !== null) {
370
+ eventEmitter.on("xhr.upload.progress", uploadEventListener);
371
+ }
372
+ this.uploadEnqueuedPartsCount += 1;
373
+ this.__validateUploadPart(dataPart);
374
+ const partResult = await this.client.send(new clientS3.UploadPartCommand({
375
+ ...this.params,
376
+ ContentLength: undefined,
377
+ UploadId: this.uploadId,
378
+ Body: dataPart.data,
379
+ PartNumber: dataPart.partNumber,
380
+ }));
381
+ if (eventEmitter !== null) {
382
+ eventEmitter.off("xhr.upload.progress", uploadEventListener);
383
+ }
384
+ if (this.abortController.signal.aborted) {
385
+ return;
386
+ }
387
+ if (!partResult.ETag) {
388
+ throw new Error(`Part ${dataPart.partNumber} is missing ETag in UploadPart response. Missing Bucket CORS configuration for ETag header?`);
389
+ }
390
+ this.uploadedParts.push({
391
+ PartNumber: dataPart.partNumber,
392
+ ETag: partResult.ETag,
393
+ ...(partResult.ChecksumCRC32 && { ChecksumCRC32: partResult.ChecksumCRC32 }),
394
+ ...(partResult.ChecksumCRC32C && { ChecksumCRC32C: partResult.ChecksumCRC32C }),
395
+ ...(partResult.ChecksumSHA1 && { ChecksumSHA1: partResult.ChecksumSHA1 }),
396
+ ...(partResult.ChecksumSHA256 && { ChecksumSHA256: partResult.ChecksumSHA256 }),
397
+ });
398
+ if (eventEmitter === null) {
399
+ this.bytesUploadedSoFar += partSize;
400
+ }
401
+ this.__notifyProgress({
402
+ loaded: this.bytesUploadedSoFar,
403
+ total: this.totalBytes,
404
+ part: dataPart.partNumber,
405
+ Key: this.params.Key,
406
+ Bucket: this.params.Bucket,
407
+ });
408
+ }
409
+ }
410
+ async __doMultipartUpload() {
411
+ const dataFeeder = getChunk(this.params.Body, this.partSize);
412
+ const concurrentUploaderFailures = [];
413
+ for (let index = 0; index < this.queueSize; index++) {
414
+ const currentUpload = this.__doConcurrentUpload(dataFeeder).catch((err) => {
415
+ concurrentUploaderFailures.push(err);
416
+ });
417
+ this.concurrentUploaders.push(currentUpload);
418
+ }
419
+ await Promise.all(this.concurrentUploaders);
420
+ if (concurrentUploaderFailures.length >= 1) {
421
+ await this.markUploadAsAborted();
422
+ throw concurrentUploaderFailures[0];
423
+ }
424
+ if (this.abortController.signal.aborted) {
425
+ await this.markUploadAsAborted();
426
+ throw Object.assign(new Error("Upload aborted."), { name: "AbortError" });
427
+ }
428
+ let result;
429
+ if (this.isMultiPart) {
430
+ const { expectedPartsCount, uploadedParts, totalBytes, totalBytesSource } = this;
431
+ if (totalBytes !== undefined && expectedPartsCount !== undefined && uploadedParts.length !== expectedPartsCount) {
432
+ throw new Error(`Expected ${expectedPartsCount} part(s) but uploaded ${uploadedParts.length} part(s).
477
433
  The expected part count is based on the byte-count of the input.params.Body,
478
434
  which was read from ${totalBytesSource} and is ${totalBytes}.
479
435
  If this is not correct, provide an override value by setting a number
480
436
  to input.params.ContentLength in bytes.
481
437
  `);
482
- }
483
- this.uploadedParts.sort((a, b) => a.PartNumber - b.PartNumber);
484
- const uploadCompleteParams = {
485
- ...this.params,
486
- Body: void 0,
487
- UploadId: this.uploadId,
488
- MultipartUpload: {
489
- Parts: this.uploadedParts
438
+ }
439
+ this.uploadedParts.sort((a, b) => a.PartNumber - b.PartNumber);
440
+ const uploadCompleteParams = {
441
+ ...this.params,
442
+ Body: undefined,
443
+ UploadId: this.uploadId,
444
+ MultipartUpload: {
445
+ Parts: this.uploadedParts,
446
+ },
447
+ };
448
+ result = await this.client.send(new clientS3.CompleteMultipartUploadCommand(uploadCompleteParams));
449
+ if (typeof result?.Location === "string" && result.Location.includes("%2F")) {
450
+ result.Location = result.Location.replace(/%2F/g, "/");
451
+ }
490
452
  }
491
- };
492
- result = await this.client.send(new import_client_s3.CompleteMultipartUploadCommand(uploadCompleteParams));
493
- if (typeof result?.Location === "string" && result.Location.includes("%2F")) {
494
- result.Location = result.Location.replace(/%2F/g, "/");
495
- }
496
- } else {
497
- result = this.singleUploadResult;
498
- }
499
- this.abortMultipartUploadCommand = null;
500
- if (this.tags.length) {
501
- await this.client.send(
502
- new import_client_s3.PutObjectTaggingCommand({
503
- ...this.params,
504
- Tagging: {
505
- TagSet: this.tags
506
- }
507
- })
508
- );
509
- }
510
- return result;
511
- }
512
- /**
513
- * Abort the last multipart upload in progress
514
- * if we know the upload id, the user did not specify to leave the parts, and
515
- * we have a prepared AbortMultipartUpload command.
516
- */
517
- async markUploadAsAborted() {
518
- if (this.uploadId && !this.leavePartsOnError && null !== this.abortMultipartUploadCommand) {
519
- await this.client.send(this.abortMultipartUploadCommand);
520
- this.abortMultipartUploadCommand = null;
521
- }
522
- }
523
- __notifyProgress(progress) {
524
- if (this.uploadEvent) {
525
- this.emit(this.uploadEvent, progress);
526
- }
527
- }
528
- async __abortTimeout(abortSignal) {
529
- return new Promise((resolve, reject) => {
530
- abortSignal.onabort = () => {
531
- const abortError = new Error("Upload aborted.");
532
- abortError.name = "AbortError";
533
- reject(abortError);
534
- };
535
- });
536
- }
537
- __validateUploadPart(dataPart) {
538
- const actualPartSize = byteLength(dataPart.data);
539
- if (actualPartSize === void 0) {
540
- throw new Error(
541
- `A dataPart was generated without a measurable data chunk size for part number ${dataPart.partNumber}`
542
- );
543
- }
544
- if (dataPart.partNumber === 1 && dataPart.lastPart) {
545
- return;
546
- }
547
- if (!dataPart.lastPart && actualPartSize !== this.partSize) {
548
- throw new Error(
549
- `The byte size for part number ${dataPart.partNumber}, size ${actualPartSize} does not match expected size ${this.partSize}`
550
- );
551
- }
552
- }
553
- __validateInput() {
554
- if (!this.client) {
555
- throw new Error(`InputError: Upload requires a AWS client to do uploads with.`);
556
- }
557
- if (this.partSize < _Upload.MIN_PART_SIZE) {
558
- throw new Error(
559
- `EntityTooSmall: Your proposed upload part size [${this.partSize}] is smaller than the minimum allowed size [${_Upload.MIN_PART_SIZE}] (5MB)`
560
- );
561
- }
562
- if (this.queueSize < 1) {
563
- throw new Error(`Queue size: Must have at least one uploading queue.`);
564
- }
565
- }
566
- };
567
- // Annotate the CommonJS export names for ESM import in node:
568
-
569
- 0 && (module.exports = {
570
- Upload
571
- });
453
+ else {
454
+ result = this.singleUploadResult;
455
+ }
456
+ this.abortMultipartUploadCommand = null;
457
+ if (this.tags.length) {
458
+ await this.client.send(new clientS3.PutObjectTaggingCommand({
459
+ ...this.params,
460
+ Tagging: {
461
+ TagSet: this.tags,
462
+ },
463
+ }));
464
+ }
465
+ return result;
466
+ }
467
+ async markUploadAsAborted() {
468
+ if (this.uploadId && !this.leavePartsOnError && null !== this.abortMultipartUploadCommand) {
469
+ await this.client.send(this.abortMultipartUploadCommand);
470
+ this.abortMultipartUploadCommand = null;
471
+ }
472
+ }
473
+ __notifyProgress(progress) {
474
+ if (this.uploadEvent) {
475
+ this.emit(this.uploadEvent, progress);
476
+ }
477
+ }
478
+ async __abortTimeout(abortSignal) {
479
+ return new Promise((resolve, reject) => {
480
+ abortSignal.onabort = () => {
481
+ const abortError = new Error("Upload aborted.");
482
+ abortError.name = "AbortError";
483
+ reject(abortError);
484
+ };
485
+ });
486
+ }
487
+ __validateUploadPart(dataPart) {
488
+ const actualPartSize = byteLength(dataPart.data);
489
+ if (actualPartSize === undefined) {
490
+ throw new Error(`A dataPart was generated without a measurable data chunk size for part number ${dataPart.partNumber}`);
491
+ }
492
+ if (dataPart.partNumber === 1 && dataPart.lastPart) {
493
+ return;
494
+ }
495
+ if (!dataPart.lastPart && actualPartSize !== this.partSize) {
496
+ throw new Error(`The byte size for part number ${dataPart.partNumber}, size ${actualPartSize} does not match expected size ${this.partSize}`);
497
+ }
498
+ }
499
+ __validateInput() {
500
+ if (!this.client) {
501
+ throw new Error(`InputError: Upload requires a AWS client to do uploads with.`);
502
+ }
503
+ if (this.partSize < Upload.MIN_PART_SIZE) {
504
+ throw new Error(`EntityTooSmall: Your proposed upload part size [${this.partSize}] is smaller than the minimum allowed size [${Upload.MIN_PART_SIZE}] (5MB)`);
505
+ }
506
+ if (this.queueSize < 1) {
507
+ throw new Error(`Queue size: Must have at least one uploading queue.`);
508
+ }
509
+ }
510
+ }
572
511
 
512
+ exports.Upload = Upload;
@@ -7,4 +7,7 @@ exports.runtimeConfig = {
7
7
  ...runtimeConfig_shared_1.runtimeConfigShared,
8
8
  runtime: "node",
9
9
  lstatSync: fs_1.lstatSync,
10
+ isFileReadStream(f) {
11
+ return f instanceof fs_1.ReadStream;
12
+ },
10
13
  };
@@ -3,4 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.runtimeConfigShared = void 0;
4
4
  exports.runtimeConfigShared = {
5
5
  lstatSync: () => { },
6
+ isFileReadStream(f) {
7
+ return false;
8
+ },
6
9
  };
@@ -19,7 +19,7 @@ export const byteLength = (input) => {
19
19
  else if (typeof input.start === "number" && typeof input.end === "number") {
20
20
  return input.end + 1 - input.start;
21
21
  }
22
- else if (typeof input.path === "string") {
22
+ else if (runtimeConfig.isFileReadStream(input)) {
23
23
  try {
24
24
  return runtimeConfig.lstatSync(input.path).size;
25
25
  }
@@ -32,7 +32,7 @@ export const byteLengthSource = (input, override) => {
32
32
  else if (typeof input.start === "number" && typeof input.end === "number") {
33
33
  return BYTE_LENGTH_SOURCE.START_END_DIFF;
34
34
  }
35
- else if (typeof input.path === "string") {
35
+ else if (runtimeConfig.isFileReadStream(input)) {
36
36
  try {
37
37
  runtimeConfig.lstatSync(input.path).size;
38
38
  return BYTE_LENGTH_SOURCE.LSTAT;
@@ -1,7 +1,10 @@
1
- import { lstatSync } from "fs";
1
+ import { lstatSync, ReadStream } from "fs";
2
2
  import { runtimeConfigShared as shared } from "./runtimeConfig.shared";
3
3
  export const runtimeConfig = {
4
4
  ...shared,
5
5
  runtime: "node",
6
6
  lstatSync,
7
+ isFileReadStream(f) {
8
+ return f instanceof ReadStream;
9
+ },
7
10
  };
@@ -1,3 +1,6 @@
1
1
  export const runtimeConfigShared = {
2
2
  lstatSync: () => { },
3
+ isFileReadStream(f) {
4
+ return false;
5
+ },
3
6
  };
@@ -4,4 +4,5 @@
4
4
  export declare const runtimeConfig: {
5
5
  runtime: string;
6
6
  lstatSync: () => void;
7
+ isFileReadStream(f: unknown): boolean;
7
8
  };
@@ -1,7 +1,9 @@
1
+ import { ReadStream } from "fs";
1
2
  /**
2
3
  * @internal
3
4
  */
4
5
  export declare const runtimeConfig: {
5
6
  runtime: string;
6
7
  lstatSync: import("fs").StatSyncFn;
8
+ isFileReadStream(f: unknown): f is ReadStream;
7
9
  };
@@ -4,4 +4,5 @@
4
4
  export declare const runtimeConfig: {
5
5
  runtime: string;
6
6
  lstatSync: () => void;
7
+ isFileReadStream(f: unknown): boolean;
7
8
  };
@@ -3,4 +3,5 @@
3
3
  */
4
4
  export declare const runtimeConfigShared: {
5
5
  lstatSync: () => void;
6
+ isFileReadStream(f: unknown): boolean;
6
7
  };
@@ -1,4 +1,5 @@
1
1
  export declare const runtimeConfig: {
2
2
  runtime: string;
3
3
  lstatSync: () => void;
4
+ isFileReadStream(f: unknown): boolean;
4
5
  };
@@ -1,4 +1,6 @@
1
+ import { ReadStream } from "fs";
1
2
  export declare const runtimeConfig: {
2
3
  runtime: string;
3
4
  lstatSync: import("fs").StatSyncFn;
5
+ isFileReadStream(f: unknown): f is ReadStream;
4
6
  };
@@ -1,4 +1,5 @@
1
1
  export declare const runtimeConfig: {
2
2
  runtime: string;
3
3
  lstatSync: () => void;
4
+ isFileReadStream(f: unknown): boolean;
4
5
  };
@@ -1,3 +1,4 @@
1
1
  export declare const runtimeConfigShared: {
2
2
  lstatSync: () => void;
3
+ isFileReadStream(f: unknown): boolean;
3
4
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aws-sdk/lib-storage",
3
- "version": "3.903.0",
3
+ "version": "3.906.0",
4
4
  "description": "Storage higher order operation",
5
5
  "main": "./dist-cjs/index.js",
6
6
  "module": "./dist-es/index.js",
@@ -39,10 +39,10 @@
39
39
  "tslib": "^2.6.2"
40
40
  },
41
41
  "peerDependencies": {
42
- "@aws-sdk/client-s3": "^3.901.0"
42
+ "@aws-sdk/client-s3": "^3.906.0"
43
43
  },
44
44
  "devDependencies": {
45
- "@aws-sdk/client-s3": "3.901.0",
45
+ "@aws-sdk/client-s3": "3.906.0",
46
46
  "@smithy/types": "^4.6.0",
47
47
  "@tsconfig/recommended": "1.0.1",
48
48
  "@types/node": "^18.19.69",