@aws-sdk/lib-storage 3.903.0 → 3.906.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist-cjs/index.js +478 -538
- package/dist-cjs/runtimeConfig.js +3 -0
- package/dist-cjs/runtimeConfig.shared.js +3 -0
- package/dist-es/byteLength.js +1 -1
- package/dist-es/byteLengthSource.js +1 -1
- package/dist-es/runtimeConfig.js +4 -1
- package/dist-es/runtimeConfig.shared.js +3 -0
- package/dist-types/runtimeConfig.browser.d.ts +1 -0
- package/dist-types/runtimeConfig.d.ts +2 -0
- package/dist-types/runtimeConfig.native.d.ts +1 -0
- package/dist-types/runtimeConfig.shared.d.ts +1 -0
- package/dist-types/ts3.4/runtimeConfig.browser.d.ts +1 -0
- package/dist-types/ts3.4/runtimeConfig.d.ts +2 -0
- package/dist-types/ts3.4/runtimeConfig.native.d.ts +1 -0
- package/dist-types/ts3.4/runtimeConfig.shared.d.ts +1 -0
- package/package.json +3 -3
package/dist-cjs/index.js
CHANGED
|
@@ -1,572 +1,512 @@
|
|
|
1
|
-
|
|
2
|
-
var __defProp = Object.defineProperty;
|
|
3
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
-
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
7
|
-
var __export = (target, all) => {
|
|
8
|
-
for (var name in all)
|
|
9
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
-
};
|
|
11
|
-
var __copyProps = (to, from, except, desc) => {
|
|
12
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
-
for (let key of __getOwnPropNames(from))
|
|
14
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
-
}
|
|
17
|
-
return to;
|
|
18
|
-
};
|
|
19
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
20
|
-
|
|
21
|
-
// src/index.ts
|
|
22
|
-
var index_exports = {};
|
|
23
|
-
__export(index_exports, {
|
|
24
|
-
Upload: () => Upload
|
|
25
|
-
});
|
|
26
|
-
module.exports = __toCommonJS(index_exports);
|
|
1
|
+
'use strict';
|
|
27
2
|
|
|
28
|
-
|
|
29
|
-
var
|
|
30
|
-
var
|
|
31
|
-
var
|
|
32
|
-
var
|
|
33
|
-
var
|
|
3
|
+
var clientS3 = require('@aws-sdk/client-s3');
|
|
4
|
+
var abortController = require('@smithy/abort-controller');
|
|
5
|
+
var middlewareEndpoint = require('@smithy/middleware-endpoint');
|
|
6
|
+
var smithyClient = require('@smithy/smithy-client');
|
|
7
|
+
var events = require('events');
|
|
8
|
+
var buffer = require('buffer');
|
|
9
|
+
var runtimeConfig = require('./runtimeConfig');
|
|
10
|
+
var stream = require('stream');
|
|
34
11
|
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
var byteLength = /* @__PURE__ */ __name((input) => {
|
|
39
|
-
if (input == null) {
|
|
40
|
-
return 0;
|
|
41
|
-
}
|
|
42
|
-
if (typeof input === "string") {
|
|
43
|
-
return import_buffer.Buffer.byteLength(input);
|
|
44
|
-
}
|
|
45
|
-
if (typeof input.byteLength === "number") {
|
|
46
|
-
return input.byteLength;
|
|
47
|
-
} else if (typeof input.length === "number") {
|
|
48
|
-
return input.length;
|
|
49
|
-
} else if (typeof input.size === "number") {
|
|
50
|
-
return input.size;
|
|
51
|
-
} else if (typeof input.start === "number" && typeof input.end === "number") {
|
|
52
|
-
return input.end + 1 - input.start;
|
|
53
|
-
} else if (typeof input.path === "string") {
|
|
54
|
-
try {
|
|
55
|
-
return import_runtimeConfig.runtimeConfig.lstatSync(input.path).size;
|
|
56
|
-
} catch (error) {
|
|
57
|
-
return void 0;
|
|
12
|
+
const byteLength = (input) => {
|
|
13
|
+
if (input == null) {
|
|
14
|
+
return 0;
|
|
58
15
|
}
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
return
|
|
83
|
-
|
|
84
|
-
try {
|
|
85
|
-
import_runtimeConfig2.runtimeConfig.lstatSync(input.path).size;
|
|
86
|
-
return "the size of the file given by Body.path on disk as reported by lstatSync" /* LSTAT */;
|
|
87
|
-
} catch (error) {
|
|
88
|
-
return void 0;
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
return void 0;
|
|
92
|
-
}, "byteLengthSource");
|
|
93
|
-
|
|
94
|
-
// src/chunker.ts
|
|
95
|
-
|
|
96
|
-
var import_stream = require("stream");
|
|
16
|
+
if (typeof input === "string") {
|
|
17
|
+
return buffer.Buffer.byteLength(input);
|
|
18
|
+
}
|
|
19
|
+
if (typeof input.byteLength === "number") {
|
|
20
|
+
return input.byteLength;
|
|
21
|
+
}
|
|
22
|
+
else if (typeof input.length === "number") {
|
|
23
|
+
return input.length;
|
|
24
|
+
}
|
|
25
|
+
else if (typeof input.size === "number") {
|
|
26
|
+
return input.size;
|
|
27
|
+
}
|
|
28
|
+
else if (typeof input.start === "number" && typeof input.end === "number") {
|
|
29
|
+
return input.end + 1 - input.start;
|
|
30
|
+
}
|
|
31
|
+
else if (runtimeConfig.runtimeConfig.isFileReadStream(input)) {
|
|
32
|
+
try {
|
|
33
|
+
return runtimeConfig.runtimeConfig.lstatSync(input.path).size;
|
|
34
|
+
}
|
|
35
|
+
catch (error) {
|
|
36
|
+
return undefined;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
return undefined;
|
|
40
|
+
};
|
|
97
41
|
|
|
98
|
-
|
|
42
|
+
var BYTE_LENGTH_SOURCE;
|
|
43
|
+
(function (BYTE_LENGTH_SOURCE) {
|
|
44
|
+
BYTE_LENGTH_SOURCE["EMPTY_INPUT"] = "a null or undefined Body";
|
|
45
|
+
BYTE_LENGTH_SOURCE["CONTENT_LENGTH"] = "the ContentLength property of the params set by the caller";
|
|
46
|
+
BYTE_LENGTH_SOURCE["STRING_LENGTH"] = "the encoded byte length of the Body string";
|
|
47
|
+
BYTE_LENGTH_SOURCE["TYPED_ARRAY"] = "the byteLength of a typed byte array such as Uint8Array";
|
|
48
|
+
BYTE_LENGTH_SOURCE["LENGTH"] = "the value of Body.length";
|
|
49
|
+
BYTE_LENGTH_SOURCE["SIZE"] = "the value of Body.size";
|
|
50
|
+
BYTE_LENGTH_SOURCE["START_END_DIFF"] = "the numeric difference between Body.start and Body.end";
|
|
51
|
+
BYTE_LENGTH_SOURCE["LSTAT"] = "the size of the file given by Body.path on disk as reported by lstatSync";
|
|
52
|
+
})(BYTE_LENGTH_SOURCE || (BYTE_LENGTH_SOURCE = {}));
|
|
53
|
+
const byteLengthSource = (input, override) => {
|
|
54
|
+
if (override != null) {
|
|
55
|
+
return BYTE_LENGTH_SOURCE.CONTENT_LENGTH;
|
|
56
|
+
}
|
|
57
|
+
if (input == null) {
|
|
58
|
+
return BYTE_LENGTH_SOURCE.EMPTY_INPUT;
|
|
59
|
+
}
|
|
60
|
+
if (typeof input === "string") {
|
|
61
|
+
return BYTE_LENGTH_SOURCE.STRING_LENGTH;
|
|
62
|
+
}
|
|
63
|
+
if (typeof input.byteLength === "number") {
|
|
64
|
+
return BYTE_LENGTH_SOURCE.TYPED_ARRAY;
|
|
65
|
+
}
|
|
66
|
+
else if (typeof input.length === "number") {
|
|
67
|
+
return BYTE_LENGTH_SOURCE.LENGTH;
|
|
68
|
+
}
|
|
69
|
+
else if (typeof input.size === "number") {
|
|
70
|
+
return BYTE_LENGTH_SOURCE.SIZE;
|
|
71
|
+
}
|
|
72
|
+
else if (typeof input.start === "number" && typeof input.end === "number") {
|
|
73
|
+
return BYTE_LENGTH_SOURCE.START_END_DIFF;
|
|
74
|
+
}
|
|
75
|
+
else if (runtimeConfig.runtimeConfig.isFileReadStream(input)) {
|
|
76
|
+
try {
|
|
77
|
+
runtimeConfig.runtimeConfig.lstatSync(input.path).size;
|
|
78
|
+
return BYTE_LENGTH_SOURCE.LSTAT;
|
|
79
|
+
}
|
|
80
|
+
catch (error) {
|
|
81
|
+
return undefined;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
return undefined;
|
|
85
|
+
};
|
|
99
86
|
|
|
100
87
|
async function* getChunkStream(data, partSize, getNextData) {
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
88
|
+
let partNumber = 1;
|
|
89
|
+
const currentBuffer = { chunks: [], length: 0 };
|
|
90
|
+
for await (const datum of getNextData(data)) {
|
|
91
|
+
currentBuffer.chunks.push(datum);
|
|
92
|
+
currentBuffer.length += datum.byteLength;
|
|
93
|
+
while (currentBuffer.length > partSize) {
|
|
94
|
+
const dataChunk = currentBuffer.chunks.length > 1 ? buffer.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0];
|
|
95
|
+
yield {
|
|
96
|
+
partNumber,
|
|
97
|
+
data: dataChunk.subarray(0, partSize),
|
|
98
|
+
};
|
|
99
|
+
currentBuffer.chunks = [dataChunk.subarray(partSize)];
|
|
100
|
+
currentBuffer.length = currentBuffer.chunks[0].byteLength;
|
|
101
|
+
partNumber += 1;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
yield {
|
|
109
105
|
partNumber,
|
|
110
|
-
data:
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
currentBuffer.length = currentBuffer.chunks[0].byteLength;
|
|
114
|
-
partNumber += 1;
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
yield {
|
|
118
|
-
partNumber,
|
|
119
|
-
data: currentBuffer.chunks.length !== 1 ? import_buffer.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0],
|
|
120
|
-
lastPart: true
|
|
121
|
-
};
|
|
106
|
+
data: currentBuffer.chunks.length !== 1 ? buffer.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0],
|
|
107
|
+
lastPart: true,
|
|
108
|
+
};
|
|
122
109
|
}
|
|
123
|
-
__name(getChunkStream, "getChunkStream");
|
|
124
110
|
|
|
125
|
-
// src/chunks/getChunkUint8Array.ts
|
|
126
111
|
async function* getChunkUint8Array(data, partSize) {
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
112
|
+
let partNumber = 1;
|
|
113
|
+
let startByte = 0;
|
|
114
|
+
let endByte = partSize;
|
|
115
|
+
while (endByte < data.byteLength) {
|
|
116
|
+
yield {
|
|
117
|
+
partNumber,
|
|
118
|
+
data: data.subarray(startByte, endByte),
|
|
119
|
+
};
|
|
120
|
+
partNumber += 1;
|
|
121
|
+
startByte = endByte;
|
|
122
|
+
endByte = startByte + partSize;
|
|
123
|
+
}
|
|
131
124
|
yield {
|
|
132
|
-
|
|
133
|
-
|
|
125
|
+
partNumber,
|
|
126
|
+
data: data.subarray(startByte),
|
|
127
|
+
lastPart: true,
|
|
134
128
|
};
|
|
135
|
-
partNumber += 1;
|
|
136
|
-
startByte = endByte;
|
|
137
|
-
endByte = startByte + partSize;
|
|
138
|
-
}
|
|
139
|
-
yield {
|
|
140
|
-
partNumber,
|
|
141
|
-
data: data.subarray(startByte),
|
|
142
|
-
lastPart: true
|
|
143
|
-
};
|
|
144
129
|
}
|
|
145
|
-
__name(getChunkUint8Array, "getChunkUint8Array");
|
|
146
|
-
|
|
147
|
-
// src/chunks/getDataReadable.ts
|
|
148
130
|
|
|
149
131
|
async function* getDataReadable(data) {
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
132
|
+
for await (const chunk of data) {
|
|
133
|
+
if (buffer.Buffer.isBuffer(chunk) || chunk instanceof Uint8Array) {
|
|
134
|
+
yield chunk;
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
yield buffer.Buffer.from(chunk);
|
|
138
|
+
}
|
|
155
139
|
}
|
|
156
|
-
}
|
|
157
140
|
}
|
|
158
|
-
__name(getDataReadable, "getDataReadable");
|
|
159
|
-
|
|
160
|
-
// src/chunks/getDataReadableStream.ts
|
|
161
141
|
|
|
162
142
|
async function* getDataReadableStream(data) {
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
143
|
+
const reader = data.getReader();
|
|
144
|
+
try {
|
|
145
|
+
while (true) {
|
|
146
|
+
const { done, value } = await reader.read();
|
|
147
|
+
if (done) {
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
if (buffer.Buffer.isBuffer(value) || value instanceof Uint8Array) {
|
|
151
|
+
yield value;
|
|
152
|
+
}
|
|
153
|
+
else {
|
|
154
|
+
yield buffer.Buffer.from(value);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
catch (e) {
|
|
159
|
+
throw e;
|
|
160
|
+
}
|
|
161
|
+
finally {
|
|
162
|
+
reader.releaseLock();
|
|
163
|
+
}
|
|
181
164
|
}
|
|
182
|
-
__name(getDataReadableStream, "getDataReadableStream");
|
|
183
165
|
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
"Body Data is unsupported format, expected data to be one of: string | Uint8Array | Buffer | Readable | ReadableStream | Blob;."
|
|
203
|
-
);
|
|
204
|
-
}, "getChunk");
|
|
166
|
+
const getChunk = (data, partSize) => {
|
|
167
|
+
if (data instanceof Uint8Array) {
|
|
168
|
+
return getChunkUint8Array(data, partSize);
|
|
169
|
+
}
|
|
170
|
+
if (data instanceof stream.Readable) {
|
|
171
|
+
return getChunkStream(data, partSize, getDataReadable);
|
|
172
|
+
}
|
|
173
|
+
if (data instanceof String || typeof data === "string") {
|
|
174
|
+
return getChunkUint8Array(buffer.Buffer.from(data), partSize);
|
|
175
|
+
}
|
|
176
|
+
if (typeof data.stream === "function") {
|
|
177
|
+
return getChunkStream(data.stream(), partSize, getDataReadableStream);
|
|
178
|
+
}
|
|
179
|
+
if (data instanceof ReadableStream) {
|
|
180
|
+
return getChunkStream(data, partSize, getDataReadableStream);
|
|
181
|
+
}
|
|
182
|
+
throw new Error("Body Data is unsupported format, expected data to be one of: string | Uint8Array | Buffer | Readable | ReadableStream | Blob;.");
|
|
183
|
+
};
|
|
205
184
|
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
this.totalBytes = this.params.ContentLength ?? byteLength(this.params.Body);
|
|
258
|
-
this.totalBytesSource = byteLengthSource(this.params.Body, this.params.ContentLength);
|
|
259
|
-
this.bytesUploadedSoFar = 0;
|
|
260
|
-
this.abortController = options.abortController ?? new import_abort_controller.AbortController();
|
|
261
|
-
this.partSize = options.partSize || Math.max(_Upload.MIN_PART_SIZE, Math.floor((this.totalBytes || 0) / this.MAX_PARTS));
|
|
262
|
-
if (this.totalBytes !== void 0) {
|
|
263
|
-
this.expectedPartsCount = Math.ceil(this.totalBytes / this.partSize);
|
|
264
|
-
}
|
|
265
|
-
this.__validateInput();
|
|
266
|
-
}
|
|
267
|
-
async abort() {
|
|
268
|
-
this.abortController.abort();
|
|
269
|
-
}
|
|
270
|
-
async done() {
|
|
271
|
-
if (this.sent) {
|
|
272
|
-
throw new Error(
|
|
273
|
-
"@aws-sdk/lib-storage: this instance of Upload has already executed .done(). Create a new instance."
|
|
274
|
-
);
|
|
275
|
-
}
|
|
276
|
-
this.sent = true;
|
|
277
|
-
return await Promise.race([this.__doMultipartUpload(), this.__abortTimeout(this.abortController.signal)]);
|
|
278
|
-
}
|
|
279
|
-
on(event, listener) {
|
|
280
|
-
this.uploadEvent = event;
|
|
281
|
-
return super.on(event, listener);
|
|
282
|
-
}
|
|
283
|
-
async __uploadUsingPut(dataPart) {
|
|
284
|
-
this.isMultiPart = false;
|
|
285
|
-
const params = { ...this.params, Body: dataPart.data };
|
|
286
|
-
const clientConfig = this.client.config;
|
|
287
|
-
const requestHandler = clientConfig.requestHandler;
|
|
288
|
-
const eventEmitter = requestHandler instanceof import_events.EventEmitter ? requestHandler : null;
|
|
289
|
-
const uploadEventListener = /* @__PURE__ */ __name((event) => {
|
|
290
|
-
this.bytesUploadedSoFar = event.loaded;
|
|
291
|
-
this.totalBytes = event.total;
|
|
292
|
-
this.__notifyProgress({
|
|
293
|
-
loaded: this.bytesUploadedSoFar,
|
|
294
|
-
total: this.totalBytes,
|
|
295
|
-
part: dataPart.partNumber,
|
|
296
|
-
Key: this.params.Key,
|
|
297
|
-
Bucket: this.params.Bucket
|
|
298
|
-
});
|
|
299
|
-
}, "uploadEventListener");
|
|
300
|
-
if (eventEmitter !== null) {
|
|
301
|
-
eventEmitter.on("xhr.upload.progress", uploadEventListener);
|
|
302
|
-
}
|
|
303
|
-
const resolved = await Promise.all([this.client.send(new import_client_s3.PutObjectCommand(params)), clientConfig?.endpoint?.()]);
|
|
304
|
-
const putResult = resolved[0];
|
|
305
|
-
let endpoint = resolved[1];
|
|
306
|
-
if (!endpoint) {
|
|
307
|
-
endpoint = (0, import_middleware_endpoint.toEndpointV1)(
|
|
308
|
-
await (0, import_middleware_endpoint.getEndpointFromInstructions)(params, import_client_s3.PutObjectCommand, {
|
|
309
|
-
...clientConfig
|
|
310
|
-
})
|
|
311
|
-
);
|
|
312
|
-
}
|
|
313
|
-
if (!endpoint) {
|
|
314
|
-
throw new Error('Could not resolve endpoint from S3 "client.config.endpoint()" nor EndpointsV2.');
|
|
315
|
-
}
|
|
316
|
-
if (eventEmitter !== null) {
|
|
317
|
-
eventEmitter.off("xhr.upload.progress", uploadEventListener);
|
|
318
|
-
}
|
|
319
|
-
const locationKey = this.params.Key.split("/").map((segment) => (0, import_smithy_client.extendedEncodeURIComponent)(segment)).join("/");
|
|
320
|
-
const locationBucket = (0, import_smithy_client.extendedEncodeURIComponent)(this.params.Bucket);
|
|
321
|
-
const Location = (() => {
|
|
322
|
-
const endpointHostnameIncludesBucket = endpoint.hostname.startsWith(`${locationBucket}.`);
|
|
323
|
-
const forcePathStyle = this.client.config.forcePathStyle;
|
|
324
|
-
const optionalPort = endpoint.port ? `:${endpoint.port}` : ``;
|
|
325
|
-
if (forcePathStyle) {
|
|
326
|
-
return `${endpoint.protocol}//${endpoint.hostname}${optionalPort}/${locationBucket}/${locationKey}`;
|
|
327
|
-
}
|
|
328
|
-
if (endpointHostnameIncludesBucket) {
|
|
329
|
-
return `${endpoint.protocol}//${endpoint.hostname}${optionalPort}/${locationKey}`;
|
|
330
|
-
}
|
|
331
|
-
return `${endpoint.protocol}//${locationBucket}.${endpoint.hostname}${optionalPort}/${locationKey}`;
|
|
332
|
-
})();
|
|
333
|
-
this.singleUploadResult = {
|
|
334
|
-
...putResult,
|
|
335
|
-
Bucket: this.params.Bucket,
|
|
336
|
-
Key: this.params.Key,
|
|
337
|
-
Location
|
|
338
|
-
};
|
|
339
|
-
const totalSize = byteLength(dataPart.data);
|
|
340
|
-
this.__notifyProgress({
|
|
341
|
-
loaded: totalSize,
|
|
342
|
-
total: totalSize,
|
|
343
|
-
part: 1,
|
|
344
|
-
Key: this.params.Key,
|
|
345
|
-
Bucket: this.params.Bucket
|
|
346
|
-
});
|
|
347
|
-
}
|
|
348
|
-
async __createMultipartUpload() {
|
|
349
|
-
const requestChecksumCalculation = await this.client.config.requestChecksumCalculation();
|
|
350
|
-
if (!this.createMultiPartPromise) {
|
|
351
|
-
const createCommandParams = { ...this.params, Body: void 0 };
|
|
352
|
-
if (requestChecksumCalculation === "WHEN_SUPPORTED") {
|
|
353
|
-
createCommandParams.ChecksumAlgorithm = this.params.ChecksumAlgorithm || import_client_s3.ChecksumAlgorithm.CRC32;
|
|
354
|
-
}
|
|
355
|
-
this.createMultiPartPromise = this.client.send(new import_client_s3.CreateMultipartUploadCommand(createCommandParams)).then((createMpuResponse) => {
|
|
356
|
-
this.abortMultipartUploadCommand = new import_client_s3.AbortMultipartUploadCommand({
|
|
357
|
-
Bucket: this.params.Bucket,
|
|
358
|
-
Key: this.params.Key,
|
|
359
|
-
UploadId: createMpuResponse.UploadId
|
|
360
|
-
});
|
|
361
|
-
return createMpuResponse;
|
|
362
|
-
});
|
|
363
|
-
}
|
|
364
|
-
return this.createMultiPartPromise;
|
|
365
|
-
}
|
|
366
|
-
async __doConcurrentUpload(dataFeeder) {
|
|
367
|
-
for await (const dataPart of dataFeeder) {
|
|
368
|
-
if (this.uploadEnqueuedPartsCount > this.MAX_PARTS) {
|
|
369
|
-
throw new Error(
|
|
370
|
-
`Exceeded ${this.MAX_PARTS} parts in multipart upload to Bucket: ${this.params.Bucket} Key: ${this.params.Key}.`
|
|
371
|
-
);
|
|
372
|
-
}
|
|
373
|
-
if (this.abortController.signal.aborted) {
|
|
374
|
-
return;
|
|
375
|
-
}
|
|
376
|
-
if (dataPart.partNumber === 1 && dataPart.lastPart) {
|
|
377
|
-
return await this.__uploadUsingPut(dataPart);
|
|
378
|
-
}
|
|
379
|
-
if (!this.uploadId) {
|
|
380
|
-
const { UploadId } = await this.__createMultipartUpload();
|
|
381
|
-
this.uploadId = UploadId;
|
|
382
|
-
if (this.abortController.signal.aborted) {
|
|
383
|
-
return;
|
|
185
|
+
class Upload extends events.EventEmitter {
|
|
186
|
+
static MIN_PART_SIZE = 1024 * 1024 * 5;
|
|
187
|
+
MAX_PARTS = 10_000;
|
|
188
|
+
queueSize = 4;
|
|
189
|
+
partSize;
|
|
190
|
+
leavePartsOnError = false;
|
|
191
|
+
tags = [];
|
|
192
|
+
client;
|
|
193
|
+
params;
|
|
194
|
+
totalBytes;
|
|
195
|
+
totalBytesSource;
|
|
196
|
+
bytesUploadedSoFar;
|
|
197
|
+
abortController;
|
|
198
|
+
concurrentUploaders = [];
|
|
199
|
+
createMultiPartPromise;
|
|
200
|
+
abortMultipartUploadCommand = null;
|
|
201
|
+
uploadedParts = [];
|
|
202
|
+
uploadEnqueuedPartsCount = 0;
|
|
203
|
+
expectedPartsCount;
|
|
204
|
+
uploadId;
|
|
205
|
+
uploadEvent;
|
|
206
|
+
isMultiPart = true;
|
|
207
|
+
singleUploadResult;
|
|
208
|
+
sent = false;
|
|
209
|
+
constructor(options) {
|
|
210
|
+
super();
|
|
211
|
+
this.queueSize = options.queueSize || this.queueSize;
|
|
212
|
+
this.leavePartsOnError = options.leavePartsOnError || this.leavePartsOnError;
|
|
213
|
+
this.tags = options.tags || this.tags;
|
|
214
|
+
this.client = options.client;
|
|
215
|
+
this.params = options.params;
|
|
216
|
+
if (!this.params) {
|
|
217
|
+
throw new Error(`InputError: Upload requires params to be passed to upload.`);
|
|
218
|
+
}
|
|
219
|
+
this.totalBytes = this.params.ContentLength ?? byteLength(this.params.Body);
|
|
220
|
+
this.totalBytesSource = byteLengthSource(this.params.Body, this.params.ContentLength);
|
|
221
|
+
this.bytesUploadedSoFar = 0;
|
|
222
|
+
this.abortController = options.abortController ?? new abortController.AbortController();
|
|
223
|
+
this.partSize =
|
|
224
|
+
options.partSize || Math.max(Upload.MIN_PART_SIZE, Math.floor((this.totalBytes || 0) / this.MAX_PARTS));
|
|
225
|
+
if (this.totalBytes !== undefined) {
|
|
226
|
+
this.expectedPartsCount = Math.ceil(this.totalBytes / this.partSize);
|
|
227
|
+
}
|
|
228
|
+
this.__validateInput();
|
|
229
|
+
}
|
|
230
|
+
async abort() {
|
|
231
|
+
this.abortController.abort();
|
|
232
|
+
}
|
|
233
|
+
async done() {
|
|
234
|
+
if (this.sent) {
|
|
235
|
+
throw new Error("@aws-sdk/lib-storage: this instance of Upload has already executed .done(). Create a new instance.");
|
|
384
236
|
}
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
237
|
+
this.sent = true;
|
|
238
|
+
return await Promise.race([this.__doMultipartUpload(), this.__abortTimeout(this.abortController.signal)]);
|
|
239
|
+
}
|
|
240
|
+
on(event, listener) {
|
|
241
|
+
this.uploadEvent = event;
|
|
242
|
+
return super.on(event, listener);
|
|
243
|
+
}
|
|
244
|
+
async __uploadUsingPut(dataPart) {
|
|
245
|
+
this.isMultiPart = false;
|
|
246
|
+
const params = { ...this.params, Body: dataPart.data };
|
|
247
|
+
const clientConfig = this.client.config;
|
|
248
|
+
const requestHandler = clientConfig.requestHandler;
|
|
249
|
+
const eventEmitter = requestHandler instanceof events.EventEmitter ? requestHandler : null;
|
|
250
|
+
const uploadEventListener = (event) => {
|
|
251
|
+
this.bytesUploadedSoFar = event.loaded;
|
|
252
|
+
this.totalBytes = event.total;
|
|
253
|
+
this.__notifyProgress({
|
|
254
|
+
loaded: this.bytesUploadedSoFar,
|
|
255
|
+
total: this.totalBytes,
|
|
256
|
+
part: dataPart.partNumber,
|
|
257
|
+
Key: this.params.Key,
|
|
258
|
+
Bucket: this.params.Bucket,
|
|
259
|
+
});
|
|
260
|
+
};
|
|
261
|
+
if (eventEmitter !== null) {
|
|
262
|
+
eventEmitter.on("xhr.upload.progress", uploadEventListener);
|
|
263
|
+
}
|
|
264
|
+
const resolved = await Promise.all([this.client.send(new clientS3.PutObjectCommand(params)), clientConfig?.endpoint?.()]);
|
|
265
|
+
const putResult = resolved[0];
|
|
266
|
+
let endpoint = resolved[1];
|
|
267
|
+
if (!endpoint) {
|
|
268
|
+
endpoint = middlewareEndpoint.toEndpointV1(await middlewareEndpoint.getEndpointFromInstructions(params, clientS3.PutObjectCommand, {
|
|
269
|
+
...clientConfig,
|
|
270
|
+
}));
|
|
394
271
|
}
|
|
395
|
-
if (
|
|
396
|
-
|
|
397
|
-
lastSeenBytes = event.loaded;
|
|
272
|
+
if (!endpoint) {
|
|
273
|
+
throw new Error('Could not resolve endpoint from S3 "client.config.endpoint()" nor EndpointsV2.');
|
|
398
274
|
}
|
|
275
|
+
if (eventEmitter !== null) {
|
|
276
|
+
eventEmitter.off("xhr.upload.progress", uploadEventListener);
|
|
277
|
+
}
|
|
278
|
+
const locationKey = this.params
|
|
279
|
+
.Key.split("/")
|
|
280
|
+
.map((segment) => smithyClient.extendedEncodeURIComponent(segment))
|
|
281
|
+
.join("/");
|
|
282
|
+
const locationBucket = smithyClient.extendedEncodeURIComponent(this.params.Bucket);
|
|
283
|
+
const Location = (() => {
|
|
284
|
+
const endpointHostnameIncludesBucket = endpoint.hostname.startsWith(`${locationBucket}.`);
|
|
285
|
+
const forcePathStyle = this.client.config.forcePathStyle;
|
|
286
|
+
const optionalPort = endpoint.port ? `:${endpoint.port}` : ``;
|
|
287
|
+
if (forcePathStyle) {
|
|
288
|
+
return `${endpoint.protocol}//${endpoint.hostname}${optionalPort}/${locationBucket}/${locationKey}`;
|
|
289
|
+
}
|
|
290
|
+
if (endpointHostnameIncludesBucket) {
|
|
291
|
+
return `${endpoint.protocol}//${endpoint.hostname}${optionalPort}/${locationKey}`;
|
|
292
|
+
}
|
|
293
|
+
return `${endpoint.protocol}//${locationBucket}.${endpoint.hostname}${optionalPort}/${locationKey}`;
|
|
294
|
+
})();
|
|
295
|
+
this.singleUploadResult = {
|
|
296
|
+
...putResult,
|
|
297
|
+
Bucket: this.params.Bucket,
|
|
298
|
+
Key: this.params.Key,
|
|
299
|
+
Location,
|
|
300
|
+
};
|
|
301
|
+
const totalSize = byteLength(dataPart.data);
|
|
399
302
|
this.__notifyProgress({
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
303
|
+
loaded: totalSize,
|
|
304
|
+
total: totalSize,
|
|
305
|
+
part: 1,
|
|
306
|
+
Key: this.params.Key,
|
|
307
|
+
Bucket: this.params.Bucket,
|
|
405
308
|
});
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
309
|
+
}
|
|
310
|
+
async __createMultipartUpload() {
|
|
311
|
+
const requestChecksumCalculation = await this.client.config.requestChecksumCalculation();
|
|
312
|
+
if (!this.createMultiPartPromise) {
|
|
313
|
+
const createCommandParams = { ...this.params, Body: undefined };
|
|
314
|
+
if (requestChecksumCalculation === "WHEN_SUPPORTED") {
|
|
315
|
+
createCommandParams.ChecksumAlgorithm = this.params.ChecksumAlgorithm || clientS3.ChecksumAlgorithm.CRC32;
|
|
316
|
+
}
|
|
317
|
+
this.createMultiPartPromise = this.client
|
|
318
|
+
.send(new clientS3.CreateMultipartUploadCommand(createCommandParams))
|
|
319
|
+
.then((createMpuResponse) => {
|
|
320
|
+
this.abortMultipartUploadCommand = new clientS3.AbortMultipartUploadCommand({
|
|
321
|
+
Bucket: this.params.Bucket,
|
|
322
|
+
Key: this.params.Key,
|
|
323
|
+
UploadId: createMpuResponse.UploadId,
|
|
324
|
+
});
|
|
325
|
+
return createMpuResponse;
|
|
326
|
+
});
|
|
327
|
+
}
|
|
328
|
+
return this.createMultiPartPromise;
|
|
329
|
+
}
|
|
330
|
+
async __doConcurrentUpload(dataFeeder) {
|
|
331
|
+
for await (const dataPart of dataFeeder) {
|
|
332
|
+
if (this.uploadEnqueuedPartsCount > this.MAX_PARTS) {
|
|
333
|
+
throw new Error(`Exceeded ${this.MAX_PARTS} parts in multipart upload to Bucket: ${this.params.Bucket} Key: ${this.params.Key}.`);
|
|
334
|
+
}
|
|
335
|
+
if (this.abortController.signal.aborted) {
|
|
336
|
+
return;
|
|
337
|
+
}
|
|
338
|
+
if (dataPart.partNumber === 1 && dataPart.lastPart) {
|
|
339
|
+
return await this.__uploadUsingPut(dataPart);
|
|
340
|
+
}
|
|
341
|
+
if (!this.uploadId) {
|
|
342
|
+
const { UploadId } = await this.__createMultipartUpload();
|
|
343
|
+
this.uploadId = UploadId;
|
|
344
|
+
if (this.abortController.signal.aborted) {
|
|
345
|
+
return;
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
const partSize = byteLength(dataPart.data) || 0;
|
|
349
|
+
const requestHandler = this.client.config.requestHandler;
|
|
350
|
+
const eventEmitter = requestHandler instanceof events.EventEmitter ? requestHandler : null;
|
|
351
|
+
let lastSeenBytes = 0;
|
|
352
|
+
const uploadEventListener = (event, request) => {
|
|
353
|
+
const requestPartSize = Number(request.query["partNumber"]) || -1;
|
|
354
|
+
if (requestPartSize !== dataPart.partNumber) {
|
|
355
|
+
return;
|
|
356
|
+
}
|
|
357
|
+
if (event.total && partSize) {
|
|
358
|
+
this.bytesUploadedSoFar += event.loaded - lastSeenBytes;
|
|
359
|
+
lastSeenBytes = event.loaded;
|
|
360
|
+
}
|
|
361
|
+
this.__notifyProgress({
|
|
362
|
+
loaded: this.bytesUploadedSoFar,
|
|
363
|
+
total: this.totalBytes,
|
|
364
|
+
part: dataPart.partNumber,
|
|
365
|
+
Key: this.params.Key,
|
|
366
|
+
Bucket: this.params.Bucket,
|
|
367
|
+
});
|
|
368
|
+
};
|
|
369
|
+
if (eventEmitter !== null) {
|
|
370
|
+
eventEmitter.on("xhr.upload.progress", uploadEventListener);
|
|
371
|
+
}
|
|
372
|
+
this.uploadEnqueuedPartsCount += 1;
|
|
373
|
+
this.__validateUploadPart(dataPart);
|
|
374
|
+
const partResult = await this.client.send(new clientS3.UploadPartCommand({
|
|
375
|
+
...this.params,
|
|
376
|
+
ContentLength: undefined,
|
|
377
|
+
UploadId: this.uploadId,
|
|
378
|
+
Body: dataPart.data,
|
|
379
|
+
PartNumber: dataPart.partNumber,
|
|
380
|
+
}));
|
|
381
|
+
if (eventEmitter !== null) {
|
|
382
|
+
eventEmitter.off("xhr.upload.progress", uploadEventListener);
|
|
383
|
+
}
|
|
384
|
+
if (this.abortController.signal.aborted) {
|
|
385
|
+
return;
|
|
386
|
+
}
|
|
387
|
+
if (!partResult.ETag) {
|
|
388
|
+
throw new Error(`Part ${dataPart.partNumber} is missing ETag in UploadPart response. Missing Bucket CORS configuration for ETag header?`);
|
|
389
|
+
}
|
|
390
|
+
this.uploadedParts.push({
|
|
391
|
+
PartNumber: dataPart.partNumber,
|
|
392
|
+
ETag: partResult.ETag,
|
|
393
|
+
...(partResult.ChecksumCRC32 && { ChecksumCRC32: partResult.ChecksumCRC32 }),
|
|
394
|
+
...(partResult.ChecksumCRC32C && { ChecksumCRC32C: partResult.ChecksumCRC32C }),
|
|
395
|
+
...(partResult.ChecksumSHA1 && { ChecksumSHA1: partResult.ChecksumSHA1 }),
|
|
396
|
+
...(partResult.ChecksumSHA256 && { ChecksumSHA256: partResult.ChecksumSHA256 }),
|
|
397
|
+
});
|
|
398
|
+
if (eventEmitter === null) {
|
|
399
|
+
this.bytesUploadedSoFar += partSize;
|
|
400
|
+
}
|
|
401
|
+
this.__notifyProgress({
|
|
402
|
+
loaded: this.bytesUploadedSoFar,
|
|
403
|
+
total: this.totalBytes,
|
|
404
|
+
part: dataPart.partNumber,
|
|
405
|
+
Key: this.params.Key,
|
|
406
|
+
Bucket: this.params.Bucket,
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
async __doMultipartUpload() {
|
|
411
|
+
const dataFeeder = getChunk(this.params.Body, this.partSize);
|
|
412
|
+
const concurrentUploaderFailures = [];
|
|
413
|
+
for (let index = 0; index < this.queueSize; index++) {
|
|
414
|
+
const currentUpload = this.__doConcurrentUpload(dataFeeder).catch((err) => {
|
|
415
|
+
concurrentUploaderFailures.push(err);
|
|
416
|
+
});
|
|
417
|
+
this.concurrentUploaders.push(currentUpload);
|
|
418
|
+
}
|
|
419
|
+
await Promise.all(this.concurrentUploaders);
|
|
420
|
+
if (concurrentUploaderFailures.length >= 1) {
|
|
421
|
+
await this.markUploadAsAborted();
|
|
422
|
+
throw concurrentUploaderFailures[0];
|
|
423
|
+
}
|
|
424
|
+
if (this.abortController.signal.aborted) {
|
|
425
|
+
await this.markUploadAsAborted();
|
|
426
|
+
throw Object.assign(new Error("Upload aborted."), { name: "AbortError" });
|
|
427
|
+
}
|
|
428
|
+
let result;
|
|
429
|
+
if (this.isMultiPart) {
|
|
430
|
+
const { expectedPartsCount, uploadedParts, totalBytes, totalBytesSource } = this;
|
|
431
|
+
if (totalBytes !== undefined && expectedPartsCount !== undefined && uploadedParts.length !== expectedPartsCount) {
|
|
432
|
+
throw new Error(`Expected ${expectedPartsCount} part(s) but uploaded ${uploadedParts.length} part(s).
|
|
477
433
|
The expected part count is based on the byte-count of the input.params.Body,
|
|
478
434
|
which was read from ${totalBytesSource} and is ${totalBytes}.
|
|
479
435
|
If this is not correct, provide an override value by setting a number
|
|
480
436
|
to input.params.ContentLength in bytes.
|
|
481
437
|
`);
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
438
|
+
}
|
|
439
|
+
this.uploadedParts.sort((a, b) => a.PartNumber - b.PartNumber);
|
|
440
|
+
const uploadCompleteParams = {
|
|
441
|
+
...this.params,
|
|
442
|
+
Body: undefined,
|
|
443
|
+
UploadId: this.uploadId,
|
|
444
|
+
MultipartUpload: {
|
|
445
|
+
Parts: this.uploadedParts,
|
|
446
|
+
},
|
|
447
|
+
};
|
|
448
|
+
result = await this.client.send(new clientS3.CompleteMultipartUploadCommand(uploadCompleteParams));
|
|
449
|
+
if (typeof result?.Location === "string" && result.Location.includes("%2F")) {
|
|
450
|
+
result.Location = result.Location.replace(/%2F/g, "/");
|
|
451
|
+
}
|
|
490
452
|
}
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
`The byte size for part number ${dataPart.partNumber}, size ${actualPartSize} does not match expected size ${this.partSize}`
|
|
550
|
-
);
|
|
551
|
-
}
|
|
552
|
-
}
|
|
553
|
-
__validateInput() {
|
|
554
|
-
if (!this.client) {
|
|
555
|
-
throw new Error(`InputError: Upload requires a AWS client to do uploads with.`);
|
|
556
|
-
}
|
|
557
|
-
if (this.partSize < _Upload.MIN_PART_SIZE) {
|
|
558
|
-
throw new Error(
|
|
559
|
-
`EntityTooSmall: Your proposed upload part size [${this.partSize}] is smaller than the minimum allowed size [${_Upload.MIN_PART_SIZE}] (5MB)`
|
|
560
|
-
);
|
|
561
|
-
}
|
|
562
|
-
if (this.queueSize < 1) {
|
|
563
|
-
throw new Error(`Queue size: Must have at least one uploading queue.`);
|
|
564
|
-
}
|
|
565
|
-
}
|
|
566
|
-
};
|
|
567
|
-
// Annotate the CommonJS export names for ESM import in node:
|
|
568
|
-
|
|
569
|
-
0 && (module.exports = {
|
|
570
|
-
Upload
|
|
571
|
-
});
|
|
453
|
+
else {
|
|
454
|
+
result = this.singleUploadResult;
|
|
455
|
+
}
|
|
456
|
+
this.abortMultipartUploadCommand = null;
|
|
457
|
+
if (this.tags.length) {
|
|
458
|
+
await this.client.send(new clientS3.PutObjectTaggingCommand({
|
|
459
|
+
...this.params,
|
|
460
|
+
Tagging: {
|
|
461
|
+
TagSet: this.tags,
|
|
462
|
+
},
|
|
463
|
+
}));
|
|
464
|
+
}
|
|
465
|
+
return result;
|
|
466
|
+
}
|
|
467
|
+
async markUploadAsAborted() {
|
|
468
|
+
if (this.uploadId && !this.leavePartsOnError && null !== this.abortMultipartUploadCommand) {
|
|
469
|
+
await this.client.send(this.abortMultipartUploadCommand);
|
|
470
|
+
this.abortMultipartUploadCommand = null;
|
|
471
|
+
}
|
|
472
|
+
}
|
|
473
|
+
__notifyProgress(progress) {
|
|
474
|
+
if (this.uploadEvent) {
|
|
475
|
+
this.emit(this.uploadEvent, progress);
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
async __abortTimeout(abortSignal) {
|
|
479
|
+
return new Promise((resolve, reject) => {
|
|
480
|
+
abortSignal.onabort = () => {
|
|
481
|
+
const abortError = new Error("Upload aborted.");
|
|
482
|
+
abortError.name = "AbortError";
|
|
483
|
+
reject(abortError);
|
|
484
|
+
};
|
|
485
|
+
});
|
|
486
|
+
}
|
|
487
|
+
__validateUploadPart(dataPart) {
|
|
488
|
+
const actualPartSize = byteLength(dataPart.data);
|
|
489
|
+
if (actualPartSize === undefined) {
|
|
490
|
+
throw new Error(`A dataPart was generated without a measurable data chunk size for part number ${dataPart.partNumber}`);
|
|
491
|
+
}
|
|
492
|
+
if (dataPart.partNumber === 1 && dataPart.lastPart) {
|
|
493
|
+
return;
|
|
494
|
+
}
|
|
495
|
+
if (!dataPart.lastPart && actualPartSize !== this.partSize) {
|
|
496
|
+
throw new Error(`The byte size for part number ${dataPart.partNumber}, size ${actualPartSize} does not match expected size ${this.partSize}`);
|
|
497
|
+
}
|
|
498
|
+
}
|
|
499
|
+
__validateInput() {
|
|
500
|
+
if (!this.client) {
|
|
501
|
+
throw new Error(`InputError: Upload requires a AWS client to do uploads with.`);
|
|
502
|
+
}
|
|
503
|
+
if (this.partSize < Upload.MIN_PART_SIZE) {
|
|
504
|
+
throw new Error(`EntityTooSmall: Your proposed upload part size [${this.partSize}] is smaller than the minimum allowed size [${Upload.MIN_PART_SIZE}] (5MB)`);
|
|
505
|
+
}
|
|
506
|
+
if (this.queueSize < 1) {
|
|
507
|
+
throw new Error(`Queue size: Must have at least one uploading queue.`);
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
}
|
|
572
511
|
|
|
512
|
+
exports.Upload = Upload;
|
package/dist-es/byteLength.js
CHANGED
|
@@ -19,7 +19,7 @@ export const byteLength = (input) => {
|
|
|
19
19
|
else if (typeof input.start === "number" && typeof input.end === "number") {
|
|
20
20
|
return input.end + 1 - input.start;
|
|
21
21
|
}
|
|
22
|
-
else if (
|
|
22
|
+
else if (runtimeConfig.isFileReadStream(input)) {
|
|
23
23
|
try {
|
|
24
24
|
return runtimeConfig.lstatSync(input.path).size;
|
|
25
25
|
}
|
|
@@ -32,7 +32,7 @@ export const byteLengthSource = (input, override) => {
|
|
|
32
32
|
else if (typeof input.start === "number" && typeof input.end === "number") {
|
|
33
33
|
return BYTE_LENGTH_SOURCE.START_END_DIFF;
|
|
34
34
|
}
|
|
35
|
-
else if (
|
|
35
|
+
else if (runtimeConfig.isFileReadStream(input)) {
|
|
36
36
|
try {
|
|
37
37
|
runtimeConfig.lstatSync(input.path).size;
|
|
38
38
|
return BYTE_LENGTH_SOURCE.LSTAT;
|
package/dist-es/runtimeConfig.js
CHANGED
|
@@ -1,7 +1,10 @@
|
|
|
1
|
-
import { lstatSync } from "fs";
|
|
1
|
+
import { lstatSync, ReadStream } from "fs";
|
|
2
2
|
import { runtimeConfigShared as shared } from "./runtimeConfig.shared";
|
|
3
3
|
export const runtimeConfig = {
|
|
4
4
|
...shared,
|
|
5
5
|
runtime: "node",
|
|
6
6
|
lstatSync,
|
|
7
|
+
isFileReadStream(f) {
|
|
8
|
+
return f instanceof ReadStream;
|
|
9
|
+
},
|
|
7
10
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aws-sdk/lib-storage",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.906.0",
|
|
4
4
|
"description": "Storage higher order operation",
|
|
5
5
|
"main": "./dist-cjs/index.js",
|
|
6
6
|
"module": "./dist-es/index.js",
|
|
@@ -39,10 +39,10 @@
|
|
|
39
39
|
"tslib": "^2.6.2"
|
|
40
40
|
},
|
|
41
41
|
"peerDependencies": {
|
|
42
|
-
"@aws-sdk/client-s3": "^3.
|
|
42
|
+
"@aws-sdk/client-s3": "^3.906.0"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
|
-
"@aws-sdk/client-s3": "3.
|
|
45
|
+
"@aws-sdk/client-s3": "3.906.0",
|
|
46
46
|
"@smithy/types": "^4.6.0",
|
|
47
47
|
"@tsconfig/recommended": "1.0.1",
|
|
48
48
|
"@types/node": "^18.19.69",
|