@aws-sdk/lib-storage 3.489.0 → 3.495.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist-cjs/Upload.js +1 -270
- package/dist-cjs/bytelength.js +1 -29
- package/dist-cjs/chunker.js +1 -30
- package/dist-cjs/chunks/getChunkBuffer.js +1 -23
- package/dist-cjs/chunks/getChunkStream.js +1 -28
- package/dist-cjs/chunks/getDataReadable.js +1 -10
- package/dist-cjs/chunks/getDataReadableStream.js +1 -22
- package/dist-cjs/index.js +436 -5
- package/dist-cjs/types.js +1 -2
- package/dist-es/Upload.js +3 -0
- package/package.json +7 -7
package/dist-cjs/Upload.js
CHANGED
|
@@ -1,270 +1 @@
|
|
|
1
|
-
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.Upload = void 0;
|
|
4
|
-
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
5
|
-
const abort_controller_1 = require("@smithy/abort-controller");
|
|
6
|
-
const middleware_endpoint_1 = require("@smithy/middleware-endpoint");
|
|
7
|
-
const smithy_client_1 = require("@smithy/smithy-client");
|
|
8
|
-
const events_1 = require("events");
|
|
9
|
-
const bytelength_1 = require("./bytelength");
|
|
10
|
-
const chunker_1 = require("./chunker");
|
|
11
|
-
const MIN_PART_SIZE = 1024 * 1024 * 5;
|
|
12
|
-
class Upload extends events_1.EventEmitter {
|
|
13
|
-
constructor(options) {
|
|
14
|
-
var _a;
|
|
15
|
-
super();
|
|
16
|
-
this.MAX_PARTS = 10000;
|
|
17
|
-
this.queueSize = 4;
|
|
18
|
-
this.partSize = MIN_PART_SIZE;
|
|
19
|
-
this.leavePartsOnError = false;
|
|
20
|
-
this.tags = [];
|
|
21
|
-
this.concurrentUploaders = [];
|
|
22
|
-
this.uploadedParts = [];
|
|
23
|
-
this.isMultiPart = true;
|
|
24
|
-
this.queueSize = options.queueSize || this.queueSize;
|
|
25
|
-
this.partSize = options.partSize || this.partSize;
|
|
26
|
-
this.leavePartsOnError = options.leavePartsOnError || this.leavePartsOnError;
|
|
27
|
-
this.tags = options.tags || this.tags;
|
|
28
|
-
this.client = options.client;
|
|
29
|
-
this.params = options.params;
|
|
30
|
-
this.__validateInput();
|
|
31
|
-
this.totalBytes = (0, bytelength_1.byteLength)(this.params.Body);
|
|
32
|
-
this.bytesUploadedSoFar = 0;
|
|
33
|
-
this.abortController = (_a = options.abortController) !== null && _a !== void 0 ? _a : new abort_controller_1.AbortController();
|
|
34
|
-
}
|
|
35
|
-
async abort() {
|
|
36
|
-
this.abortController.abort();
|
|
37
|
-
}
|
|
38
|
-
async done() {
|
|
39
|
-
return await Promise.race([this.__doMultipartUpload(), this.__abortTimeout(this.abortController.signal)]);
|
|
40
|
-
}
|
|
41
|
-
on(event, listener) {
|
|
42
|
-
this.uploadEvent = event;
|
|
43
|
-
return super.on(event, listener);
|
|
44
|
-
}
|
|
45
|
-
async __uploadUsingPut(dataPart) {
|
|
46
|
-
var _a;
|
|
47
|
-
this.isMultiPart = false;
|
|
48
|
-
const params = { ...this.params, Body: dataPart.data };
|
|
49
|
-
const clientConfig = this.client.config;
|
|
50
|
-
const requestHandler = clientConfig.requestHandler;
|
|
51
|
-
const eventEmitter = requestHandler instanceof events_1.EventEmitter ? requestHandler : null;
|
|
52
|
-
const uploadEventListener = (event) => {
|
|
53
|
-
this.bytesUploadedSoFar = event.loaded;
|
|
54
|
-
this.totalBytes = event.total;
|
|
55
|
-
this.__notifyProgress({
|
|
56
|
-
loaded: this.bytesUploadedSoFar,
|
|
57
|
-
total: this.totalBytes,
|
|
58
|
-
part: dataPart.partNumber,
|
|
59
|
-
Key: this.params.Key,
|
|
60
|
-
Bucket: this.params.Bucket,
|
|
61
|
-
});
|
|
62
|
-
};
|
|
63
|
-
if (eventEmitter !== null) {
|
|
64
|
-
eventEmitter.on("xhr.upload.progress", uploadEventListener);
|
|
65
|
-
}
|
|
66
|
-
const resolved = await Promise.all([this.client.send(new client_s3_1.PutObjectCommand(params)), (_a = clientConfig === null || clientConfig === void 0 ? void 0 : clientConfig.endpoint) === null || _a === void 0 ? void 0 : _a.call(clientConfig)]);
|
|
67
|
-
const putResult = resolved[0];
|
|
68
|
-
let endpoint = resolved[1];
|
|
69
|
-
if (!endpoint) {
|
|
70
|
-
endpoint = (0, middleware_endpoint_1.toEndpointV1)(await (0, middleware_endpoint_1.getEndpointFromInstructions)(params, client_s3_1.PutObjectCommand, {
|
|
71
|
-
...clientConfig,
|
|
72
|
-
}));
|
|
73
|
-
}
|
|
74
|
-
if (!endpoint) {
|
|
75
|
-
throw new Error('Could not resolve endpoint from S3 "client.config.endpoint()" nor EndpointsV2.');
|
|
76
|
-
}
|
|
77
|
-
if (eventEmitter !== null) {
|
|
78
|
-
eventEmitter.off("xhr.upload.progress", uploadEventListener);
|
|
79
|
-
}
|
|
80
|
-
const locationKey = this.params
|
|
81
|
-
.Key.split("/")
|
|
82
|
-
.map((segment) => (0, smithy_client_1.extendedEncodeURIComponent)(segment))
|
|
83
|
-
.join("/");
|
|
84
|
-
const locationBucket = (0, smithy_client_1.extendedEncodeURIComponent)(this.params.Bucket);
|
|
85
|
-
const Location = (() => {
|
|
86
|
-
const endpointHostnameIncludesBucket = endpoint.hostname.startsWith(`${locationBucket}.`);
|
|
87
|
-
const forcePathStyle = this.client.config.forcePathStyle;
|
|
88
|
-
if (forcePathStyle) {
|
|
89
|
-
return `${endpoint.protocol}//${endpoint.hostname}/${locationBucket}/${locationKey}`;
|
|
90
|
-
}
|
|
91
|
-
if (endpointHostnameIncludesBucket) {
|
|
92
|
-
return `${endpoint.protocol}//${endpoint.hostname}/${locationKey}`;
|
|
93
|
-
}
|
|
94
|
-
return `${endpoint.protocol}//${locationBucket}.${endpoint.hostname}/${locationKey}`;
|
|
95
|
-
})();
|
|
96
|
-
this.singleUploadResult = {
|
|
97
|
-
...putResult,
|
|
98
|
-
Bucket: this.params.Bucket,
|
|
99
|
-
Key: this.params.Key,
|
|
100
|
-
Location,
|
|
101
|
-
};
|
|
102
|
-
const totalSize = (0, bytelength_1.byteLength)(dataPart.data);
|
|
103
|
-
this.__notifyProgress({
|
|
104
|
-
loaded: totalSize,
|
|
105
|
-
total: totalSize,
|
|
106
|
-
part: 1,
|
|
107
|
-
Key: this.params.Key,
|
|
108
|
-
Bucket: this.params.Bucket,
|
|
109
|
-
});
|
|
110
|
-
}
|
|
111
|
-
async __createMultipartUpload() {
|
|
112
|
-
if (!this.createMultiPartPromise) {
|
|
113
|
-
const createCommandParams = { ...this.params, Body: undefined };
|
|
114
|
-
this.createMultiPartPromise = this.client.send(new client_s3_1.CreateMultipartUploadCommand(createCommandParams));
|
|
115
|
-
}
|
|
116
|
-
return this.createMultiPartPromise;
|
|
117
|
-
}
|
|
118
|
-
async __doConcurrentUpload(dataFeeder) {
|
|
119
|
-
for await (const dataPart of dataFeeder) {
|
|
120
|
-
if (this.uploadedParts.length > this.MAX_PARTS) {
|
|
121
|
-
throw new Error(`Exceeded ${this.MAX_PARTS} as part of the upload to ${this.params.Key} and ${this.params.Bucket}.`);
|
|
122
|
-
}
|
|
123
|
-
try {
|
|
124
|
-
if (this.abortController.signal.aborted) {
|
|
125
|
-
return;
|
|
126
|
-
}
|
|
127
|
-
if (dataPart.partNumber === 1 && dataPart.lastPart) {
|
|
128
|
-
return await this.__uploadUsingPut(dataPart);
|
|
129
|
-
}
|
|
130
|
-
if (!this.uploadId) {
|
|
131
|
-
const { UploadId } = await this.__createMultipartUpload();
|
|
132
|
-
this.uploadId = UploadId;
|
|
133
|
-
if (this.abortController.signal.aborted) {
|
|
134
|
-
return;
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
const partSize = (0, bytelength_1.byteLength)(dataPart.data) || 0;
|
|
138
|
-
const requestHandler = this.client.config.requestHandler;
|
|
139
|
-
const eventEmitter = requestHandler instanceof events_1.EventEmitter ? requestHandler : null;
|
|
140
|
-
let lastSeenBytes = 0;
|
|
141
|
-
const uploadEventListener = (event, request) => {
|
|
142
|
-
const requestPartSize = Number(request.query["partNumber"]) || -1;
|
|
143
|
-
if (requestPartSize !== dataPart.partNumber) {
|
|
144
|
-
return;
|
|
145
|
-
}
|
|
146
|
-
if (event.total && partSize) {
|
|
147
|
-
this.bytesUploadedSoFar += event.loaded - lastSeenBytes;
|
|
148
|
-
lastSeenBytes = event.loaded;
|
|
149
|
-
}
|
|
150
|
-
this.__notifyProgress({
|
|
151
|
-
loaded: this.bytesUploadedSoFar,
|
|
152
|
-
total: this.totalBytes,
|
|
153
|
-
part: dataPart.partNumber,
|
|
154
|
-
Key: this.params.Key,
|
|
155
|
-
Bucket: this.params.Bucket,
|
|
156
|
-
});
|
|
157
|
-
};
|
|
158
|
-
if (eventEmitter !== null) {
|
|
159
|
-
eventEmitter.on("xhr.upload.progress", uploadEventListener);
|
|
160
|
-
}
|
|
161
|
-
const partResult = await this.client.send(new client_s3_1.UploadPartCommand({
|
|
162
|
-
...this.params,
|
|
163
|
-
UploadId: this.uploadId,
|
|
164
|
-
Body: dataPart.data,
|
|
165
|
-
PartNumber: dataPart.partNumber,
|
|
166
|
-
}));
|
|
167
|
-
if (eventEmitter !== null) {
|
|
168
|
-
eventEmitter.off("xhr.upload.progress", uploadEventListener);
|
|
169
|
-
}
|
|
170
|
-
if (this.abortController.signal.aborted) {
|
|
171
|
-
return;
|
|
172
|
-
}
|
|
173
|
-
if (!partResult.ETag) {
|
|
174
|
-
throw new Error(`Part ${dataPart.partNumber} is missing ETag in UploadPart response. Missing Bucket CORS configuration for ETag header?`);
|
|
175
|
-
}
|
|
176
|
-
this.uploadedParts.push({
|
|
177
|
-
PartNumber: dataPart.partNumber,
|
|
178
|
-
ETag: partResult.ETag,
|
|
179
|
-
...(partResult.ChecksumCRC32 && { ChecksumCRC32: partResult.ChecksumCRC32 }),
|
|
180
|
-
...(partResult.ChecksumCRC32C && { ChecksumCRC32C: partResult.ChecksumCRC32C }),
|
|
181
|
-
...(partResult.ChecksumSHA1 && { ChecksumSHA1: partResult.ChecksumSHA1 }),
|
|
182
|
-
...(partResult.ChecksumSHA256 && { ChecksumSHA256: partResult.ChecksumSHA256 }),
|
|
183
|
-
});
|
|
184
|
-
if (eventEmitter === null) {
|
|
185
|
-
this.bytesUploadedSoFar += partSize;
|
|
186
|
-
}
|
|
187
|
-
this.__notifyProgress({
|
|
188
|
-
loaded: this.bytesUploadedSoFar,
|
|
189
|
-
total: this.totalBytes,
|
|
190
|
-
part: dataPart.partNumber,
|
|
191
|
-
Key: this.params.Key,
|
|
192
|
-
Bucket: this.params.Bucket,
|
|
193
|
-
});
|
|
194
|
-
}
|
|
195
|
-
catch (e) {
|
|
196
|
-
if (!this.uploadId) {
|
|
197
|
-
throw e;
|
|
198
|
-
}
|
|
199
|
-
if (this.leavePartsOnError) {
|
|
200
|
-
throw e;
|
|
201
|
-
}
|
|
202
|
-
}
|
|
203
|
-
}
|
|
204
|
-
}
|
|
205
|
-
async __doMultipartUpload() {
|
|
206
|
-
const dataFeeder = (0, chunker_1.getChunk)(this.params.Body, this.partSize);
|
|
207
|
-
for (let index = 0; index < this.queueSize; index++) {
|
|
208
|
-
const currentUpload = this.__doConcurrentUpload(dataFeeder);
|
|
209
|
-
this.concurrentUploaders.push(currentUpload);
|
|
210
|
-
}
|
|
211
|
-
await Promise.all(this.concurrentUploaders);
|
|
212
|
-
if (this.abortController.signal.aborted) {
|
|
213
|
-
throw Object.assign(new Error("Upload aborted."), { name: "AbortError" });
|
|
214
|
-
}
|
|
215
|
-
let result;
|
|
216
|
-
if (this.isMultiPart) {
|
|
217
|
-
this.uploadedParts.sort((a, b) => a.PartNumber - b.PartNumber);
|
|
218
|
-
const uploadCompleteParams = {
|
|
219
|
-
...this.params,
|
|
220
|
-
Body: undefined,
|
|
221
|
-
UploadId: this.uploadId,
|
|
222
|
-
MultipartUpload: {
|
|
223
|
-
Parts: this.uploadedParts,
|
|
224
|
-
},
|
|
225
|
-
};
|
|
226
|
-
result = await this.client.send(new client_s3_1.CompleteMultipartUploadCommand(uploadCompleteParams));
|
|
227
|
-
}
|
|
228
|
-
else {
|
|
229
|
-
result = this.singleUploadResult;
|
|
230
|
-
}
|
|
231
|
-
if (this.tags.length) {
|
|
232
|
-
await this.client.send(new client_s3_1.PutObjectTaggingCommand({
|
|
233
|
-
...this.params,
|
|
234
|
-
Tagging: {
|
|
235
|
-
TagSet: this.tags,
|
|
236
|
-
},
|
|
237
|
-
}));
|
|
238
|
-
}
|
|
239
|
-
return result;
|
|
240
|
-
}
|
|
241
|
-
__notifyProgress(progress) {
|
|
242
|
-
if (this.uploadEvent) {
|
|
243
|
-
this.emit(this.uploadEvent, progress);
|
|
244
|
-
}
|
|
245
|
-
}
|
|
246
|
-
async __abortTimeout(abortSignal) {
|
|
247
|
-
return new Promise((resolve, reject) => {
|
|
248
|
-
abortSignal.onabort = () => {
|
|
249
|
-
const abortError = new Error("Upload aborted.");
|
|
250
|
-
abortError.name = "AbortError";
|
|
251
|
-
reject(abortError);
|
|
252
|
-
};
|
|
253
|
-
});
|
|
254
|
-
}
|
|
255
|
-
__validateInput() {
|
|
256
|
-
if (!this.params) {
|
|
257
|
-
throw new Error(`InputError: Upload requires params to be passed to upload.`);
|
|
258
|
-
}
|
|
259
|
-
if (!this.client) {
|
|
260
|
-
throw new Error(`InputError: Upload requires a AWS client to do uploads with.`);
|
|
261
|
-
}
|
|
262
|
-
if (this.partSize < MIN_PART_SIZE) {
|
|
263
|
-
throw new Error(`EntityTooSmall: Your proposed upload partsize [${this.partSize}] is smaller than the minimum allowed size [${MIN_PART_SIZE}] (5MB)`);
|
|
264
|
-
}
|
|
265
|
-
if (this.queueSize < 1) {
|
|
266
|
-
throw new Error(`Queue size: Must have at least one uploading queue.`);
|
|
267
|
-
}
|
|
268
|
-
}
|
|
269
|
-
}
|
|
270
|
-
exports.Upload = Upload;
|
|
1
|
+
module.exports = require("./index.js");
|
package/dist-cjs/bytelength.js
CHANGED
|
@@ -1,29 +1 @@
|
|
|
1
|
-
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.byteLength = void 0;
|
|
4
|
-
const runtimeConfig_1 = require("./runtimeConfig");
|
|
5
|
-
const byteLength = (input) => {
|
|
6
|
-
if (input === null || input === undefined)
|
|
7
|
-
return 0;
|
|
8
|
-
if (typeof input === "string")
|
|
9
|
-
input = Buffer.from(input);
|
|
10
|
-
if (typeof input.byteLength === "number") {
|
|
11
|
-
return input.byteLength;
|
|
12
|
-
}
|
|
13
|
-
else if (typeof input.length === "number") {
|
|
14
|
-
return input.length;
|
|
15
|
-
}
|
|
16
|
-
else if (typeof input.size === "number") {
|
|
17
|
-
return input.size;
|
|
18
|
-
}
|
|
19
|
-
else if (typeof input.path === "string") {
|
|
20
|
-
try {
|
|
21
|
-
return runtimeConfig_1.ClientDefaultValues.lstatSync(input.path).size;
|
|
22
|
-
}
|
|
23
|
-
catch (error) {
|
|
24
|
-
return undefined;
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
return undefined;
|
|
28
|
-
};
|
|
29
|
-
exports.byteLength = byteLength;
|
|
1
|
+
module.exports = require("./index.js");
|
package/dist-cjs/chunker.js
CHANGED
|
@@ -1,30 +1 @@
|
|
|
1
|
-
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getChunk = void 0;
|
|
4
|
-
const buffer_1 = require("buffer");
|
|
5
|
-
const stream_1 = require("stream");
|
|
6
|
-
const getChunkBuffer_1 = require("./chunks/getChunkBuffer");
|
|
7
|
-
const getChunkStream_1 = require("./chunks/getChunkStream");
|
|
8
|
-
const getDataReadable_1 = require("./chunks/getDataReadable");
|
|
9
|
-
const getDataReadableStream_1 = require("./chunks/getDataReadableStream");
|
|
10
|
-
const getChunk = (data, partSize) => {
|
|
11
|
-
if (data instanceof buffer_1.Buffer) {
|
|
12
|
-
return (0, getChunkBuffer_1.getChunkBuffer)(data, partSize);
|
|
13
|
-
}
|
|
14
|
-
else if (data instanceof stream_1.Readable) {
|
|
15
|
-
return (0, getChunkStream_1.getChunkStream)(data, partSize, getDataReadable_1.getDataReadable);
|
|
16
|
-
}
|
|
17
|
-
else if (data instanceof String || typeof data === "string" || data instanceof Uint8Array) {
|
|
18
|
-
return (0, getChunkBuffer_1.getChunkBuffer)(buffer_1.Buffer.from(data), partSize);
|
|
19
|
-
}
|
|
20
|
-
if (typeof data.stream === "function") {
|
|
21
|
-
return (0, getChunkStream_1.getChunkStream)(data.stream(), partSize, getDataReadableStream_1.getDataReadableStream);
|
|
22
|
-
}
|
|
23
|
-
else if (data instanceof ReadableStream) {
|
|
24
|
-
return (0, getChunkStream_1.getChunkStream)(data, partSize, getDataReadableStream_1.getDataReadableStream);
|
|
25
|
-
}
|
|
26
|
-
else {
|
|
27
|
-
throw new Error("Body Data is unsupported format, expected data to be one of: string | Uint8Array | Buffer | Readable | ReadableStream | Blob;.");
|
|
28
|
-
}
|
|
29
|
-
};
|
|
30
|
-
exports.getChunk = getChunk;
|
|
1
|
+
module.exports = require("./index.js");
|
|
@@ -1,23 +1 @@
|
|
|
1
|
-
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getChunkBuffer = void 0;
|
|
4
|
-
async function* getChunkBuffer(data, partSize) {
|
|
5
|
-
let partNumber = 1;
|
|
6
|
-
let startByte = 0;
|
|
7
|
-
let endByte = partSize;
|
|
8
|
-
while (endByte < data.byteLength) {
|
|
9
|
-
yield {
|
|
10
|
-
partNumber,
|
|
11
|
-
data: data.slice(startByte, endByte),
|
|
12
|
-
};
|
|
13
|
-
partNumber += 1;
|
|
14
|
-
startByte = endByte;
|
|
15
|
-
endByte = startByte + partSize;
|
|
16
|
-
}
|
|
17
|
-
yield {
|
|
18
|
-
partNumber,
|
|
19
|
-
data: data.slice(startByte),
|
|
20
|
-
lastPart: true,
|
|
21
|
-
};
|
|
22
|
-
}
|
|
23
|
-
exports.getChunkBuffer = getChunkBuffer;
|
|
1
|
+
module.exports = require("../index.js");
|
|
@@ -1,28 +1 @@
|
|
|
1
|
-
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getChunkStream = void 0;
|
|
4
|
-
const buffer_1 = require("buffer");
|
|
5
|
-
async function* getChunkStream(data, partSize, getNextData) {
|
|
6
|
-
let partNumber = 1;
|
|
7
|
-
const currentBuffer = { chunks: [], length: 0 };
|
|
8
|
-
for await (const datum of getNextData(data)) {
|
|
9
|
-
currentBuffer.chunks.push(datum);
|
|
10
|
-
currentBuffer.length += datum.length;
|
|
11
|
-
while (currentBuffer.length >= partSize) {
|
|
12
|
-
const dataChunk = currentBuffer.chunks.length > 1 ? buffer_1.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0];
|
|
13
|
-
yield {
|
|
14
|
-
partNumber,
|
|
15
|
-
data: dataChunk.slice(0, partSize),
|
|
16
|
-
};
|
|
17
|
-
currentBuffer.chunks = [dataChunk.slice(partSize)];
|
|
18
|
-
currentBuffer.length = currentBuffer.chunks[0].length;
|
|
19
|
-
partNumber += 1;
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
yield {
|
|
23
|
-
partNumber,
|
|
24
|
-
data: buffer_1.Buffer.concat(currentBuffer.chunks),
|
|
25
|
-
lastPart: true,
|
|
26
|
-
};
|
|
27
|
-
}
|
|
28
|
-
exports.getChunkStream = getChunkStream;
|
|
1
|
+
module.exports = require("../index.js");
|
|
@@ -1,10 +1 @@
|
|
|
1
|
-
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getDataReadable = void 0;
|
|
4
|
-
const buffer_1 = require("buffer");
|
|
5
|
-
async function* getDataReadable(data) {
|
|
6
|
-
for await (const chunk of data) {
|
|
7
|
-
yield buffer_1.Buffer.from(chunk);
|
|
8
|
-
}
|
|
9
|
-
}
|
|
10
|
-
exports.getDataReadable = getDataReadable;
|
|
1
|
+
module.exports = require("../index.js");
|
|
@@ -1,22 +1 @@
|
|
|
1
|
-
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getDataReadableStream = void 0;
|
|
4
|
-
const buffer_1 = require("buffer");
|
|
5
|
-
async function* getDataReadableStream(data) {
|
|
6
|
-
const reader = data.getReader();
|
|
7
|
-
try {
|
|
8
|
-
while (true) {
|
|
9
|
-
const { done, value } = await reader.read();
|
|
10
|
-
if (done)
|
|
11
|
-
return;
|
|
12
|
-
yield buffer_1.Buffer.from(value);
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
catch (e) {
|
|
16
|
-
throw e;
|
|
17
|
-
}
|
|
18
|
-
finally {
|
|
19
|
-
reader.releaseLock();
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
exports.getDataReadableStream = getDataReadableStream;
|
|
1
|
+
module.exports = require("../index.js");
|
package/dist-cjs/index.js
CHANGED
|
@@ -1,5 +1,436 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var src_exports = {};
|
|
22
|
+
__export(src_exports, {
|
|
23
|
+
Upload: () => Upload
|
|
24
|
+
});
|
|
25
|
+
module.exports = __toCommonJS(src_exports);
|
|
26
|
+
|
|
27
|
+
// src/Upload.ts
|
|
28
|
+
var import_client_s3 = require("@aws-sdk/client-s3");
|
|
29
|
+
var import_abort_controller = require("@smithy/abort-controller");
|
|
30
|
+
var import_middleware_endpoint = require("@smithy/middleware-endpoint");
|
|
31
|
+
var import_smithy_client = require("@smithy/smithy-client");
|
|
32
|
+
var import_events = require("events");
|
|
33
|
+
|
|
34
|
+
// src/bytelength.ts
|
|
35
|
+
var import_runtimeConfig = require("././runtimeConfig");
|
|
36
|
+
var byteLength = /* @__PURE__ */ __name((input) => {
|
|
37
|
+
if (input === null || input === void 0)
|
|
38
|
+
return 0;
|
|
39
|
+
if (typeof input === "string")
|
|
40
|
+
input = Buffer.from(input);
|
|
41
|
+
if (typeof input.byteLength === "number") {
|
|
42
|
+
return input.byteLength;
|
|
43
|
+
} else if (typeof input.length === "number") {
|
|
44
|
+
return input.length;
|
|
45
|
+
} else if (typeof input.size === "number") {
|
|
46
|
+
return input.size;
|
|
47
|
+
} else if (typeof input.path === "string") {
|
|
48
|
+
try {
|
|
49
|
+
return import_runtimeConfig.ClientDefaultValues.lstatSync(input.path).size;
|
|
50
|
+
} catch (error) {
|
|
51
|
+
return void 0;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
return void 0;
|
|
55
|
+
}, "byteLength");
|
|
56
|
+
|
|
57
|
+
// src/chunker.ts
|
|
58
|
+
|
|
59
|
+
var import_stream = require("stream");
|
|
60
|
+
|
|
61
|
+
// src/chunks/getChunkBuffer.ts
|
|
62
|
+
async function* getChunkBuffer(data, partSize) {
|
|
63
|
+
let partNumber = 1;
|
|
64
|
+
let startByte = 0;
|
|
65
|
+
let endByte = partSize;
|
|
66
|
+
while (endByte < data.byteLength) {
|
|
67
|
+
yield {
|
|
68
|
+
partNumber,
|
|
69
|
+
data: data.slice(startByte, endByte)
|
|
70
|
+
};
|
|
71
|
+
partNumber += 1;
|
|
72
|
+
startByte = endByte;
|
|
73
|
+
endByte = startByte + partSize;
|
|
74
|
+
}
|
|
75
|
+
yield {
|
|
76
|
+
partNumber,
|
|
77
|
+
data: data.slice(startByte),
|
|
78
|
+
lastPart: true
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
__name(getChunkBuffer, "getChunkBuffer");
|
|
82
|
+
|
|
83
|
+
// src/chunks/getChunkStream.ts
|
|
84
|
+
var import_buffer = require("buffer");
|
|
85
|
+
async function* getChunkStream(data, partSize, getNextData) {
|
|
86
|
+
let partNumber = 1;
|
|
87
|
+
const currentBuffer = { chunks: [], length: 0 };
|
|
88
|
+
for await (const datum of getNextData(data)) {
|
|
89
|
+
currentBuffer.chunks.push(datum);
|
|
90
|
+
currentBuffer.length += datum.length;
|
|
91
|
+
while (currentBuffer.length >= partSize) {
|
|
92
|
+
const dataChunk = currentBuffer.chunks.length > 1 ? import_buffer.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0];
|
|
93
|
+
yield {
|
|
94
|
+
partNumber,
|
|
95
|
+
data: dataChunk.slice(0, partSize)
|
|
96
|
+
};
|
|
97
|
+
currentBuffer.chunks = [dataChunk.slice(partSize)];
|
|
98
|
+
currentBuffer.length = currentBuffer.chunks[0].length;
|
|
99
|
+
partNumber += 1;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
yield {
|
|
103
|
+
partNumber,
|
|
104
|
+
data: import_buffer.Buffer.concat(currentBuffer.chunks),
|
|
105
|
+
lastPart: true
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
__name(getChunkStream, "getChunkStream");
|
|
109
|
+
|
|
110
|
+
// src/chunks/getDataReadable.ts
|
|
111
|
+
|
|
112
|
+
async function* getDataReadable(data) {
|
|
113
|
+
for await (const chunk of data) {
|
|
114
|
+
yield import_buffer.Buffer.from(chunk);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
__name(getDataReadable, "getDataReadable");
|
|
118
|
+
|
|
119
|
+
// src/chunks/getDataReadableStream.ts
|
|
120
|
+
|
|
121
|
+
async function* getDataReadableStream(data) {
|
|
122
|
+
const reader = data.getReader();
|
|
123
|
+
try {
|
|
124
|
+
while (true) {
|
|
125
|
+
const { done, value } = await reader.read();
|
|
126
|
+
if (done)
|
|
127
|
+
return;
|
|
128
|
+
yield import_buffer.Buffer.from(value);
|
|
129
|
+
}
|
|
130
|
+
} catch (e) {
|
|
131
|
+
throw e;
|
|
132
|
+
} finally {
|
|
133
|
+
reader.releaseLock();
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
__name(getDataReadableStream, "getDataReadableStream");
|
|
137
|
+
|
|
138
|
+
// src/chunker.ts
|
|
139
|
+
var getChunk = /* @__PURE__ */ __name((data, partSize) => {
|
|
140
|
+
if (data instanceof import_buffer.Buffer) {
|
|
141
|
+
return getChunkBuffer(data, partSize);
|
|
142
|
+
} else if (data instanceof import_stream.Readable) {
|
|
143
|
+
return getChunkStream(data, partSize, getDataReadable);
|
|
144
|
+
} else if (data instanceof String || typeof data === "string" || data instanceof Uint8Array) {
|
|
145
|
+
return getChunkBuffer(import_buffer.Buffer.from(data), partSize);
|
|
146
|
+
}
|
|
147
|
+
if (typeof data.stream === "function") {
|
|
148
|
+
return getChunkStream(data.stream(), partSize, getDataReadableStream);
|
|
149
|
+
} else if (data instanceof ReadableStream) {
|
|
150
|
+
return getChunkStream(data, partSize, getDataReadableStream);
|
|
151
|
+
} else {
|
|
152
|
+
throw new Error(
|
|
153
|
+
"Body Data is unsupported format, expected data to be one of: string | Uint8Array | Buffer | Readable | ReadableStream | Blob;."
|
|
154
|
+
);
|
|
155
|
+
}
|
|
156
|
+
}, "getChunk");
|
|
157
|
+
|
|
158
|
+
// src/Upload.ts
|
|
159
|
+
var MIN_PART_SIZE = 1024 * 1024 * 5;
|
|
160
|
+
var _Upload = class _Upload extends import_events.EventEmitter {
|
|
161
|
+
constructor(options) {
|
|
162
|
+
super();
|
|
163
|
+
/**
|
|
164
|
+
* S3 multipart upload does not allow more than 10000 parts.
|
|
165
|
+
*/
|
|
166
|
+
this.MAX_PARTS = 1e4;
|
|
167
|
+
// Defaults.
|
|
168
|
+
this.queueSize = 4;
|
|
169
|
+
this.partSize = MIN_PART_SIZE;
|
|
170
|
+
this.leavePartsOnError = false;
|
|
171
|
+
this.tags = [];
|
|
172
|
+
this.concurrentUploaders = [];
|
|
173
|
+
this.uploadedParts = [];
|
|
174
|
+
this.isMultiPart = true;
|
|
175
|
+
this.queueSize = options.queueSize || this.queueSize;
|
|
176
|
+
this.partSize = options.partSize || this.partSize;
|
|
177
|
+
this.leavePartsOnError = options.leavePartsOnError || this.leavePartsOnError;
|
|
178
|
+
this.tags = options.tags || this.tags;
|
|
179
|
+
this.client = options.client;
|
|
180
|
+
this.params = options.params;
|
|
181
|
+
this.__validateInput();
|
|
182
|
+
this.totalBytes = byteLength(this.params.Body);
|
|
183
|
+
this.bytesUploadedSoFar = 0;
|
|
184
|
+
this.abortController = options.abortController ?? new import_abort_controller.AbortController();
|
|
185
|
+
}
|
|
186
|
+
async abort() {
|
|
187
|
+
this.abortController.abort();
|
|
188
|
+
}
|
|
189
|
+
async done() {
|
|
190
|
+
return await Promise.race([this.__doMultipartUpload(), this.__abortTimeout(this.abortController.signal)]);
|
|
191
|
+
}
|
|
192
|
+
on(event, listener) {
|
|
193
|
+
this.uploadEvent = event;
|
|
194
|
+
return super.on(event, listener);
|
|
195
|
+
}
|
|
196
|
+
async __uploadUsingPut(dataPart) {
|
|
197
|
+
var _a;
|
|
198
|
+
this.isMultiPart = false;
|
|
199
|
+
const params = { ...this.params, Body: dataPart.data };
|
|
200
|
+
const clientConfig = this.client.config;
|
|
201
|
+
const requestHandler = clientConfig.requestHandler;
|
|
202
|
+
const eventEmitter = requestHandler instanceof import_events.EventEmitter ? requestHandler : null;
|
|
203
|
+
const uploadEventListener = /* @__PURE__ */ __name((event) => {
|
|
204
|
+
this.bytesUploadedSoFar = event.loaded;
|
|
205
|
+
this.totalBytes = event.total;
|
|
206
|
+
this.__notifyProgress({
|
|
207
|
+
loaded: this.bytesUploadedSoFar,
|
|
208
|
+
total: this.totalBytes,
|
|
209
|
+
part: dataPart.partNumber,
|
|
210
|
+
Key: this.params.Key,
|
|
211
|
+
Bucket: this.params.Bucket
|
|
212
|
+
});
|
|
213
|
+
}, "uploadEventListener");
|
|
214
|
+
if (eventEmitter !== null) {
|
|
215
|
+
eventEmitter.on("xhr.upload.progress", uploadEventListener);
|
|
216
|
+
}
|
|
217
|
+
const resolved = await Promise.all([this.client.send(new import_client_s3.PutObjectCommand(params)), (_a = clientConfig == null ? void 0 : clientConfig.endpoint) == null ? void 0 : _a.call(clientConfig)]);
|
|
218
|
+
const putResult = resolved[0];
|
|
219
|
+
let endpoint = resolved[1];
|
|
220
|
+
if (!endpoint) {
|
|
221
|
+
endpoint = (0, import_middleware_endpoint.toEndpointV1)(
|
|
222
|
+
await (0, import_middleware_endpoint.getEndpointFromInstructions)(params, import_client_s3.PutObjectCommand, {
|
|
223
|
+
...clientConfig
|
|
224
|
+
})
|
|
225
|
+
);
|
|
226
|
+
}
|
|
227
|
+
if (!endpoint) {
|
|
228
|
+
throw new Error('Could not resolve endpoint from S3 "client.config.endpoint()" nor EndpointsV2.');
|
|
229
|
+
}
|
|
230
|
+
if (eventEmitter !== null) {
|
|
231
|
+
eventEmitter.off("xhr.upload.progress", uploadEventListener);
|
|
232
|
+
}
|
|
233
|
+
const locationKey = this.params.Key.split("/").map((segment) => (0, import_smithy_client.extendedEncodeURIComponent)(segment)).join("/");
|
|
234
|
+
const locationBucket = (0, import_smithy_client.extendedEncodeURIComponent)(this.params.Bucket);
|
|
235
|
+
const Location = (() => {
|
|
236
|
+
const endpointHostnameIncludesBucket = endpoint.hostname.startsWith(`${locationBucket}.`);
|
|
237
|
+
const forcePathStyle = this.client.config.forcePathStyle;
|
|
238
|
+
if (forcePathStyle) {
|
|
239
|
+
return `${endpoint.protocol}//${endpoint.hostname}/${locationBucket}/${locationKey}`;
|
|
240
|
+
}
|
|
241
|
+
if (endpointHostnameIncludesBucket) {
|
|
242
|
+
return `${endpoint.protocol}//${endpoint.hostname}/${locationKey}`;
|
|
243
|
+
}
|
|
244
|
+
return `${endpoint.protocol}//${locationBucket}.${endpoint.hostname}/${locationKey}`;
|
|
245
|
+
})();
|
|
246
|
+
this.singleUploadResult = {
|
|
247
|
+
...putResult,
|
|
248
|
+
Bucket: this.params.Bucket,
|
|
249
|
+
Key: this.params.Key,
|
|
250
|
+
Location
|
|
251
|
+
};
|
|
252
|
+
const totalSize = byteLength(dataPart.data);
|
|
253
|
+
this.__notifyProgress({
|
|
254
|
+
loaded: totalSize,
|
|
255
|
+
total: totalSize,
|
|
256
|
+
part: 1,
|
|
257
|
+
Key: this.params.Key,
|
|
258
|
+
Bucket: this.params.Bucket
|
|
259
|
+
});
|
|
260
|
+
}
|
|
261
|
+
async __createMultipartUpload() {
|
|
262
|
+
if (!this.createMultiPartPromise) {
|
|
263
|
+
const createCommandParams = { ...this.params, Body: void 0 };
|
|
264
|
+
this.createMultiPartPromise = this.client.send(new import_client_s3.CreateMultipartUploadCommand(createCommandParams));
|
|
265
|
+
}
|
|
266
|
+
return this.createMultiPartPromise;
|
|
267
|
+
}
|
|
268
|
+
async __doConcurrentUpload(dataFeeder) {
|
|
269
|
+
for await (const dataPart of dataFeeder) {
|
|
270
|
+
if (this.uploadedParts.length > this.MAX_PARTS) {
|
|
271
|
+
throw new Error(
|
|
272
|
+
`Exceeded ${this.MAX_PARTS} as part of the upload to ${this.params.Key} and ${this.params.Bucket}.`
|
|
273
|
+
);
|
|
274
|
+
}
|
|
275
|
+
try {
|
|
276
|
+
if (this.abortController.signal.aborted) {
|
|
277
|
+
return;
|
|
278
|
+
}
|
|
279
|
+
if (dataPart.partNumber === 1 && dataPart.lastPart) {
|
|
280
|
+
return await this.__uploadUsingPut(dataPart);
|
|
281
|
+
}
|
|
282
|
+
if (!this.uploadId) {
|
|
283
|
+
const { UploadId } = await this.__createMultipartUpload();
|
|
284
|
+
this.uploadId = UploadId;
|
|
285
|
+
if (this.abortController.signal.aborted) {
|
|
286
|
+
return;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
const partSize = byteLength(dataPart.data) || 0;
|
|
290
|
+
const requestHandler = this.client.config.requestHandler;
|
|
291
|
+
const eventEmitter = requestHandler instanceof import_events.EventEmitter ? requestHandler : null;
|
|
292
|
+
let lastSeenBytes = 0;
|
|
293
|
+
const uploadEventListener = /* @__PURE__ */ __name((event, request) => {
|
|
294
|
+
const requestPartSize = Number(request.query["partNumber"]) || -1;
|
|
295
|
+
if (requestPartSize !== dataPart.partNumber) {
|
|
296
|
+
return;
|
|
297
|
+
}
|
|
298
|
+
if (event.total && partSize) {
|
|
299
|
+
this.bytesUploadedSoFar += event.loaded - lastSeenBytes;
|
|
300
|
+
lastSeenBytes = event.loaded;
|
|
301
|
+
}
|
|
302
|
+
this.__notifyProgress({
|
|
303
|
+
loaded: this.bytesUploadedSoFar,
|
|
304
|
+
total: this.totalBytes,
|
|
305
|
+
part: dataPart.partNumber,
|
|
306
|
+
Key: this.params.Key,
|
|
307
|
+
Bucket: this.params.Bucket
|
|
308
|
+
});
|
|
309
|
+
}, "uploadEventListener");
|
|
310
|
+
if (eventEmitter !== null) {
|
|
311
|
+
eventEmitter.on("xhr.upload.progress", uploadEventListener);
|
|
312
|
+
}
|
|
313
|
+
const partResult = await this.client.send(
|
|
314
|
+
new import_client_s3.UploadPartCommand({
|
|
315
|
+
...this.params,
|
|
316
|
+
UploadId: this.uploadId,
|
|
317
|
+
Body: dataPart.data,
|
|
318
|
+
PartNumber: dataPart.partNumber
|
|
319
|
+
})
|
|
320
|
+
);
|
|
321
|
+
if (eventEmitter !== null) {
|
|
322
|
+
eventEmitter.off("xhr.upload.progress", uploadEventListener);
|
|
323
|
+
}
|
|
324
|
+
if (this.abortController.signal.aborted) {
|
|
325
|
+
return;
|
|
326
|
+
}
|
|
327
|
+
if (!partResult.ETag) {
|
|
328
|
+
throw new Error(
|
|
329
|
+
`Part ${dataPart.partNumber} is missing ETag in UploadPart response. Missing Bucket CORS configuration for ETag header?`
|
|
330
|
+
);
|
|
331
|
+
}
|
|
332
|
+
this.uploadedParts.push({
|
|
333
|
+
PartNumber: dataPart.partNumber,
|
|
334
|
+
ETag: partResult.ETag,
|
|
335
|
+
...partResult.ChecksumCRC32 && { ChecksumCRC32: partResult.ChecksumCRC32 },
|
|
336
|
+
...partResult.ChecksumCRC32C && { ChecksumCRC32C: partResult.ChecksumCRC32C },
|
|
337
|
+
...partResult.ChecksumSHA1 && { ChecksumSHA1: partResult.ChecksumSHA1 },
|
|
338
|
+
...partResult.ChecksumSHA256 && { ChecksumSHA256: partResult.ChecksumSHA256 }
|
|
339
|
+
});
|
|
340
|
+
if (eventEmitter === null) {
|
|
341
|
+
this.bytesUploadedSoFar += partSize;
|
|
342
|
+
}
|
|
343
|
+
this.__notifyProgress({
|
|
344
|
+
loaded: this.bytesUploadedSoFar,
|
|
345
|
+
total: this.totalBytes,
|
|
346
|
+
part: dataPart.partNumber,
|
|
347
|
+
Key: this.params.Key,
|
|
348
|
+
Bucket: this.params.Bucket
|
|
349
|
+
});
|
|
350
|
+
} catch (e) {
|
|
351
|
+
if (!this.uploadId) {
|
|
352
|
+
throw e;
|
|
353
|
+
}
|
|
354
|
+
if (this.leavePartsOnError) {
|
|
355
|
+
throw e;
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
async __doMultipartUpload() {
|
|
361
|
+
const dataFeeder = getChunk(this.params.Body, this.partSize);
|
|
362
|
+
for (let index = 0; index < this.queueSize; index++) {
|
|
363
|
+
const currentUpload = this.__doConcurrentUpload(dataFeeder);
|
|
364
|
+
this.concurrentUploaders.push(currentUpload);
|
|
365
|
+
}
|
|
366
|
+
await Promise.all(this.concurrentUploaders);
|
|
367
|
+
if (this.abortController.signal.aborted) {
|
|
368
|
+
throw Object.assign(new Error("Upload aborted."), { name: "AbortError" });
|
|
369
|
+
}
|
|
370
|
+
let result;
|
|
371
|
+
if (this.isMultiPart) {
|
|
372
|
+
this.uploadedParts.sort((a, b) => a.PartNumber - b.PartNumber);
|
|
373
|
+
const uploadCompleteParams = {
|
|
374
|
+
...this.params,
|
|
375
|
+
Body: void 0,
|
|
376
|
+
UploadId: this.uploadId,
|
|
377
|
+
MultipartUpload: {
|
|
378
|
+
Parts: this.uploadedParts
|
|
379
|
+
}
|
|
380
|
+
};
|
|
381
|
+
result = await this.client.send(new import_client_s3.CompleteMultipartUploadCommand(uploadCompleteParams));
|
|
382
|
+
if (typeof (result == null ? void 0 : result.Location) === "string" && result.Location.includes("%2F")) {
|
|
383
|
+
result.Location = result.Location.replace(/%2F/g, "/");
|
|
384
|
+
}
|
|
385
|
+
} else {
|
|
386
|
+
result = this.singleUploadResult;
|
|
387
|
+
}
|
|
388
|
+
if (this.tags.length) {
|
|
389
|
+
await this.client.send(
|
|
390
|
+
new import_client_s3.PutObjectTaggingCommand({
|
|
391
|
+
...this.params,
|
|
392
|
+
Tagging: {
|
|
393
|
+
TagSet: this.tags
|
|
394
|
+
}
|
|
395
|
+
})
|
|
396
|
+
);
|
|
397
|
+
}
|
|
398
|
+
return result;
|
|
399
|
+
}
|
|
400
|
+
__notifyProgress(progress) {
|
|
401
|
+
if (this.uploadEvent) {
|
|
402
|
+
this.emit(this.uploadEvent, progress);
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
async __abortTimeout(abortSignal) {
|
|
406
|
+
return new Promise((resolve, reject) => {
|
|
407
|
+
abortSignal.onabort = () => {
|
|
408
|
+
const abortError = new Error("Upload aborted.");
|
|
409
|
+
abortError.name = "AbortError";
|
|
410
|
+
reject(abortError);
|
|
411
|
+
};
|
|
412
|
+
});
|
|
413
|
+
}
|
|
414
|
+
__validateInput() {
|
|
415
|
+
if (!this.params) {
|
|
416
|
+
throw new Error(`InputError: Upload requires params to be passed to upload.`);
|
|
417
|
+
}
|
|
418
|
+
if (!this.client) {
|
|
419
|
+
throw new Error(`InputError: Upload requires a AWS client to do uploads with.`);
|
|
420
|
+
}
|
|
421
|
+
if (this.partSize < MIN_PART_SIZE) {
|
|
422
|
+
throw new Error(
|
|
423
|
+
`EntityTooSmall: Your proposed upload partsize [${this.partSize}] is smaller than the minimum allowed size [${MIN_PART_SIZE}] (5MB)`
|
|
424
|
+
);
|
|
425
|
+
}
|
|
426
|
+
if (this.queueSize < 1) {
|
|
427
|
+
throw new Error(`Queue size: Must have at least one uploading queue.`);
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
};
|
|
431
|
+
__name(_Upload, "Upload");
|
|
432
|
+
var Upload = _Upload;
|
|
433
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
434
|
+
0 && (module.exports = {
|
|
435
|
+
Upload
|
|
436
|
+
});
|
package/dist-cjs/types.js
CHANGED
|
@@ -1,2 +1 @@
|
|
|
1
|
-
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
1
|
+
module.exports = require("./index.js");
|
package/dist-es/Upload.js
CHANGED
|
@@ -219,6 +219,9 @@ export class Upload extends EventEmitter {
|
|
|
219
219
|
},
|
|
220
220
|
};
|
|
221
221
|
result = await this.client.send(new CompleteMultipartUploadCommand(uploadCompleteParams));
|
|
222
|
+
if (typeof result?.Location === "string" && result.Location.includes("%2F")) {
|
|
223
|
+
result.Location = result.Location.replace(/%2F/g, "/");
|
|
224
|
+
}
|
|
222
225
|
}
|
|
223
226
|
else {
|
|
224
227
|
result = this.singleUploadResult;
|
package/package.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aws-sdk/lib-storage",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.495.0",
|
|
4
4
|
"description": "Storage higher order operation",
|
|
5
5
|
"main": "./dist-cjs/index.js",
|
|
6
6
|
"module": "./dist-es/index.js",
|
|
7
7
|
"types": "./dist-types/index.d.ts",
|
|
8
8
|
"scripts": {
|
|
9
9
|
"build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'",
|
|
10
|
-
"build:cjs": "
|
|
10
|
+
"build:cjs": "node ../../scripts/compilation/inline lib-storage",
|
|
11
11
|
"build:es": "tsc -p tsconfig.es.json",
|
|
12
12
|
"build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build",
|
|
13
13
|
"build:types": "tsc -p tsconfig.types.json",
|
|
@@ -25,9 +25,9 @@
|
|
|
25
25
|
},
|
|
26
26
|
"license": "Apache-2.0",
|
|
27
27
|
"dependencies": {
|
|
28
|
-
"@smithy/abort-controller": "^2.0
|
|
29
|
-
"@smithy/middleware-endpoint": "^2.
|
|
30
|
-
"@smithy/smithy-client": "^2.
|
|
28
|
+
"@smithy/abort-controller": "^2.1.0",
|
|
29
|
+
"@smithy/middleware-endpoint": "^2.4.0",
|
|
30
|
+
"@smithy/smithy-client": "^2.3.0",
|
|
31
31
|
"buffer": "5.6.0",
|
|
32
32
|
"events": "3.3.0",
|
|
33
33
|
"stream-browserify": "3.0.0",
|
|
@@ -37,8 +37,8 @@
|
|
|
37
37
|
"@aws-sdk/client-s3": "^3.0.0"
|
|
38
38
|
},
|
|
39
39
|
"devDependencies": {
|
|
40
|
-
"@aws-sdk/client-s3": "3.
|
|
41
|
-
"@smithy/types": "^2.
|
|
40
|
+
"@aws-sdk/client-s3": "3.495.0",
|
|
41
|
+
"@smithy/types": "^2.9.0",
|
|
42
42
|
"@tsconfig/recommended": "1.0.1",
|
|
43
43
|
"@types/node": "^14.14.31",
|
|
44
44
|
"concurrently": "7.0.0",
|