@aws-amplify/storage 4.4.3 → 4.4.4-in-app-messaging.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +28 -352
- package/dist/aws-amplify-storage.js +6523 -5062
- package/dist/aws-amplify-storage.js.map +1 -1
- package/dist/aws-amplify-storage.min.js +5 -5
- package/dist/aws-amplify-storage.min.js.map +1 -1
- package/lib/Storage.d.ts +3 -1
- package/lib/Storage.js +25 -21
- package/lib/Storage.js.map +1 -1
- package/lib/common/S3ClientUtils.d.ts +33 -0
- package/lib/common/S3ClientUtils.js +179 -0
- package/lib/common/S3ClientUtils.js.map +1 -0
- package/lib/common/StorageConstants.d.ts +4 -0
- package/lib/common/StorageConstants.js +10 -0
- package/lib/common/StorageConstants.js.map +1 -0
- package/lib/common/StorageErrorStrings.d.ts +5 -1
- package/lib/common/StorageErrorStrings.js +5 -0
- package/lib/common/StorageErrorStrings.js.map +1 -1
- package/lib/common/StorageUtils.d.ts +4 -0
- package/lib/common/StorageUtils.js +41 -0
- package/lib/common/StorageUtils.js.map +1 -0
- package/lib/providers/AWSS3Provider.d.ts +9 -6
- package/lib/providers/AWSS3Provider.js +197 -149
- package/lib/providers/AWSS3Provider.js.map +1 -1
- package/lib/providers/AWSS3ProviderManagedUpload.d.ts +3 -10
- package/lib/providers/AWSS3ProviderManagedUpload.js +41 -109
- package/lib/providers/AWSS3ProviderManagedUpload.js.map +1 -1
- package/lib/providers/AWSS3UploadTask.d.ts +107 -0
- package/lib/providers/AWSS3UploadTask.js +610 -0
- package/lib/providers/AWSS3UploadTask.js.map +1 -0
- package/lib/providers/axios-http-handler.d.ts +5 -1
- package/lib/providers/axios-http-handler.js +28 -5
- package/lib/providers/axios-http-handler.js.map +1 -1
- package/lib/types/AWSS3Provider.d.ts +24 -4
- package/lib/types/Provider.d.ts +7 -1
- package/lib/types/Storage.d.ts +8 -8
- package/lib-esm/Storage.d.ts +3 -1
- package/lib-esm/Storage.js +25 -21
- package/lib-esm/Storage.js.map +1 -1
- package/lib-esm/common/S3ClientUtils.d.ts +33 -0
- package/lib-esm/common/S3ClientUtils.js +177 -0
- package/lib-esm/common/S3ClientUtils.js.map +1 -0
- package/lib-esm/common/StorageConstants.d.ts +4 -0
- package/lib-esm/common/StorageConstants.js +8 -0
- package/lib-esm/common/StorageConstants.js.map +1 -0
- package/lib-esm/common/StorageErrorStrings.d.ts +5 -1
- package/lib-esm/common/StorageErrorStrings.js +5 -0
- package/lib-esm/common/StorageErrorStrings.js.map +1 -1
- package/lib-esm/common/StorageUtils.d.ts +4 -0
- package/lib-esm/common/StorageUtils.js +39 -0
- package/lib-esm/common/StorageUtils.js.map +1 -0
- package/lib-esm/providers/AWSS3Provider.d.ts +9 -6
- package/lib-esm/providers/AWSS3Provider.js +192 -144
- package/lib-esm/providers/AWSS3Provider.js.map +1 -1
- package/lib-esm/providers/AWSS3ProviderManagedUpload.d.ts +3 -10
- package/lib-esm/providers/AWSS3ProviderManagedUpload.js +44 -112
- package/lib-esm/providers/AWSS3ProviderManagedUpload.js.map +1 -1
- package/lib-esm/providers/AWSS3UploadTask.d.ts +107 -0
- package/lib-esm/providers/AWSS3UploadTask.js +605 -0
- package/lib-esm/providers/AWSS3UploadTask.js.map +1 -0
- package/lib-esm/providers/axios-http-handler.d.ts +5 -1
- package/lib-esm/providers/axios-http-handler.js +28 -5
- package/lib-esm/providers/axios-http-handler.js.map +1 -1
- package/lib-esm/types/AWSS3Provider.d.ts +24 -4
- package/lib-esm/types/Provider.d.ts +7 -1
- package/lib-esm/types/Storage.d.ts +8 -8
- package/package.json +3 -3
- package/src/Storage.ts +85 -27
- package/src/common/S3ClientUtils.ts +168 -0
- package/src/common/StorageConstants.ts +10 -0
- package/src/common/StorageErrorStrings.ts +5 -0
- package/src/common/StorageUtils.ts +51 -0
- package/src/providers/AWSS3Provider.ts +251 -88
- package/src/providers/AWSS3ProviderManagedUpload.ts +346 -397
- package/src/providers/AWSS3UploadTask.ts +543 -0
- package/src/providers/axios-http-handler.ts +221 -186
- package/src/types/AWSS3Provider.ts +48 -7
- package/src/types/Provider.ts +18 -3
- package/src/types/Storage.ts +9 -9
|
@@ -0,0 +1,543 @@
|
|
|
1
|
+
import {
|
|
2
|
+
UploadPartCommandInput,
|
|
3
|
+
CompletedPart,
|
|
4
|
+
S3Client,
|
|
5
|
+
UploadPartCommand,
|
|
6
|
+
CompleteMultipartUploadCommand,
|
|
7
|
+
Part,
|
|
8
|
+
AbortMultipartUploadCommand,
|
|
9
|
+
ListPartsCommand,
|
|
10
|
+
CreateMultipartUploadCommand,
|
|
11
|
+
PutObjectCommandInput,
|
|
12
|
+
ListObjectsV2Command,
|
|
13
|
+
} from '@aws-sdk/client-s3';
|
|
14
|
+
import * as events from 'events';
|
|
15
|
+
import axios, { Canceler, CancelTokenSource } from 'axios';
|
|
16
|
+
import { HttpHandlerOptions } from '@aws-sdk/types';
|
|
17
|
+
import { Logger } from '@aws-amplify/core';
|
|
18
|
+
import { UploadTask } from '../types/Provider';
|
|
19
|
+
import { byteLength, isFile } from '../common/StorageUtils';
|
|
20
|
+
import { AWSS3ProviderUploadErrorStrings } from '../common/StorageErrorStrings';
|
|
21
|
+
import {
|
|
22
|
+
SET_CONTENT_LENGTH_HEADER,
|
|
23
|
+
UPLOADS_STORAGE_KEY,
|
|
24
|
+
} from '../common/StorageConstants';
|
|
25
|
+
import { StorageAccessLevel } from '..';
|
|
26
|
+
|
|
27
|
+
const logger = new Logger('AWSS3UploadTask');
|
|
28
|
+
export enum AWSS3UploadTaskState {
|
|
29
|
+
INIT,
|
|
30
|
+
IN_PROGRESS,
|
|
31
|
+
PAUSED,
|
|
32
|
+
CANCELLED,
|
|
33
|
+
COMPLETED,
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export enum TaskEvents {
|
|
37
|
+
CANCEL = 'cancel',
|
|
38
|
+
UPLOAD_COMPLETE = 'uploadComplete',
|
|
39
|
+
UPLOAD_PROGRESS = 'uploadPartProgress',
|
|
40
|
+
ERROR = 'error',
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export interface AWSS3UploadTaskParams {
|
|
44
|
+
s3Client: S3Client;
|
|
45
|
+
file: Blob;
|
|
46
|
+
storage: Storage;
|
|
47
|
+
level: StorageAccessLevel;
|
|
48
|
+
params: PutObjectCommandInput;
|
|
49
|
+
prefixPromise: Promise<string>;
|
|
50
|
+
emitter?: events.EventEmitter;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export interface InProgressRequest {
|
|
54
|
+
uploadPartInput: UploadPartCommandInput;
|
|
55
|
+
s3Request: Promise<any>;
|
|
56
|
+
cancel: Canceler;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
export interface UploadTaskCompleteEvent {
|
|
60
|
+
key: string;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export interface UploadTaskProgressEvent {
|
|
64
|
+
/**
|
|
65
|
+
* bytes that has been sent to S3 so far
|
|
66
|
+
*/
|
|
67
|
+
loaded: number;
|
|
68
|
+
/**
|
|
69
|
+
* total bytes that needs to be sent to S3
|
|
70
|
+
*/
|
|
71
|
+
total: number;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export interface FileMetadata {
|
|
75
|
+
bucket: string;
|
|
76
|
+
fileName: string;
|
|
77
|
+
key: string;
|
|
78
|
+
// Unix timestamp in ms
|
|
79
|
+
lastTouched: number;
|
|
80
|
+
uploadId: string;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// maximum number of parts per upload request according the S3 spec,
|
|
84
|
+
// see: https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html
|
|
85
|
+
const MAX_PARTS = 10000;
|
|
86
|
+
// 5MB in bytes
|
|
87
|
+
const PART_SIZE = 5 * 1024 * 1024;
|
|
88
|
+
const DEFAULT_QUEUE_SIZE = 4;
|
|
89
|
+
|
|
90
|
+
function comparePartNumber(a: CompletedPart, b: CompletedPart) {
|
|
91
|
+
return a.PartNumber - b.PartNumber;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
export class AWSS3UploadTask implements UploadTask {
|
|
95
|
+
private readonly emitter: events.EventEmitter;
|
|
96
|
+
private readonly file: Blob;
|
|
97
|
+
private readonly partSize: number = PART_SIZE;
|
|
98
|
+
private readonly queueSize = DEFAULT_QUEUE_SIZE;
|
|
99
|
+
private readonly s3client: S3Client;
|
|
100
|
+
private readonly storage: Storage;
|
|
101
|
+
private readonly storageSync: Promise<any>;
|
|
102
|
+
private readonly fileId: string;
|
|
103
|
+
private readonly params: PutObjectCommandInput;
|
|
104
|
+
private readonly prefixPromise: Promise<string>;
|
|
105
|
+
private inProgress: InProgressRequest[] = [];
|
|
106
|
+
private completedParts: CompletedPart[] = [];
|
|
107
|
+
private queued: UploadPartCommandInput[] = [];
|
|
108
|
+
private bytesUploaded: number = 0;
|
|
109
|
+
private totalBytes: number = 0;
|
|
110
|
+
private uploadId: string;
|
|
111
|
+
|
|
112
|
+
public state: AWSS3UploadTaskState = AWSS3UploadTaskState.INIT;
|
|
113
|
+
|
|
114
|
+
constructor({
|
|
115
|
+
s3Client,
|
|
116
|
+
file,
|
|
117
|
+
emitter,
|
|
118
|
+
storage,
|
|
119
|
+
params,
|
|
120
|
+
level,
|
|
121
|
+
prefixPromise,
|
|
122
|
+
}: AWSS3UploadTaskParams) {
|
|
123
|
+
this.prefixPromise = prefixPromise;
|
|
124
|
+
this.s3client = s3Client;
|
|
125
|
+
this.s3client.middlewareStack.remove(SET_CONTENT_LENGTH_HEADER);
|
|
126
|
+
this.storage = storage;
|
|
127
|
+
this.storageSync = Promise.resolve();
|
|
128
|
+
if (typeof this.storage['sync'] === 'function') {
|
|
129
|
+
this.storageSync = this.storage['sync']();
|
|
130
|
+
}
|
|
131
|
+
this.params = params;
|
|
132
|
+
this.file = file;
|
|
133
|
+
this.totalBytes = this.file.size;
|
|
134
|
+
this.bytesUploaded = 0;
|
|
135
|
+
this.emitter = emitter;
|
|
136
|
+
this.queued = [];
|
|
137
|
+
this.fileId = this._getFileId(level);
|
|
138
|
+
this._validateParams();
|
|
139
|
+
// event emitter will re-throw an error if an event emits an error unless there's a listener, attaching a no-op
|
|
140
|
+
// function to it unless user adds their own onError callback
|
|
141
|
+
this.emitter.on(TaskEvents.ERROR, () => {});
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
get percent() {
|
|
145
|
+
return (this.bytesUploaded / this.totalBytes) * 100;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
get isInProgress() {
|
|
149
|
+
return this.state === AWSS3UploadTaskState.IN_PROGRESS;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
private async _listSingleFile({
|
|
153
|
+
key,
|
|
154
|
+
bucket,
|
|
155
|
+
}: {
|
|
156
|
+
key: string;
|
|
157
|
+
bucket: string;
|
|
158
|
+
}) {
|
|
159
|
+
const listObjectRes = await this.s3client.send(
|
|
160
|
+
new ListObjectsV2Command({
|
|
161
|
+
Bucket: bucket,
|
|
162
|
+
Prefix: key,
|
|
163
|
+
})
|
|
164
|
+
);
|
|
165
|
+
const { Contents = [] } = listObjectRes;
|
|
166
|
+
const prefix = await this.prefixPromise;
|
|
167
|
+
const obj = Contents.find(o => o.Key === `${prefix}${key}`);
|
|
168
|
+
return obj;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
private _getFileId(level: StorageAccessLevel): string {
|
|
172
|
+
// We should check if it's a File first because File is also instance of a Blob
|
|
173
|
+
if (isFile(this.file)) {
|
|
174
|
+
return [
|
|
175
|
+
this.file.name,
|
|
176
|
+
this.file.lastModified,
|
|
177
|
+
this.file.size,
|
|
178
|
+
this.file.type,
|
|
179
|
+
this.params.Bucket,
|
|
180
|
+
level,
|
|
181
|
+
this.params.Key,
|
|
182
|
+
].join('-');
|
|
183
|
+
} else {
|
|
184
|
+
return [
|
|
185
|
+
this.file.size,
|
|
186
|
+
this.file.type,
|
|
187
|
+
this.params.Bucket,
|
|
188
|
+
level,
|
|
189
|
+
this.params.Key,
|
|
190
|
+
].join('-');
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
private async _findCachedUploadParts(): Promise<{
|
|
195
|
+
parts: Part[];
|
|
196
|
+
uploadId: string;
|
|
197
|
+
}> {
|
|
198
|
+
const uploadRequests = await this._listCachedUploadTasks();
|
|
199
|
+
|
|
200
|
+
if (
|
|
201
|
+
Object.keys(uploadRequests).length === 0 ||
|
|
202
|
+
!Object.prototype.hasOwnProperty.call(uploadRequests, this.fileId)
|
|
203
|
+
) {
|
|
204
|
+
return { parts: [], uploadId: null };
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
const cachedUploadFileData = uploadRequests[this.fileId];
|
|
208
|
+
cachedUploadFileData.lastTouched = Date.now();
|
|
209
|
+
this.storage.setItem(UPLOADS_STORAGE_KEY, JSON.stringify(uploadRequests));
|
|
210
|
+
|
|
211
|
+
const listPartsOutput = await this.s3client.send(
|
|
212
|
+
new ListPartsCommand({
|
|
213
|
+
Bucket: this.params.Bucket,
|
|
214
|
+
Key: this.params.Key,
|
|
215
|
+
UploadId: cachedUploadFileData.uploadId,
|
|
216
|
+
})
|
|
217
|
+
);
|
|
218
|
+
|
|
219
|
+
return {
|
|
220
|
+
parts: listPartsOutput.Parts || [],
|
|
221
|
+
uploadId: cachedUploadFileData.uploadId,
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
private _emitEvent<T = any>(event: string, payload: T) {
|
|
226
|
+
this.emitter.emit(event, payload);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
private _validateParams() {
|
|
230
|
+
if (this.file.size / this.partSize > MAX_PARTS) {
|
|
231
|
+
throw new Error(
|
|
232
|
+
`Too many parts. Number of parts is ${this.file.size /
|
|
233
|
+
this.partSize}, maximum is ${MAX_PARTS}.`
|
|
234
|
+
);
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
private async _listCachedUploadTasks(): Promise<
|
|
239
|
+
Record<string, FileMetadata>
|
|
240
|
+
> {
|
|
241
|
+
await this.storageSync;
|
|
242
|
+
const tasks = this.storage.getItem(UPLOADS_STORAGE_KEY) || '{}';
|
|
243
|
+
return JSON.parse(tasks);
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
private async _cache(fileMetadata: FileMetadata): Promise<void> {
|
|
247
|
+
const uploadRequests = await this._listCachedUploadTasks();
|
|
248
|
+
uploadRequests[this.fileId] = fileMetadata;
|
|
249
|
+
this.storage.setItem(UPLOADS_STORAGE_KEY, JSON.stringify(uploadRequests));
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
private async _isCached(): Promise<boolean> {
|
|
253
|
+
return Object.prototype.hasOwnProperty.call(
|
|
254
|
+
await this._listCachedUploadTasks(),
|
|
255
|
+
this.fileId
|
|
256
|
+
);
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
private async _removeFromCache(): Promise<void> {
|
|
260
|
+
const uploadRequests = await this._listCachedUploadTasks();
|
|
261
|
+
delete uploadRequests[this.fileId];
|
|
262
|
+
this.storage.setItem(UPLOADS_STORAGE_KEY, JSON.stringify(uploadRequests));
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
private async _onPartUploadCompletion({
|
|
266
|
+
eTag,
|
|
267
|
+
partNumber,
|
|
268
|
+
chunk,
|
|
269
|
+
}: {
|
|
270
|
+
eTag: string;
|
|
271
|
+
partNumber: number;
|
|
272
|
+
chunk: UploadPartCommandInput['Body'];
|
|
273
|
+
}) {
|
|
274
|
+
this.completedParts.push({
|
|
275
|
+
ETag: eTag,
|
|
276
|
+
PartNumber: partNumber,
|
|
277
|
+
});
|
|
278
|
+
this.bytesUploaded += byteLength(chunk);
|
|
279
|
+
this._emitEvent<UploadTaskProgressEvent>(TaskEvents.UPLOAD_PROGRESS, {
|
|
280
|
+
loaded: this.bytesUploaded,
|
|
281
|
+
total: this.totalBytes,
|
|
282
|
+
});
|
|
283
|
+
// Remove the completed item from the inProgress array
|
|
284
|
+
this.inProgress = this.inProgress.filter(
|
|
285
|
+
job => job.uploadPartInput.PartNumber !== partNumber
|
|
286
|
+
);
|
|
287
|
+
if (this.queued.length && this.state !== AWSS3UploadTaskState.PAUSED)
|
|
288
|
+
this._startNextPart();
|
|
289
|
+
if (this._isDone()) this._completeUpload();
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
private async _completeUpload() {
|
|
293
|
+
try {
|
|
294
|
+
await this.s3client.send(
|
|
295
|
+
new CompleteMultipartUploadCommand({
|
|
296
|
+
Bucket: this.params.Bucket,
|
|
297
|
+
Key: this.params.Key,
|
|
298
|
+
UploadId: this.uploadId,
|
|
299
|
+
MultipartUpload: {
|
|
300
|
+
// Parts are not always completed in order, we need to manually sort them
|
|
301
|
+
Parts: this.completedParts.sort(comparePartNumber),
|
|
302
|
+
},
|
|
303
|
+
})
|
|
304
|
+
);
|
|
305
|
+
this._verifyFileSize();
|
|
306
|
+
this._emitEvent<UploadTaskCompleteEvent>(TaskEvents.UPLOAD_COMPLETE, {
|
|
307
|
+
key: `${this.params.Bucket}/${this.params.Key}`,
|
|
308
|
+
});
|
|
309
|
+
this._removeFromCache();
|
|
310
|
+
this.state = AWSS3UploadTaskState.COMPLETED;
|
|
311
|
+
} catch (err) {
|
|
312
|
+
logger.error('error completing upload', err);
|
|
313
|
+
this._emitEvent(TaskEvents.ERROR, err);
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
private async _makeUploadPartRequest(
|
|
318
|
+
input: UploadPartCommandInput,
|
|
319
|
+
cancelTokenSource: CancelTokenSource
|
|
320
|
+
) {
|
|
321
|
+
try {
|
|
322
|
+
const res = await this.s3client.send(new UploadPartCommand(input), {
|
|
323
|
+
cancelTokenSource,
|
|
324
|
+
} as HttpHandlerOptions);
|
|
325
|
+
await this._onPartUploadCompletion({
|
|
326
|
+
eTag: res.ETag,
|
|
327
|
+
partNumber: input.PartNumber,
|
|
328
|
+
chunk: input.Body,
|
|
329
|
+
});
|
|
330
|
+
} catch (err) {
|
|
331
|
+
if (this.state === AWSS3UploadTaskState.PAUSED) {
|
|
332
|
+
logger.log('upload paused');
|
|
333
|
+
} else if (this.state === AWSS3UploadTaskState.CANCELLED) {
|
|
334
|
+
logger.log('upload aborted');
|
|
335
|
+
} else {
|
|
336
|
+
logger.error('error starting next part of upload: ', err);
|
|
337
|
+
}
|
|
338
|
+
// axios' cancel will also throw an error, however we don't need to emit an event in that case as it's an
|
|
339
|
+
// expected behavior
|
|
340
|
+
if (
|
|
341
|
+
!axios.isCancel(err) &&
|
|
342
|
+
err.message !== AWSS3ProviderUploadErrorStrings.UPLOAD_PAUSED_MESSAGE
|
|
343
|
+
) {
|
|
344
|
+
this._emitEvent(TaskEvents.ERROR, err);
|
|
345
|
+
this.pause();
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
private _startNextPart() {
|
|
351
|
+
if (this.queued.length > 0 && this.state !== AWSS3UploadTaskState.PAUSED) {
|
|
352
|
+
const cancelTokenSource = axios.CancelToken.source();
|
|
353
|
+
const nextPart = this.queued.shift();
|
|
354
|
+
this.inProgress.push({
|
|
355
|
+
uploadPartInput: nextPart,
|
|
356
|
+
s3Request: this._makeUploadPartRequest(nextPart, cancelTokenSource),
|
|
357
|
+
cancel: cancelTokenSource.cancel,
|
|
358
|
+
});
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
/**
|
|
363
|
+
* Verify on S3 side that the file size matches the one on the client side.
|
|
364
|
+
*
|
|
365
|
+
* @async
|
|
366
|
+
* @throws throws an error if the file size does not match between local copy of the file and the file on s3.
|
|
367
|
+
*/
|
|
368
|
+
private async _verifyFileSize() {
|
|
369
|
+
const obj = await this._listSingleFile({
|
|
370
|
+
key: this.params.Key,
|
|
371
|
+
bucket: this.params.Bucket,
|
|
372
|
+
});
|
|
373
|
+
const valid = Boolean(obj && obj.Size === this.file.size);
|
|
374
|
+
if (!valid) {
|
|
375
|
+
throw new Error(
|
|
376
|
+
'File size does not match between local file and file on s3'
|
|
377
|
+
);
|
|
378
|
+
}
|
|
379
|
+
return valid;
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
private _isDone() {
|
|
383
|
+
return (
|
|
384
|
+
!this.queued.length &&
|
|
385
|
+
!this.inProgress.length &&
|
|
386
|
+
this.bytesUploaded === this.totalBytes
|
|
387
|
+
);
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
private _createParts() {
|
|
391
|
+
const size = this.file.size;
|
|
392
|
+
const parts: UploadPartCommandInput[] = [];
|
|
393
|
+
for (let bodyStart = 0; bodyStart < size; ) {
|
|
394
|
+
const bodyEnd = Math.min(bodyStart + this.partSize, size);
|
|
395
|
+
parts.push({
|
|
396
|
+
Body: this.file.slice(bodyStart, bodyEnd),
|
|
397
|
+
Key: this.params.Key,
|
|
398
|
+
Bucket: this.params.Bucket,
|
|
399
|
+
PartNumber: parts.length + 1,
|
|
400
|
+
UploadId: this.uploadId,
|
|
401
|
+
});
|
|
402
|
+
bodyStart += this.partSize;
|
|
403
|
+
}
|
|
404
|
+
return parts;
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
private _initCachedUploadParts(cachedParts: Part[]) {
|
|
408
|
+
this.bytesUploaded += cachedParts.reduce((acc, part) => acc + part.Size, 0);
|
|
409
|
+
// Find the set of part numbers that have already been uploaded
|
|
410
|
+
const uploadedPartNumSet = new Set(
|
|
411
|
+
cachedParts.map(part => part.PartNumber)
|
|
412
|
+
);
|
|
413
|
+
this.queued = this.queued.filter(
|
|
414
|
+
part => !uploadedPartNumSet.has(part.PartNumber)
|
|
415
|
+
);
|
|
416
|
+
this.completedParts = cachedParts.map(part => ({
|
|
417
|
+
PartNumber: part.PartNumber,
|
|
418
|
+
ETag: part.ETag,
|
|
419
|
+
}));
|
|
420
|
+
this._emitEvent<UploadTaskProgressEvent>(TaskEvents.UPLOAD_PROGRESS, {
|
|
421
|
+
loaded: this.bytesUploaded,
|
|
422
|
+
total: this.totalBytes,
|
|
423
|
+
});
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
private async _initMultipartUpload() {
|
|
427
|
+
const res = await this.s3client.send(
|
|
428
|
+
new CreateMultipartUploadCommand(this.params)
|
|
429
|
+
);
|
|
430
|
+
this._cache({
|
|
431
|
+
uploadId: res.UploadId,
|
|
432
|
+
lastTouched: Date.now(),
|
|
433
|
+
bucket: this.params.Bucket,
|
|
434
|
+
key: this.params.Key,
|
|
435
|
+
fileName: this.file instanceof File ? this.file.name : '',
|
|
436
|
+
});
|
|
437
|
+
return res.UploadId;
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
private async _initializeUploadTask() {
|
|
441
|
+
this.state = AWSS3UploadTaskState.IN_PROGRESS;
|
|
442
|
+
try {
|
|
443
|
+
if (await this._isCached()) {
|
|
444
|
+
const { parts, uploadId } = await this._findCachedUploadParts();
|
|
445
|
+
this.uploadId = uploadId;
|
|
446
|
+
this.queued = this._createParts();
|
|
447
|
+
this._initCachedUploadParts(parts);
|
|
448
|
+
this._startUpload();
|
|
449
|
+
} else {
|
|
450
|
+
if (!this.uploadId) {
|
|
451
|
+
const uploadId = await this._initMultipartUpload();
|
|
452
|
+
this.uploadId = uploadId;
|
|
453
|
+
this.queued = this._createParts();
|
|
454
|
+
this._startUpload();
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
} catch (err) {
|
|
458
|
+
if (!axios.isCancel(err)) {
|
|
459
|
+
logger.error('Error initializing the upload task', err);
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
public resume(): void {
|
|
465
|
+
if (this.state === AWSS3UploadTaskState.CANCELLED) {
|
|
466
|
+
logger.warn('This task has already been cancelled');
|
|
467
|
+
} else if (this.state === AWSS3UploadTaskState.COMPLETED) {
|
|
468
|
+
logger.warn('This task has already been completed');
|
|
469
|
+
} else if (this.state === AWSS3UploadTaskState.IN_PROGRESS) {
|
|
470
|
+
logger.warn('Upload task already in progress');
|
|
471
|
+
// first time running resume, find any cached parts on s3 or start a new multipart upload request before
|
|
472
|
+
// starting the upload
|
|
473
|
+
} else if (!this.uploadId) {
|
|
474
|
+
this._initializeUploadTask();
|
|
475
|
+
} else {
|
|
476
|
+
this._startUpload();
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
private _startUpload() {
|
|
481
|
+
this.state = AWSS3UploadTaskState.IN_PROGRESS;
|
|
482
|
+
for (let i = 0; i < this.queueSize; i++) {
|
|
483
|
+
this._startNextPart();
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
async _cancel(): Promise<boolean> {
|
|
488
|
+
if (this.state === AWSS3UploadTaskState.CANCELLED) {
|
|
489
|
+
logger.warn('This task has already been cancelled');
|
|
490
|
+
return false;
|
|
491
|
+
} else if (this.state === AWSS3UploadTaskState.COMPLETED) {
|
|
492
|
+
logger.warn('This task has already been completed');
|
|
493
|
+
return false;
|
|
494
|
+
} else {
|
|
495
|
+
this.pause();
|
|
496
|
+
this.queued = [];
|
|
497
|
+
this.completedParts = [];
|
|
498
|
+
this.bytesUploaded = 0;
|
|
499
|
+
this.state = AWSS3UploadTaskState.CANCELLED;
|
|
500
|
+
try {
|
|
501
|
+
await this.s3client.send(
|
|
502
|
+
new AbortMultipartUploadCommand({
|
|
503
|
+
Bucket: this.params.Bucket,
|
|
504
|
+
Key: this.params.Key,
|
|
505
|
+
UploadId: this.uploadId,
|
|
506
|
+
})
|
|
507
|
+
);
|
|
508
|
+
await this._removeFromCache();
|
|
509
|
+
return true;
|
|
510
|
+
} catch (err) {
|
|
511
|
+
logger.error('Error cancelling upload task', err);
|
|
512
|
+
return false;
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
/**
|
|
518
|
+
* pause this particular upload task
|
|
519
|
+
**/
|
|
520
|
+
public pause(): void {
|
|
521
|
+
if (this.state === AWSS3UploadTaskState.CANCELLED) {
|
|
522
|
+
logger.warn('This task has already been cancelled');
|
|
523
|
+
} else if (this.state === AWSS3UploadTaskState.COMPLETED) {
|
|
524
|
+
logger.warn('This task has already been completed');
|
|
525
|
+
} else if (this.state === AWSS3UploadTaskState.PAUSED) {
|
|
526
|
+
logger.warn('This task is already paused');
|
|
527
|
+
}
|
|
528
|
+
this.state = AWSS3UploadTaskState.PAUSED;
|
|
529
|
+
// Use axios cancel token to abort the part request immediately
|
|
530
|
+
// Add the inProgress parts back to pending
|
|
531
|
+
const removedInProgressReq = this.inProgress.splice(
|
|
532
|
+
0,
|
|
533
|
+
this.inProgress.length
|
|
534
|
+
);
|
|
535
|
+
removedInProgressReq.forEach(req => {
|
|
536
|
+
req.cancel(AWSS3ProviderUploadErrorStrings.UPLOAD_PAUSED_MESSAGE);
|
|
537
|
+
});
|
|
538
|
+
// Put all removed in progress parts back into the queue
|
|
539
|
+
this.queued.unshift(
|
|
540
|
+
...removedInProgressReq.map(req => req.uploadPartInput)
|
|
541
|
+
);
|
|
542
|
+
}
|
|
543
|
+
}
|