@chunkflowjs/core 0.0.1-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +181 -0
- package/dist/chunk-size-adjuster-interface.d.ts +12 -0
- package/dist/chunk-size-adjuster-interface.d.ts.map +1 -0
- package/dist/chunk-size-adjuster-tcp.d.ts +34 -0
- package/dist/chunk-size-adjuster-tcp.d.ts.map +1 -0
- package/dist/chunk-size-adjuster.d.ts +12 -0
- package/dist/chunk-size-adjuster.d.ts.map +1 -0
- package/dist/index.d.mts +246 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.mjs +1988 -0
- package/dist/index.mjs.map +1 -0
- package/dist/plugins.d.ts +51 -0
- package/dist/plugins.d.ts.map +1 -0
- package/dist/upload-manager.d.ts +71 -0
- package/dist/upload-manager.d.ts.map +1 -0
- package/dist/upload-task.d.ts +66 -0
- package/dist/upload-task.d.ts.map +1 -0
- package/package.json +44 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,1988 @@
|
|
|
1
|
+
import { ConcurrencyController, UploadStorage, calculateChunkHash, calculateFileHash, calculateSpeed, createEventBus, estimateRemainingTime, sliceFile } from "@chunkflowjs/shared";
|
|
2
|
+
|
|
3
|
+
//#region src/chunk-size-adjuster.ts
|
|
4
|
+
/**
|
|
5
|
+
* ChunkSizeAdjuster dynamically adjusts chunk sizes based on upload performance.
|
|
6
|
+
* Uses a simple binary strategy: doubles size when fast, halves when slow.
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```typescript
|
|
10
|
+
* const adjuster = new ChunkSizeAdjuster({
|
|
11
|
+
* initialSize: 1024 * 1024, // 1MB
|
|
12
|
+
* minSize: 256 * 1024, // 256KB
|
|
13
|
+
* maxSize: 10 * 1024 * 1024, // 10MB
|
|
14
|
+
* targetTime: 3000 // 3 seconds
|
|
15
|
+
* });
|
|
16
|
+
*
|
|
17
|
+
* // After uploading a chunk
|
|
18
|
+
* const uploadTimeMs = 1500;
|
|
19
|
+
* const newSize = adjuster.adjust(uploadTimeMs);
|
|
20
|
+
* ```
|
|
21
|
+
*/
|
|
22
|
+
var ChunkSizeAdjuster = class {
|
|
23
|
+
currentSize;
|
|
24
|
+
options;
|
|
25
|
+
constructor(options) {
|
|
26
|
+
this.currentSize = options.initialSize;
|
|
27
|
+
this.options = {
|
|
28
|
+
targetTime: 3e3,
|
|
29
|
+
...options
|
|
30
|
+
};
|
|
31
|
+
if (this.options.minSize > this.options.maxSize) throw new Error("minSize cannot be greater than maxSize");
|
|
32
|
+
if (this.options.initialSize < this.options.minSize || this.options.initialSize > this.options.maxSize) throw new Error("initialSize must be between minSize and maxSize");
|
|
33
|
+
if (this.options.targetTime <= 0) throw new Error("targetTime must be positive");
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Adjusts the chunk size based on the upload time of the previous chunk.
|
|
37
|
+
* Uses a simple binary strategy:
|
|
38
|
+
* - If upload is fast (< 50% of target time): double the chunk size
|
|
39
|
+
* - If upload is slow (> 150% of target time): halve the chunk size
|
|
40
|
+
* - Otherwise: keep the current size
|
|
41
|
+
*
|
|
42
|
+
* @param uploadTimeMs - The time taken to upload the previous chunk in milliseconds
|
|
43
|
+
* @returns The new chunk size in bytes
|
|
44
|
+
*/
|
|
45
|
+
adjust(uploadTimeMs) {
|
|
46
|
+
if (uploadTimeMs < 0) throw new Error("uploadTimeMs cannot be negative");
|
|
47
|
+
const { targetTime, minSize, maxSize } = this.options;
|
|
48
|
+
if (uploadTimeMs < targetTime * .5) this.currentSize = Math.min(this.currentSize * 2, maxSize);
|
|
49
|
+
else if (uploadTimeMs > targetTime * 1.5) this.currentSize = Math.max(this.currentSize / 2, minSize);
|
|
50
|
+
return this.currentSize;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Gets the current chunk size without adjusting it.
|
|
54
|
+
*
|
|
55
|
+
* @returns The current chunk size in bytes
|
|
56
|
+
*/
|
|
57
|
+
getCurrentSize() {
|
|
58
|
+
return this.currentSize;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Resets the chunk size to the initial size.
|
|
62
|
+
* Useful when starting a new upload or after an error.
|
|
63
|
+
*/
|
|
64
|
+
reset() {
|
|
65
|
+
this.currentSize = this.options.initialSize;
|
|
66
|
+
}
|
|
67
|
+
};
|
|
68
|
+
|
|
69
|
+
//#endregion
|
|
70
|
+
//#region src/chunk-size-adjuster-tcp.ts
|
|
71
|
+
let CongestionState = /* @__PURE__ */ function(CongestionState) {
|
|
72
|
+
CongestionState["SLOW_START"] = "slow_start";
|
|
73
|
+
CongestionState["CONGESTION_AVOIDANCE"] = "congestion_avoidance";
|
|
74
|
+
CongestionState["FAST_RECOVERY"] = "fast_recovery";
|
|
75
|
+
return CongestionState;
|
|
76
|
+
}({});
|
|
77
|
+
/**
|
|
78
|
+
* TCP-inspired chunk size adjuster with proper slow start and congestion avoidance
|
|
79
|
+
*
|
|
80
|
+
* Algorithm:
|
|
81
|
+
* 1. Slow Start: Exponential growth (double size) until ssthresh
|
|
82
|
+
* 2. Congestion Avoidance: Linear growth (add increment) after ssthresh
|
|
83
|
+
* 3. Fast Recovery: On congestion, set ssthresh = currentSize / 2, reduce size
|
|
84
|
+
*
|
|
85
|
+
* @example
|
|
86
|
+
* ```typescript
|
|
87
|
+
* const adjuster = new TCPChunkSizeAdjuster({
|
|
88
|
+
* initialSize: 256 * 1024, // 256KB (like TCP initial cwnd)
|
|
89
|
+
* minSize: 256 * 1024, // 256KB
|
|
90
|
+
* maxSize: 10 * 1024 * 1024, // 10MB
|
|
91
|
+
* targetTime: 3000, // 3 seconds
|
|
92
|
+
* initialSsthresh: 5 * 1024 * 1024 // 5MB
|
|
93
|
+
* });
|
|
94
|
+
*
|
|
95
|
+
* // After each chunk upload
|
|
96
|
+
* const newSize = adjuster.adjust(uploadTimeMs);
|
|
97
|
+
* ```
|
|
98
|
+
*/
|
|
99
|
+
var TCPChunkSizeAdjuster = class {
|
|
100
|
+
currentSize;
|
|
101
|
+
ssthresh;
|
|
102
|
+
state;
|
|
103
|
+
options;
|
|
104
|
+
consecutiveFastUploads = 0;
|
|
105
|
+
consecutiveSlowUploads = 0;
|
|
106
|
+
constructor(options) {
|
|
107
|
+
this.currentSize = options.initialSize;
|
|
108
|
+
this.options = {
|
|
109
|
+
targetTime: 3e3,
|
|
110
|
+
initialSsthresh: options.initialSsthresh ?? options.maxSize / 2,
|
|
111
|
+
...options
|
|
112
|
+
};
|
|
113
|
+
this.ssthresh = this.options.initialSsthresh;
|
|
114
|
+
this.state = CongestionState.SLOW_START;
|
|
115
|
+
this.validate();
|
|
116
|
+
}
|
|
117
|
+
validate() {
|
|
118
|
+
const { minSize, maxSize, initialSize, targetTime, initialSsthresh } = this.options;
|
|
119
|
+
if (minSize > maxSize) throw new Error("minSize cannot be greater than maxSize");
|
|
120
|
+
if (initialSize < minSize || initialSize > maxSize) throw new Error("initialSize must be between minSize and maxSize");
|
|
121
|
+
if (targetTime <= 0) throw new Error("targetTime must be positive");
|
|
122
|
+
if (initialSsthresh < minSize || initialSsthresh > maxSize) throw new Error("initialSsthresh must be between minSize and maxSize");
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Adjusts chunk size based on upload performance using TCP-like algorithm
|
|
126
|
+
*
|
|
127
|
+
* @param uploadTimeMs - Time taken to upload the previous chunk
|
|
128
|
+
* @returns New chunk size in bytes
|
|
129
|
+
*/
|
|
130
|
+
adjust(uploadTimeMs) {
|
|
131
|
+
if (uploadTimeMs < 0) throw new Error("uploadTimeMs cannot be negative");
|
|
132
|
+
const { targetTime, minSize, maxSize } = this.options;
|
|
133
|
+
const ratio = uploadTimeMs / targetTime;
|
|
134
|
+
if (ratio < .5) {
|
|
135
|
+
this.consecutiveFastUploads++;
|
|
136
|
+
this.consecutiveSlowUploads = 0;
|
|
137
|
+
this.handleFastUpload();
|
|
138
|
+
} else if (ratio > 1.5) {
|
|
139
|
+
this.consecutiveSlowUploads++;
|
|
140
|
+
this.consecutiveFastUploads = 0;
|
|
141
|
+
this.handleSlowUpload();
|
|
142
|
+
} else {
|
|
143
|
+
this.consecutiveFastUploads = 0;
|
|
144
|
+
this.consecutiveSlowUploads = 0;
|
|
145
|
+
}
|
|
146
|
+
this.currentSize = Math.max(minSize, Math.min(this.currentSize, maxSize));
|
|
147
|
+
return this.currentSize;
|
|
148
|
+
}
|
|
149
|
+
handleFastUpload() {
|
|
150
|
+
const { maxSize } = this.options;
|
|
151
|
+
switch (this.state) {
|
|
152
|
+
case CongestionState.SLOW_START:
|
|
153
|
+
const newSize = this.currentSize * 2;
|
|
154
|
+
if (newSize >= this.ssthresh) {
|
|
155
|
+
this.currentSize = this.ssthresh;
|
|
156
|
+
this.state = CongestionState.CONGESTION_AVOIDANCE;
|
|
157
|
+
} else this.currentSize = Math.min(newSize, maxSize);
|
|
158
|
+
break;
|
|
159
|
+
case CongestionState.CONGESTION_AVOIDANCE:
|
|
160
|
+
const increment = Math.max(this.options.minSize, Math.floor(this.currentSize * .1));
|
|
161
|
+
this.currentSize = Math.min(this.currentSize + increment, maxSize);
|
|
162
|
+
break;
|
|
163
|
+
case CongestionState.FAST_RECOVERY:
|
|
164
|
+
this.state = CongestionState.CONGESTION_AVOIDANCE;
|
|
165
|
+
break;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
handleSlowUpload() {
|
|
169
|
+
const { minSize } = this.options;
|
|
170
|
+
this.ssthresh = Math.max(minSize, Math.floor(this.currentSize / 2));
|
|
171
|
+
this.currentSize = this.ssthresh;
|
|
172
|
+
this.state = CongestionState.FAST_RECOVERY;
|
|
173
|
+
}
|
|
174
|
+
/**
|
|
175
|
+
* Gets the current chunk size
|
|
176
|
+
*/
|
|
177
|
+
getCurrentSize() {
|
|
178
|
+
return this.currentSize;
|
|
179
|
+
}
|
|
180
|
+
/**
|
|
181
|
+
* Gets the current slow start threshold
|
|
182
|
+
*/
|
|
183
|
+
getSsthresh() {
|
|
184
|
+
return this.ssthresh;
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Gets the current congestion state
|
|
188
|
+
*/
|
|
189
|
+
getState() {
|
|
190
|
+
return this.state;
|
|
191
|
+
}
|
|
192
|
+
/**
|
|
193
|
+
* Resets to initial state
|
|
194
|
+
*/
|
|
195
|
+
reset() {
|
|
196
|
+
this.currentSize = this.options.initialSize;
|
|
197
|
+
this.ssthresh = this.options.initialSsthresh;
|
|
198
|
+
this.state = CongestionState.SLOW_START;
|
|
199
|
+
this.consecutiveFastUploads = 0;
|
|
200
|
+
this.consecutiveSlowUploads = 0;
|
|
201
|
+
}
|
|
202
|
+
/**
|
|
203
|
+
* Gets statistics about the adjuster's behavior
|
|
204
|
+
*/
|
|
205
|
+
getStats() {
|
|
206
|
+
return {
|
|
207
|
+
currentSize: this.currentSize,
|
|
208
|
+
ssthresh: this.ssthresh,
|
|
209
|
+
state: this.state,
|
|
210
|
+
consecutiveFastUploads: this.consecutiveFastUploads,
|
|
211
|
+
consecutiveSlowUploads: this.consecutiveSlowUploads
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
};
|
|
215
|
+
|
|
216
|
+
//#endregion
|
|
217
|
+
//#region src/upload-task.ts
|
|
218
|
+
/**
|
|
219
|
+
* UploadTask class
|
|
220
|
+
*
|
|
221
|
+
* Manages a single file upload with support for:
|
|
222
|
+
* - Chunked upload with dynamic chunk size adjustment
|
|
223
|
+
* - Hash calculation and verification (instant upload/resume)
|
|
224
|
+
* - Concurrent chunk uploads with retry
|
|
225
|
+
* - Progress tracking and persistence
|
|
226
|
+
* - Pause/resume/cancel operations
|
|
227
|
+
* - Event-driven lifecycle
|
|
228
|
+
*
|
|
229
|
+
* @example
|
|
230
|
+
* ```typescript
|
|
231
|
+
* const task = new UploadTask({
|
|
232
|
+
* file: myFile,
|
|
233
|
+
* requestAdapter: myAdapter,
|
|
234
|
+
* chunkSize: 1024 * 1024, // 1MB
|
|
235
|
+
* concurrency: 3,
|
|
236
|
+
* });
|
|
237
|
+
*
|
|
238
|
+
* // Listen to events
|
|
239
|
+
* task.on('progress', ({ progress, speed }) => {
|
|
240
|
+
* console.log(`Progress: ${progress}%, Speed: ${speed} bytes/s`);
|
|
241
|
+
* });
|
|
242
|
+
*
|
|
243
|
+
* task.on('success', ({ fileUrl }) => {
|
|
244
|
+
* console.log(`Upload complete: ${fileUrl}`);
|
|
245
|
+
* });
|
|
246
|
+
*
|
|
247
|
+
* // Start upload
|
|
248
|
+
* await task.start();
|
|
249
|
+
* ```
|
|
250
|
+
*/
|
|
251
|
+
var UploadTask = class {
|
|
252
|
+
/** Unique task identifier */
|
|
253
|
+
id;
|
|
254
|
+
/** File being uploaded */
|
|
255
|
+
file;
|
|
256
|
+
/** Current upload status */
|
|
257
|
+
status;
|
|
258
|
+
/** Current upload progress */
|
|
259
|
+
progress;
|
|
260
|
+
/** Array of chunk information */
|
|
261
|
+
chunks;
|
|
262
|
+
/** Upload token from server */
|
|
263
|
+
uploadToken;
|
|
264
|
+
/** Calculated file hash */
|
|
265
|
+
fileHash;
|
|
266
|
+
/** Event bus for lifecycle events */
|
|
267
|
+
eventBus;
|
|
268
|
+
/** Concurrency controller for chunk uploads */
|
|
269
|
+
concurrencyController;
|
|
270
|
+
/** Storage for persisting upload progress */
|
|
271
|
+
storage;
|
|
272
|
+
/** Request adapter for API calls */
|
|
273
|
+
requestAdapter;
|
|
274
|
+
/** Upload task options with defaults */
|
|
275
|
+
options;
|
|
276
|
+
/** Upload start timestamp */
|
|
277
|
+
startTime;
|
|
278
|
+
/** Upload end timestamp */
|
|
279
|
+
endTime;
|
|
280
|
+
/** Chunk size adjuster for dynamic sizing */
|
|
281
|
+
chunkSizeAdjuster;
|
|
282
|
+
/** Flag to indicate if upload should be cancelled (e.g., instant upload) */
|
|
283
|
+
shouldCancelUpload;
|
|
284
|
+
/**
|
|
285
|
+
* Creates a new UploadTask
|
|
286
|
+
*
|
|
287
|
+
* @param options - Upload task configuration options
|
|
288
|
+
*/
|
|
289
|
+
constructor(options) {
|
|
290
|
+
this.id = options.resumeTaskId ?? this.generateTaskId();
|
|
291
|
+
this.file = options.file;
|
|
292
|
+
this.status = "idle";
|
|
293
|
+
this.progress = {
|
|
294
|
+
uploadedBytes: 0,
|
|
295
|
+
totalBytes: options.file.size,
|
|
296
|
+
percentage: 0,
|
|
297
|
+
speed: 0,
|
|
298
|
+
remainingTime: 0,
|
|
299
|
+
uploadedChunks: 0,
|
|
300
|
+
totalChunks: 0
|
|
301
|
+
};
|
|
302
|
+
this.chunks = [];
|
|
303
|
+
if (options.resumeUploadToken) this.uploadToken = {
|
|
304
|
+
token: options.resumeUploadToken,
|
|
305
|
+
fileId: "",
|
|
306
|
+
chunkSize: options.chunkSize ?? 1024 * 1024,
|
|
307
|
+
expiresAt: Date.now() + 1440 * 60 * 1e3
|
|
308
|
+
};
|
|
309
|
+
else this.uploadToken = null;
|
|
310
|
+
this.fileHash = null;
|
|
311
|
+
this.eventBus = createEventBus();
|
|
312
|
+
this.requestAdapter = options.requestAdapter;
|
|
313
|
+
this.options = {
|
|
314
|
+
file: options.file,
|
|
315
|
+
requestAdapter: options.requestAdapter,
|
|
316
|
+
chunkSize: options.chunkSize ?? 1024 * 1024,
|
|
317
|
+
concurrency: options.concurrency ?? 3,
|
|
318
|
+
retryCount: options.retryCount ?? 3,
|
|
319
|
+
retryDelay: options.retryDelay ?? 1e3,
|
|
320
|
+
autoStart: options.autoStart ?? false,
|
|
321
|
+
resumeTaskId: options.resumeTaskId ?? "",
|
|
322
|
+
resumeUploadToken: options.resumeUploadToken ?? "",
|
|
323
|
+
resumeUploadedChunks: options.resumeUploadedChunks ?? [],
|
|
324
|
+
chunkSizeStrategy: options.chunkSizeStrategy ?? "tcp-like",
|
|
325
|
+
initialSsthresh: options.initialSsthresh ?? 5 * 1024 * 1024
|
|
326
|
+
};
|
|
327
|
+
this.concurrencyController = new ConcurrencyController({ limit: this.options.concurrency });
|
|
328
|
+
this.storage = new UploadStorage();
|
|
329
|
+
this.startTime = 0;
|
|
330
|
+
this.endTime = null;
|
|
331
|
+
this.chunkSizeAdjuster = null;
|
|
332
|
+
this.shouldCancelUpload = false;
|
|
333
|
+
}
|
|
334
|
+
/**
|
|
335
|
+
* Generates a unique task ID
|
|
336
|
+
* Uses timestamp and random string for uniqueness
|
|
337
|
+
*
|
|
338
|
+
* @returns Unique task identifier
|
|
339
|
+
*/
|
|
340
|
+
generateTaskId() {
|
|
341
|
+
return `task_${Date.now().toString(36)}_${Math.random().toString(36).substring(2, 9)}`;
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* Gets the current upload status
|
|
345
|
+
*
|
|
346
|
+
* @returns Current upload status
|
|
347
|
+
*/
|
|
348
|
+
getStatus() {
|
|
349
|
+
return this.status;
|
|
350
|
+
}
|
|
351
|
+
/**
|
|
352
|
+
* Gets the current upload progress
|
|
353
|
+
* Returns a copy to prevent external modification
|
|
354
|
+
*
|
|
355
|
+
* @returns Current upload progress
|
|
356
|
+
*/
|
|
357
|
+
getProgress() {
|
|
358
|
+
return { ...this.progress };
|
|
359
|
+
}
|
|
360
|
+
/**
|
|
361
|
+
* Gets the upload duration in milliseconds
|
|
362
|
+
* Returns null if upload hasn't completed
|
|
363
|
+
*
|
|
364
|
+
* @returns Upload duration or null
|
|
365
|
+
*/
|
|
366
|
+
getDuration() {
|
|
367
|
+
if (this.endTime === null) return null;
|
|
368
|
+
return this.endTime - this.startTime;
|
|
369
|
+
}
|
|
370
|
+
/**
|
|
371
|
+
* Subscribes to upload events
|
|
372
|
+
*
|
|
373
|
+
* @param event - Event name to listen to
|
|
374
|
+
* @param handler - Event handler function
|
|
375
|
+
*
|
|
376
|
+
* @example
|
|
377
|
+
* ```typescript
|
|
378
|
+
* task.on('progress', ({ progress, speed }) => {
|
|
379
|
+
* console.log(`${progress}% at ${speed} bytes/s`);
|
|
380
|
+
* });
|
|
381
|
+
* ```
|
|
382
|
+
*/
|
|
383
|
+
on(event, handler) {
|
|
384
|
+
this.eventBus.on(event, handler);
|
|
385
|
+
}
|
|
386
|
+
/**
|
|
387
|
+
* Unsubscribes from upload events
|
|
388
|
+
*
|
|
389
|
+
* @param event - Event name to stop listening to
|
|
390
|
+
* @param handler - Event handler function to remove
|
|
391
|
+
*/
|
|
392
|
+
off(event, handler) {
|
|
393
|
+
this.eventBus.off(event, handler);
|
|
394
|
+
}
|
|
395
|
+
/**
|
|
396
|
+
* Creates chunk information array based on negotiated chunk size
|
|
397
|
+
* Divides the file into chunks and creates ChunkInfo objects for each
|
|
398
|
+
*
|
|
399
|
+
* @param chunkSize - Size of each chunk in bytes (negotiated with server)
|
|
400
|
+
* @returns Array of ChunkInfo objects
|
|
401
|
+
*
|
|
402
|
+
* @remarks
|
|
403
|
+
* - Chunks are created sequentially from start to end of file
|
|
404
|
+
* - Last chunk may be smaller than chunkSize
|
|
405
|
+
* - Hash field is initially empty and will be calculated during upload
|
|
406
|
+
* - Validates: Requirement 2.1 (chunk size based splitting)
|
|
407
|
+
*
|
|
408
|
+
* @internal This method will be used in task 5.3
|
|
409
|
+
*/
|
|
410
|
+
createChunks(chunkSize) {
|
|
411
|
+
const chunks = [];
|
|
412
|
+
const totalChunks = Math.ceil(this.file.size / chunkSize);
|
|
413
|
+
for (let i = 0; i < totalChunks; i++) {
|
|
414
|
+
const start = i * chunkSize;
|
|
415
|
+
const end = Math.min(start + chunkSize, this.file.size);
|
|
416
|
+
chunks.push({
|
|
417
|
+
index: i,
|
|
418
|
+
hash: "",
|
|
419
|
+
size: end - start,
|
|
420
|
+
start,
|
|
421
|
+
end
|
|
422
|
+
});
|
|
423
|
+
}
|
|
424
|
+
return chunks;
|
|
425
|
+
}
|
|
426
|
+
/**
|
|
427
|
+
* Starts the upload process
|
|
428
|
+
*
|
|
429
|
+
* Workflow:
|
|
430
|
+
* 1. Create file on server and get upload token
|
|
431
|
+
* 2. Split file into chunks based on negotiated chunk size
|
|
432
|
+
* 3. Start concurrent chunk upload
|
|
433
|
+
*
|
|
434
|
+
* @throws Error if upload is already in progress or completed
|
|
435
|
+
*
|
|
436
|
+
* @remarks
|
|
437
|
+
* - Validates: Requirements 1.1, 1.2, 2.2, 5.1, 5.2, 5.3, 20.1, 20.2, 20.3, 20.5
|
|
438
|
+
* - Sets status to 'uploading'
|
|
439
|
+
* - Emits 'start' event
|
|
440
|
+
* - Creates chunks based on negotiated size
|
|
441
|
+
* - Initiates concurrent upload with retry
|
|
442
|
+
*/
|
|
443
|
+
async start() {
|
|
444
|
+
if (this.status !== "idle") throw new Error(`Cannot start upload: current status is ${this.status}`);
|
|
445
|
+
try {
|
|
446
|
+
await this.initializeStorage();
|
|
447
|
+
this.status = "uploading";
|
|
448
|
+
this.startTime = Date.now();
|
|
449
|
+
this.eventBus.emit("start", {
|
|
450
|
+
taskId: this.id,
|
|
451
|
+
file: this.file
|
|
452
|
+
});
|
|
453
|
+
const isResume = this.options.resumeUploadToken && this.options.resumeUploadedChunks;
|
|
454
|
+
let negotiatedChunkSize;
|
|
455
|
+
if (isResume) {
|
|
456
|
+
negotiatedChunkSize = this.uploadToken.chunkSize;
|
|
457
|
+
console.info(`Resuming upload for task ${this.id}: ${this.options.resumeUploadedChunks.length} chunks already uploaded`);
|
|
458
|
+
} else {
|
|
459
|
+
const createResponse = await this.requestAdapter.createFile({
|
|
460
|
+
fileName: this.file.name,
|
|
461
|
+
fileSize: this.file.size,
|
|
462
|
+
fileType: this.file.type,
|
|
463
|
+
preferredChunkSize: this.options.chunkSize
|
|
464
|
+
});
|
|
465
|
+
this.uploadToken = createResponse.uploadToken;
|
|
466
|
+
negotiatedChunkSize = createResponse.negotiatedChunkSize;
|
|
467
|
+
}
|
|
468
|
+
this.chunks = this.createChunks(negotiatedChunkSize);
|
|
469
|
+
this.progress.totalChunks = this.chunks.length;
|
|
470
|
+
if (isResume && this.options.resumeUploadedChunks) {
|
|
471
|
+
const uploadedChunks = this.options.resumeUploadedChunks;
|
|
472
|
+
let uploadedBytes = 0;
|
|
473
|
+
for (const chunkIndex of uploadedChunks) if (chunkIndex < this.chunks.length) {
|
|
474
|
+
const chunk = this.chunks[chunkIndex];
|
|
475
|
+
chunk.uploaded = true;
|
|
476
|
+
uploadedBytes += chunk.size;
|
|
477
|
+
}
|
|
478
|
+
this.progress.uploadedChunks = uploadedChunks.length;
|
|
479
|
+
this.progress.uploadedBytes = uploadedBytes;
|
|
480
|
+
this.progress.percentage = uploadedBytes / this.file.size * 100;
|
|
481
|
+
console.info(`Resume progress: ${this.progress.percentage.toFixed(1)}% (${uploadedChunks.length}/${this.chunks.length} chunks)`);
|
|
482
|
+
}
|
|
483
|
+
const strategy = this.options.chunkSizeStrategy;
|
|
484
|
+
const minSize = 256 * 1024;
|
|
485
|
+
const maxSize = 10 * 1024 * 1024;
|
|
486
|
+
const targetTime = 3e3;
|
|
487
|
+
if (typeof strategy === "object") this.chunkSizeAdjuster = strategy;
|
|
488
|
+
else if (strategy === "tcp-like") this.chunkSizeAdjuster = new TCPChunkSizeAdjuster({
|
|
489
|
+
initialSize: negotiatedChunkSize,
|
|
490
|
+
minSize,
|
|
491
|
+
maxSize,
|
|
492
|
+
targetTime,
|
|
493
|
+
initialSsthresh: this.options.initialSsthresh
|
|
494
|
+
});
|
|
495
|
+
else this.chunkSizeAdjuster = new ChunkSizeAdjuster({
|
|
496
|
+
initialSize: negotiatedChunkSize,
|
|
497
|
+
minSize,
|
|
498
|
+
maxSize,
|
|
499
|
+
targetTime
|
|
500
|
+
});
|
|
501
|
+
await Promise.all([this.startUpload(), this.calculateAndVerifyHash()]);
|
|
502
|
+
if (this.status === "paused") {
|
|
503
|
+
console.info(`Upload paused at ${this.progress.percentage.toFixed(1)}%`);
|
|
504
|
+
return;
|
|
505
|
+
}
|
|
506
|
+
if (this.status === "cancelled" || this.shouldCancelUpload) return;
|
|
507
|
+
if (this.status === "success") {
|
|
508
|
+
console.info("Instant upload already completed");
|
|
509
|
+
return;
|
|
510
|
+
}
|
|
511
|
+
if (this.status === "uploading" && this.fileHash) try {
|
|
512
|
+
const chunkHashes = this.chunks.map((chunk) => chunk.hash);
|
|
513
|
+
const verifyResponse = await this.requestAdapter.verifyHash({
|
|
514
|
+
fileHash: this.fileHash,
|
|
515
|
+
chunkHashes,
|
|
516
|
+
uploadToken: this.uploadToken.token
|
|
517
|
+
});
|
|
518
|
+
if (verifyResponse.fileExists && verifyResponse.fileUrl) {
|
|
519
|
+
this.status = "success";
|
|
520
|
+
this.endTime = Date.now();
|
|
521
|
+
this.progress.uploadedBytes = this.file.size;
|
|
522
|
+
this.progress.uploadedChunks = this.chunks.length;
|
|
523
|
+
this.progress.percentage = 100;
|
|
524
|
+
this.eventBus.emit("success", {
|
|
525
|
+
taskId: this.id,
|
|
526
|
+
fileUrl: verifyResponse.fileUrl
|
|
527
|
+
});
|
|
528
|
+
return;
|
|
529
|
+
}
|
|
530
|
+
} catch (error) {
|
|
531
|
+
console.warn("Hash verification failed:", error);
|
|
532
|
+
}
|
|
533
|
+
else if (this.status === "uploading" && !this.fileHash) console.warn("No fileHash available, skipping hash verification");
|
|
534
|
+
if (this.status === "uploading" && !this.shouldCancelUpload) {
|
|
535
|
+
const mergeResponse = await this.requestAdapter.mergeFile({
|
|
536
|
+
uploadToken: this.uploadToken.token,
|
|
537
|
+
fileHash: this.fileHash || "",
|
|
538
|
+
chunkHashes: this.chunks.map((chunk) => chunk.hash)
|
|
539
|
+
});
|
|
540
|
+
if (mergeResponse.success) {
|
|
541
|
+
this.status = "success";
|
|
542
|
+
this.endTime = Date.now();
|
|
543
|
+
this.eventBus.emit("success", {
|
|
544
|
+
taskId: this.id,
|
|
545
|
+
fileUrl: mergeResponse.fileUrl
|
|
546
|
+
});
|
|
547
|
+
} else throw new Error("Merge failed: response.success is false");
|
|
548
|
+
}
|
|
549
|
+
} catch (error) {
|
|
550
|
+
if (this.status !== "paused" && this.status !== "cancelled") {
|
|
551
|
+
this.status = "error";
|
|
552
|
+
this.endTime = Date.now();
|
|
553
|
+
this.eventBus.emit("error", {
|
|
554
|
+
taskId: this.id,
|
|
555
|
+
error
|
|
556
|
+
});
|
|
557
|
+
}
|
|
558
|
+
throw error;
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
/**
|
|
562
|
+
* Starts concurrent chunk upload with priority for first chunks
|
|
563
|
+
*
|
|
564
|
+
* Uploads all chunks with concurrency control and dynamic chunk size adjustment.
|
|
565
|
+
* Uses the concurrency controller to limit parallel uploads.
|
|
566
|
+
* Implements priority upload for the first few chunks to get quick feedback.
|
|
567
|
+
*
|
|
568
|
+
* @remarks
|
|
569
|
+
* - Validates: Requirements 5.1, 5.2, 5.3, 17.5, 20.1, 20.2, 20.3
|
|
570
|
+
* - Respects concurrency limits
|
|
571
|
+
* - Tracks upload time for dynamic chunk size adjustment
|
|
572
|
+
* - Stops if status changes (pause/cancel) or shouldCancelUpload is set
|
|
573
|
+
* - Prioritizes first 3 chunks for quick server feedback
|
|
574
|
+
*
|
|
575
|
+
* @internal
|
|
576
|
+
*/
|
|
577
|
+
async startUpload() {
|
|
578
|
+
const chunksToUpload = this.chunks.filter((chunk) => !chunk.uploaded);
|
|
579
|
+
if (chunksToUpload.length === 0) {
|
|
580
|
+
console.info("All chunks already uploaded, skipping upload phase");
|
|
581
|
+
return;
|
|
582
|
+
}
|
|
583
|
+
const priorityChunkCount = Math.min(3, chunksToUpload.length);
|
|
584
|
+
const priorityChunks = chunksToUpload.slice(0, priorityChunkCount);
|
|
585
|
+
const remainingChunks = chunksToUpload.slice(priorityChunkCount);
|
|
586
|
+
const priorityPromises = priorityChunks.map((chunk) => {
|
|
587
|
+
return this.concurrencyController.run(async () => {
|
|
588
|
+
if (this.status !== "uploading" || this.shouldCancelUpload) return;
|
|
589
|
+
const chunkStartTime = Date.now();
|
|
590
|
+
await this.uploadChunkWithRetry(chunk);
|
|
591
|
+
const chunkUploadTime = Date.now() - chunkStartTime;
|
|
592
|
+
if (this.chunkSizeAdjuster) this.chunkSizeAdjuster.adjust(chunkUploadTime);
|
|
593
|
+
});
|
|
594
|
+
});
|
|
595
|
+
const remainingPromises = remainingChunks.map((chunk) => {
|
|
596
|
+
return this.concurrencyController.run(async () => {
|
|
597
|
+
if (this.status !== "uploading" || this.shouldCancelUpload) return;
|
|
598
|
+
const chunkStartTime = Date.now();
|
|
599
|
+
await this.uploadChunkWithRetry(chunk);
|
|
600
|
+
const chunkUploadTime = Date.now() - chunkStartTime;
|
|
601
|
+
if (this.chunkSizeAdjuster) this.chunkSizeAdjuster.adjust(chunkUploadTime);
|
|
602
|
+
});
|
|
603
|
+
});
|
|
604
|
+
await Promise.all([...priorityPromises, ...remainingPromises]);
|
|
605
|
+
}
|
|
606
|
+
/**
|
|
607
|
+
* Uploads a single chunk with retry logic
|
|
608
|
+
*
|
|
609
|
+
* Implements exponential backoff retry strategy for failed uploads.
|
|
610
|
+
* Calculates chunk hash before upload for verification.
|
|
611
|
+
* Updates progress after successful upload.
|
|
612
|
+
*
|
|
613
|
+
* @param chunk - Chunk information to upload
|
|
614
|
+
* @throws Error if all retries are exhausted
|
|
615
|
+
*
|
|
616
|
+
* @remarks
|
|
617
|
+
* - Validates: Requirements 20.1, 20.2, 20.3, 20.5
|
|
618
|
+
* - Retries up to configured retry count
|
|
619
|
+
* - Uses exponential backoff delay
|
|
620
|
+
* - Emits chunkSuccess or chunkError events
|
|
621
|
+
* - Updates progress and persists to storage
|
|
622
|
+
* - Skips upload if shouldCancelUpload is set (instant upload)
|
|
623
|
+
*
|
|
624
|
+
* @internal
|
|
625
|
+
*/
|
|
626
|
+
async uploadChunkWithRetry(chunk) {
|
|
627
|
+
let retries = 0;
|
|
628
|
+
let lastError = null;
|
|
629
|
+
while (retries <= this.options.retryCount) try {
|
|
630
|
+
if (this.status !== "uploading" || this.shouldCancelUpload) return;
|
|
631
|
+
const blob = sliceFile(this.file, chunk.start, chunk.end);
|
|
632
|
+
const chunkHash = await calculateChunkHash(blob);
|
|
633
|
+
chunk.hash = chunkHash;
|
|
634
|
+
await this.requestAdapter.uploadChunk({
|
|
635
|
+
uploadToken: this.uploadToken.token,
|
|
636
|
+
chunkIndex: chunk.index,
|
|
637
|
+
chunkHash,
|
|
638
|
+
chunk: blob
|
|
639
|
+
});
|
|
640
|
+
chunk.uploaded = true;
|
|
641
|
+
await this.updateProgress(chunk);
|
|
642
|
+
this.eventBus.emit("chunkSuccess", {
|
|
643
|
+
taskId: this.id,
|
|
644
|
+
chunkIndex: chunk.index
|
|
645
|
+
});
|
|
646
|
+
return;
|
|
647
|
+
} catch (error) {
|
|
648
|
+
lastError = error;
|
|
649
|
+
retries++;
|
|
650
|
+
this.eventBus.emit("chunkError", {
|
|
651
|
+
taskId: this.id,
|
|
652
|
+
chunkIndex: chunk.index,
|
|
653
|
+
error: lastError
|
|
654
|
+
});
|
|
655
|
+
if (retries > this.options.retryCount) throw new Error(`Failed to upload chunk ${chunk.index} after ${this.options.retryCount} retries: ${lastError.message}`);
|
|
656
|
+
const delay = this.options.retryDelay * Math.pow(2, retries - 1);
|
|
657
|
+
await this.delay(delay);
|
|
658
|
+
}
|
|
659
|
+
if (lastError) throw lastError;
|
|
660
|
+
}
|
|
661
|
+
/**
|
|
662
|
+
* Updates upload progress after a chunk is successfully uploaded
|
|
663
|
+
*
|
|
664
|
+
* Calculates:
|
|
665
|
+
* - Uploaded bytes and percentage
|
|
666
|
+
* - Upload speed
|
|
667
|
+
* - Estimated remaining time
|
|
668
|
+
*
|
|
669
|
+
* Emits progress event with updated information.
|
|
670
|
+
* Persists progress to IndexedDB for resume functionality.
|
|
671
|
+
*
|
|
672
|
+
* @param chunk - The chunk that was just uploaded
|
|
673
|
+
*
|
|
674
|
+
* @remarks
|
|
675
|
+
* - Validates: Requirements 4.1, 6.3, 6.4 (progress tracking and persistence)
|
|
676
|
+
* - Updates all progress metrics
|
|
677
|
+
* - Emits progress event
|
|
678
|
+
* - Persists to IndexedDB for resume capability
|
|
679
|
+
*
|
|
680
|
+
* @internal
|
|
681
|
+
*/
|
|
682
|
+
async updateProgress(chunk) {
|
|
683
|
+
if (chunk.progressCounted) {
|
|
684
|
+
console.warn(`Chunk ${chunk.index} progress already counted, skipping update`);
|
|
685
|
+
return;
|
|
686
|
+
}
|
|
687
|
+
chunk.progressCounted = true;
|
|
688
|
+
this.progress.uploadedBytes += chunk.size;
|
|
689
|
+
this.progress.uploadedChunks++;
|
|
690
|
+
if (this.progress.uploadedBytes > this.file.size) {
|
|
691
|
+
console.warn(`Progress overflow detected: ${this.progress.uploadedBytes} > ${this.file.size}, capping to file size`);
|
|
692
|
+
this.progress.uploadedBytes = this.file.size;
|
|
693
|
+
}
|
|
694
|
+
if (this.progress.uploadedChunks > this.progress.totalChunks) {
|
|
695
|
+
console.warn(`Chunk count overflow detected: ${this.progress.uploadedChunks} > ${this.progress.totalChunks}, capping to total chunks`);
|
|
696
|
+
this.progress.uploadedChunks = this.progress.totalChunks;
|
|
697
|
+
}
|
|
698
|
+
this.progress.percentage = Math.min(100, this.progress.uploadedBytes / this.file.size * 100);
|
|
699
|
+
const elapsedTime = Date.now() - this.startTime;
|
|
700
|
+
this.progress.speed = calculateSpeed(this.progress.uploadedBytes, elapsedTime);
|
|
701
|
+
const remainingBytes = this.file.size - this.progress.uploadedBytes;
|
|
702
|
+
this.progress.remainingTime = estimateRemainingTime(remainingBytes, this.progress.speed);
|
|
703
|
+
this.eventBus.emit("progress", {
|
|
704
|
+
taskId: this.id,
|
|
705
|
+
progress: this.progress.percentage,
|
|
706
|
+
speed: this.progress.speed
|
|
707
|
+
});
|
|
708
|
+
await this.persistProgress();
|
|
709
|
+
}
|
|
710
|
+
/**
|
|
711
|
+
* Delays execution for a specified time
|
|
712
|
+
* Used for retry backoff
|
|
713
|
+
*
|
|
714
|
+
* @param ms - Milliseconds to delay
|
|
715
|
+
* @returns Promise that resolves after the delay
|
|
716
|
+
*
|
|
717
|
+
* @internal
|
|
718
|
+
*/
|
|
719
|
+
delay(ms) {
|
|
720
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
721
|
+
}
|
|
722
|
+
/**
|
|
723
|
+
* Calculates file hash and verifies with server for instant upload
|
|
724
|
+
*
|
|
725
|
+
* This method runs in parallel with chunk upload to optimize performance.
|
|
726
|
+
* It implements the following features:
|
|
727
|
+
* - Calculate file hash using Web Worker or requestIdleCallback (non-blocking)
|
|
728
|
+
* - Send hash verification request to server
|
|
729
|
+
* - Handle instant upload (秒传) when file already exists
|
|
730
|
+
* - Handle partial instant upload (skip existing chunks)
|
|
731
|
+
*
|
|
732
|
+
* @remarks
|
|
733
|
+
* - Validates: Requirements 3.1, 3.2, 3.3, 3.4, 3.5
|
|
734
|
+
* - Emits hashProgress events during calculation
|
|
735
|
+
* - Emits hashComplete event when calculation finishes
|
|
736
|
+
* - If file exists on server, cancels ongoing upload and emits success
|
|
737
|
+
* - If some chunks exist, marks them as uploaded to skip them
|
|
738
|
+
*
|
|
739
|
+
* @internal
|
|
740
|
+
*/
|
|
741
|
+
async calculateAndVerifyHash() {
|
|
742
|
+
try {
|
|
743
|
+
this.fileHash = await calculateFileHash(this.file, (progress) => {
|
|
744
|
+
this.eventBus.emit("hashProgress", {
|
|
745
|
+
taskId: this.id,
|
|
746
|
+
progress
|
|
747
|
+
});
|
|
748
|
+
});
|
|
749
|
+
this.eventBus.emit("hashComplete", {
|
|
750
|
+
taskId: this.id,
|
|
751
|
+
hash: this.fileHash
|
|
752
|
+
});
|
|
753
|
+
if (!this.uploadToken) return;
|
|
754
|
+
const verifyResponse = await this.requestAdapter.verifyHash({
|
|
755
|
+
fileHash: this.fileHash,
|
|
756
|
+
uploadToken: this.uploadToken.token
|
|
757
|
+
});
|
|
758
|
+
if (verifyResponse.fileExists && verifyResponse.fileUrl) {
|
|
759
|
+
if (this.status === "paused" || this.status === "cancelled") return;
|
|
760
|
+
this.shouldCancelUpload = true;
|
|
761
|
+
this.status = "success";
|
|
762
|
+
this.endTime = Date.now();
|
|
763
|
+
this.progress.uploadedBytes = this.file.size;
|
|
764
|
+
this.progress.uploadedChunks = this.chunks.length;
|
|
765
|
+
this.progress.percentage = 100;
|
|
766
|
+
this.eventBus.emit("success", {
|
|
767
|
+
taskId: this.id,
|
|
768
|
+
fileUrl: verifyResponse.fileUrl
|
|
769
|
+
});
|
|
770
|
+
return;
|
|
771
|
+
}
|
|
772
|
+
if (verifyResponse.existingChunks && verifyResponse.existingChunks.length > 0) this.skipExistingChunks(verifyResponse.existingChunks);
|
|
773
|
+
} catch (error) {
|
|
774
|
+
console.warn("Hash calculation/verification failed:", error);
|
|
775
|
+
}
|
|
776
|
+
}
|
|
777
|
+
/**
|
|
778
|
+
* Skips existing chunks by marking them as uploaded
|
|
779
|
+
*
|
|
780
|
+
* This is used for partial instant upload when some chunks already exist on server.
|
|
781
|
+
* Updates progress to reflect the skipped chunks.
|
|
782
|
+
*
|
|
783
|
+
* @param existingChunkIndices - Array of chunk indices that already exist on server
|
|
784
|
+
*
|
|
785
|
+
* @remarks
|
|
786
|
+
* - Validates: Requirement 3.5, 17.4 (partial instant upload)
|
|
787
|
+
* - Updates progress for skipped chunks
|
|
788
|
+
* - Emits chunkSuccess events for skipped chunks
|
|
789
|
+
*
|
|
790
|
+
* @internal
|
|
791
|
+
*/
|
|
792
|
+
skipExistingChunks(existingChunkIndices) {
|
|
793
|
+
for (const chunkIndex of existingChunkIndices) {
|
|
794
|
+
const chunk = this.chunks[chunkIndex];
|
|
795
|
+
if (!chunk) continue;
|
|
796
|
+
chunk.uploaded = true;
|
|
797
|
+
chunk.progressCounted = true;
|
|
798
|
+
this.progress.uploadedBytes += chunk.size;
|
|
799
|
+
this.progress.uploadedChunks++;
|
|
800
|
+
this.progress.percentage = this.progress.uploadedBytes / this.file.size * 100;
|
|
801
|
+
this.eventBus.emit("chunkSuccess", {
|
|
802
|
+
taskId: this.id,
|
|
803
|
+
chunkIndex: chunk.index
|
|
804
|
+
});
|
|
805
|
+
}
|
|
806
|
+
this.eventBus.emit("progress", {
|
|
807
|
+
taskId: this.id,
|
|
808
|
+
progress: this.progress.percentage,
|
|
809
|
+
speed: this.progress.speed
|
|
810
|
+
});
|
|
811
|
+
}
|
|
812
|
+
/**
|
|
813
|
+
* Initializes the IndexedDB storage for progress persistence
|
|
814
|
+
*
|
|
815
|
+
* Creates the initial upload record in IndexedDB.
|
|
816
|
+
* If storage is not available, logs a warning but continues upload.
|
|
817
|
+
*
|
|
818
|
+
* @remarks
|
|
819
|
+
* - Validates: Requirement 4.1 (persist progress to IndexedDB)
|
|
820
|
+
* - Gracefully handles storage unavailability
|
|
821
|
+
* - Creates initial record with empty uploaded chunks
|
|
822
|
+
*
|
|
823
|
+
* @internal
|
|
824
|
+
*/
|
|
825
|
+
async initializeStorage() {
|
|
826
|
+
try {
|
|
827
|
+
await this.storage.init();
|
|
828
|
+
const record = {
|
|
829
|
+
taskId: this.id,
|
|
830
|
+
fileInfo: {
|
|
831
|
+
name: this.file.name,
|
|
832
|
+
size: this.file.size,
|
|
833
|
+
type: this.file.type,
|
|
834
|
+
lastModified: this.file.lastModified
|
|
835
|
+
},
|
|
836
|
+
uploadedChunks: [],
|
|
837
|
+
uploadToken: this.uploadToken?.token || "",
|
|
838
|
+
createdAt: Date.now(),
|
|
839
|
+
updatedAt: Date.now()
|
|
840
|
+
};
|
|
841
|
+
await this.storage.saveRecord(record);
|
|
842
|
+
} catch (error) {}
|
|
843
|
+
}
|
|
844
|
+
/**
|
|
845
|
+
* Persists current upload progress to IndexedDB
|
|
846
|
+
*
|
|
847
|
+
* Updates the upload record with the list of successfully uploaded chunks.
|
|
848
|
+
* This enables resume functionality if the upload is interrupted.
|
|
849
|
+
*
|
|
850
|
+
* @remarks
|
|
851
|
+
* - Validates: Requirement 4.1 (write progress to IndexedDB)
|
|
852
|
+
* - Gracefully handles storage errors
|
|
853
|
+
* - Updates uploadedChunks array and timestamp
|
|
854
|
+
*
|
|
855
|
+
* @internal
|
|
856
|
+
*/
|
|
857
|
+
async persistProgress() {
|
|
858
|
+
try {
|
|
859
|
+
if (!this.storage.isAvailable()) return;
|
|
860
|
+
const uploadedChunkIndices = this.chunks.filter((_, index) => index < this.progress.uploadedChunks).map((chunk) => chunk.index);
|
|
861
|
+
try {
|
|
862
|
+
await this.storage.updateRecord(this.id, {
|
|
863
|
+
uploadedChunks: uploadedChunkIndices,
|
|
864
|
+
uploadToken: this.uploadToken?.token || "",
|
|
865
|
+
updatedAt: Date.now()
|
|
866
|
+
});
|
|
867
|
+
} catch (error) {
|
|
868
|
+
if (error.code === "OPERATION_FAILED") {
|
|
869
|
+
const record = {
|
|
870
|
+
taskId: this.id,
|
|
871
|
+
fileInfo: {
|
|
872
|
+
name: this.file.name,
|
|
873
|
+
size: this.file.size,
|
|
874
|
+
type: this.file.type,
|
|
875
|
+
lastModified: this.file.lastModified
|
|
876
|
+
},
|
|
877
|
+
uploadedChunks: uploadedChunkIndices,
|
|
878
|
+
uploadToken: this.uploadToken?.token || "",
|
|
879
|
+
createdAt: Date.now(),
|
|
880
|
+
updatedAt: Date.now()
|
|
881
|
+
};
|
|
882
|
+
await this.storage.saveRecord(record);
|
|
883
|
+
} else throw error;
|
|
884
|
+
}
|
|
885
|
+
} catch (error) {}
|
|
886
|
+
}
|
|
887
|
+
/**
|
|
888
|
+
* Pauses the upload
|
|
889
|
+
*
|
|
890
|
+
* Pauses an ongoing upload by changing the status to 'paused'.
|
|
891
|
+
* The upload can be resumed later from where it left off.
|
|
892
|
+
*
|
|
893
|
+
* @remarks
|
|
894
|
+
* - Validates: Requirements 4.3, 6.3 (pause functionality and lifecycle events)
|
|
895
|
+
* - Only works when status is 'uploading'
|
|
896
|
+
* - Emits 'pause' event
|
|
897
|
+
* - Progress is persisted to IndexedDB for resume
|
|
898
|
+
* - Ongoing chunk uploads will complete, but no new chunks will start
|
|
899
|
+
*
|
|
900
|
+
* @example
|
|
901
|
+
* ```typescript
|
|
902
|
+
* task.on('pause', () => {
|
|
903
|
+
* console.log('Upload paused');
|
|
904
|
+
* });
|
|
905
|
+
*
|
|
906
|
+
* task.pause();
|
|
907
|
+
* ```
|
|
908
|
+
*/
|
|
909
|
+
pause() {
|
|
910
|
+
if (this.status !== "uploading") {
|
|
911
|
+
console.warn(`Cannot pause upload: current status is ${this.status}`);
|
|
912
|
+
return;
|
|
913
|
+
}
|
|
914
|
+
this.status = "paused";
|
|
915
|
+
this.eventBus.emit("pause", { taskId: this.id });
|
|
916
|
+
}
|
|
917
|
+
/**
|
|
918
|
+
* Resumes a paused upload (断点续传)
|
|
919
|
+
*
|
|
920
|
+
* Resumes an upload that was previously paused.
|
|
921
|
+
* Continues uploading from where it left off, skipping already uploaded chunks.
|
|
922
|
+
*
|
|
923
|
+
* @throws Error if upload is not in paused state
|
|
924
|
+
*
|
|
925
|
+
* @remarks
|
|
926
|
+
* - Validates: Requirements 4.3, 4.4, 6.3 (resume functionality and lifecycle events)
|
|
927
|
+
* - Only works when status is 'paused'
|
|
928
|
+
* - Emits 'resume' event
|
|
929
|
+
* - Continues from last uploaded chunk
|
|
930
|
+
* - Uses persisted progress from IndexedDB
|
|
931
|
+
*
|
|
932
|
+
* @example
|
|
933
|
+
* ```typescript
|
|
934
|
+
* task.on('resume', () => {
|
|
935
|
+
* console.log('Upload resumed');
|
|
936
|
+
* });
|
|
937
|
+
*
|
|
938
|
+
* await task.resume();
|
|
939
|
+
* ```
|
|
940
|
+
*/
|
|
941
|
+
async resume() {
|
|
942
|
+
if (this.status !== "paused") throw new Error(`Cannot resume upload: current status is ${this.status}`);
|
|
943
|
+
try {
|
|
944
|
+
if (!this.uploadToken) throw new Error("Cannot resume: upload token not available");
|
|
945
|
+
this.status = "uploading";
|
|
946
|
+
this.eventBus.emit("resume", { taskId: this.id });
|
|
947
|
+
await this.startUpload();
|
|
948
|
+
if (this.status === "cancelled" || this.shouldCancelUpload) return;
|
|
949
|
+
if (this.status === "uploading") {
|
|
950
|
+
if (this.fileHash) try {
|
|
951
|
+
const chunkHashes = this.chunks.map((chunk) => chunk.hash);
|
|
952
|
+
const verifyResponse = await this.requestAdapter.verifyHash({
|
|
953
|
+
fileHash: this.fileHash,
|
|
954
|
+
chunkHashes,
|
|
955
|
+
uploadToken: this.uploadToken.token
|
|
956
|
+
});
|
|
957
|
+
if (verifyResponse.fileExists && verifyResponse.fileUrl) {
|
|
958
|
+
this.status = "success";
|
|
959
|
+
this.endTime = Date.now();
|
|
960
|
+
this.progress.uploadedBytes = this.file.size;
|
|
961
|
+
this.progress.uploadedChunks = this.chunks.length;
|
|
962
|
+
this.progress.percentage = 100;
|
|
963
|
+
this.eventBus.emit("success", {
|
|
964
|
+
taskId: this.id,
|
|
965
|
+
fileUrl: verifyResponse.fileUrl
|
|
966
|
+
});
|
|
967
|
+
return;
|
|
968
|
+
}
|
|
969
|
+
} catch (error) {
|
|
970
|
+
console.warn("Hash verification failed:", error);
|
|
971
|
+
}
|
|
972
|
+
const mergeResponse = await this.requestAdapter.mergeFile({
|
|
973
|
+
uploadToken: this.uploadToken.token,
|
|
974
|
+
fileHash: this.fileHash || "",
|
|
975
|
+
chunkHashes: this.chunks.map((chunk) => chunk.hash)
|
|
976
|
+
});
|
|
977
|
+
if (mergeResponse.success) {
|
|
978
|
+
this.status = "success";
|
|
979
|
+
this.endTime = Date.now();
|
|
980
|
+
this.eventBus.emit("success", {
|
|
981
|
+
taskId: this.id,
|
|
982
|
+
fileUrl: mergeResponse.fileUrl
|
|
983
|
+
});
|
|
984
|
+
} else throw new Error("Merge failed: response.success is false");
|
|
985
|
+
}
|
|
986
|
+
} catch (error) {
|
|
987
|
+
this.status = "error";
|
|
988
|
+
this.endTime = Date.now();
|
|
989
|
+
this.eventBus.emit("error", {
|
|
990
|
+
taskId: this.id,
|
|
991
|
+
error
|
|
992
|
+
});
|
|
993
|
+
throw error;
|
|
994
|
+
}
|
|
995
|
+
}
|
|
996
|
+
/**
|
|
997
|
+
* Cancels the upload
|
|
998
|
+
*
|
|
999
|
+
* Cancels an ongoing or paused upload.
|
|
1000
|
+
* Once cancelled, the upload cannot be resumed.
|
|
1001
|
+
*
|
|
1002
|
+
* @remarks
|
|
1003
|
+
* - Validates: Requirements 6.3 (cancel functionality and lifecycle events)
|
|
1004
|
+
* - Works when status is 'uploading' or 'paused'
|
|
1005
|
+
* - Emits 'cancel' event
|
|
1006
|
+
* - Sets shouldCancelUpload flag to stop ongoing chunk uploads
|
|
1007
|
+
* - Cleans up upload record from IndexedDB
|
|
1008
|
+
* - Status becomes 'cancelled' (terminal state)
|
|
1009
|
+
*
|
|
1010
|
+
* @example
|
|
1011
|
+
* ```typescript
|
|
1012
|
+
* task.on('cancel', () => {
|
|
1013
|
+
* console.log('Upload cancelled');
|
|
1014
|
+
* });
|
|
1015
|
+
*
|
|
1016
|
+
* task.cancel();
|
|
1017
|
+
* ```
|
|
1018
|
+
*/
|
|
1019
|
+
cancel() {
|
|
1020
|
+
if (this.status !== "uploading" && this.status !== "paused") {
|
|
1021
|
+
console.warn(`Cannot cancel upload: current status is ${this.status}`);
|
|
1022
|
+
return;
|
|
1023
|
+
}
|
|
1024
|
+
this.shouldCancelUpload = true;
|
|
1025
|
+
this.status = "cancelled";
|
|
1026
|
+
this.endTime = Date.now();
|
|
1027
|
+
this.eventBus.emit("cancel", { taskId: this.id });
|
|
1028
|
+
this.cleanupStorage().catch((error) => {
|
|
1029
|
+
console.warn("Failed to cleanup upload storage:", error);
|
|
1030
|
+
});
|
|
1031
|
+
}
|
|
1032
|
+
/**
|
|
1033
|
+
* Cleans up the upload record from IndexedDB
|
|
1034
|
+
*
|
|
1035
|
+
* Removes the upload record to free up storage space.
|
|
1036
|
+
* Called when upload is cancelled or completed.
|
|
1037
|
+
*
|
|
1038
|
+
* @remarks
|
|
1039
|
+
* - Gracefully handles storage errors
|
|
1040
|
+
* - Does not throw errors
|
|
1041
|
+
*
|
|
1042
|
+
* @internal
|
|
1043
|
+
*/
|
|
1044
|
+
async cleanupStorage() {
|
|
1045
|
+
try {
|
|
1046
|
+
if (this.storage.isAvailable()) await this.storage.deleteRecord(this.id);
|
|
1047
|
+
} catch (error) {
|
|
1048
|
+
console.warn("Failed to cleanup storage:", error);
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
};
|
|
1052
|
+
|
|
1053
|
+
//#endregion
|
|
1054
|
+
//#region src/upload-manager.ts
|
|
1055
|
+
/**
|
|
1056
|
+
* UploadManager class
|
|
1057
|
+
*
|
|
1058
|
+
* Central manager for handling multiple file upload tasks.
|
|
1059
|
+
* Provides task lifecycle management, plugin system, and persistent storage.
|
|
1060
|
+
*
|
|
1061
|
+
* @example
|
|
1062
|
+
* ```typescript
|
|
1063
|
+
* const manager = new UploadManager({
|
|
1064
|
+
* requestAdapter: myAdapter,
|
|
1065
|
+
* maxConcurrentTasks: 3,
|
|
1066
|
+
* defaultChunkSize: 1024 * 1024, // 1MB
|
|
1067
|
+
* });
|
|
1068
|
+
*
|
|
1069
|
+
* // Initialize (loads unfinished tasks if enabled)
|
|
1070
|
+
* await manager.init();
|
|
1071
|
+
*
|
|
1072
|
+
* // Create and start a task
|
|
1073
|
+
* const task = manager.createTask(file);
|
|
1074
|
+
* await task.start();
|
|
1075
|
+
*
|
|
1076
|
+
* // Get all tasks
|
|
1077
|
+
* const allTasks = manager.getAllTasks();
|
|
1078
|
+
*
|
|
1079
|
+
* // Delete a task
|
|
1080
|
+
* await manager.deleteTask(task.id);
|
|
1081
|
+
* ```
|
|
1082
|
+
*/
|
|
1083
|
+
var UploadManager = class {
|
|
1084
|
+
/** Map of task ID to UploadTask instances */
|
|
1085
|
+
tasks;
|
|
1086
|
+
/** Manager options with defaults applied */
|
|
1087
|
+
options;
|
|
1088
|
+
/** Storage instance for persistent task data */
|
|
1089
|
+
storage;
|
|
1090
|
+
/** Flag indicating if manager has been initialized */
|
|
1091
|
+
initialized;
|
|
1092
|
+
/** Registered plugins */
|
|
1093
|
+
plugins;
|
|
1094
|
+
/**
|
|
1095
|
+
* Creates a new UploadManager instance
|
|
1096
|
+
*
|
|
1097
|
+
* @param options - Configuration options for the manager
|
|
1098
|
+
*
|
|
1099
|
+
* @remarks
|
|
1100
|
+
* - Validates: Requirement 8.6 (UploadManager manages multiple tasks)
|
|
1101
|
+
* - Applies default values for optional parameters
|
|
1102
|
+
* - Creates storage instance for persistence
|
|
1103
|
+
* - Does not automatically initialize - call init() explicitly
|
|
1104
|
+
*/
|
|
1105
|
+
constructor(options) {
|
|
1106
|
+
this.tasks = /* @__PURE__ */ new Map();
|
|
1107
|
+
this.options = {
|
|
1108
|
+
requestAdapter: options.requestAdapter,
|
|
1109
|
+
maxConcurrentTasks: options.maxConcurrentTasks ?? 3,
|
|
1110
|
+
defaultChunkSize: options.defaultChunkSize ?? 1024 * 1024,
|
|
1111
|
+
defaultConcurrency: options.defaultConcurrency ?? 3,
|
|
1112
|
+
autoResumeUnfinished: options.autoResumeUnfinished ?? true
|
|
1113
|
+
};
|
|
1114
|
+
this.storage = new UploadStorage();
|
|
1115
|
+
this.plugins = [];
|
|
1116
|
+
this.initialized = false;
|
|
1117
|
+
}
|
|
1118
|
+
/**
|
|
1119
|
+
* Registers a plugin with the manager
|
|
1120
|
+
*
|
|
1121
|
+
* Plugins can hook into task lifecycle events to add custom behavior.
|
|
1122
|
+
* Plugins are called in the order they were registered.
|
|
1123
|
+
*
|
|
1124
|
+
* @param plugin - Plugin instance to register
|
|
1125
|
+
*
|
|
1126
|
+
* @remarks
|
|
1127
|
+
* - Validates: Requirement 6.5 (Plugin mechanism)
|
|
1128
|
+
* - Validates: Requirement 8.5 (Plugin system)
|
|
1129
|
+
* - Plugin's install() method is called immediately if provided
|
|
1130
|
+
* - Plugin errors are caught and logged but don't stop execution
|
|
1131
|
+
* - Duplicate plugin names are allowed (no uniqueness check)
|
|
1132
|
+
*
|
|
1133
|
+
* @example
|
|
1134
|
+
* ```typescript
|
|
1135
|
+
* const logger = new LoggerPlugin();
|
|
1136
|
+
* manager.use(logger);
|
|
1137
|
+
*
|
|
1138
|
+
* const stats = new StatisticsPlugin();
|
|
1139
|
+
* manager.use(stats);
|
|
1140
|
+
* ```
|
|
1141
|
+
*/
|
|
1142
|
+
use(plugin) {
|
|
1143
|
+
this.plugins.push(plugin);
|
|
1144
|
+
if (plugin.install) try {
|
|
1145
|
+
plugin.install(this);
|
|
1146
|
+
} catch (error) {
|
|
1147
|
+
console.error(`Plugin "${plugin.name}" install failed:`, error);
|
|
1148
|
+
}
|
|
1149
|
+
}
|
|
1150
|
+
/**
|
|
1151
|
+
* Calls a plugin hook for all registered plugins
|
|
1152
|
+
*
|
|
1153
|
+
* @param hookName - Name of the hook to call
|
|
1154
|
+
* @param args - Arguments to pass to the hook
|
|
1155
|
+
*
|
|
1156
|
+
* @internal
|
|
1157
|
+
*/
|
|
1158
|
+
callPluginHook(hookName, ...args) {
|
|
1159
|
+
for (const plugin of this.plugins) {
|
|
1160
|
+
const hook = plugin[hookName];
|
|
1161
|
+
if (hook && typeof hook === "function") try {
|
|
1162
|
+
hook.apply(plugin, args);
|
|
1163
|
+
} catch (error) {
|
|
1164
|
+
console.error(`Plugin "${plugin.name}" hook "${String(hookName)}" failed:`, error);
|
|
1165
|
+
}
|
|
1166
|
+
}
|
|
1167
|
+
}
|
|
1168
|
+
/**
|
|
1169
|
+
* Initializes the UploadManager
|
|
1170
|
+
*
|
|
1171
|
+
* Performs initialization tasks including:
|
|
1172
|
+
* - Initializing IndexedDB storage
|
|
1173
|
+
* - Loading unfinished tasks if autoResumeUnfinished is enabled
|
|
1174
|
+
*
|
|
1175
|
+
* @remarks
|
|
1176
|
+
* - Validates: Requirement 8.6 (initialization and task management)
|
|
1177
|
+
* - Should be called once before using the manager
|
|
1178
|
+
* - Safe to call multiple times (idempotent)
|
|
1179
|
+
* - Gracefully handles storage initialization failures
|
|
1180
|
+
*
|
|
1181
|
+
* @example
|
|
1182
|
+
* ```typescript
|
|
1183
|
+
* const manager = new UploadManager({ requestAdapter });
|
|
1184
|
+
* await manager.init();
|
|
1185
|
+
* ```
|
|
1186
|
+
*/
|
|
1187
|
+
async init() {
|
|
1188
|
+
if (this.initialized) return;
|
|
1189
|
+
try {
|
|
1190
|
+
await this.storage.init();
|
|
1191
|
+
if (this.options.autoResumeUnfinished) await this.loadUnfinishedTasks();
|
|
1192
|
+
this.initialized = true;
|
|
1193
|
+
} catch (error) {
|
|
1194
|
+
this.initialized = true;
|
|
1195
|
+
}
|
|
1196
|
+
}
|
|
1197
|
+
/**
|
|
1198
|
+
* Creates a new upload task
|
|
1199
|
+
*
|
|
1200
|
+
* Creates an UploadTask instance for the given file and adds it to the manager.
|
|
1201
|
+
* The task is not automatically started - call task.start() to begin upload.
|
|
1202
|
+
*
|
|
1203
|
+
* @param file - File to upload
|
|
1204
|
+
* @param options - Optional task-specific configuration (overrides defaults)
|
|
1205
|
+
* @returns Created UploadTask instance
|
|
1206
|
+
*
|
|
1207
|
+
* @remarks
|
|
1208
|
+
* - Validates: Requirement 8.6 (task creation and management)
|
|
1209
|
+
* - Task is added to the manager's task map
|
|
1210
|
+
* - Uses manager's default options unless overridden
|
|
1211
|
+
* - Task is not started automatically
|
|
1212
|
+
*
|
|
1213
|
+
* @example
|
|
1214
|
+
* ```typescript
|
|
1215
|
+
* const task = manager.createTask(file, {
|
|
1216
|
+
* chunkSize: 2 * 1024 * 1024, // 2MB
|
|
1217
|
+
* concurrency: 5,
|
|
1218
|
+
* });
|
|
1219
|
+
*
|
|
1220
|
+
* task.on('progress', ({ progress }) => {
|
|
1221
|
+
* console.log(`Progress: ${progress}%`);
|
|
1222
|
+
* });
|
|
1223
|
+
*
|
|
1224
|
+
* await task.start();
|
|
1225
|
+
* ```
|
|
1226
|
+
*/
|
|
1227
|
+
createTask(file, options) {
|
|
1228
|
+
const task = new UploadTask({
|
|
1229
|
+
file,
|
|
1230
|
+
requestAdapter: this.options.requestAdapter,
|
|
1231
|
+
chunkSize: options?.chunkSize ?? this.options.defaultChunkSize,
|
|
1232
|
+
concurrency: options?.concurrency ?? this.options.defaultConcurrency,
|
|
1233
|
+
retryCount: options?.retryCount ?? 3,
|
|
1234
|
+
retryDelay: options?.retryDelay ?? 1e3,
|
|
1235
|
+
autoStart: options?.autoStart ?? false
|
|
1236
|
+
});
|
|
1237
|
+
this.tasks.set(task.id, task);
|
|
1238
|
+
this.callPluginHook("onTaskCreated", task);
|
|
1239
|
+
this.setupTaskPluginHooks(task);
|
|
1240
|
+
return task;
|
|
1241
|
+
}
|
|
1242
|
+
/**
|
|
1243
|
+
* Sets up event listeners on a task to call plugin hooks
|
|
1244
|
+
*
|
|
1245
|
+
* @param task - Task to set up listeners for
|
|
1246
|
+
*
|
|
1247
|
+
* @internal
|
|
1248
|
+
*/
|
|
1249
|
+
setupTaskPluginHooks(task) {
|
|
1250
|
+
task.on("start", () => {
|
|
1251
|
+
this.callPluginHook("onTaskStart", task);
|
|
1252
|
+
});
|
|
1253
|
+
task.on("progress", () => {
|
|
1254
|
+
const progressData = task.getProgress();
|
|
1255
|
+
this.callPluginHook("onTaskProgress", task, progressData);
|
|
1256
|
+
});
|
|
1257
|
+
task.on("success", ({ fileUrl }) => {
|
|
1258
|
+
this.callPluginHook("onTaskSuccess", task, fileUrl);
|
|
1259
|
+
});
|
|
1260
|
+
task.on("error", ({ error }) => {
|
|
1261
|
+
this.callPluginHook("onTaskError", task, error);
|
|
1262
|
+
});
|
|
1263
|
+
task.on("pause", () => {
|
|
1264
|
+
this.callPluginHook("onTaskPause", task);
|
|
1265
|
+
});
|
|
1266
|
+
task.on("resume", () => {
|
|
1267
|
+
this.callPluginHook("onTaskResume", task);
|
|
1268
|
+
});
|
|
1269
|
+
task.on("cancel", () => {
|
|
1270
|
+
this.callPluginHook("onTaskCancel", task);
|
|
1271
|
+
});
|
|
1272
|
+
}
|
|
1273
|
+
/**
|
|
1274
|
+
* Gets a task by its ID
|
|
1275
|
+
*
|
|
1276
|
+
* @param taskId - Unique task identifier
|
|
1277
|
+
* @returns UploadTask instance or undefined if not found
|
|
1278
|
+
*
|
|
1279
|
+
* @remarks
|
|
1280
|
+
* - Validates: Requirement 8.6 (task retrieval)
|
|
1281
|
+
*
|
|
1282
|
+
* @example
|
|
1283
|
+
* ```typescript
|
|
1284
|
+
* const task = manager.getTask('task_abc123');
|
|
1285
|
+
* if (task) {
|
|
1286
|
+
* console.log(`Status: ${task.getStatus()}`);
|
|
1287
|
+
* }
|
|
1288
|
+
* ```
|
|
1289
|
+
*/
|
|
1290
|
+
getTask(taskId) {
|
|
1291
|
+
return this.tasks.get(taskId);
|
|
1292
|
+
}
|
|
1293
|
+
/**
|
|
1294
|
+
* Gets all tasks managed by this manager
|
|
1295
|
+
*
|
|
1296
|
+
* @returns Array of all UploadTask instances
|
|
1297
|
+
*
|
|
1298
|
+
* @remarks
|
|
1299
|
+
* - Validates: Requirement 8.6 (task retrieval)
|
|
1300
|
+
* - Returns a new array (safe to modify)
|
|
1301
|
+
* - Tasks are in insertion order
|
|
1302
|
+
*
|
|
1303
|
+
* @example
|
|
1304
|
+
* ```typescript
|
|
1305
|
+
* const allTasks = manager.getAllTasks();
|
|
1306
|
+
* console.log(`Total tasks: ${allTasks.length}`);
|
|
1307
|
+
*
|
|
1308
|
+
* // Filter by status
|
|
1309
|
+
* const uploadingTasks = allTasks.filter(
|
|
1310
|
+
* task => task.getStatus() === 'uploading'
|
|
1311
|
+
* );
|
|
1312
|
+
* ```
|
|
1313
|
+
*/
|
|
1314
|
+
getAllTasks() {
|
|
1315
|
+
return Array.from(this.tasks.values());
|
|
1316
|
+
}
|
|
1317
|
+
/**
|
|
1318
|
+
* Deletes a task from the manager
|
|
1319
|
+
*
|
|
1320
|
+
* Cancels the task if it's still running and removes it from the manager.
|
|
1321
|
+
* Also cleans up the task's storage record.
|
|
1322
|
+
*
|
|
1323
|
+
* @param taskId - Unique task identifier
|
|
1324
|
+
*
|
|
1325
|
+
* @remarks
|
|
1326
|
+
* - Validates: Requirement 8.6 (task deletion)
|
|
1327
|
+
* - Cancels the task if it's still running
|
|
1328
|
+
* - Removes task from manager's task map
|
|
1329
|
+
* - Cleans up storage record
|
|
1330
|
+
* - Safe to call even if task doesn't exist
|
|
1331
|
+
*
|
|
1332
|
+
* @example
|
|
1333
|
+
* ```typescript
|
|
1334
|
+
* // Delete a specific task
|
|
1335
|
+
* await manager.deleteTask('task_abc123');
|
|
1336
|
+
*
|
|
1337
|
+
* // Delete all completed tasks
|
|
1338
|
+
* const tasks = manager.getAllTasks();
|
|
1339
|
+
* for (const task of tasks) {
|
|
1340
|
+
* if (task.getStatus() === 'success') {
|
|
1341
|
+
* await manager.deleteTask(task.id);
|
|
1342
|
+
* }
|
|
1343
|
+
* }
|
|
1344
|
+
* ```
|
|
1345
|
+
*/
|
|
1346
|
+
async deleteTask(taskId) {
|
|
1347
|
+
const task = this.tasks.get(taskId);
|
|
1348
|
+
if (task) {
|
|
1349
|
+
const status = task.getStatus();
|
|
1350
|
+
if (status === "uploading" || status === "paused") task.cancel();
|
|
1351
|
+
this.tasks.delete(taskId);
|
|
1352
|
+
try {
|
|
1353
|
+
if (this.storage.isAvailable()) await this.storage.deleteRecord(taskId);
|
|
1354
|
+
} catch (error) {
|
|
1355
|
+
console.warn(`Failed to delete storage record for task ${taskId}:`, error);
|
|
1356
|
+
}
|
|
1357
|
+
}
|
|
1358
|
+
}
|
|
1359
|
+
/**
|
|
1360
|
+
* Loads unfinished tasks from storage
|
|
1361
|
+
*
|
|
1362
|
+
* Retrieves upload records from IndexedDB and creates task placeholders.
|
|
1363
|
+
* Note: Tasks cannot be automatically resumed because File objects cannot
|
|
1364
|
+
* be persisted. Users must re-select files to resume uploads.
|
|
1365
|
+
*
|
|
1366
|
+
* @remarks
|
|
1367
|
+
* - Validates: Requirement 4.2 (read unfinished tasks from IndexedDB)
|
|
1368
|
+
* - Creates task entries in the manager
|
|
1369
|
+
* - Tasks are in 'paused' state and require file re-selection to resume
|
|
1370
|
+
* - Gracefully handles storage errors
|
|
1371
|
+
*
|
|
1372
|
+
* @internal
|
|
1373
|
+
*/
|
|
1374
|
+
async loadUnfinishedTasks() {
|
|
1375
|
+
try {
|
|
1376
|
+
if (!this.storage.isAvailable()) return;
|
|
1377
|
+
await this.storage.getAllRecords();
|
|
1378
|
+
} catch (error) {}
|
|
1379
|
+
}
|
|
1380
|
+
/**
|
|
1381
|
+
* Gets information about unfinished tasks from storage
|
|
1382
|
+
*
|
|
1383
|
+
* Returns metadata about uploads that were not completed in previous sessions.
|
|
1384
|
+
* This allows UI layers to prompt users to resume uploads by re-selecting files.
|
|
1385
|
+
*
|
|
1386
|
+
* @returns Array of unfinished upload records with file metadata
|
|
1387
|
+
*
|
|
1388
|
+
* @remarks
|
|
1389
|
+
* - Validates: Requirement 4.2 (read unfinished tasks from IndexedDB)
|
|
1390
|
+
* - Validates: Requirement 4.3 (provide interface for resuming tasks)
|
|
1391
|
+
* - Returns empty array if storage is unavailable or on error
|
|
1392
|
+
* - File objects cannot be restored - users must re-select files
|
|
1393
|
+
*
|
|
1394
|
+
* @example
|
|
1395
|
+
* ```typescript
|
|
1396
|
+
* const unfinished = await manager.getUnfinishedTasksInfo();
|
|
1397
|
+
* if (unfinished.length > 0) {
|
|
1398
|
+
* console.log('Found unfinished uploads:');
|
|
1399
|
+
* unfinished.forEach(record => {
|
|
1400
|
+
* console.log(`- ${record.fileInfo.name} (${record.uploadedChunks.length} chunks uploaded)`);
|
|
1401
|
+
* });
|
|
1402
|
+
* }
|
|
1403
|
+
* ```
|
|
1404
|
+
*/
|
|
1405
|
+
async getUnfinishedTasksInfo() {
|
|
1406
|
+
try {
|
|
1407
|
+
if (!this.storage.isAvailable()) return [];
|
|
1408
|
+
return (await this.storage.getAllRecords()).map((record) => ({
|
|
1409
|
+
taskId: record.taskId,
|
|
1410
|
+
fileInfo: {
|
|
1411
|
+
name: record.fileInfo.name,
|
|
1412
|
+
size: record.fileInfo.size,
|
|
1413
|
+
type: record.fileInfo.type,
|
|
1414
|
+
lastModified: record.fileInfo.lastModified
|
|
1415
|
+
},
|
|
1416
|
+
uploadedChunks: record.uploadedChunks,
|
|
1417
|
+
uploadToken: record.uploadToken,
|
|
1418
|
+
createdAt: record.createdAt,
|
|
1419
|
+
updatedAt: record.updatedAt
|
|
1420
|
+
}));
|
|
1421
|
+
} catch (error) {
|
|
1422
|
+
console.warn("Failed to get unfinished tasks info:", error);
|
|
1423
|
+
return [];
|
|
1424
|
+
}
|
|
1425
|
+
}
|
|
1426
|
+
/**
|
|
1427
|
+
* Resumes an unfinished upload task with a re-selected file
|
|
1428
|
+
*
|
|
1429
|
+
* Allows users to resume a previously interrupted upload by providing the
|
|
1430
|
+
* original task ID and re-selecting the file. The method validates that the
|
|
1431
|
+
* file matches the stored metadata and creates a new task that continues
|
|
1432
|
+
* from the last uploaded chunk.
|
|
1433
|
+
*
|
|
1434
|
+
* @param taskId - ID of the unfinished task to resume
|
|
1435
|
+
* @param file - Re-selected file (must match original file metadata)
|
|
1436
|
+
* @param options - Optional task configuration overrides
|
|
1437
|
+
* @returns Created UploadTask instance ready to resume
|
|
1438
|
+
* @throws Error if task record not found or file doesn't match
|
|
1439
|
+
*
|
|
1440
|
+
* @remarks
|
|
1441
|
+
* - Validates: Requirement 4.3 (resume unfinished tasks)
|
|
1442
|
+
* - Validates: Requirement 4.4 (continue from last uploaded chunk)
|
|
1443
|
+
* - Verifies file matches stored metadata (name, size, type, lastModified)
|
|
1444
|
+
* - Creates new task with stored progress
|
|
1445
|
+
* - Removes old storage record and creates new one with same ID
|
|
1446
|
+
*
|
|
1447
|
+
* @example
|
|
1448
|
+
* ```typescript
|
|
1449
|
+
* // Get unfinished tasks
|
|
1450
|
+
* const unfinished = await manager.getUnfinishedTasksInfo();
|
|
1451
|
+
*
|
|
1452
|
+
* // User re-selects file
|
|
1453
|
+
* const file = await selectFile();
|
|
1454
|
+
*
|
|
1455
|
+
* // Resume upload
|
|
1456
|
+
* try {
|
|
1457
|
+
* const task = await manager.resumeTask(unfinished[0].taskId, file);
|
|
1458
|
+
* await task.start();
|
|
1459
|
+
* } catch (error) {
|
|
1460
|
+
* console.error('Failed to resume:', error);
|
|
1461
|
+
* }
|
|
1462
|
+
* ```
|
|
1463
|
+
*/
|
|
1464
|
+
async resumeTask(taskId, file, options) {
|
|
1465
|
+
if (!this.storage.isAvailable()) throw new Error("Storage is not available - cannot resume task");
|
|
1466
|
+
const record = await this.storage.getRecord(taskId);
|
|
1467
|
+
if (!record) throw new Error(`No unfinished task found with ID: ${taskId}`);
|
|
1468
|
+
if (file.name !== record.fileInfo.name) throw new Error(`File name mismatch: expected "${record.fileInfo.name}", got "${file.name}"`);
|
|
1469
|
+
if (file.size !== record.fileInfo.size) throw new Error(`File size mismatch: expected ${record.fileInfo.size}, got ${file.size}`);
|
|
1470
|
+
if (file.type !== record.fileInfo.type) throw new Error(`File type mismatch: expected "${record.fileInfo.type}", got "${file.type}"`);
|
|
1471
|
+
const task = new UploadTask({
|
|
1472
|
+
file,
|
|
1473
|
+
requestAdapter: this.options.requestAdapter,
|
|
1474
|
+
chunkSize: options?.chunkSize ?? this.options.defaultChunkSize,
|
|
1475
|
+
concurrency: options?.concurrency ?? this.options.defaultConcurrency,
|
|
1476
|
+
retryCount: options?.retryCount ?? 3,
|
|
1477
|
+
retryDelay: options?.retryDelay ?? 1e3,
|
|
1478
|
+
autoStart: options?.autoStart ?? false,
|
|
1479
|
+
resumeTaskId: taskId,
|
|
1480
|
+
resumeUploadToken: record.uploadToken,
|
|
1481
|
+
resumeUploadedChunks: record.uploadedChunks
|
|
1482
|
+
});
|
|
1483
|
+
this.tasks.set(task.id, task);
|
|
1484
|
+
this.callPluginHook("onTaskCreated", task);
|
|
1485
|
+
this.setupTaskPluginHooks(task);
|
|
1486
|
+
try {
|
|
1487
|
+
await this.storage.deleteRecord(taskId);
|
|
1488
|
+
} catch (error) {
|
|
1489
|
+
console.warn(`Failed to delete old storage record for task ${taskId}:`, error);
|
|
1490
|
+
}
|
|
1491
|
+
return task;
|
|
1492
|
+
}
|
|
1493
|
+
/**
|
|
1494
|
+
* Clears a specific unfinished task record from storage
|
|
1495
|
+
*
|
|
1496
|
+
* Removes the storage record for an unfinished task without resuming it.
|
|
1497
|
+
* Useful for cleaning up tasks that the user no longer wants to resume.
|
|
1498
|
+
*
|
|
1499
|
+
* @param taskId - ID of the unfinished task to clear
|
|
1500
|
+
*
|
|
1501
|
+
* @remarks
|
|
1502
|
+
* - Validates: Requirement 4.5 (clear saved upload records)
|
|
1503
|
+
* - Safe to call even if record doesn't exist
|
|
1504
|
+
* - Does not affect active tasks in the manager
|
|
1505
|
+
*
|
|
1506
|
+
* @example
|
|
1507
|
+
* ```typescript
|
|
1508
|
+
* // Clear a specific unfinished task
|
|
1509
|
+
* await manager.clearUnfinishedTask('task_abc123');
|
|
1510
|
+
*
|
|
1511
|
+
* // Clear all unfinished tasks
|
|
1512
|
+
* const unfinished = await manager.getUnfinishedTasksInfo();
|
|
1513
|
+
* for (const record of unfinished) {
|
|
1514
|
+
* await manager.clearUnfinishedTask(record.taskId);
|
|
1515
|
+
* }
|
|
1516
|
+
* ```
|
|
1517
|
+
*/
|
|
1518
|
+
async clearUnfinishedTask(taskId) {
|
|
1519
|
+
try {
|
|
1520
|
+
if (this.storage.isAvailable()) await this.storage.deleteRecord(taskId);
|
|
1521
|
+
} catch (error) {
|
|
1522
|
+
console.warn(`Failed to clear unfinished task ${taskId}:`, error);
|
|
1523
|
+
}
|
|
1524
|
+
}
|
|
1525
|
+
/**
|
|
1526
|
+
* Clears all unfinished task records from storage
|
|
1527
|
+
*
|
|
1528
|
+
* Removes all storage records for unfinished tasks.
|
|
1529
|
+
* Useful for cleaning up when users don't want to resume any uploads.
|
|
1530
|
+
*
|
|
1531
|
+
* @returns Number of records cleared
|
|
1532
|
+
*
|
|
1533
|
+
* @remarks
|
|
1534
|
+
* - Validates: Requirement 4.5 (clear saved upload records)
|
|
1535
|
+
* - Does not affect active tasks in the manager
|
|
1536
|
+
* - Returns 0 if storage is unavailable or on error
|
|
1537
|
+
*
|
|
1538
|
+
* @example
|
|
1539
|
+
* ```typescript
|
|
1540
|
+
* const cleared = await manager.clearAllUnfinishedTasks();
|
|
1541
|
+
* console.log(`Cleared ${cleared} unfinished task(s)`);
|
|
1542
|
+
* ```
|
|
1543
|
+
*/
|
|
1544
|
+
async clearAllUnfinishedTasks() {
|
|
1545
|
+
try {
|
|
1546
|
+
if (!this.storage.isAvailable()) return 0;
|
|
1547
|
+
const count = (await this.storage.getAllRecords()).length;
|
|
1548
|
+
await this.storage.clearAll();
|
|
1549
|
+
return count;
|
|
1550
|
+
} catch (error) {
|
|
1551
|
+
console.warn("Failed to clear all unfinished tasks:", error);
|
|
1552
|
+
return 0;
|
|
1553
|
+
}
|
|
1554
|
+
}
|
|
1555
|
+
/**
|
|
1556
|
+
* Gets the number of tasks in the manager
|
|
1557
|
+
*
|
|
1558
|
+
* @returns Total number of tasks
|
|
1559
|
+
*
|
|
1560
|
+
* @example
|
|
1561
|
+
* ```typescript
|
|
1562
|
+
* console.log(`Total tasks: ${manager.getTaskCount()}`);
|
|
1563
|
+
* ```
|
|
1564
|
+
*/
|
|
1565
|
+
getTaskCount() {
|
|
1566
|
+
return this.tasks.size;
|
|
1567
|
+
}
|
|
1568
|
+
/**
|
|
1569
|
+
* Checks if the manager has been initialized
|
|
1570
|
+
*
|
|
1571
|
+
* @returns True if initialized, false otherwise
|
|
1572
|
+
*
|
|
1573
|
+
* @example
|
|
1574
|
+
* ```typescript
|
|
1575
|
+
* if (!manager.isInitialized()) {
|
|
1576
|
+
* await manager.init();
|
|
1577
|
+
* }
|
|
1578
|
+
* ```
|
|
1579
|
+
*/
|
|
1580
|
+
isInitialized() {
|
|
1581
|
+
return this.initialized;
|
|
1582
|
+
}
|
|
1583
|
+
/**
|
|
1584
|
+
* Clears all completed tasks from the manager
|
|
1585
|
+
*
|
|
1586
|
+
* Removes tasks with 'success', 'error', or 'cancelled' status.
|
|
1587
|
+
* Does not affect running or paused tasks.
|
|
1588
|
+
*
|
|
1589
|
+
* @returns Number of tasks cleared
|
|
1590
|
+
*
|
|
1591
|
+
* @example
|
|
1592
|
+
* ```typescript
|
|
1593
|
+
* const cleared = await manager.clearCompletedTasks();
|
|
1594
|
+
* console.log(`Cleared ${cleared} completed task(s)`);
|
|
1595
|
+
* ```
|
|
1596
|
+
*/
|
|
1597
|
+
async clearCompletedTasks() {
|
|
1598
|
+
const tasks = this.getAllTasks();
|
|
1599
|
+
let clearedCount = 0;
|
|
1600
|
+
for (const task of tasks) {
|
|
1601
|
+
const status = task.getStatus();
|
|
1602
|
+
if (status === "success" || status === "error" || status === "cancelled") {
|
|
1603
|
+
await this.deleteTask(task.id);
|
|
1604
|
+
clearedCount++;
|
|
1605
|
+
}
|
|
1606
|
+
}
|
|
1607
|
+
return clearedCount;
|
|
1608
|
+
}
|
|
1609
|
+
/**
|
|
1610
|
+
* Pauses all running tasks
|
|
1611
|
+
*
|
|
1612
|
+
* Calls pause() on all tasks with 'uploading' status.
|
|
1613
|
+
*
|
|
1614
|
+
* @returns Number of tasks paused
|
|
1615
|
+
*
|
|
1616
|
+
* @example
|
|
1617
|
+
* ```typescript
|
|
1618
|
+
* const paused = manager.pauseAll();
|
|
1619
|
+
* console.log(`Paused ${paused} task(s)`);
|
|
1620
|
+
* ```
|
|
1621
|
+
*/
|
|
1622
|
+
pauseAll() {
|
|
1623
|
+
const tasks = this.getAllTasks();
|
|
1624
|
+
let pausedCount = 0;
|
|
1625
|
+
for (const task of tasks) if (task.getStatus() === "uploading") {
|
|
1626
|
+
task.pause();
|
|
1627
|
+
pausedCount++;
|
|
1628
|
+
}
|
|
1629
|
+
return pausedCount;
|
|
1630
|
+
}
|
|
1631
|
+
/**
|
|
1632
|
+
* Resumes all paused tasks
|
|
1633
|
+
*
|
|
1634
|
+
* Calls resume() on all tasks with 'paused' status.
|
|
1635
|
+
*
|
|
1636
|
+
* @returns Number of tasks resumed
|
|
1637
|
+
*
|
|
1638
|
+
* @example
|
|
1639
|
+
* ```typescript
|
|
1640
|
+
* const resumed = await manager.resumeAll();
|
|
1641
|
+
* console.log(`Resumed ${resumed} task(s)`);
|
|
1642
|
+
* ```
|
|
1643
|
+
*/
|
|
1644
|
+
async resumeAll() {
|
|
1645
|
+
const tasks = this.getAllTasks();
|
|
1646
|
+
let resumedCount = 0;
|
|
1647
|
+
for (const task of tasks) if (task.getStatus() === "paused") try {
|
|
1648
|
+
await task.resume();
|
|
1649
|
+
resumedCount++;
|
|
1650
|
+
} catch (error) {
|
|
1651
|
+
console.warn(`Failed to resume task ${task.id}:`, error);
|
|
1652
|
+
}
|
|
1653
|
+
return resumedCount;
|
|
1654
|
+
}
|
|
1655
|
+
/**
|
|
1656
|
+
* Cancels all running and paused tasks
|
|
1657
|
+
*
|
|
1658
|
+
* Calls cancel() on all tasks that are not in a terminal state.
|
|
1659
|
+
*
|
|
1660
|
+
* @returns Number of tasks cancelled
|
|
1661
|
+
*
|
|
1662
|
+
* @example
|
|
1663
|
+
* ```typescript
|
|
1664
|
+
* const cancelled = manager.cancelAll();
|
|
1665
|
+
* console.log(`Cancelled ${cancelled} task(s)`);
|
|
1666
|
+
* ```
|
|
1667
|
+
*/
|
|
1668
|
+
cancelAll() {
|
|
1669
|
+
const tasks = this.getAllTasks();
|
|
1670
|
+
let cancelledCount = 0;
|
|
1671
|
+
for (const task of tasks) {
|
|
1672
|
+
const status = task.getStatus();
|
|
1673
|
+
if (status === "uploading" || status === "paused") {
|
|
1674
|
+
task.cancel();
|
|
1675
|
+
cancelledCount++;
|
|
1676
|
+
}
|
|
1677
|
+
}
|
|
1678
|
+
return cancelledCount;
|
|
1679
|
+
}
|
|
1680
|
+
/**
|
|
1681
|
+
* Gets statistics about all tasks
|
|
1682
|
+
*
|
|
1683
|
+
* @returns Object containing task statistics
|
|
1684
|
+
*
|
|
1685
|
+
* @example
|
|
1686
|
+
* ```typescript
|
|
1687
|
+
* const stats = manager.getStatistics();
|
|
1688
|
+
* console.log(`Total: ${stats.total}`);
|
|
1689
|
+
* console.log(`Uploading: ${stats.uploading}`);
|
|
1690
|
+
* console.log(`Success: ${stats.success}`);
|
|
1691
|
+
* ```
|
|
1692
|
+
*/
|
|
1693
|
+
getStatistics() {
|
|
1694
|
+
const tasks = this.getAllTasks();
|
|
1695
|
+
const stats = {
|
|
1696
|
+
total: tasks.length,
|
|
1697
|
+
idle: 0,
|
|
1698
|
+
uploading: 0,
|
|
1699
|
+
paused: 0,
|
|
1700
|
+
success: 0,
|
|
1701
|
+
error: 0,
|
|
1702
|
+
cancelled: 0
|
|
1703
|
+
};
|
|
1704
|
+
for (const task of tasks) switch (task.getStatus()) {
|
|
1705
|
+
case "idle":
|
|
1706
|
+
stats.idle++;
|
|
1707
|
+
break;
|
|
1708
|
+
case "uploading":
|
|
1709
|
+
stats.uploading++;
|
|
1710
|
+
break;
|
|
1711
|
+
case "paused":
|
|
1712
|
+
stats.paused++;
|
|
1713
|
+
break;
|
|
1714
|
+
case "success":
|
|
1715
|
+
stats.success++;
|
|
1716
|
+
break;
|
|
1717
|
+
case "error":
|
|
1718
|
+
stats.error++;
|
|
1719
|
+
break;
|
|
1720
|
+
case "cancelled":
|
|
1721
|
+
stats.cancelled++;
|
|
1722
|
+
break;
|
|
1723
|
+
}
|
|
1724
|
+
return stats;
|
|
1725
|
+
}
|
|
1726
|
+
/**
|
|
1727
|
+
* Closes the manager and cleans up resources
|
|
1728
|
+
*
|
|
1729
|
+
* Cancels all running tasks and closes the storage connection.
|
|
1730
|
+
* The manager should not be used after calling this method.
|
|
1731
|
+
*
|
|
1732
|
+
* @example
|
|
1733
|
+
* ```typescript
|
|
1734
|
+
* // Clean up when done
|
|
1735
|
+
* manager.close();
|
|
1736
|
+
* ```
|
|
1737
|
+
*/
|
|
1738
|
+
close() {
|
|
1739
|
+
this.cancelAll();
|
|
1740
|
+
this.storage.close();
|
|
1741
|
+
this.tasks.clear();
|
|
1742
|
+
this.initialized = false;
|
|
1743
|
+
}
|
|
1744
|
+
};
|
|
1745
|
+
|
|
1746
|
+
//#endregion
|
|
1747
|
+
//#region src/plugins.ts
|
|
1748
|
+
/**
|
|
1749
|
+
* Logger plugin for debugging and monitoring uploads
|
|
1750
|
+
*
|
|
1751
|
+
* Logs all task lifecycle events to the console with timestamps.
|
|
1752
|
+
* Useful for development and debugging.
|
|
1753
|
+
*
|
|
1754
|
+
* @remarks
|
|
1755
|
+
* - Validates: Requirement 6.5 (Plugin mechanism example)
|
|
1756
|
+
* - Validates: Requirement 8.5 (Plugin system example)
|
|
1757
|
+
* - Logs to console with timestamps
|
|
1758
|
+
* - Can be configured to log only specific events
|
|
1759
|
+
*
|
|
1760
|
+
* @example
|
|
1761
|
+
* ```typescript
|
|
1762
|
+
* const manager = new UploadManager({ requestAdapter });
|
|
1763
|
+
* manager.use(new LoggerPlugin());
|
|
1764
|
+
*
|
|
1765
|
+
* // With custom options
|
|
1766
|
+
* manager.use(new LoggerPlugin({
|
|
1767
|
+
* logProgress: false, // Don't log progress updates
|
|
1768
|
+
* prefix: '[Upload]' // Custom log prefix
|
|
1769
|
+
* }));
|
|
1770
|
+
* ```
|
|
1771
|
+
*/
|
|
1772
|
+
var LoggerPlugin = class {
|
|
1773
|
+
name = "logger";
|
|
1774
|
+
options;
|
|
1775
|
+
constructor(options) {
|
|
1776
|
+
this.options = {
|
|
1777
|
+
logProgress: options?.logProgress ?? true,
|
|
1778
|
+
logStart: options?.logStart ?? true,
|
|
1779
|
+
logSuccess: options?.logSuccess ?? true,
|
|
1780
|
+
logError: options?.logError ?? true,
|
|
1781
|
+
logPause: options?.logPause ?? true,
|
|
1782
|
+
logResume: options?.logResume ?? true,
|
|
1783
|
+
logCancel: options?.logCancel ?? true,
|
|
1784
|
+
prefix: options?.prefix ?? "[LoggerPlugin]"
|
|
1785
|
+
};
|
|
1786
|
+
}
|
|
1787
|
+
log(message, ...args) {
|
|
1788
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
1789
|
+
console.log(`${this.options.prefix} [${timestamp}]`, message, ...args);
|
|
1790
|
+
}
|
|
1791
|
+
install() {
|
|
1792
|
+
this.log("Plugin installed");
|
|
1793
|
+
}
|
|
1794
|
+
onTaskCreated(task) {
|
|
1795
|
+
this.log(`Task created: ${task.id}`, {
|
|
1796
|
+
fileName: task.file.name,
|
|
1797
|
+
fileSize: task.file.size,
|
|
1798
|
+
fileType: task.file.type
|
|
1799
|
+
});
|
|
1800
|
+
}
|
|
1801
|
+
onTaskStart(task) {
|
|
1802
|
+
if (this.options.logStart) this.log(`Task started: ${task.id}`, { fileName: task.file.name });
|
|
1803
|
+
}
|
|
1804
|
+
onTaskProgress(task, progress) {
|
|
1805
|
+
if (this.options.logProgress) this.log(`Task progress: ${task.id}`, {
|
|
1806
|
+
percentage: `${progress.percentage.toFixed(2)}%`,
|
|
1807
|
+
uploadedBytes: progress.uploadedBytes,
|
|
1808
|
+
totalBytes: progress.totalBytes,
|
|
1809
|
+
speed: `${(progress.speed / 1024 / 1024).toFixed(2)} MB/s`,
|
|
1810
|
+
remainingTime: `${progress.remainingTime.toFixed(0)}s`
|
|
1811
|
+
});
|
|
1812
|
+
}
|
|
1813
|
+
onTaskSuccess(task, fileUrl) {
|
|
1814
|
+
if (this.options.logSuccess) this.log(`Task completed: ${task.id}`, {
|
|
1815
|
+
fileName: task.file.name,
|
|
1816
|
+
fileUrl
|
|
1817
|
+
});
|
|
1818
|
+
}
|
|
1819
|
+
onTaskError(task, error) {
|
|
1820
|
+
if (this.options.logError) this.log(`Task error: ${task.id}`, {
|
|
1821
|
+
fileName: task.file.name,
|
|
1822
|
+
error: error.message,
|
|
1823
|
+
stack: error.stack
|
|
1824
|
+
});
|
|
1825
|
+
}
|
|
1826
|
+
onTaskPause(task) {
|
|
1827
|
+
if (this.options.logPause) this.log(`Task paused: ${task.id}`, { fileName: task.file.name });
|
|
1828
|
+
}
|
|
1829
|
+
onTaskResume(task) {
|
|
1830
|
+
if (this.options.logResume) this.log(`Task resumed: ${task.id}`, { fileName: task.file.name });
|
|
1831
|
+
}
|
|
1832
|
+
onTaskCancel(task) {
|
|
1833
|
+
if (this.options.logCancel) this.log(`Task cancelled: ${task.id}`, { fileName: task.file.name });
|
|
1834
|
+
}
|
|
1835
|
+
};
|
|
1836
|
+
/**
|
|
1837
|
+
* Statistics plugin for tracking upload metrics
|
|
1838
|
+
*
|
|
1839
|
+
* Collects statistics about uploads including success/error counts,
|
|
1840
|
+
* total bytes uploaded, average speed, etc.
|
|
1841
|
+
*
|
|
1842
|
+
* @remarks
|
|
1843
|
+
* - Validates: Requirement 6.5 (Plugin mechanism example)
|
|
1844
|
+
* - Validates: Requirement 8.5 (Plugin system example)
|
|
1845
|
+
* - Tracks upload statistics in memory
|
|
1846
|
+
* - Provides methods to retrieve and reset statistics
|
|
1847
|
+
* - Thread-safe for concurrent uploads
|
|
1848
|
+
*
|
|
1849
|
+
* @example
|
|
1850
|
+
* ```typescript
|
|
1851
|
+
* const stats = new StatisticsPlugin();
|
|
1852
|
+
* manager.use(stats);
|
|
1853
|
+
*
|
|
1854
|
+
* // Later, get statistics
|
|
1855
|
+
* const metrics = stats.getStats();
|
|
1856
|
+
* console.log(`Success rate: ${metrics.successRate}%`);
|
|
1857
|
+
* console.log(`Total uploaded: ${metrics.totalBytesUploaded} bytes`);
|
|
1858
|
+
* console.log(`Average speed: ${metrics.averageSpeed} bytes/s`);
|
|
1859
|
+
* ```
|
|
1860
|
+
*/
|
|
1861
|
+
var StatisticsPlugin = class {
|
|
1862
|
+
name = "statistics";
|
|
1863
|
+
stats = {
|
|
1864
|
+
totalFiles: 0,
|
|
1865
|
+
successCount: 0,
|
|
1866
|
+
errorCount: 0,
|
|
1867
|
+
cancelledCount: 0,
|
|
1868
|
+
totalBytesUploaded: 0,
|
|
1869
|
+
totalUploadTime: 0,
|
|
1870
|
+
startTimes: /* @__PURE__ */ new Map()
|
|
1871
|
+
};
|
|
1872
|
+
install() {}
|
|
1873
|
+
onTaskCreated(_task) {
|
|
1874
|
+
this.stats.totalFiles++;
|
|
1875
|
+
}
|
|
1876
|
+
onTaskStart(task) {
|
|
1877
|
+
this.stats.startTimes.set(task.id, Date.now());
|
|
1878
|
+
}
|
|
1879
|
+
onTaskSuccess(task, _fileUrl) {
|
|
1880
|
+
this.stats.successCount++;
|
|
1881
|
+
this.stats.totalBytesUploaded += task.file.size;
|
|
1882
|
+
const startTime = this.stats.startTimes.get(task.id);
|
|
1883
|
+
if (startTime) {
|
|
1884
|
+
const uploadTime = Date.now() - startTime;
|
|
1885
|
+
this.stats.totalUploadTime += uploadTime;
|
|
1886
|
+
this.stats.startTimes.delete(task.id);
|
|
1887
|
+
}
|
|
1888
|
+
}
|
|
1889
|
+
onTaskError(task, _error) {
|
|
1890
|
+
this.stats.errorCount++;
|
|
1891
|
+
this.stats.startTimes.delete(task.id);
|
|
1892
|
+
}
|
|
1893
|
+
onTaskCancel(task) {
|
|
1894
|
+
this.stats.cancelledCount++;
|
|
1895
|
+
this.stats.startTimes.delete(task.id);
|
|
1896
|
+
}
|
|
1897
|
+
/**
|
|
1898
|
+
* Gets current statistics
|
|
1899
|
+
*
|
|
1900
|
+
* @returns Object containing upload statistics
|
|
1901
|
+
*
|
|
1902
|
+
* @example
|
|
1903
|
+
* ```typescript
|
|
1904
|
+
* const stats = plugin.getStats();
|
|
1905
|
+
* console.log(`Success rate: ${stats.successRate}%`);
|
|
1906
|
+
* ```
|
|
1907
|
+
*/
|
|
1908
|
+
getStats() {
|
|
1909
|
+
const completedCount = this.stats.successCount + this.stats.errorCount + this.stats.cancelledCount;
|
|
1910
|
+
const averageSpeed = this.stats.totalUploadTime > 0 ? this.stats.totalBytesUploaded / this.stats.totalUploadTime * 1e3 : 0;
|
|
1911
|
+
const averageUploadTime = this.stats.successCount > 0 ? this.stats.totalUploadTime / this.stats.successCount : 0;
|
|
1912
|
+
const successRate = completedCount > 0 ? this.stats.successCount / completedCount * 100 : 0;
|
|
1913
|
+
const errorRate = completedCount > 0 ? this.stats.errorCount / completedCount * 100 : 0;
|
|
1914
|
+
return {
|
|
1915
|
+
totalFiles: this.stats.totalFiles,
|
|
1916
|
+
successCount: this.stats.successCount,
|
|
1917
|
+
errorCount: this.stats.errorCount,
|
|
1918
|
+
cancelledCount: this.stats.cancelledCount,
|
|
1919
|
+
totalBytesUploaded: this.stats.totalBytesUploaded,
|
|
1920
|
+
averageSpeed,
|
|
1921
|
+
averageUploadTime,
|
|
1922
|
+
successRate,
|
|
1923
|
+
errorRate
|
|
1924
|
+
};
|
|
1925
|
+
}
|
|
1926
|
+
/**
|
|
1927
|
+
* Resets all statistics to zero
|
|
1928
|
+
*
|
|
1929
|
+
* @example
|
|
1930
|
+
* ```typescript
|
|
1931
|
+
* plugin.reset();
|
|
1932
|
+
* ```
|
|
1933
|
+
*/
|
|
1934
|
+
reset() {
|
|
1935
|
+
this.stats = {
|
|
1936
|
+
totalFiles: 0,
|
|
1937
|
+
successCount: 0,
|
|
1938
|
+
errorCount: 0,
|
|
1939
|
+
cancelledCount: 0,
|
|
1940
|
+
totalBytesUploaded: 0,
|
|
1941
|
+
totalUploadTime: 0,
|
|
1942
|
+
startTimes: /* @__PURE__ */ new Map()
|
|
1943
|
+
};
|
|
1944
|
+
}
|
|
1945
|
+
/**
|
|
1946
|
+
* Gets a formatted summary of statistics
|
|
1947
|
+
*
|
|
1948
|
+
* @returns Human-readable statistics summary
|
|
1949
|
+
*
|
|
1950
|
+
* @example
|
|
1951
|
+
* ```typescript
|
|
1952
|
+
* console.log(plugin.getSummary());
|
|
1953
|
+
* // Output:
|
|
1954
|
+
* // Upload Statistics:
|
|
1955
|
+
* // Total Files: 10
|
|
1956
|
+
* // Success: 8 (80.00%)
|
|
1957
|
+
* // Errors: 1 (10.00%)
|
|
1958
|
+
* // Cancelled: 1 (10.00%)
|
|
1959
|
+
* // Total Uploaded: 52.43 MB
|
|
1960
|
+
* // Average Speed: 2.15 MB/s
|
|
1961
|
+
* // Average Time: 24.5s
|
|
1962
|
+
* ```
|
|
1963
|
+
*/
|
|
1964
|
+
getSummary() {
|
|
1965
|
+
const stats = this.getStats();
|
|
1966
|
+
const formatBytes = (bytes) => {
|
|
1967
|
+
return `${(bytes / 1024 / 1024).toFixed(2)} MB`;
|
|
1968
|
+
};
|
|
1969
|
+
const formatSpeed = (bytesPerSecond) => {
|
|
1970
|
+
return `${(bytesPerSecond / 1024 / 1024).toFixed(2)} MB/s`;
|
|
1971
|
+
};
|
|
1972
|
+
const formatTime = (ms) => {
|
|
1973
|
+
return `${(ms / 1e3).toFixed(1)}s`;
|
|
1974
|
+
};
|
|
1975
|
+
return `Upload Statistics:
|
|
1976
|
+
Total Files: ${stats.totalFiles}
|
|
1977
|
+
Success: ${stats.successCount} (${stats.successRate.toFixed(2)}%)
|
|
1978
|
+
Errors: ${stats.errorCount} (${stats.errorRate.toFixed(2)}%)
|
|
1979
|
+
Cancelled: ${stats.cancelledCount}
|
|
1980
|
+
Total Uploaded: ${formatBytes(stats.totalBytesUploaded)}
|
|
1981
|
+
Average Speed: ${formatSpeed(stats.averageSpeed)}
|
|
1982
|
+
Average Time: ${formatTime(stats.averageUploadTime)}`;
|
|
1983
|
+
}
|
|
1984
|
+
};
|
|
1985
|
+
|
|
1986
|
+
//#endregion
|
|
1987
|
+
export { ChunkSizeAdjuster, CongestionState, LoggerPlugin, StatisticsPlugin, TCPChunkSizeAdjuster, UploadManager, UploadTask };
|
|
1988
|
+
//# sourceMappingURL=index.mjs.map
|