@karpeleslab/klbfw 0.2.24 → 0.2.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.d.ts CHANGED
@@ -237,6 +237,8 @@ interface UploadFileOptions {
237
237
  onProgress?: (progress: number) => void;
238
238
  /** Error callback - resolve to retry, reject to fail */
239
239
  onError?: (error: Error, context: { phase: string; blockNum?: number; attempt: number }) => Promise<void>;
240
+ /** AbortSignal for cancellation - use AbortController.signal */
241
+ signal?: AbortSignal;
240
242
  }
241
243
 
242
244
  /** Options for uploadManyFiles */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@karpeleslab/klbfw",
3
- "version": "0.2.24",
3
+ "version": "0.2.26",
4
4
  "description": "Frontend Framework",
5
5
  "main": "index.js",
6
6
  "types": "index.d.ts",
@@ -222,9 +222,10 @@ const utils = {
222
222
  * @param {*} body - Request body
223
223
  * @param {Object} headers - Request headers
224
224
  * @param {Object} context - Request context
225
+ * @param {AbortSignal} [signal] - Optional AbortSignal for cancellation
225
226
  * @returns {Promise} - Request promise
226
227
  */
227
- function awsReq(upInfo, method, query, body, headers, context) {
228
+ function awsReq(upInfo, method, query, body, headers, context, signal) {
228
229
  headers = headers || {};
229
230
  context = context || {};
230
231
 
@@ -305,11 +306,15 @@ function awsReq(upInfo, method, query, body, headers, context) {
305
306
  headers["Authorization"] = response.data.authorization;
306
307
 
307
308
  // Make the actual request to S3
308
- return utils.fetch(url, {
309
+ const fetchOptions = {
309
310
  method,
310
311
  body,
311
312
  headers
312
- });
313
+ };
314
+ if (signal) {
315
+ fetchOptions.signal = signal;
316
+ }
317
+ return utils.fetch(url, fetchOptions);
313
318
  })
314
319
  .then(resolve)
315
320
  .catch(reject);
package/upload-many.js CHANGED
@@ -40,7 +40,9 @@ const { uploadFile } = require('./upload');
40
40
  * Context includes { fileIndex, phase, attempt } where phase is 'file' for file-level errors,
41
41
  * or 'upload'/'init'/'complete' for block-level errors (also includes blockNum for 'upload').
42
42
  * @param {number} [options.concurrency=3] - Maximum concurrent uploads (1-10)
43
- * @returns {Promise<Array>} - Resolves with array of upload results in same order as input files
43
+ * @param {AbortSignal} [options.signal] - AbortSignal for cancellation. Use AbortController to cancel.
44
+ * @returns {Promise<Array>} - Resolves with array of upload results in same order as input files.
45
+ * Rejects with AbortError if cancelled.
44
46
  *
45
47
  * @example
46
48
  * // Upload multiple files from a file input
@@ -76,7 +78,14 @@ async function uploadManyFiles(api, files, method, params, context, options) {
76
78
  }
77
79
 
78
80
  const concurrency = Math.min(Math.max(options.concurrency || 3, 1), 10);
79
- const { onProgress, onFileComplete, onError } = options;
81
+ const { onProgress, onFileComplete, onError, signal } = options;
82
+
83
+ // Check if already aborted
84
+ if (signal && signal.aborted) {
85
+ const error = new Error('Upload aborted');
86
+ error.name = 'AbortError';
87
+ throw error;
88
+ }
80
89
 
81
90
  // Results array in same order as input
82
91
  const results = new Array(fileCount);
@@ -122,6 +131,11 @@ async function uploadManyFiles(api, files, method, params, context, options) {
122
131
  }
123
132
  };
124
133
 
134
+ // Pass signal to each file upload
135
+ if (signal) {
136
+ fileOptions.signal = signal;
137
+ }
138
+
125
139
  // Wrap onError to include fileIndex for block-level errors
126
140
  if (onError) {
127
141
  fileOptions.onError = (error, ctx) => {
@@ -142,6 +156,11 @@ async function uploadManyFiles(api, files, method, params, context, options) {
142
156
 
143
157
  return result;
144
158
  } catch (error) {
159
+ // Re-throw abort errors immediately without retry
160
+ if (error.name === 'AbortError') {
161
+ throw error;
162
+ }
163
+
145
164
  // Give onError a chance to retry the whole file
146
165
  if (onError) {
147
166
  try {
@@ -161,19 +180,37 @@ async function uploadManyFiles(api, files, method, params, context, options) {
161
180
  }
162
181
  };
163
182
 
183
+ // Track if aborted
184
+ let aborted = false;
185
+ let abortError = null;
186
+
164
187
  // Process files with concurrency limit
165
188
  const processQueue = async () => {
166
189
  const workers = [];
167
190
 
168
191
  for (let i = 0; i < concurrency; i++) {
169
192
  workers.push((async () => {
170
- while (nextIndex < fileCount) {
193
+ while (nextIndex < fileCount && !aborted) {
194
+ // Check for abort before starting next file
195
+ if (signal && signal.aborted) {
196
+ aborted = true;
197
+ abortError = new Error('Upload aborted');
198
+ abortError.name = 'AbortError';
199
+ return;
200
+ }
201
+
171
202
  const fileIndex = nextIndex++;
172
203
  running.add(fileIndex);
173
204
 
174
205
  try {
175
206
  await uploadOne(fileIndex);
176
207
  } catch (error) {
208
+ // If aborted, stop processing and propagate
209
+ if (error.name === 'AbortError') {
210
+ aborted = true;
211
+ abortError = error;
212
+ return;
213
+ }
177
214
  // Continue with next file even if one fails
178
215
  // Error is already stored in results
179
216
  } finally {
@@ -188,6 +225,11 @@ async function uploadManyFiles(api, files, method, params, context, options) {
188
225
 
189
226
  await processQueue();
190
227
 
228
+ // If aborted, throw the abort error
229
+ if (aborted && abortError) {
230
+ throw abortError;
231
+ }
232
+
191
233
  // Check if any uploads failed
192
234
  const errors = results.filter(r => r && r.error).map(r => r.error);
193
235
  if (errors.length > 0) {
package/upload.js CHANGED
@@ -13,6 +13,17 @@ const rest = require('./rest');
13
13
  const fwWrapper = require('./fw-wrapper');
14
14
  const { env, utils, awsReq, readChunkFromStream, readFileSlice } = require('./upload-internal');
15
15
 
16
+ /**
17
+ * Sleep for a specified duration with exponential backoff and jitter
18
+ * @private
19
+ */
20
+ function retryDelay(attempt, maxRetries) {
21
+ // Exponential backoff: 1s, 2s, 4s (capped at 4s) plus random jitter (0-500ms)
22
+ const baseDelay = Math.min(1000 * Math.pow(2, attempt - 1), 4000);
23
+ const jitter = Math.random() * 500;
24
+ return new Promise(resolve => setTimeout(resolve, baseDelay + jitter));
25
+ }
26
+
16
27
  /**
17
28
  * Simple file upload function
18
29
  *
@@ -34,10 +45,12 @@ const { env, utils, awsReq, readChunkFromStream, readFileSlice } = require('./up
34
45
  * @param {Object} [context=null] - Request context (uses default context if not provided)
35
46
  * @param {Object} [options={}] - Upload options
36
47
  * @param {Function} [options.onProgress] - Progress callback(progress) where progress is 0-1
37
- * @param {Function} [options.onError] - Error callback(error, context). Can return a Promise
38
- * that, if resolved, will cause the failed operation to be retried. Context contains
39
- * { phase, blockNum, attempt } for block uploads or { phase, attempt } for other operations.
40
- * @returns {Promise<Object>} - Resolves with the full REST response
48
+ * @param {Function} [options.onError] - Error callback(error, context). Called only after 3
49
+ * automatic retries have failed. Can return a Promise that, if resolved, will reset the
50
+ * retry counter and attempt 3 more automatic retries. Context contains { phase, blockNum,
51
+ * attempt } for block uploads or { phase, attempt } for other operations.
52
+ * @param {AbortSignal} [options.signal] - AbortSignal for cancellation. Use AbortController to cancel.
53
+ * @returns {Promise<Object>} - Resolves with the full REST response. Rejects with AbortError if cancelled.
41
54
  *
42
55
  * @example
43
56
  * // Upload a buffer with filename
@@ -48,18 +61,16 @@ const { env, utils, awsReq, readChunkFromStream, readFileSlice } = require('./up
48
61
  * });
49
62
  *
50
63
  * @example
51
- * // Upload with progress and error handling
64
+ * // Upload with progress - transient failures are automatically retried up to 3 times
52
65
  * const result = await uploadFile('Misc/Debug:testUpload', buffer, 'POST', {
53
66
  * filename: 'large-file.bin'
54
67
  * }, null, {
55
68
  * onProgress: (progress) => console.log(`${Math.round(progress * 100)}%`),
56
69
  * onError: async (error, ctx) => {
57
- * console.log(`Error in ${ctx.phase}, attempt ${ctx.attempt}:`, error.message);
58
- * if (ctx.attempt < 3) {
59
- * await new Promise(r => setTimeout(r, 1000)); // Wait 1s before retry
60
- * return; // Resolve to trigger retry
61
- * }
62
- * throw error; // Give up after 3 attempts
70
+ * // Called only after 3 automatic retries have failed
71
+ * console.log(`Error in ${ctx.phase} after ${ctx.attempt} attempts:`, error.message);
72
+ * // Resolve to reset counter and try 3 more times, or throw to give up
73
+ * throw error;
63
74
  * }
64
75
  * });
65
76
  *
@@ -76,6 +87,25 @@ const { env, utils, awsReq, readChunkFromStream, readFileSlice } = require('./up
76
87
  * type: 'application/octet-stream',
77
88
  * size: 2199023255552 // optional: if known, enables optimal block sizing
78
89
  * });
90
+ *
91
+ * @example
92
+ * // Upload with cancellation support
93
+ * const controller = new AbortController();
94
+ * const uploadPromise = uploadFile('Misc/Debug:testUpload', buffer, 'POST', {
95
+ * filename: 'large-file.bin'
96
+ * }, null, {
97
+ * signal: controller.signal,
98
+ * onProgress: (progress) => console.log(`${Math.round(progress * 100)}%`)
99
+ * });
100
+ * // Cancel after 5 seconds
101
+ * setTimeout(() => controller.abort(), 5000);
102
+ * try {
103
+ * const result = await uploadPromise;
104
+ * } catch (err) {
105
+ * if (err.name === 'AbortError') {
106
+ * console.log('Upload was cancelled');
107
+ * }
108
+ * }
79
109
  */
80
110
  async function uploadFile(api, buffer, method, params, context, options) {
81
111
  // Handle default values
@@ -83,6 +113,13 @@ async function uploadFile(api, buffer, method, params, context, options) {
83
113
  params = params || {};
84
114
  options = options || {};
85
115
 
116
+ // Check if already aborted
117
+ if (options.signal && options.signal.aborted) {
118
+ const error = new Error('Upload aborted');
119
+ error.name = 'AbortError';
120
+ throw error;
121
+ }
122
+
86
123
  // Get context from framework if not provided, and add available values
87
124
  if (!context) {
88
125
  context = fwWrapper.getContext();
@@ -202,7 +239,16 @@ async function uploadFile(api, buffer, method, params, context, options) {
202
239
  * @private
203
240
  */
204
241
  async function doPutUpload(file, uploadInfo, context, options) {
205
- const { onProgress, onError } = options;
242
+ const { onProgress, onError, signal } = options;
243
+
244
+ // Helper to check abort status
245
+ const checkAbort = () => {
246
+ if (signal && signal.aborted) {
247
+ const error = new Error('Upload aborted');
248
+ error.name = 'AbortError';
249
+ throw error;
250
+ }
251
+ };
206
252
 
207
253
  // Calculate block size
208
254
  // - If size known: use server's Blocksize or file size
@@ -228,6 +274,9 @@ async function doPutUpload(file, uploadInfo, context, options) {
228
274
  const pendingUploads = [];
229
275
 
230
276
  while (!streamEnded || pendingUploads.length > 0) {
277
+ // Check for abort before reading more data
278
+ checkAbort();
279
+
231
280
  // Read and start uploads up to maxConcurrent
232
281
  while (!streamEnded && pendingUploads.length < maxConcurrent) {
233
282
  const chunkData = await readChunkFromStream(file.stream, blockSize);
@@ -243,7 +292,7 @@ async function doPutUpload(file, uploadInfo, context, options) {
243
292
  // Only add Content-Range for multi-block uploads
244
293
  const useContentRange = blocks === null || blocks > 1;
245
294
  const uploadPromise = uploadPutBlockWithDataAndRetry(
246
- uploadInfo, currentBlock, startByte, chunkData, file.type, onError, useContentRange
295
+ uploadInfo, currentBlock, startByte, chunkData, file.type, onError, useContentRange, signal
247
296
  ).then(() => {
248
297
  completedBlocks++;
249
298
  if (onProgress && blocks) {
@@ -267,10 +316,13 @@ async function doPutUpload(file, uploadInfo, context, options) {
267
316
  } else {
268
317
  // Buffer-based upload: original logic
269
318
  for (let i = 0; i < blocks; i += maxConcurrent) {
319
+ // Check for abort before starting next batch
320
+ checkAbort();
321
+
270
322
  const batch = [];
271
323
  for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
272
324
  batch.push(
273
- uploadPutBlockWithRetry(file, uploadInfo, j, blockSize, onError)
325
+ uploadPutBlockWithRetry(file, uploadInfo, j, blockSize, onError, signal)
274
326
  .then(() => {
275
327
  completedBlocks++;
276
328
  if (onProgress) {
@@ -285,6 +337,8 @@ async function doPutUpload(file, uploadInfo, context, options) {
285
337
  }
286
338
 
287
339
  // All blocks done, call completion with retry support
340
+ checkAbort();
341
+
288
342
  let attempt = 0;
289
343
  while (true) {
290
344
  attempt++;
@@ -292,9 +346,17 @@ async function doPutUpload(file, uploadInfo, context, options) {
292
346
  const completeResponse = await rest.rest(uploadInfo.Complete, 'POST', {}, context);
293
347
  return completeResponse;
294
348
  } catch (error) {
349
+ // Check if aborted during completion
350
+ checkAbort();
351
+ // Auto-retry up to 3 times before triggering onError
352
+ if (attempt < 3) {
353
+ await retryDelay(attempt);
354
+ continue;
355
+ }
295
356
  if (onError) {
296
357
  await onError(error, { phase: 'complete', attempt });
297
- // If onError resolves, retry
358
+ // If onError resolves, reset attempt counter and retry
359
+ attempt = 0;
298
360
  continue;
299
361
  }
300
362
  throw error;
@@ -306,7 +368,7 @@ async function doPutUpload(file, uploadInfo, context, options) {
306
368
  * Upload a single block via PUT with pre-read data and retry support
307
369
  * @private
308
370
  */
309
- async function uploadPutBlockWithDataAndRetry(uploadInfo, blockNum, startByte, data, contentType, onError, useContentRange) {
371
+ async function uploadPutBlockWithDataAndRetry(uploadInfo, blockNum, startByte, data, contentType, onError, useContentRange, signal) {
310
372
  let attempt = 0;
311
373
  while (true) {
312
374
  attempt++;
@@ -320,11 +382,16 @@ async function uploadPutBlockWithDataAndRetry(uploadInfo, blockNum, startByte, d
320
382
  headers['Content-Range'] = `bytes ${startByte}-${startByte + data.byteLength - 1}/*`;
321
383
  }
322
384
 
323
- const response = await utils.fetch(uploadInfo.PUT, {
385
+ const fetchOptions = {
324
386
  method: 'PUT',
325
387
  body: data,
326
388
  headers: headers
327
- });
389
+ };
390
+ if (signal) {
391
+ fetchOptions.signal = signal;
392
+ }
393
+
394
+ const response = await utils.fetch(uploadInfo.PUT, fetchOptions);
328
395
 
329
396
  if (!response.ok) {
330
397
  throw new Error(`HTTP ${response.status}: ${response.statusText}`);
@@ -333,8 +400,19 @@ async function uploadPutBlockWithDataAndRetry(uploadInfo, blockNum, startByte, d
333
400
  await response.text();
334
401
  return;
335
402
  } catch (error) {
403
+ // Re-throw abort errors immediately
404
+ if (error.name === 'AbortError') {
405
+ throw error;
406
+ }
407
+ // Auto-retry up to 3 times before triggering onError
408
+ if (attempt < 3) {
409
+ await retryDelay(attempt);
410
+ continue;
411
+ }
336
412
  if (onError) {
337
413
  await onError(error, { phase: 'upload', blockNum, attempt });
414
+ // If onError resolves, reset attempt counter and retry
415
+ attempt = 0;
338
416
  continue;
339
417
  }
340
418
  throw error;
@@ -346,16 +424,26 @@ async function uploadPutBlockWithDataAndRetry(uploadInfo, blockNum, startByte, d
346
424
  * Upload a single block via PUT with retry support
347
425
  * @private
348
426
  */
349
- async function uploadPutBlockWithRetry(file, uploadInfo, blockNum, blockSize, onError) {
427
+ async function uploadPutBlockWithRetry(file, uploadInfo, blockNum, blockSize, onError, signal) {
350
428
  let attempt = 0;
351
429
  while (true) {
352
430
  attempt++;
353
431
  try {
354
- return await uploadPutBlock(file, uploadInfo, blockNum, blockSize);
432
+ return await uploadPutBlock(file, uploadInfo, blockNum, blockSize, signal);
355
433
  } catch (error) {
434
+ // Re-throw abort errors immediately
435
+ if (error.name === 'AbortError') {
436
+ throw error;
437
+ }
438
+ // Auto-retry up to 3 times before triggering onError
439
+ if (attempt < 3) {
440
+ await retryDelay(attempt);
441
+ continue;
442
+ }
356
443
  if (onError) {
357
444
  await onError(error, { phase: 'upload', blockNum, attempt });
358
- // If onError resolves, retry
445
+ // If onError resolves, reset attempt counter and retry
446
+ attempt = 0;
359
447
  continue;
360
448
  }
361
449
  throw error;
@@ -367,7 +455,7 @@ async function uploadPutBlockWithRetry(file, uploadInfo, blockNum, blockSize, on
367
455
  * Upload a single block via PUT
368
456
  * @private
369
457
  */
370
- async function uploadPutBlock(file, uploadInfo, blockNum, blockSize) {
458
+ async function uploadPutBlock(file, uploadInfo, blockNum, blockSize, signal) {
371
459
  const startByte = blockNum * blockSize;
372
460
  const endByte = Math.min(startByte + blockSize, file.size);
373
461
 
@@ -383,11 +471,16 @@ async function uploadPutBlock(file, uploadInfo, blockNum, blockSize) {
383
471
  headers['Content-Range'] = `bytes ${startByte}-${endByte - 1}/*`;
384
472
  }
385
473
 
386
- const response = await utils.fetch(uploadInfo.PUT, {
474
+ const fetchOptions = {
387
475
  method: 'PUT',
388
476
  body: arrayBuffer,
389
477
  headers: headers
390
- });
478
+ };
479
+ if (signal) {
480
+ fetchOptions.signal = signal;
481
+ }
482
+
483
+ const response = await utils.fetch(uploadInfo.PUT, fetchOptions);
391
484
 
392
485
  if (!response.ok) {
393
486
  throw new Error(`HTTP ${response.status}: ${response.statusText}`);
@@ -401,7 +494,25 @@ async function uploadPutBlock(file, uploadInfo, blockNum, blockSize) {
401
494
  * @private
402
495
  */
403
496
  async function doAwsUpload(file, uploadInfo, context, options) {
404
- const { onProgress, onError } = options;
497
+ const { onProgress, onError, signal } = options;
498
+
499
+ // Helper to check abort status
500
+ const checkAbort = () => {
501
+ if (signal && signal.aborted) {
502
+ const error = new Error('Upload aborted');
503
+ error.name = 'AbortError';
504
+ throw error;
505
+ }
506
+ };
507
+
508
+ // Helper to abort AWS multipart upload (best effort, don't throw on failure)
509
+ const abortMultipartUpload = async (uploadId) => {
510
+ try {
511
+ await awsReq(uploadInfo, 'DELETE', `uploadId=${uploadId}`, '', null, context);
512
+ } catch (e) {
513
+ // Ignore errors during abort - this is cleanup
514
+ }
515
+ };
405
516
 
406
517
  // Calculate block size
407
518
  // - If size known: target ~10k parts, min 5MB
@@ -417,6 +528,9 @@ async function doAwsUpload(file, uploadInfo, context, options) {
417
528
  blockSize = 551550976; // 526MB
418
529
  }
419
530
 
531
+ // Check for abort before starting
532
+ checkAbort();
533
+
420
534
  // Initialize multipart upload with retry support
421
535
  let uploadId;
422
536
  let initAttempt = 0;
@@ -429,15 +543,27 @@ async function doAwsUpload(file, uploadInfo, context, options) {
429
543
  'uploads=',
430
544
  '',
431
545
  { 'Content-Type': file.type || 'application/octet-stream', 'X-Amz-Acl': 'private' },
432
- context
546
+ context,
547
+ signal
433
548
  );
434
549
  const initXml = await initResponse.text();
435
550
  const dom = utils.parseXML(initXml);
436
551
  uploadId = dom.querySelector('UploadId').innerHTML;
437
552
  break;
438
553
  } catch (error) {
554
+ // Re-throw abort errors immediately
555
+ if (error.name === 'AbortError') {
556
+ throw error;
557
+ }
558
+ // Auto-retry up to 3 times before triggering onError
559
+ if (initAttempt < 3) {
560
+ await retryDelay(initAttempt);
561
+ continue;
562
+ }
439
563
  if (onError) {
440
564
  await onError(error, { phase: 'init', attempt: initAttempt });
565
+ // If onError resolves, reset attempt counter and retry
566
+ initAttempt = 0;
441
567
  continue;
442
568
  }
443
569
  throw error;
@@ -448,66 +574,84 @@ async function doAwsUpload(file, uploadInfo, context, options) {
448
574
  const maxConcurrent = 3;
449
575
  let completedBlocks = 0;
450
576
 
451
- // Stream-based upload: read sequentially, upload in parallel
452
- if (file.stream) {
453
- let blockNum = 0;
454
- let streamEnded = false;
455
- const pendingUploads = [];
577
+ // Wrap upload in try/catch to abort multipart upload on cancel
578
+ try {
579
+ // Stream-based upload: read sequentially, upload in parallel
580
+ if (file.stream) {
581
+ let blockNum = 0;
582
+ let streamEnded = false;
583
+ const pendingUploads = [];
584
+
585
+ while (!streamEnded || pendingUploads.length > 0) {
586
+ // Check for abort before reading more data
587
+ checkAbort();
588
+
589
+ // Read and start uploads up to maxConcurrent
590
+ while (!streamEnded && pendingUploads.length < maxConcurrent) {
591
+ const chunkData = await readChunkFromStream(file.stream, blockSize);
592
+ if (chunkData === null) {
593
+ streamEnded = true;
594
+ break;
595
+ }
456
596
 
457
- while (!streamEnded || pendingUploads.length > 0) {
458
- // Read and start uploads up to maxConcurrent
459
- while (!streamEnded && pendingUploads.length < maxConcurrent) {
460
- const chunkData = await readChunkFromStream(file.stream, blockSize);
461
- if (chunkData === null) {
462
- streamEnded = true;
463
- break;
597
+ const currentBlock = blockNum++;
598
+ const uploadPromise = uploadAwsBlockWithDataAndRetry(
599
+ uploadInfo, uploadId, currentBlock, chunkData, context, onError, signal
600
+ ).then(etag => {
601
+ etags[currentBlock] = etag;
602
+ completedBlocks++;
603
+ if (onProgress && blocks) {
604
+ onProgress(completedBlocks / blocks);
605
+ }
606
+ });
607
+
608
+ pendingUploads.push(uploadPromise);
464
609
  }
465
610
 
466
- const currentBlock = blockNum++;
467
- const uploadPromise = uploadAwsBlockWithDataAndRetry(
468
- uploadInfo, uploadId, currentBlock, chunkData, context, onError
469
- ).then(etag => {
470
- etags[currentBlock] = etag;
471
- completedBlocks++;
472
- if (onProgress && blocks) {
473
- onProgress(completedBlocks / blocks);
474
- }
475
- });
476
-
477
- pendingUploads.push(uploadPromise);
611
+ // Wait for at least one upload to complete before reading more
612
+ if (pendingUploads.length > 0) {
613
+ // Create indexed promises that return their index when done
614
+ const indexedPromises = pendingUploads.map((p, idx) => p.then(() => idx));
615
+ const completedIdx = await Promise.race(indexedPromises);
616
+ pendingUploads.splice(completedIdx, 1);
617
+ }
478
618
  }
479
619
 
480
- // Wait for at least one upload to complete before reading more
481
- if (pendingUploads.length > 0) {
482
- // Create indexed promises that return their index when done
483
- const indexedPromises = pendingUploads.map((p, idx) => p.then(() => idx));
484
- const completedIdx = await Promise.race(indexedPromises);
485
- pendingUploads.splice(completedIdx, 1);
486
- }
487
- }
620
+ blocks = blockNum; // Now we know the total
621
+ } else {
622
+ // Buffer-based upload: original logic
623
+ for (let i = 0; i < blocks; i += maxConcurrent) {
624
+ // Check for abort before starting next batch
625
+ checkAbort();
626
+
627
+ const batch = [];
628
+ for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
629
+ batch.push(
630
+ uploadAwsBlockWithRetry(file, uploadInfo, uploadId, j, blockSize, context, onError, signal)
631
+ .then(etag => {
632
+ etags[j] = etag;
633
+ completedBlocks++;
634
+ if (onProgress) {
635
+ onProgress(completedBlocks / blocks);
636
+ }
637
+ })
638
+ );
639
+ }
488
640
 
489
- blocks = blockNum; // Now we know the total
490
- } else {
491
- // Buffer-based upload: original logic
492
- for (let i = 0; i < blocks; i += maxConcurrent) {
493
- const batch = [];
494
- for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
495
- batch.push(
496
- uploadAwsBlockWithRetry(file, uploadInfo, uploadId, j, blockSize, context, onError)
497
- .then(etag => {
498
- etags[j] = etag;
499
- completedBlocks++;
500
- if (onProgress) {
501
- onProgress(completedBlocks / blocks);
502
- }
503
- })
504
- );
641
+ await Promise.all(batch);
505
642
  }
506
-
507
- await Promise.all(batch);
508
643
  }
644
+ } catch (error) {
645
+ // On abort, try to clean up the AWS multipart upload
646
+ if (error.name === 'AbortError') {
647
+ await abortMultipartUpload(uploadId);
648
+ }
649
+ throw error;
509
650
  }
510
651
 
652
+ // Check for abort before completing
653
+ checkAbort();
654
+
511
655
  // Complete multipart upload with retry support
512
656
  let xml = '<CompleteMultipartUpload>';
513
657
  for (let i = 0; i < blocks; i++) {
@@ -519,18 +663,33 @@ async function doAwsUpload(file, uploadInfo, context, options) {
519
663
  while (true) {
520
664
  completeAttempt++;
521
665
  try {
522
- const completeResponse = await awsReq(uploadInfo, 'POST', `uploadId=${uploadId}`, xml, null, context);
666
+ const completeResponse = await awsReq(uploadInfo, 'POST', `uploadId=${uploadId}`, xml, null, context, signal);
523
667
  await completeResponse.text();
524
668
  break;
525
669
  } catch (error) {
670
+ // On abort, try to clean up the AWS multipart upload
671
+ if (error.name === 'AbortError') {
672
+ await abortMultipartUpload(uploadId);
673
+ throw error;
674
+ }
675
+ // Auto-retry up to 3 times before triggering onError
676
+ if (completeAttempt < 3) {
677
+ await retryDelay(completeAttempt);
678
+ continue;
679
+ }
526
680
  if (onError) {
527
681
  await onError(error, { phase: 'complete', attempt: completeAttempt });
682
+ // If onError resolves, reset attempt counter and retry
683
+ completeAttempt = 0;
528
684
  continue;
529
685
  }
530
686
  throw error;
531
687
  }
532
688
  }
533
689
 
690
+ // Check for abort before server-side completion
691
+ checkAbort();
692
+
534
693
  // Call server-side completion handler with retry support
535
694
  let handleAttempt = 0;
536
695
  while (true) {
@@ -544,8 +703,17 @@ async function doAwsUpload(file, uploadInfo, context, options) {
544
703
  );
545
704
  return finalResponse;
546
705
  } catch (error) {
706
+ // Check if aborted during completion
707
+ checkAbort();
708
+ // Auto-retry up to 3 times before triggering onError
709
+ if (handleAttempt < 3) {
710
+ await retryDelay(handleAttempt);
711
+ continue;
712
+ }
547
713
  if (onError) {
548
714
  await onError(error, { phase: 'handleComplete', attempt: handleAttempt });
715
+ // If onError resolves, reset attempt counter and retry
716
+ handleAttempt = 0;
549
717
  continue;
550
718
  }
551
719
  throw error;
@@ -557,7 +725,7 @@ async function doAwsUpload(file, uploadInfo, context, options) {
557
725
  * Upload a block to AWS S3 with pre-read data and retry support
558
726
  * @private
559
727
  */
560
- async function uploadAwsBlockWithDataAndRetry(uploadInfo, uploadId, blockNum, data, context, onError) {
728
+ async function uploadAwsBlockWithDataAndRetry(uploadInfo, uploadId, blockNum, data, context, onError, signal) {
561
729
  let attempt = 0;
562
730
  while (true) {
563
731
  attempt++;
@@ -569,7 +737,8 @@ async function uploadAwsBlockWithDataAndRetry(uploadInfo, uploadId, blockNum, da
569
737
  `partNumber=${awsPartNumber}&uploadId=${uploadId}`,
570
738
  data,
571
739
  null,
572
- context
740
+ context,
741
+ signal
573
742
  );
574
743
 
575
744
  if (!response.ok) {
@@ -580,8 +749,19 @@ async function uploadAwsBlockWithDataAndRetry(uploadInfo, uploadId, blockNum, da
580
749
  await response.text();
581
750
  return etag;
582
751
  } catch (error) {
752
+ // Re-throw abort errors immediately
753
+ if (error.name === 'AbortError') {
754
+ throw error;
755
+ }
756
+ // Auto-retry up to 3 times before triggering onError
757
+ if (attempt < 3) {
758
+ await retryDelay(attempt);
759
+ continue;
760
+ }
583
761
  if (onError) {
584
762
  await onError(error, { phase: 'upload', blockNum, attempt });
763
+ // If onError resolves, reset attempt counter and retry
764
+ attempt = 0;
585
765
  continue;
586
766
  }
587
767
  throw error;
@@ -593,15 +773,26 @@ async function uploadAwsBlockWithDataAndRetry(uploadInfo, uploadId, blockNum, da
593
773
  * Upload a single block to AWS S3 with retry support
594
774
  * @private
595
775
  */
596
- async function uploadAwsBlockWithRetry(file, uploadInfo, uploadId, blockNum, blockSize, context, onError) {
776
+ async function uploadAwsBlockWithRetry(file, uploadInfo, uploadId, blockNum, blockSize, context, onError, signal) {
597
777
  let attempt = 0;
598
778
  while (true) {
599
779
  attempt++;
600
780
  try {
601
- return await uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, context);
781
+ return await uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, context, signal);
602
782
  } catch (error) {
783
+ // Re-throw abort errors immediately
784
+ if (error.name === 'AbortError') {
785
+ throw error;
786
+ }
787
+ // Auto-retry up to 3 times before triggering onError
788
+ if (attempt < 3) {
789
+ await retryDelay(attempt);
790
+ continue;
791
+ }
603
792
  if (onError) {
604
793
  await onError(error, { phase: 'upload', blockNum, attempt });
794
+ // If onError resolves, reset attempt counter and retry
795
+ attempt = 0;
605
796
  continue;
606
797
  }
607
798
  throw error;
@@ -613,7 +804,7 @@ async function uploadAwsBlockWithRetry(file, uploadInfo, uploadId, blockNum, blo
613
804
  * Upload a single block to AWS S3
614
805
  * @private
615
806
  */
616
- async function uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, context) {
807
+ async function uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, context, signal) {
617
808
  const startByte = blockNum * blockSize;
618
809
  const endByte = Math.min(startByte + blockSize, file.size);
619
810
  const awsPartNumber = blockNum + 1; // AWS uses 1-based part numbers
@@ -626,7 +817,8 @@ async function uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, c
626
817
  `partNumber=${awsPartNumber}&uploadId=${uploadId}`,
627
818
  arrayBuffer,
628
819
  null,
629
- context
820
+ context,
821
+ signal
630
822
  );
631
823
 
632
824
  if (!response.ok) {