@karpeleslab/klbfw 0.2.17 → 0.2.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/upload.js +500 -79
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@karpeleslab/klbfw",
3
- "version": "0.2.17",
3
+ "version": "0.2.18",
4
4
  "description": "Frontend Framework",
5
5
  "main": "index.js",
6
6
  "types": "index.d.ts",
package/upload.js CHANGED
@@ -39,23 +39,22 @@
39
39
  * ```js
40
40
  * // For Node.js environments, first install dependencies:
41
41
  * // npm install node-fetch @xmldom/xmldom
42
- *
43
- * // Create a buffer-based file object for upload
44
- * const file = {
45
- * name: 'test.txt',
46
- * size: buffer.length,
47
- * type: 'text/plain',
48
- * content: buffer, // Buffer or ArrayBuffer with file content
49
- * lastModified: Date.now(),
50
- * slice: function(start, end) {
51
- * return {
52
- * content: this.content.slice(start, end)
53
- * };
54
- * }
55
- * };
56
- *
57
- * upload.append('Misc/Debug:testUpload', file)
58
- * .then(result => console.log('Upload complete', result));
42
+ *
43
+ * // Simple upload with a buffer
44
+ * const { uploadFile } = require('./upload');
45
+ * const buffer = Buffer.from('Hello, World!');
46
+ * const result = await uploadFile('Misc/Debug:testUpload', buffer, 'POST', {
47
+ * filename: 'hello.txt',
48
+ * type: 'text/plain'
49
+ * });
50
+ *
51
+ * // Upload large files using a stream (doesn't load entire file into memory)
52
+ * const fs = require('fs');
53
+ * const stream = fs.createReadStream('/path/to/2tb-file.bin');
54
+ * const result = await uploadFile('Misc/Debug:testUpload', stream, 'POST', {
55
+ * filename: 'large-file.bin',
56
+ * type: 'application/octet-stream'
57
+ * });
59
58
  * ```
60
59
  *
61
60
  * @module upload
@@ -280,11 +279,17 @@ const utils = {
280
279
  * - A Uint8Array or other TypedArray
281
280
  * - A browser File object
282
281
  * - A file-like object with { name, size, type, content, lastModified }
282
+ * - A file-like object with { name, size, type, stream } for streaming large files
283
283
  * - A string (will be converted to UTF-8 bytes)
284
284
  * @param {string} [method='POST'] - HTTP method for the initial API call
285
285
  * @param {Object} [params={}] - Additional parameters to send with the upload.
286
286
  * Can include `filename` and `type` to override defaults.
287
287
  * @param {Object} [context=null] - Request context (uses default context if not provided)
288
+ * @param {Object} [options={}] - Upload options
289
+ * @param {Function} [options.onProgress] - Progress callback(progress) where progress is 0-1
290
+ * @param {Function} [options.onError] - Error callback(error, context). Can return a Promise
291
+ * that, if resolved, will cause the failed operation to be retried. Context contains
292
+ * { phase, blockNum, attempt } for block uploads or { phase, attempt } for other operations.
288
293
  * @returns {Promise<Object>} - Resolves with the upload result data
289
294
  *
290
295
  * @example
@@ -296,17 +301,40 @@ const utils = {
296
301
  * });
297
302
  *
298
303
  * @example
299
- * // Upload with defaults
300
- * const result = await uploadFile('Misc/Debug:testUpload', buffer);
304
+ * // Upload with progress and error handling
305
+ * const result = await uploadFile('Misc/Debug:testUpload', buffer, 'POST', {
306
+ * filename: 'large-file.bin'
307
+ * }, null, {
308
+ * onProgress: (progress) => console.log(`${Math.round(progress * 100)}%`),
309
+ * onError: async (error, ctx) => {
310
+ * console.log(`Error in ${ctx.phase}, attempt ${ctx.attempt}:`, error.message);
311
+ * if (ctx.attempt < 3) {
312
+ * await new Promise(r => setTimeout(r, 1000)); // Wait 1s before retry
313
+ * return; // Resolve to trigger retry
314
+ * }
315
+ * throw error; // Give up after 3 attempts
316
+ * }
317
+ * });
301
318
  *
302
319
  * @example
303
320
  * // Upload a File object (browser)
304
321
  * const result = await uploadFile('Misc/Debug:testUpload', fileInput.files[0]);
322
+ *
323
+ * @example
324
+ * // Upload a large file using a stream (Node.js) - doesn't load entire file into memory
325
+ * const fs = require('fs');
326
+ * const stream = fs.createReadStream('/path/to/large-file.bin');
327
+ * const result = await uploadFile('Misc/Debug:testUpload', stream, 'POST', {
328
+ * filename: 'large-file.bin',
329
+ * type: 'application/octet-stream',
330
+ * size: 2199023255552 // optional: if known, enables optimal block sizing
331
+ * });
305
332
  */
306
- async function uploadFile(api, buffer, method, params, context) {
333
+ async function uploadFile(api, buffer, method, params, context, options) {
307
334
  // Handle default values
308
335
  method = method || 'POST';
309
336
  params = params || {};
337
+ options = options || {};
310
338
 
311
339
  // Get context from framework if not provided, and add available values
312
340
  if (!context) {
@@ -384,8 +412,18 @@ async function uploadFile(api, buffer, method, params, context) {
384
412
  content: buffer.content
385
413
  };
386
414
  }
415
+ // Handle Node.js readable stream
416
+ else if (buffer && typeof buffer.read === 'function' && typeof buffer.on === 'function') {
417
+ fileObj = {
418
+ name: params.filename || 'file.bin',
419
+ size: params.size || null, // null means unknown size
420
+ type: params.type || 'application/octet-stream',
421
+ lastModified: Date.now(),
422
+ stream: buffer
423
+ };
424
+ }
387
425
  else {
388
- throw new Error('Invalid file: must be a Buffer, ArrayBuffer, Uint8Array, File, string, or file-like object with content');
426
+ throw new Error('Invalid file: must be a Buffer, ArrayBuffer, Uint8Array, File, readable stream, or file-like object with content');
389
427
  }
390
428
 
391
429
  // Merge params with file metadata (file metadata takes precedence for these fields)
@@ -401,12 +439,12 @@ async function uploadFile(api, buffer, method, params, context) {
401
439
 
402
440
  // Method 1: AWS signed multipart upload
403
441
  if (data.Cloud_Aws_Bucket_Upload__) {
404
- return doAwsUpload(fileObj, data, context);
442
+ return doAwsUpload(fileObj, data, context, options);
405
443
  }
406
444
 
407
445
  // Method 2: Direct PUT upload
408
446
  if (data.PUT) {
409
- return doPutUpload(fileObj, data, context);
447
+ return doPutUpload(fileObj, data, context, options);
410
448
  }
411
449
 
412
450
  throw new Error('Invalid upload response format: no upload method available');
@@ -416,26 +454,169 @@ async function uploadFile(api, buffer, method, params, context) {
416
454
  * Perform a direct PUT upload (simple upload method)
417
455
  * @private
418
456
  */
419
- async function doPutUpload(file, uploadInfo, context) {
420
- const blockSize = uploadInfo.Blocksize || file.size;
421
- const blocks = Math.ceil(file.size / blockSize);
457
+ async function doPutUpload(file, uploadInfo, context, options) {
458
+ const { onProgress, onError } = options;
459
+
460
+ // Calculate block size
461
+ // - If size known: use server's Blocksize or file size
462
+ // - If size unknown (streaming): use 526MB default
463
+ let blockSize;
464
+ let blocks = null;
465
+
466
+ if (file.size) {
467
+ blockSize = uploadInfo.Blocksize || file.size;
468
+ blocks = Math.ceil(file.size / blockSize);
469
+ } else {
470
+ blockSize = 551550976; // 526MB
471
+ }
422
472
 
423
- // Upload blocks with concurrency limit
424
473
  const maxConcurrent = 3;
474
+ let completedBlocks = 0;
475
+
476
+ // Stream-based upload: read sequentially, upload in parallel
477
+ if (file.stream) {
478
+ let blockNum = 0;
479
+ let streamEnded = false;
480
+ let byteOffset = 0;
481
+ const pendingUploads = [];
482
+
483
+ while (!streamEnded || pendingUploads.length > 0) {
484
+ // Read and start uploads up to maxConcurrent
485
+ while (!streamEnded && pendingUploads.length < maxConcurrent) {
486
+ const chunkData = await readChunkFromStream(file.stream, blockSize);
487
+ if (chunkData === null) {
488
+ streamEnded = true;
489
+ break;
490
+ }
491
+
492
+ const currentBlock = blockNum++;
493
+ const startByte = byteOffset;
494
+ byteOffset += chunkData.byteLength;
495
+
496
+ const uploadPromise = uploadPutBlockWithDataAndRetry(
497
+ uploadInfo, currentBlock, startByte, chunkData, file.type, onError
498
+ ).then(() => {
499
+ completedBlocks++;
500
+ if (onProgress && blocks) {
501
+ onProgress(completedBlocks / blocks);
502
+ }
503
+ });
504
+
505
+ pendingUploads.push(uploadPromise);
506
+ }
507
+
508
+ // Wait for at least one upload to complete before reading more
509
+ if (pendingUploads.length > 0) {
510
+ await Promise.race(pendingUploads);
511
+ // Remove completed promises
512
+ for (let i = pendingUploads.length - 1; i >= 0; i--) {
513
+ const status = await Promise.race([
514
+ pendingUploads[i].then(() => 'done'),
515
+ Promise.resolve('pending')
516
+ ]);
517
+ if (status === 'done') {
518
+ pendingUploads.splice(i, 1);
519
+ }
520
+ }
521
+ }
522
+ }
523
+
524
+ blocks = blockNum;
525
+ } else {
526
+ // Buffer-based upload: original logic
527
+ for (let i = 0; i < blocks; i += maxConcurrent) {
528
+ const batch = [];
529
+ for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
530
+ batch.push(
531
+ uploadPutBlockWithRetry(file, uploadInfo, j, blockSize, onError)
532
+ .then(() => {
533
+ completedBlocks++;
534
+ if (onProgress) {
535
+ onProgress(completedBlocks / blocks);
536
+ }
537
+ })
538
+ );
539
+ }
540
+
541
+ await Promise.all(batch);
542
+ }
543
+ }
425
544
 
426
- // Process blocks in batches
427
- for (let i = 0; i < blocks; i += maxConcurrent) {
428
- const batch = [];
429
- for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
430
- batch.push(uploadPutBlock(file, uploadInfo, j, blockSize));
545
+ // All blocks done, call completion with retry support
546
+ let attempt = 0;
547
+ while (true) {
548
+ attempt++;
549
+ try {
550
+ const completeResponse = await rest.rest(uploadInfo.Complete, 'POST', {}, context);
551
+ return completeResponse.data;
552
+ } catch (error) {
553
+ if (onError) {
554
+ await onError(error, { phase: 'complete', attempt });
555
+ // If onError resolves, retry
556
+ continue;
557
+ }
558
+ throw error;
431
559
  }
560
+ }
561
+ }
562
+
563
+ /**
564
+ * Upload a single block via PUT with pre-read data and retry support
565
+ * @private
566
+ */
567
+ async function uploadPutBlockWithDataAndRetry(uploadInfo, blockNum, startByte, data, contentType, onError) {
568
+ let attempt = 0;
569
+ while (true) {
570
+ attempt++;
571
+ try {
572
+ const headers = {
573
+ 'Content-Type': contentType || 'application/octet-stream'
574
+ };
575
+
576
+ // Add Content-Range for multipart PUT
577
+ headers['Content-Range'] = `bytes ${startByte}-${startByte + data.byteLength - 1}/*`;
578
+
579
+ const response = await utils.fetch(uploadInfo.PUT, {
580
+ method: 'PUT',
581
+ body: data,
582
+ headers: headers
583
+ });
584
+
585
+ if (!response.ok) {
586
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
587
+ }
432
588
 
433
- await Promise.all(batch);
589
+ await response.text();
590
+ return;
591
+ } catch (error) {
592
+ if (onError) {
593
+ await onError(error, { phase: 'upload', blockNum, attempt });
594
+ continue;
595
+ }
596
+ throw error;
597
+ }
434
598
  }
599
+ }
435
600
 
436
- // All blocks done, call completion
437
- const completeResponse = await rest.rest(uploadInfo.Complete, 'POST', {}, context);
438
- return completeResponse.data;
601
+ /**
602
+ * Upload a single block via PUT with retry support
603
+ * @private
604
+ */
605
+ async function uploadPutBlockWithRetry(file, uploadInfo, blockNum, blockSize, onError) {
606
+ let attempt = 0;
607
+ while (true) {
608
+ attempt++;
609
+ try {
610
+ return await uploadPutBlock(file, uploadInfo, blockNum, blockSize);
611
+ } catch (error) {
612
+ if (onError) {
613
+ await onError(error, { phase: 'upload', blockNum, attempt });
614
+ // If onError resolves, retry
615
+ continue;
616
+ }
617
+ throw error;
618
+ }
619
+ }
439
620
  }
440
621
 
441
622
  /**
@@ -475,61 +656,220 @@ async function uploadPutBlock(file, uploadInfo, blockNum, blockSize) {
475
656
  * Perform an AWS multipart upload
476
657
  * @private
477
658
  */
478
- async function doAwsUpload(file, uploadInfo, context) {
479
- // Calculate optimal block size (min 5MB for AWS, target ~10k parts)
480
- let blockSize = Math.ceil(file.size / 10000);
481
- if (blockSize < 5242880) blockSize = 5242880;
482
-
483
- const blocks = Math.ceil(file.size / blockSize);
659
+ async function doAwsUpload(file, uploadInfo, context, options) {
660
+ const { onProgress, onError } = options;
661
+
662
+ // Calculate block size
663
+ // - If size known: target ~10k parts, min 5MB
664
+ // - If size unknown: use 526MB (allows up to ~5TB with 10k parts)
665
+ let blockSize;
666
+ let blocks = null; // null means unknown (streaming)
667
+
668
+ if (file.size) {
669
+ blockSize = Math.ceil(file.size / 10000);
670
+ if (blockSize < 5242880) blockSize = 5242880;
671
+ blocks = Math.ceil(file.size / blockSize);
672
+ } else {
673
+ blockSize = 551550976; // 526MB
674
+ }
484
675
 
485
- // Initialize multipart upload
486
- const initResponse = await awsReq(
487
- uploadInfo,
488
- 'POST',
489
- 'uploads=',
490
- '',
491
- { 'Content-Type': file.type || 'application/octet-stream', 'X-Amz-Acl': 'private' },
492
- context
493
- );
494
- const initXml = await initResponse.text();
495
- const dom = utils.parseXML(initXml);
496
- const uploadId = dom.querySelector('UploadId').innerHTML;
676
+ // Initialize multipart upload with retry support
677
+ let uploadId;
678
+ let initAttempt = 0;
679
+ while (true) {
680
+ initAttempt++;
681
+ try {
682
+ const initResponse = await awsReq(
683
+ uploadInfo,
684
+ 'POST',
685
+ 'uploads=',
686
+ '',
687
+ { 'Content-Type': file.type || 'application/octet-stream', 'X-Amz-Acl': 'private' },
688
+ context
689
+ );
690
+ const initXml = await initResponse.text();
691
+ const dom = utils.parseXML(initXml);
692
+ uploadId = dom.querySelector('UploadId').innerHTML;
693
+ break;
694
+ } catch (error) {
695
+ if (onError) {
696
+ await onError(error, { phase: 'init', attempt: initAttempt });
697
+ continue;
698
+ }
699
+ throw error;
700
+ }
701
+ }
497
702
 
498
- // Upload all parts with concurrency limit
499
703
  const etags = {};
500
704
  const maxConcurrent = 3;
705
+ let completedBlocks = 0;
706
+
707
+ // Stream-based upload: read sequentially, upload in parallel
708
+ if (file.stream) {
709
+ let blockNum = 0;
710
+ let streamEnded = false;
711
+ const pendingUploads = [];
712
+
713
+ while (!streamEnded || pendingUploads.length > 0) {
714
+ // Read and start uploads up to maxConcurrent
715
+ while (!streamEnded && pendingUploads.length < maxConcurrent) {
716
+ const chunkData = await readChunkFromStream(file.stream, blockSize);
717
+ if (chunkData === null) {
718
+ streamEnded = true;
719
+ break;
720
+ }
501
721
 
502
- for (let i = 0; i < blocks; i += maxConcurrent) {
503
- const batch = [];
504
- for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
505
- batch.push(
506
- uploadAwsBlock(file, uploadInfo, uploadId, j, blockSize, context)
507
- .then(etag => { etags[j] = etag; })
508
- );
722
+ const currentBlock = blockNum++;
723
+ const uploadPromise = uploadAwsBlockWithDataAndRetry(
724
+ uploadInfo, uploadId, currentBlock, chunkData, context, onError
725
+ ).then(etag => {
726
+ etags[currentBlock] = etag;
727
+ completedBlocks++;
728
+ if (onProgress && blocks) {
729
+ onProgress(completedBlocks / blocks);
730
+ }
731
+ });
732
+
733
+ pendingUploads.push(uploadPromise);
734
+ }
735
+
736
+ // Wait for at least one upload to complete before reading more
737
+ if (pendingUploads.length > 0) {
738
+ await Promise.race(pendingUploads);
739
+ // Remove completed promises
740
+ for (let i = pendingUploads.length - 1; i >= 0; i--) {
741
+ const status = await Promise.race([
742
+ pendingUploads[i].then(() => 'done'),
743
+ Promise.resolve('pending')
744
+ ]);
745
+ if (status === 'done') {
746
+ pendingUploads.splice(i, 1);
747
+ }
748
+ }
749
+ }
509
750
  }
510
751
 
511
- await Promise.all(batch);
752
+ blocks = blockNum; // Now we know the total
753
+ } else {
754
+ // Buffer-based upload: original logic
755
+ for (let i = 0; i < blocks; i += maxConcurrent) {
756
+ const batch = [];
757
+ for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
758
+ batch.push(
759
+ uploadAwsBlockWithRetry(file, uploadInfo, uploadId, j, blockSize, context, onError)
760
+ .then(etag => {
761
+ etags[j] = etag;
762
+ completedBlocks++;
763
+ if (onProgress) {
764
+ onProgress(completedBlocks / blocks);
765
+ }
766
+ })
767
+ );
768
+ }
769
+
770
+ await Promise.all(batch);
771
+ }
512
772
  }
513
773
 
514
- // Complete multipart upload
774
+ // Complete multipart upload with retry support
515
775
  let xml = '<CompleteMultipartUpload>';
516
776
  for (let i = 0; i < blocks; i++) {
517
777
  xml += `<Part><PartNumber>${i + 1}</PartNumber><ETag>${etags[i]}</ETag></Part>`;
518
778
  }
519
779
  xml += '</CompleteMultipartUpload>';
520
780
 
521
- const completeResponse = await awsReq(uploadInfo, 'POST', `uploadId=${uploadId}`, xml, null, context);
522
- await completeResponse.text();
781
+ let completeAttempt = 0;
782
+ while (true) {
783
+ completeAttempt++;
784
+ try {
785
+ const completeResponse = await awsReq(uploadInfo, 'POST', `uploadId=${uploadId}`, xml, null, context);
786
+ await completeResponse.text();
787
+ break;
788
+ } catch (error) {
789
+ if (onError) {
790
+ await onError(error, { phase: 'complete', attempt: completeAttempt });
791
+ continue;
792
+ }
793
+ throw error;
794
+ }
795
+ }
523
796
 
524
- // Call server-side completion handler
525
- const finalResponse = await rest.rest(
526
- `Cloud/Aws/Bucket/Upload/${uploadInfo.Cloud_Aws_Bucket_Upload__}:handleComplete`,
527
- 'POST',
528
- {},
529
- context
530
- );
797
+ // Call server-side completion handler with retry support
798
+ let handleAttempt = 0;
799
+ while (true) {
800
+ handleAttempt++;
801
+ try {
802
+ const finalResponse = await rest.rest(
803
+ `Cloud/Aws/Bucket/Upload/${uploadInfo.Cloud_Aws_Bucket_Upload__}:handleComplete`,
804
+ 'POST',
805
+ {},
806
+ context
807
+ );
808
+ return finalResponse.data;
809
+ } catch (error) {
810
+ if (onError) {
811
+ await onError(error, { phase: 'handleComplete', attempt: handleAttempt });
812
+ continue;
813
+ }
814
+ throw error;
815
+ }
816
+ }
817
+ }
818
+
819
+ /**
820
+ * Upload a block to AWS S3 with pre-read data and retry support
821
+ * @private
822
+ */
823
+ async function uploadAwsBlockWithDataAndRetry(uploadInfo, uploadId, blockNum, data, context, onError) {
824
+ let attempt = 0;
825
+ while (true) {
826
+ attempt++;
827
+ try {
828
+ const awsPartNumber = blockNum + 1;
829
+ const response = await awsReq(
830
+ uploadInfo,
831
+ 'PUT',
832
+ `partNumber=${awsPartNumber}&uploadId=${uploadId}`,
833
+ data,
834
+ null,
835
+ context
836
+ );
531
837
 
532
- return finalResponse.data;
838
+ if (!response.ok) {
839
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
840
+ }
841
+
842
+ const etag = response.headers.get('ETag');
843
+ await response.text();
844
+ return etag;
845
+ } catch (error) {
846
+ if (onError) {
847
+ await onError(error, { phase: 'upload', blockNum, attempt });
848
+ continue;
849
+ }
850
+ throw error;
851
+ }
852
+ }
853
+ }
854
+
855
+ /**
856
+ * Upload a single block to AWS S3 with retry support
857
+ * @private
858
+ */
859
+ async function uploadAwsBlockWithRetry(file, uploadInfo, uploadId, blockNum, blockSize, context, onError) {
860
+ let attempt = 0;
861
+ while (true) {
862
+ attempt++;
863
+ try {
864
+ return await uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, context);
865
+ } catch (error) {
866
+ if (onError) {
867
+ await onError(error, { phase: 'upload', blockNum, attempt });
868
+ continue;
869
+ }
870
+ throw error;
871
+ }
872
+ }
533
873
  }
534
874
 
535
875
  /**
@@ -561,6 +901,82 @@ async function uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, c
561
901
  return etag;
562
902
  }
563
903
 
904
+ /**
905
+ * Read a chunk of specified size from a stream
906
+ * @private
907
+ * @param {ReadableStream} stream - Node.js readable stream
908
+ * @param {number} size - Number of bytes to read
909
+ * @returns {Promise<ArrayBuffer|null>} - ArrayBuffer with data, or null if stream ended
910
+ */
911
+ function readChunkFromStream(stream, size) {
912
+ return new Promise((resolve, reject) => {
913
+ const chunks = [];
914
+ let bytesRead = 0;
915
+
916
+ const onReadable = () => {
917
+ let chunk;
918
+ while (bytesRead < size && (chunk = stream.read(Math.min(size - bytesRead, 65536))) !== null) {
919
+ chunks.push(chunk);
920
+ bytesRead += chunk.length;
921
+ }
922
+
923
+ if (bytesRead >= size) {
924
+ cleanup();
925
+ resolve(combineChunks(chunks));
926
+ }
927
+ };
928
+
929
+ const onEnd = () => {
930
+ cleanup();
931
+ if (bytesRead === 0) {
932
+ resolve(null); // Stream ended, no more data
933
+ } else {
934
+ resolve(combineChunks(chunks));
935
+ }
936
+ };
937
+
938
+ const onError = (err) => {
939
+ cleanup();
940
+ reject(err);
941
+ };
942
+
943
+ const cleanup = () => {
944
+ stream.removeListener('readable', onReadable);
945
+ stream.removeListener('end', onEnd);
946
+ stream.removeListener('error', onError);
947
+ };
948
+
949
+ stream.on('readable', onReadable);
950
+ stream.on('end', onEnd);
951
+ stream.on('error', onError);
952
+
953
+ // Try reading immediately in case data is already buffered
954
+ onReadable();
955
+ });
956
+ }
957
+
958
+ /**
959
+ * Combine chunks into a single ArrayBuffer
960
+ * @private
961
+ */
962
+ function combineChunks(chunks) {
963
+ if (chunks.length === 0) {
964
+ return new ArrayBuffer(0);
965
+ }
966
+ if (chunks.length === 1) {
967
+ const chunk = chunks[0];
968
+ return chunk.buffer.slice(chunk.byteOffset, chunk.byteOffset + chunk.length);
969
+ }
970
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
971
+ const result = new Uint8Array(totalLength);
972
+ let offset = 0;
973
+ for (const chunk of chunks) {
974
+ result.set(new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.length), offset);
975
+ offset += chunk.length;
976
+ }
977
+ return result.buffer;
978
+ }
979
+
564
980
  /**
565
981
  * Read a slice of a file as ArrayBuffer
566
982
  * @private
@@ -762,25 +1178,30 @@ module.exports.upload = (function () {
762
1178
  function handleFailure(up, error) {
763
1179
  // Skip if upload is no longer running
764
1180
  if (!(up.up_id in state.running)) return;
765
-
1181
+
766
1182
  // Check if already in failed list
767
1183
  for (const failedItem of state.failed) {
768
1184
  if (failedItem.up_id === up.up_id) {
769
1185
  return; // Already recorded as failed
770
1186
  }
771
1187
  }
772
-
1188
+
773
1189
  // Record failure
774
1190
  up.failure = error;
775
1191
  state.failed.push(up);
776
1192
  delete state.running[up.up_id];
777
-
1193
+
1194
+ // Reject the promise so callers know the upload failed
1195
+ if (up.reject) {
1196
+ up.reject(error);
1197
+ }
1198
+
778
1199
  // Continue processing queue
779
1200
  upload.run();
780
-
1201
+
781
1202
  // Notify progress
782
1203
  sendProgress();
783
-
1204
+
784
1205
  // Dispatch failure event
785
1206
  utils.dispatchEvent("upload:failed", {
786
1207
  item: up,