@karpeleslab/klbfw 0.2.16 → 0.2.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/upload.js +602 -126
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@karpeleslab/klbfw",
3
- "version": "0.2.16",
3
+ "version": "0.2.18",
4
4
  "description": "Frontend Framework",
5
5
  "main": "index.js",
6
6
  "types": "index.d.ts",
package/upload.js CHANGED
@@ -39,23 +39,22 @@
39
39
  * ```js
40
40
  * // For Node.js environments, first install dependencies:
41
41
  * // npm install node-fetch @xmldom/xmldom
42
- *
43
- * // Create a buffer-based file object for upload
44
- * const file = {
45
- * name: 'test.txt',
46
- * size: buffer.length,
47
- * type: 'text/plain',
48
- * content: buffer, // Buffer or ArrayBuffer with file content
49
- * lastModified: Date.now(),
50
- * slice: function(start, end) {
51
- * return {
52
- * content: this.content.slice(start, end)
53
- * };
54
- * }
55
- * };
56
- *
57
- * upload.append('Misc/Debug:testUpload', file)
58
- * .then(result => console.log('Upload complete', result));
42
+ *
43
+ * // Simple upload with a buffer
44
+ * const { uploadFile } = require('./upload');
45
+ * const buffer = Buffer.from('Hello, World!');
46
+ * const result = await uploadFile('Misc/Debug:testUpload', buffer, 'POST', {
47
+ * filename: 'hello.txt',
48
+ * type: 'text/plain'
49
+ * });
50
+ *
51
+ * // Upload large files using a stream (doesn't load entire file into memory)
52
+ * const fs = require('fs');
53
+ * const stream = fs.createReadStream('/path/to/2tb-file.bin');
54
+ * const result = await uploadFile('Misc/Debug:testUpload', stream, 'POST', {
55
+ * filename: 'large-file.bin',
56
+ * type: 'application/octet-stream'
57
+ * });
59
58
  * ```
60
59
  *
61
60
  * @module upload
@@ -273,83 +272,179 @@ const utils = {
273
272
  * that resolves when the upload is complete. It doesn't use global state or the
274
273
  * upload.run() process.
275
274
  *
276
- * @param {string} path - API endpoint path (e.g., 'Misc/Debug:testUpload')
277
- * @param {Buffer|ArrayBuffer|Object} file - File to upload. Can be:
278
- * - A Buffer or ArrayBuffer with file content
275
+ * @param {string} api - API endpoint path (e.g., 'Misc/Debug:testUpload')
276
+ * @param {Buffer|ArrayBuffer|Uint8Array|File|Object} buffer - File to upload. Can be:
277
+ * - A Node.js Buffer
278
+ * - An ArrayBuffer
279
+ * - A Uint8Array or other TypedArray
280
+ * - A browser File object
279
281
  * - A file-like object with { name, size, type, content, lastModified }
280
- * @param {Object} [options] - Upload options
281
- * @param {string} [options.filename] - Filename (defaults to 'file.bin')
282
- * @param {string} [options.type] - MIME type (defaults to 'application/octet-stream')
283
- * @param {Object} [options.params] - Additional parameters to send with the upload
284
- * @param {Object} [options.context] - Request context
282
+ * - A file-like object with { name, size, type, stream } for streaming large files
283
+ * - A string (will be converted to UTF-8 bytes)
284
+ * @param {string} [method='POST'] - HTTP method for the initial API call
285
+ * @param {Object} [params={}] - Additional parameters to send with the upload.
286
+ * Can include `filename` and `type` to override defaults.
287
+ * @param {Object} [context=null] - Request context (uses default context if not provided)
288
+ * @param {Object} [options={}] - Upload options
285
289
  * @param {Function} [options.onProgress] - Progress callback(progress) where progress is 0-1
290
+ * @param {Function} [options.onError] - Error callback(error, context). Can return a Promise
291
+ * that, if resolved, will cause the failed operation to be retried. Context contains
292
+ * { phase, blockNum, attempt } for block uploads or { phase, attempt } for other operations.
286
293
  * @returns {Promise<Object>} - Resolves with the upload result data
287
294
  *
288
295
  * @example
289
- * // Upload a buffer
296
+ * // Upload a buffer with filename
290
297
  * const buffer = Buffer.from('Hello, World!');
291
- * const result = await uploadFile('Misc/Debug:testUpload', buffer, {
298
+ * const result = await uploadFile('Misc/Debug:testUpload', buffer, 'POST', {
292
299
  * filename: 'hello.txt',
293
300
  * type: 'text/plain'
294
301
  * });
295
- * console.log(result); // { Blob__: '...', SHA256: '...', ... }
296
302
  *
297
303
  * @example
298
- * // Upload with progress tracking
299
- * const result = await uploadFile('Misc/Debug:testUpload', largeBuffer, {
304
+ * // Upload with progress and error handling
305
+ * const result = await uploadFile('Misc/Debug:testUpload', buffer, 'POST', {
306
+ * filename: 'large-file.bin'
307
+ * }, null, {
308
+ * onProgress: (progress) => console.log(`${Math.round(progress * 100)}%`),
309
+ * onError: async (error, ctx) => {
310
+ * console.log(`Error in ${ctx.phase}, attempt ${ctx.attempt}:`, error.message);
311
+ * if (ctx.attempt < 3) {
312
+ * await new Promise(r => setTimeout(r, 1000)); // Wait 1s before retry
313
+ * return; // Resolve to trigger retry
314
+ * }
315
+ * throw error; // Give up after 3 attempts
316
+ * }
317
+ * });
318
+ *
319
+ * @example
320
+ * // Upload a File object (browser)
321
+ * const result = await uploadFile('Misc/Debug:testUpload', fileInput.files[0]);
322
+ *
323
+ * @example
324
+ * // Upload a large file using a stream (Node.js) - doesn't load entire file into memory
325
+ * const fs = require('fs');
326
+ * const stream = fs.createReadStream('/path/to/large-file.bin');
327
+ * const result = await uploadFile('Misc/Debug:testUpload', stream, 'POST', {
300
328
  * filename: 'large-file.bin',
301
- * onProgress: (progress) => console.log(`${Math.round(progress * 100)}%`)
329
+ * type: 'application/octet-stream',
330
+ * size: 2199023255552 // optional: if known, enables optimal block sizing
302
331
  * });
303
332
  */
304
- async function uploadFile(path, file, options = {}) {
305
- // Normalize file to a file-like object
333
+ async function uploadFile(api, buffer, method, params, context, options) {
334
+ // Handle default values
335
+ method = method || 'POST';
336
+ params = params || {};
337
+ options = options || {};
338
+
339
+ // Get context from framework if not provided, and add available values
340
+ if (!context) {
341
+ context = fwWrapper.getContext();
342
+ } else {
343
+ // Merge with default context values if available
344
+ const defaultContext = fwWrapper.getContext();
345
+ if (defaultContext) {
346
+ context = { ...defaultContext, ...context };
347
+ }
348
+ }
349
+
350
+ // Normalize buffer to a file-like object
306
351
  let fileObj;
307
- if (file instanceof ArrayBuffer ||
308
- (file.buffer instanceof ArrayBuffer) ||
309
- (typeof Buffer !== 'undefined' && file instanceof Buffer)) {
310
- // Raw buffer - wrap in file-like object
311
- const size = file.byteLength || file.length;
352
+
353
+ // Handle string input
354
+ if (typeof buffer === 'string') {
355
+ const encoder = new TextEncoder();
356
+ const uint8Array = encoder.encode(buffer);
312
357
  fileObj = {
313
- name: options.filename || 'file.bin',
314
- size: size,
315
- type: options.type || 'application/octet-stream',
358
+ name: params.filename || 'file.txt',
359
+ size: uint8Array.length,
360
+ type: params.type || 'text/plain',
316
361
  lastModified: Date.now(),
317
- content: file
362
+ content: uint8Array.buffer
318
363
  };
319
- } else if (file.content !== undefined) {
320
- // Already a file-like object
364
+ }
365
+ // Handle ArrayBuffer
366
+ else if (buffer instanceof ArrayBuffer) {
321
367
  fileObj = {
322
- name: file.name || options.filename || 'file.bin',
323
- size: file.size || file.content.byteLength || file.content.length,
324
- type: file.type || options.type || 'application/octet-stream',
325
- lastModified: file.lastModified || Date.now(),
326
- content: file.content
368
+ name: params.filename || 'file.bin',
369
+ size: buffer.byteLength,
370
+ type: params.type || 'application/octet-stream',
371
+ lastModified: Date.now(),
372
+ content: buffer
327
373
  };
328
- } else {
329
- throw new Error('Invalid file: must be a Buffer, ArrayBuffer, or file-like object with content');
374
+ }
375
+ // Handle TypedArray (Uint8Array, etc.)
376
+ else if (buffer && buffer.buffer instanceof ArrayBuffer) {
377
+ fileObj = {
378
+ name: params.filename || 'file.bin',
379
+ size: buffer.byteLength,
380
+ type: params.type || 'application/octet-stream',
381
+ lastModified: Date.now(),
382
+ content: buffer
383
+ };
384
+ }
385
+ // Handle Node.js Buffer
386
+ else if (typeof Buffer !== 'undefined' && buffer instanceof Buffer) {
387
+ fileObj = {
388
+ name: params.filename || 'file.bin',
389
+ size: buffer.length,
390
+ type: params.type || 'application/octet-stream',
391
+ lastModified: Date.now(),
392
+ content: buffer
393
+ };
394
+ }
395
+ // Handle browser File object
396
+ else if (env.isBrowser && typeof File !== 'undefined' && buffer instanceof File) {
397
+ fileObj = {
398
+ name: buffer.name || params.filename || 'file.bin',
399
+ size: buffer.size,
400
+ type: buffer.type || params.type || 'application/octet-stream',
401
+ lastModified: buffer.lastModified || Date.now(),
402
+ browserFile: buffer // Keep reference to original File for reading
403
+ };
404
+ }
405
+ // Handle file-like object with content property
406
+ else if (buffer && buffer.content !== undefined) {
407
+ fileObj = {
408
+ name: buffer.name || params.filename || 'file.bin',
409
+ size: buffer.size || buffer.content.byteLength || buffer.content.length,
410
+ type: buffer.type || params.type || 'application/octet-stream',
411
+ lastModified: buffer.lastModified || Date.now(),
412
+ content: buffer.content
413
+ };
414
+ }
415
+ // Handle Node.js readable stream
416
+ else if (buffer && typeof buffer.read === 'function' && typeof buffer.on === 'function') {
417
+ fileObj = {
418
+ name: params.filename || 'file.bin',
419
+ size: params.size || null, // null means unknown size
420
+ type: params.type || 'application/octet-stream',
421
+ lastModified: Date.now(),
422
+ stream: buffer
423
+ };
424
+ }
425
+ else {
426
+ throw new Error('Invalid file: must be a Buffer, ArrayBuffer, Uint8Array, File, readable stream, or file-like object with content');
330
427
  }
331
428
 
332
- const context = options.context || fwWrapper.getContext();
333
- const params = { ...(options.params || {}) };
334
-
335
- // Set file metadata
336
- params.filename = fileObj.name;
337
- params.size = fileObj.size;
338
- params.lastModified = fileObj.lastModified / 1000;
339
- params.type = fileObj.type;
429
+ // Merge params with file metadata (file metadata takes precedence for these fields)
430
+ const uploadParams = { ...params };
431
+ uploadParams.filename = fileObj.name;
432
+ uploadParams.size = fileObj.size;
433
+ uploadParams.lastModified = fileObj.lastModified / 1000;
434
+ uploadParams.type = fileObj.type;
340
435
 
341
436
  // Initialize upload with the server
342
- const response = await rest.rest(path, 'POST', params, context);
437
+ const response = await rest.rest(api, method, uploadParams, context);
343
438
  const data = response.data;
344
439
 
345
440
  // Method 1: AWS signed multipart upload
346
441
  if (data.Cloud_Aws_Bucket_Upload__) {
347
- return doAwsUpload(fileObj, data, context, options.onProgress);
442
+ return doAwsUpload(fileObj, data, context, options);
348
443
  }
349
444
 
350
445
  // Method 2: Direct PUT upload
351
446
  if (data.PUT) {
352
- return doPutUpload(fileObj, data, context, options.onProgress);
447
+ return doPutUpload(fileObj, data, context, options);
353
448
  }
354
449
 
355
450
  throw new Error('Invalid upload response format: no upload method available');
@@ -359,32 +454,169 @@ async function uploadFile(path, file, options = {}) {
359
454
  * Perform a direct PUT upload (simple upload method)
360
455
  * @private
361
456
  */
362
- async function doPutUpload(file, uploadInfo, context, onProgress) {
363
- const blockSize = uploadInfo.Blocksize || file.size;
364
- const blocks = Math.ceil(file.size / blockSize);
457
+ async function doPutUpload(file, uploadInfo, context, options) {
458
+ const { onProgress, onError } = options;
459
+
460
+ // Calculate block size
461
+ // - If size known: use server's Blocksize or file size
462
+ // - If size unknown (streaming): use 526MB default
463
+ let blockSize;
464
+ let blocks = null;
465
+
466
+ if (file.size) {
467
+ blockSize = uploadInfo.Blocksize || file.size;
468
+ blocks = Math.ceil(file.size / blockSize);
469
+ } else {
470
+ blockSize = 551550976; // 526MB
471
+ }
365
472
 
366
- // Upload blocks with concurrency limit
367
- let completedBlocks = 0;
368
473
  const maxConcurrent = 3;
474
+ let completedBlocks = 0;
369
475
 
370
- // Process blocks in batches
371
- for (let i = 0; i < blocks; i += maxConcurrent) {
372
- const batch = [];
373
- for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
374
- batch.push(uploadPutBlock(file, uploadInfo, j, blockSize));
476
+ // Stream-based upload: read sequentially, upload in parallel
477
+ if (file.stream) {
478
+ let blockNum = 0;
479
+ let streamEnded = false;
480
+ let byteOffset = 0;
481
+ const pendingUploads = [];
482
+
483
+ while (!streamEnded || pendingUploads.length > 0) {
484
+ // Read and start uploads up to maxConcurrent
485
+ while (!streamEnded && pendingUploads.length < maxConcurrent) {
486
+ const chunkData = await readChunkFromStream(file.stream, blockSize);
487
+ if (chunkData === null) {
488
+ streamEnded = true;
489
+ break;
490
+ }
491
+
492
+ const currentBlock = blockNum++;
493
+ const startByte = byteOffset;
494
+ byteOffset += chunkData.byteLength;
495
+
496
+ const uploadPromise = uploadPutBlockWithDataAndRetry(
497
+ uploadInfo, currentBlock, startByte, chunkData, file.type, onError
498
+ ).then(() => {
499
+ completedBlocks++;
500
+ if (onProgress && blocks) {
501
+ onProgress(completedBlocks / blocks);
502
+ }
503
+ });
504
+
505
+ pendingUploads.push(uploadPromise);
506
+ }
507
+
508
+ // Wait for at least one upload to complete before reading more
509
+ if (pendingUploads.length > 0) {
510
+ await Promise.race(pendingUploads);
511
+ // Remove completed promises
512
+ for (let i = pendingUploads.length - 1; i >= 0; i--) {
513
+ const status = await Promise.race([
514
+ pendingUploads[i].then(() => 'done'),
515
+ Promise.resolve('pending')
516
+ ]);
517
+ if (status === 'done') {
518
+ pendingUploads.splice(i, 1);
519
+ }
520
+ }
521
+ }
375
522
  }
376
523
 
377
- await Promise.all(batch);
378
- completedBlocks += batch.length;
524
+ blocks = blockNum;
525
+ } else {
526
+ // Buffer-based upload: original logic
527
+ for (let i = 0; i < blocks; i += maxConcurrent) {
528
+ const batch = [];
529
+ for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
530
+ batch.push(
531
+ uploadPutBlockWithRetry(file, uploadInfo, j, blockSize, onError)
532
+ .then(() => {
533
+ completedBlocks++;
534
+ if (onProgress) {
535
+ onProgress(completedBlocks / blocks);
536
+ }
537
+ })
538
+ );
539
+ }
540
+
541
+ await Promise.all(batch);
542
+ }
543
+ }
544
+
545
+ // All blocks done, call completion with retry support
546
+ let attempt = 0;
547
+ while (true) {
548
+ attempt++;
549
+ try {
550
+ const completeResponse = await rest.rest(uploadInfo.Complete, 'POST', {}, context);
551
+ return completeResponse.data;
552
+ } catch (error) {
553
+ if (onError) {
554
+ await onError(error, { phase: 'complete', attempt });
555
+ // If onError resolves, retry
556
+ continue;
557
+ }
558
+ throw error;
559
+ }
560
+ }
561
+ }
562
+
563
+ /**
564
+ * Upload a single block via PUT with pre-read data and retry support
565
+ * @private
566
+ */
567
+ async function uploadPutBlockWithDataAndRetry(uploadInfo, blockNum, startByte, data, contentType, onError) {
568
+ let attempt = 0;
569
+ while (true) {
570
+ attempt++;
571
+ try {
572
+ const headers = {
573
+ 'Content-Type': contentType || 'application/octet-stream'
574
+ };
575
+
576
+ // Add Content-Range for multipart PUT
577
+ headers['Content-Range'] = `bytes ${startByte}-${startByte + data.byteLength - 1}/*`;
578
+
579
+ const response = await utils.fetch(uploadInfo.PUT, {
580
+ method: 'PUT',
581
+ body: data,
582
+ headers: headers
583
+ });
584
+
585
+ if (!response.ok) {
586
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
587
+ }
379
588
 
380
- if (onProgress) {
381
- onProgress(completedBlocks / blocks);
589
+ await response.text();
590
+ return;
591
+ } catch (error) {
592
+ if (onError) {
593
+ await onError(error, { phase: 'upload', blockNum, attempt });
594
+ continue;
595
+ }
596
+ throw error;
382
597
  }
383
598
  }
599
+ }
384
600
 
385
- // All blocks done, call completion
386
- const completeResponse = await rest.rest(uploadInfo.Complete, 'POST', {}, context);
387
- return completeResponse.data;
601
+ /**
602
+ * Upload a single block via PUT with retry support
603
+ * @private
604
+ */
605
+ async function uploadPutBlockWithRetry(file, uploadInfo, blockNum, blockSize, onError) {
606
+ let attempt = 0;
607
+ while (true) {
608
+ attempt++;
609
+ try {
610
+ return await uploadPutBlock(file, uploadInfo, blockNum, blockSize);
611
+ } catch (error) {
612
+ if (onError) {
613
+ await onError(error, { phase: 'upload', blockNum, attempt });
614
+ // If onError resolves, retry
615
+ continue;
616
+ }
617
+ throw error;
618
+ }
619
+ }
388
620
  }
389
621
 
390
622
  /**
@@ -424,67 +656,220 @@ async function uploadPutBlock(file, uploadInfo, blockNum, blockSize) {
424
656
  * Perform an AWS multipart upload
425
657
  * @private
426
658
  */
427
- async function doAwsUpload(file, uploadInfo, context, onProgress) {
428
- // Calculate optimal block size (min 5MB for AWS, target ~10k parts)
429
- let blockSize = Math.ceil(file.size / 10000);
430
- if (blockSize < 5242880) blockSize = 5242880;
431
-
432
- const blocks = Math.ceil(file.size / blockSize);
659
+ async function doAwsUpload(file, uploadInfo, context, options) {
660
+ const { onProgress, onError } = options;
661
+
662
+ // Calculate block size
663
+ // - If size known: target ~10k parts, min 5MB
664
+ // - If size unknown: use 526MB (allows up to ~5TB with 10k parts)
665
+ let blockSize;
666
+ let blocks = null; // null means unknown (streaming)
667
+
668
+ if (file.size) {
669
+ blockSize = Math.ceil(file.size / 10000);
670
+ if (blockSize < 5242880) blockSize = 5242880;
671
+ blocks = Math.ceil(file.size / blockSize);
672
+ } else {
673
+ blockSize = 551550976; // 526MB
674
+ }
433
675
 
434
- // Initialize multipart upload
435
- const initResponse = await awsReq(
436
- uploadInfo,
437
- 'POST',
438
- 'uploads=',
439
- '',
440
- { 'Content-Type': file.type || 'application/octet-stream', 'X-Amz-Acl': 'private' },
441
- context
442
- );
443
- const initXml = await initResponse.text();
444
- const dom = utils.parseXML(initXml);
445
- const uploadId = dom.querySelector('UploadId').innerHTML;
676
+ // Initialize multipart upload with retry support
677
+ let uploadId;
678
+ let initAttempt = 0;
679
+ while (true) {
680
+ initAttempt++;
681
+ try {
682
+ const initResponse = await awsReq(
683
+ uploadInfo,
684
+ 'POST',
685
+ 'uploads=',
686
+ '',
687
+ { 'Content-Type': file.type || 'application/octet-stream', 'X-Amz-Acl': 'private' },
688
+ context
689
+ );
690
+ const initXml = await initResponse.text();
691
+ const dom = utils.parseXML(initXml);
692
+ uploadId = dom.querySelector('UploadId').innerHTML;
693
+ break;
694
+ } catch (error) {
695
+ if (onError) {
696
+ await onError(error, { phase: 'init', attempt: initAttempt });
697
+ continue;
698
+ }
699
+ throw error;
700
+ }
701
+ }
446
702
 
447
- // Upload all parts with concurrency limit
448
703
  const etags = {};
449
- let completedBlocks = 0;
450
704
  const maxConcurrent = 3;
705
+ let completedBlocks = 0;
451
706
 
452
- for (let i = 0; i < blocks; i += maxConcurrent) {
453
- const batch = [];
454
- for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
455
- batch.push(
456
- uploadAwsBlock(file, uploadInfo, uploadId, j, blockSize, context)
457
- .then(etag => { etags[j] = etag; })
458
- );
707
+ // Stream-based upload: read sequentially, upload in parallel
708
+ if (file.stream) {
709
+ let blockNum = 0;
710
+ let streamEnded = false;
711
+ const pendingUploads = [];
712
+
713
+ while (!streamEnded || pendingUploads.length > 0) {
714
+ // Read and start uploads up to maxConcurrent
715
+ while (!streamEnded && pendingUploads.length < maxConcurrent) {
716
+ const chunkData = await readChunkFromStream(file.stream, blockSize);
717
+ if (chunkData === null) {
718
+ streamEnded = true;
719
+ break;
720
+ }
721
+
722
+ const currentBlock = blockNum++;
723
+ const uploadPromise = uploadAwsBlockWithDataAndRetry(
724
+ uploadInfo, uploadId, currentBlock, chunkData, context, onError
725
+ ).then(etag => {
726
+ etags[currentBlock] = etag;
727
+ completedBlocks++;
728
+ if (onProgress && blocks) {
729
+ onProgress(completedBlocks / blocks);
730
+ }
731
+ });
732
+
733
+ pendingUploads.push(uploadPromise);
734
+ }
735
+
736
+ // Wait for at least one upload to complete before reading more
737
+ if (pendingUploads.length > 0) {
738
+ await Promise.race(pendingUploads);
739
+ // Remove completed promises
740
+ for (let i = pendingUploads.length - 1; i >= 0; i--) {
741
+ const status = await Promise.race([
742
+ pendingUploads[i].then(() => 'done'),
743
+ Promise.resolve('pending')
744
+ ]);
745
+ if (status === 'done') {
746
+ pendingUploads.splice(i, 1);
747
+ }
748
+ }
749
+ }
459
750
  }
460
751
 
461
- await Promise.all(batch);
462
- completedBlocks += batch.length;
752
+ blocks = blockNum; // Now we know the total
753
+ } else {
754
+ // Buffer-based upload: original logic
755
+ for (let i = 0; i < blocks; i += maxConcurrent) {
756
+ const batch = [];
757
+ for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
758
+ batch.push(
759
+ uploadAwsBlockWithRetry(file, uploadInfo, uploadId, j, blockSize, context, onError)
760
+ .then(etag => {
761
+ etags[j] = etag;
762
+ completedBlocks++;
763
+ if (onProgress) {
764
+ onProgress(completedBlocks / blocks);
765
+ }
766
+ })
767
+ );
768
+ }
463
769
 
464
- if (onProgress) {
465
- onProgress(completedBlocks / blocks);
770
+ await Promise.all(batch);
466
771
  }
467
772
  }
468
773
 
469
- // Complete multipart upload
774
+ // Complete multipart upload with retry support
470
775
  let xml = '<CompleteMultipartUpload>';
471
776
  for (let i = 0; i < blocks; i++) {
472
777
  xml += `<Part><PartNumber>${i + 1}</PartNumber><ETag>${etags[i]}</ETag></Part>`;
473
778
  }
474
779
  xml += '</CompleteMultipartUpload>';
475
780
 
476
- const completeResponse = await awsReq(uploadInfo, 'POST', `uploadId=${uploadId}`, xml, null, context);
477
- await completeResponse.text();
781
+ let completeAttempt = 0;
782
+ while (true) {
783
+ completeAttempt++;
784
+ try {
785
+ const completeResponse = await awsReq(uploadInfo, 'POST', `uploadId=${uploadId}`, xml, null, context);
786
+ await completeResponse.text();
787
+ break;
788
+ } catch (error) {
789
+ if (onError) {
790
+ await onError(error, { phase: 'complete', attempt: completeAttempt });
791
+ continue;
792
+ }
793
+ throw error;
794
+ }
795
+ }
796
+
797
+ // Call server-side completion handler with retry support
798
+ let handleAttempt = 0;
799
+ while (true) {
800
+ handleAttempt++;
801
+ try {
802
+ const finalResponse = await rest.rest(
803
+ `Cloud/Aws/Bucket/Upload/${uploadInfo.Cloud_Aws_Bucket_Upload__}:handleComplete`,
804
+ 'POST',
805
+ {},
806
+ context
807
+ );
808
+ return finalResponse.data;
809
+ } catch (error) {
810
+ if (onError) {
811
+ await onError(error, { phase: 'handleComplete', attempt: handleAttempt });
812
+ continue;
813
+ }
814
+ throw error;
815
+ }
816
+ }
817
+ }
478
818
 
479
- // Call server-side completion handler
480
- const finalResponse = await rest.rest(
481
- `Cloud/Aws/Bucket/Upload/${uploadInfo.Cloud_Aws_Bucket_Upload__}:handleComplete`,
482
- 'POST',
483
- {},
484
- context
485
- );
819
+ /**
820
+ * Upload a block to AWS S3 with pre-read data and retry support
821
+ * @private
822
+ */
823
+ async function uploadAwsBlockWithDataAndRetry(uploadInfo, uploadId, blockNum, data, context, onError) {
824
+ let attempt = 0;
825
+ while (true) {
826
+ attempt++;
827
+ try {
828
+ const awsPartNumber = blockNum + 1;
829
+ const response = await awsReq(
830
+ uploadInfo,
831
+ 'PUT',
832
+ `partNumber=${awsPartNumber}&uploadId=${uploadId}`,
833
+ data,
834
+ null,
835
+ context
836
+ );
837
+
838
+ if (!response.ok) {
839
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
840
+ }
841
+
842
+ const etag = response.headers.get('ETag');
843
+ await response.text();
844
+ return etag;
845
+ } catch (error) {
846
+ if (onError) {
847
+ await onError(error, { phase: 'upload', blockNum, attempt });
848
+ continue;
849
+ }
850
+ throw error;
851
+ }
852
+ }
853
+ }
486
854
 
487
- return finalResponse.data;
855
+ /**
856
+ * Upload a single block to AWS S3 with retry support
857
+ * @private
858
+ */
859
+ async function uploadAwsBlockWithRetry(file, uploadInfo, uploadId, blockNum, blockSize, context, onError) {
860
+ let attempt = 0;
861
+ while (true) {
862
+ attempt++;
863
+ try {
864
+ return await uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, context);
865
+ } catch (error) {
866
+ if (onError) {
867
+ await onError(error, { phase: 'upload', blockNum, attempt });
868
+ continue;
869
+ }
870
+ throw error;
871
+ }
872
+ }
488
873
  }
489
874
 
490
875
  /**
@@ -516,12 +901,98 @@ async function uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, c
516
901
  return etag;
517
902
  }
518
903
 
904
+ /**
905
+ * Read a chunk of specified size from a stream
906
+ * @private
907
+ * @param {ReadableStream} stream - Node.js readable stream
908
+ * @param {number} size - Number of bytes to read
909
+ * @returns {Promise<ArrayBuffer|null>} - ArrayBuffer with data, or null if stream ended
910
+ */
911
+ function readChunkFromStream(stream, size) {
912
+ return new Promise((resolve, reject) => {
913
+ const chunks = [];
914
+ let bytesRead = 0;
915
+
916
+ const onReadable = () => {
917
+ let chunk;
918
+ while (bytesRead < size && (chunk = stream.read(Math.min(size - bytesRead, 65536))) !== null) {
919
+ chunks.push(chunk);
920
+ bytesRead += chunk.length;
921
+ }
922
+
923
+ if (bytesRead >= size) {
924
+ cleanup();
925
+ resolve(combineChunks(chunks));
926
+ }
927
+ };
928
+
929
+ const onEnd = () => {
930
+ cleanup();
931
+ if (bytesRead === 0) {
932
+ resolve(null); // Stream ended, no more data
933
+ } else {
934
+ resolve(combineChunks(chunks));
935
+ }
936
+ };
937
+
938
+ const onError = (err) => {
939
+ cleanup();
940
+ reject(err);
941
+ };
942
+
943
+ const cleanup = () => {
944
+ stream.removeListener('readable', onReadable);
945
+ stream.removeListener('end', onEnd);
946
+ stream.removeListener('error', onError);
947
+ };
948
+
949
+ stream.on('readable', onReadable);
950
+ stream.on('end', onEnd);
951
+ stream.on('error', onError);
952
+
953
+ // Try reading immediately in case data is already buffered
954
+ onReadable();
955
+ });
956
+ }
957
+
958
+ /**
959
+ * Combine chunks into a single ArrayBuffer
960
+ * @private
961
+ */
962
+ function combineChunks(chunks) {
963
+ if (chunks.length === 0) {
964
+ return new ArrayBuffer(0);
965
+ }
966
+ if (chunks.length === 1) {
967
+ const chunk = chunks[0];
968
+ return chunk.buffer.slice(chunk.byteOffset, chunk.byteOffset + chunk.length);
969
+ }
970
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
971
+ const result = new Uint8Array(totalLength);
972
+ let offset = 0;
973
+ for (const chunk of chunks) {
974
+ result.set(new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.length), offset);
975
+ offset += chunk.length;
976
+ }
977
+ return result.buffer;
978
+ }
979
+
519
980
  /**
520
981
  * Read a slice of a file as ArrayBuffer
521
982
  * @private
522
983
  */
523
984
  function readFileSlice(file, start, end) {
524
985
  return new Promise((resolve, reject) => {
986
+ // Handle browser File objects
987
+ if (file.browserFile) {
988
+ const slice = file.browserFile.slice(start, end);
989
+ const reader = new FileReader();
990
+ reader.addEventListener('loadend', () => resolve(reader.result));
991
+ reader.addEventListener('error', (e) => reject(e));
992
+ reader.readAsArrayBuffer(slice);
993
+ return;
994
+ }
995
+
525
996
  if (!file.content) {
526
997
  reject(new Error('Cannot read file content - no content property'));
527
998
  return;
@@ -707,25 +1178,30 @@ module.exports.upload = (function () {
707
1178
  function handleFailure(up, error) {
708
1179
  // Skip if upload is no longer running
709
1180
  if (!(up.up_id in state.running)) return;
710
-
1181
+
711
1182
  // Check if already in failed list
712
1183
  for (const failedItem of state.failed) {
713
1184
  if (failedItem.up_id === up.up_id) {
714
1185
  return; // Already recorded as failed
715
1186
  }
716
1187
  }
717
-
1188
+
718
1189
  // Record failure
719
1190
  up.failure = error;
720
1191
  state.failed.push(up);
721
1192
  delete state.running[up.up_id];
722
-
1193
+
1194
+ // Reject the promise so callers know the upload failed
1195
+ if (up.reject) {
1196
+ up.reject(error);
1197
+ }
1198
+
723
1199
  // Continue processing queue
724
1200
  upload.run();
725
-
1201
+
726
1202
  // Notify progress
727
1203
  sendProgress();
728
-
1204
+
729
1205
  // Dispatch failure event
730
1206
  utils.dispatchEvent("upload:failed", {
731
1207
  item: up,