@karpeleslab/klbfw 0.2.14 → 0.2.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/upload.js +295 -2
package/package.json
CHANGED
package/upload.js
CHANGED
|
@@ -266,10 +266,300 @@ const utils = {
|
|
|
266
266
|
}
|
|
267
267
|
};
|
|
268
268
|
|
|
269
|
+
/**
|
|
270
|
+
* Simple file upload for Node.js consumers
|
|
271
|
+
*
|
|
272
|
+
* This function provides a straightforward way to upload a file and get a Promise
|
|
273
|
+
* that resolves when the upload is complete. It doesn't use global state or the
|
|
274
|
+
* upload.run() process.
|
|
275
|
+
*
|
|
276
|
+
* @param {string} path - API endpoint path (e.g., 'Misc/Debug:testUpload')
|
|
277
|
+
* @param {Buffer|ArrayBuffer|Object} file - File to upload. Can be:
|
|
278
|
+
* - A Buffer or ArrayBuffer with file content
|
|
279
|
+
* - A file-like object with { name, size, type, content, lastModified }
|
|
280
|
+
* @param {Object} [options] - Upload options
|
|
281
|
+
* @param {string} [options.filename] - Filename (defaults to 'file.bin')
|
|
282
|
+
* @param {string} [options.type] - MIME type (defaults to 'application/octet-stream')
|
|
283
|
+
* @param {Object} [options.params] - Additional parameters to send with the upload
|
|
284
|
+
* @param {Object} [options.context] - Request context
|
|
285
|
+
* @param {Function} [options.onProgress] - Progress callback(progress) where progress is 0-1
|
|
286
|
+
* @returns {Promise<Object>} - Resolves with the upload result data
|
|
287
|
+
*
|
|
288
|
+
* @example
|
|
289
|
+
* // Upload a buffer
|
|
290
|
+
* const buffer = Buffer.from('Hello, World!');
|
|
291
|
+
* const result = await uploadFile('Misc/Debug:testUpload', buffer, {
|
|
292
|
+
* filename: 'hello.txt',
|
|
293
|
+
* type: 'text/plain'
|
|
294
|
+
* });
|
|
295
|
+
* console.log(result); // { Blob__: '...', SHA256: '...', ... }
|
|
296
|
+
*
|
|
297
|
+
* @example
|
|
298
|
+
* // Upload with progress tracking
|
|
299
|
+
* const result = await uploadFile('Misc/Debug:testUpload', largeBuffer, {
|
|
300
|
+
* filename: 'large-file.bin',
|
|
301
|
+
* onProgress: (progress) => console.log(`${Math.round(progress * 100)}%`)
|
|
302
|
+
* });
|
|
303
|
+
*/
|
|
304
|
+
async function uploadFile(path, file, options = {}) {
|
|
305
|
+
// Normalize file to a file-like object
|
|
306
|
+
let fileObj;
|
|
307
|
+
if (file instanceof ArrayBuffer ||
|
|
308
|
+
(file.buffer instanceof ArrayBuffer) ||
|
|
309
|
+
(typeof Buffer !== 'undefined' && file instanceof Buffer)) {
|
|
310
|
+
// Raw buffer - wrap in file-like object
|
|
311
|
+
const size = file.byteLength || file.length;
|
|
312
|
+
fileObj = {
|
|
313
|
+
name: options.filename || 'file.bin',
|
|
314
|
+
size: size,
|
|
315
|
+
type: options.type || 'application/octet-stream',
|
|
316
|
+
lastModified: Date.now(),
|
|
317
|
+
content: file
|
|
318
|
+
};
|
|
319
|
+
} else if (file.content !== undefined) {
|
|
320
|
+
// Already a file-like object
|
|
321
|
+
fileObj = {
|
|
322
|
+
name: file.name || options.filename || 'file.bin',
|
|
323
|
+
size: file.size || file.content.byteLength || file.content.length,
|
|
324
|
+
type: file.type || options.type || 'application/octet-stream',
|
|
325
|
+
lastModified: file.lastModified || Date.now(),
|
|
326
|
+
content: file.content
|
|
327
|
+
};
|
|
328
|
+
} else {
|
|
329
|
+
throw new Error('Invalid file: must be a Buffer, ArrayBuffer, or file-like object with content');
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
const context = options.context || fwWrapper.getContext();
|
|
333
|
+
const params = { ...(options.params || {}) };
|
|
334
|
+
|
|
335
|
+
// Set file metadata
|
|
336
|
+
params.filename = fileObj.name;
|
|
337
|
+
params.size = fileObj.size;
|
|
338
|
+
params.lastModified = fileObj.lastModified / 1000;
|
|
339
|
+
params.type = fileObj.type;
|
|
340
|
+
|
|
341
|
+
// Initialize upload with the server
|
|
342
|
+
const response = await rest.rest(path, 'POST', params, context);
|
|
343
|
+
const data = response.data;
|
|
344
|
+
|
|
345
|
+
// Method 1: AWS signed multipart upload
|
|
346
|
+
if (data.Cloud_Aws_Bucket_Upload__) {
|
|
347
|
+
return doAwsUpload(fileObj, data, context, options.onProgress);
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
// Method 2: Direct PUT upload
|
|
351
|
+
if (data.PUT) {
|
|
352
|
+
return doPutUpload(fileObj, data, context, options.onProgress);
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
throw new Error('Invalid upload response format: no upload method available');
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
/**
|
|
359
|
+
* Perform a direct PUT upload (simple upload method)
|
|
360
|
+
* @private
|
|
361
|
+
*/
|
|
362
|
+
async function doPutUpload(file, uploadInfo, context, onProgress) {
|
|
363
|
+
const blockSize = uploadInfo.Blocksize || file.size;
|
|
364
|
+
const blocks = Math.ceil(file.size / blockSize);
|
|
365
|
+
|
|
366
|
+
// Upload blocks with concurrency limit
|
|
367
|
+
let completedBlocks = 0;
|
|
368
|
+
const maxConcurrent = 3;
|
|
369
|
+
|
|
370
|
+
// Process blocks in batches
|
|
371
|
+
for (let i = 0; i < blocks; i += maxConcurrent) {
|
|
372
|
+
const batch = [];
|
|
373
|
+
for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
|
|
374
|
+
batch.push(uploadPutBlock(file, uploadInfo, j, blockSize));
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
await Promise.all(batch);
|
|
378
|
+
completedBlocks += batch.length;
|
|
379
|
+
|
|
380
|
+
if (onProgress) {
|
|
381
|
+
onProgress(completedBlocks / blocks);
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
// All blocks done, call completion
|
|
386
|
+
const completeResponse = await rest.rest(uploadInfo.Complete, 'POST', {}, context);
|
|
387
|
+
return completeResponse.data;
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
/**
|
|
391
|
+
* Upload a single block via PUT
|
|
392
|
+
* @private
|
|
393
|
+
*/
|
|
394
|
+
async function uploadPutBlock(file, uploadInfo, blockNum, blockSize) {
|
|
395
|
+
const startByte = blockNum * blockSize;
|
|
396
|
+
const endByte = Math.min(startByte + blockSize, file.size);
|
|
397
|
+
|
|
398
|
+
const arrayBuffer = await readFileSlice(file, startByte, endByte);
|
|
399
|
+
|
|
400
|
+
const headers = {
|
|
401
|
+
'Content-Type': file.type || 'application/octet-stream'
|
|
402
|
+
};
|
|
403
|
+
|
|
404
|
+
// Add Content-Range for multipart PUT
|
|
405
|
+
const totalBlocks = Math.ceil(file.size / blockSize);
|
|
406
|
+
if (totalBlocks > 1) {
|
|
407
|
+
headers['Content-Range'] = `bytes ${startByte}-${endByte - 1}/*`;
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
const response = await utils.fetch(uploadInfo.PUT, {
|
|
411
|
+
method: 'PUT',
|
|
412
|
+
body: arrayBuffer,
|
|
413
|
+
headers: headers
|
|
414
|
+
});
|
|
415
|
+
|
|
416
|
+
if (!response.ok) {
|
|
417
|
+
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
await response.text();
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
/**
|
|
424
|
+
* Perform an AWS multipart upload
|
|
425
|
+
* @private
|
|
426
|
+
*/
|
|
427
|
+
async function doAwsUpload(file, uploadInfo, context, onProgress) {
|
|
428
|
+
// Calculate optimal block size (min 5MB for AWS, target ~10k parts)
|
|
429
|
+
let blockSize = Math.ceil(file.size / 10000);
|
|
430
|
+
if (blockSize < 5242880) blockSize = 5242880;
|
|
431
|
+
|
|
432
|
+
const blocks = Math.ceil(file.size / blockSize);
|
|
433
|
+
|
|
434
|
+
// Initialize multipart upload
|
|
435
|
+
const initResponse = await awsReq(
|
|
436
|
+
uploadInfo,
|
|
437
|
+
'POST',
|
|
438
|
+
'uploads=',
|
|
439
|
+
'',
|
|
440
|
+
{ 'Content-Type': file.type || 'application/octet-stream', 'X-Amz-Acl': 'private' },
|
|
441
|
+
context
|
|
442
|
+
);
|
|
443
|
+
const initXml = await initResponse.text();
|
|
444
|
+
const dom = utils.parseXML(initXml);
|
|
445
|
+
const uploadId = dom.querySelector('UploadId').innerHTML;
|
|
446
|
+
|
|
447
|
+
// Upload all parts with concurrency limit
|
|
448
|
+
const etags = {};
|
|
449
|
+
let completedBlocks = 0;
|
|
450
|
+
const maxConcurrent = 3;
|
|
451
|
+
|
|
452
|
+
for (let i = 0; i < blocks; i += maxConcurrent) {
|
|
453
|
+
const batch = [];
|
|
454
|
+
for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
|
|
455
|
+
batch.push(
|
|
456
|
+
uploadAwsBlock(file, uploadInfo, uploadId, j, blockSize, context)
|
|
457
|
+
.then(etag => { etags[j] = etag; })
|
|
458
|
+
);
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
await Promise.all(batch);
|
|
462
|
+
completedBlocks += batch.length;
|
|
463
|
+
|
|
464
|
+
if (onProgress) {
|
|
465
|
+
onProgress(completedBlocks / blocks);
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
// Complete multipart upload
|
|
470
|
+
let xml = '<CompleteMultipartUpload>';
|
|
471
|
+
for (let i = 0; i < blocks; i++) {
|
|
472
|
+
xml += `<Part><PartNumber>${i + 1}</PartNumber><ETag>${etags[i]}</ETag></Part>`;
|
|
473
|
+
}
|
|
474
|
+
xml += '</CompleteMultipartUpload>';
|
|
475
|
+
|
|
476
|
+
const completeResponse = await awsReq(uploadInfo, 'POST', `uploadId=${uploadId}`, xml, null, context);
|
|
477
|
+
await completeResponse.text();
|
|
478
|
+
|
|
479
|
+
// Call server-side completion handler
|
|
480
|
+
const finalResponse = await rest.rest(
|
|
481
|
+
`Cloud/Aws/Bucket/Upload/${uploadInfo.Cloud_Aws_Bucket_Upload__}:handleComplete`,
|
|
482
|
+
'POST',
|
|
483
|
+
{},
|
|
484
|
+
context
|
|
485
|
+
);
|
|
486
|
+
|
|
487
|
+
return finalResponse.data;
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
/**
|
|
491
|
+
* Upload a single block to AWS S3
|
|
492
|
+
* @private
|
|
493
|
+
*/
|
|
494
|
+
async function uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, context) {
|
|
495
|
+
const startByte = blockNum * blockSize;
|
|
496
|
+
const endByte = Math.min(startByte + blockSize, file.size);
|
|
497
|
+
const awsPartNumber = blockNum + 1; // AWS uses 1-based part numbers
|
|
498
|
+
|
|
499
|
+
const arrayBuffer = await readFileSlice(file, startByte, endByte);
|
|
500
|
+
|
|
501
|
+
const response = await awsReq(
|
|
502
|
+
uploadInfo,
|
|
503
|
+
'PUT',
|
|
504
|
+
`partNumber=${awsPartNumber}&uploadId=${uploadId}`,
|
|
505
|
+
arrayBuffer,
|
|
506
|
+
null,
|
|
507
|
+
context
|
|
508
|
+
);
|
|
509
|
+
|
|
510
|
+
if (!response.ok) {
|
|
511
|
+
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
const etag = response.headers.get('ETag');
|
|
515
|
+
await response.text();
|
|
516
|
+
return etag;
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
/**
|
|
520
|
+
* Read a slice of a file as ArrayBuffer
|
|
521
|
+
* @private
|
|
522
|
+
*/
|
|
523
|
+
function readFileSlice(file, start, end) {
|
|
524
|
+
return new Promise((resolve, reject) => {
|
|
525
|
+
if (!file.content) {
|
|
526
|
+
reject(new Error('Cannot read file content - no content property'));
|
|
527
|
+
return;
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
const content = file.content;
|
|
531
|
+
|
|
532
|
+
if (content instanceof ArrayBuffer) {
|
|
533
|
+
if (start === 0 && end === content.byteLength) {
|
|
534
|
+
resolve(content);
|
|
535
|
+
} else {
|
|
536
|
+
resolve(content.slice(start, end));
|
|
537
|
+
}
|
|
538
|
+
} else if (content.buffer instanceof ArrayBuffer) {
|
|
539
|
+
// TypedArray (Uint8Array, etc.)
|
|
540
|
+
resolve(content.buffer.slice(content.byteOffset + start, content.byteOffset + end));
|
|
541
|
+
} else if (typeof Buffer !== 'undefined' && content instanceof Buffer) {
|
|
542
|
+
// Node.js Buffer
|
|
543
|
+
const arrayBuffer = content.buffer.slice(
|
|
544
|
+
content.byteOffset + start,
|
|
545
|
+
content.byteOffset + Math.min(end, content.byteLength)
|
|
546
|
+
);
|
|
547
|
+
resolve(arrayBuffer);
|
|
548
|
+
} else if (typeof content === 'string') {
|
|
549
|
+
// String content
|
|
550
|
+
const encoder = new TextEncoder();
|
|
551
|
+
const uint8Array = encoder.encode(content.slice(start, end));
|
|
552
|
+
resolve(uint8Array.buffer);
|
|
553
|
+
} else {
|
|
554
|
+
reject(new Error('Unsupported content type'));
|
|
555
|
+
}
|
|
556
|
+
});
|
|
557
|
+
}
|
|
558
|
+
|
|
269
559
|
/**
|
|
270
560
|
* AWS S3 request handler
|
|
271
561
|
* Performs a signed request to AWS S3 using a signature obtained from the server
|
|
272
|
-
*
|
|
562
|
+
*
|
|
273
563
|
* @param {Object} upInfo - Upload info including bucket endpoint and key
|
|
274
564
|
* @param {string} method - HTTP method (GET, POST, PUT)
|
|
275
565
|
* @param {string} query - Query parameters
|
|
@@ -1168,4 +1458,7 @@ module.exports.upload = (function () {
|
|
|
1168
1458
|
};
|
|
1169
1459
|
|
|
1170
1460
|
return upload;
|
|
1171
|
-
}());
|
|
1461
|
+
}());
|
|
1462
|
+
|
|
1463
|
+
// Export simple upload function for Node.js consumers
|
|
1464
|
+
module.exports.uploadFile = uploadFile;
|