@karpeleslab/klbfw 0.2.15 → 0.2.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/upload.js +350 -2
package/package.json
CHANGED
package/upload.js
CHANGED
|
@@ -266,10 +266,355 @@ const utils = {
|
|
|
266
266
|
}
|
|
267
267
|
};
|
|
268
268
|
|
|
269
|
+
/**
|
|
270
|
+
* Simple file upload for Node.js consumers
|
|
271
|
+
*
|
|
272
|
+
* This function provides a straightforward way to upload a file and get a Promise
|
|
273
|
+
* that resolves when the upload is complete. It doesn't use global state or the
|
|
274
|
+
* upload.run() process.
|
|
275
|
+
*
|
|
276
|
+
* @param {string} api - API endpoint path (e.g., 'Misc/Debug:testUpload')
|
|
277
|
+
* @param {Buffer|ArrayBuffer|Uint8Array|File|Object} buffer - File to upload. Can be:
|
|
278
|
+
* - A Node.js Buffer
|
|
279
|
+
* - An ArrayBuffer
|
|
280
|
+
* - A Uint8Array or other TypedArray
|
|
281
|
+
* - A browser File object
|
|
282
|
+
* - A file-like object with { name, size, type, content, lastModified }
|
|
283
|
+
* - A string (will be converted to UTF-8 bytes)
|
|
284
|
+
* @param {string} [method='POST'] - HTTP method for the initial API call
|
|
285
|
+
* @param {Object} [params={}] - Additional parameters to send with the upload.
|
|
286
|
+
* Can include `filename` and `type` to override defaults.
|
|
287
|
+
* @param {Object} [context=null] - Request context (uses default context if not provided)
|
|
288
|
+
* @returns {Promise<Object>} - Resolves with the upload result data
|
|
289
|
+
*
|
|
290
|
+
* @example
|
|
291
|
+
* // Upload a buffer with filename
|
|
292
|
+
* const buffer = Buffer.from('Hello, World!');
|
|
293
|
+
* const result = await uploadFile('Misc/Debug:testUpload', buffer, 'POST', {
|
|
294
|
+
* filename: 'hello.txt',
|
|
295
|
+
* type: 'text/plain'
|
|
296
|
+
* });
|
|
297
|
+
*
|
|
298
|
+
* @example
|
|
299
|
+
* // Upload with defaults
|
|
300
|
+
* const result = await uploadFile('Misc/Debug:testUpload', buffer);
|
|
301
|
+
*
|
|
302
|
+
* @example
|
|
303
|
+
* // Upload a File object (browser)
|
|
304
|
+
* const result = await uploadFile('Misc/Debug:testUpload', fileInput.files[0]);
|
|
305
|
+
*/
|
|
306
|
+
async function uploadFile(api, buffer, method, params, context) {
|
|
307
|
+
// Handle default values
|
|
308
|
+
method = method || 'POST';
|
|
309
|
+
params = params || {};
|
|
310
|
+
|
|
311
|
+
// Get context from framework if not provided, and add available values
|
|
312
|
+
if (!context) {
|
|
313
|
+
context = fwWrapper.getContext();
|
|
314
|
+
} else {
|
|
315
|
+
// Merge with default context values if available
|
|
316
|
+
const defaultContext = fwWrapper.getContext();
|
|
317
|
+
if (defaultContext) {
|
|
318
|
+
context = { ...defaultContext, ...context };
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
// Normalize buffer to a file-like object
|
|
323
|
+
let fileObj;
|
|
324
|
+
|
|
325
|
+
// Handle string input
|
|
326
|
+
if (typeof buffer === 'string') {
|
|
327
|
+
const encoder = new TextEncoder();
|
|
328
|
+
const uint8Array = encoder.encode(buffer);
|
|
329
|
+
fileObj = {
|
|
330
|
+
name: params.filename || 'file.txt',
|
|
331
|
+
size: uint8Array.length,
|
|
332
|
+
type: params.type || 'text/plain',
|
|
333
|
+
lastModified: Date.now(),
|
|
334
|
+
content: uint8Array.buffer
|
|
335
|
+
};
|
|
336
|
+
}
|
|
337
|
+
// Handle ArrayBuffer
|
|
338
|
+
else if (buffer instanceof ArrayBuffer) {
|
|
339
|
+
fileObj = {
|
|
340
|
+
name: params.filename || 'file.bin',
|
|
341
|
+
size: buffer.byteLength,
|
|
342
|
+
type: params.type || 'application/octet-stream',
|
|
343
|
+
lastModified: Date.now(),
|
|
344
|
+
content: buffer
|
|
345
|
+
};
|
|
346
|
+
}
|
|
347
|
+
// Handle TypedArray (Uint8Array, etc.)
|
|
348
|
+
else if (buffer && buffer.buffer instanceof ArrayBuffer) {
|
|
349
|
+
fileObj = {
|
|
350
|
+
name: params.filename || 'file.bin',
|
|
351
|
+
size: buffer.byteLength,
|
|
352
|
+
type: params.type || 'application/octet-stream',
|
|
353
|
+
lastModified: Date.now(),
|
|
354
|
+
content: buffer
|
|
355
|
+
};
|
|
356
|
+
}
|
|
357
|
+
// Handle Node.js Buffer
|
|
358
|
+
else if (typeof Buffer !== 'undefined' && buffer instanceof Buffer) {
|
|
359
|
+
fileObj = {
|
|
360
|
+
name: params.filename || 'file.bin',
|
|
361
|
+
size: buffer.length,
|
|
362
|
+
type: params.type || 'application/octet-stream',
|
|
363
|
+
lastModified: Date.now(),
|
|
364
|
+
content: buffer
|
|
365
|
+
};
|
|
366
|
+
}
|
|
367
|
+
// Handle browser File object
|
|
368
|
+
else if (env.isBrowser && typeof File !== 'undefined' && buffer instanceof File) {
|
|
369
|
+
fileObj = {
|
|
370
|
+
name: buffer.name || params.filename || 'file.bin',
|
|
371
|
+
size: buffer.size,
|
|
372
|
+
type: buffer.type || params.type || 'application/octet-stream',
|
|
373
|
+
lastModified: buffer.lastModified || Date.now(),
|
|
374
|
+
browserFile: buffer // Keep reference to original File for reading
|
|
375
|
+
};
|
|
376
|
+
}
|
|
377
|
+
// Handle file-like object with content property
|
|
378
|
+
else if (buffer && buffer.content !== undefined) {
|
|
379
|
+
fileObj = {
|
|
380
|
+
name: buffer.name || params.filename || 'file.bin',
|
|
381
|
+
size: buffer.size || buffer.content.byteLength || buffer.content.length,
|
|
382
|
+
type: buffer.type || params.type || 'application/octet-stream',
|
|
383
|
+
lastModified: buffer.lastModified || Date.now(),
|
|
384
|
+
content: buffer.content
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
else {
|
|
388
|
+
throw new Error('Invalid file: must be a Buffer, ArrayBuffer, Uint8Array, File, string, or file-like object with content');
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
// Merge params with file metadata (file metadata takes precedence for these fields)
|
|
392
|
+
const uploadParams = { ...params };
|
|
393
|
+
uploadParams.filename = fileObj.name;
|
|
394
|
+
uploadParams.size = fileObj.size;
|
|
395
|
+
uploadParams.lastModified = fileObj.lastModified / 1000;
|
|
396
|
+
uploadParams.type = fileObj.type;
|
|
397
|
+
|
|
398
|
+
// Initialize upload with the server
|
|
399
|
+
const response = await rest.rest(api, method, uploadParams, context);
|
|
400
|
+
const data = response.data;
|
|
401
|
+
|
|
402
|
+
// Method 1: AWS signed multipart upload
|
|
403
|
+
if (data.Cloud_Aws_Bucket_Upload__) {
|
|
404
|
+
return doAwsUpload(fileObj, data, context);
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
// Method 2: Direct PUT upload
|
|
408
|
+
if (data.PUT) {
|
|
409
|
+
return doPutUpload(fileObj, data, context);
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
throw new Error('Invalid upload response format: no upload method available');
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
/**
|
|
416
|
+
* Perform a direct PUT upload (simple upload method)
|
|
417
|
+
* @private
|
|
418
|
+
*/
|
|
419
|
+
async function doPutUpload(file, uploadInfo, context) {
|
|
420
|
+
const blockSize = uploadInfo.Blocksize || file.size;
|
|
421
|
+
const blocks = Math.ceil(file.size / blockSize);
|
|
422
|
+
|
|
423
|
+
// Upload blocks with concurrency limit
|
|
424
|
+
const maxConcurrent = 3;
|
|
425
|
+
|
|
426
|
+
// Process blocks in batches
|
|
427
|
+
for (let i = 0; i < blocks; i += maxConcurrent) {
|
|
428
|
+
const batch = [];
|
|
429
|
+
for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
|
|
430
|
+
batch.push(uploadPutBlock(file, uploadInfo, j, blockSize));
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
await Promise.all(batch);
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
// All blocks done, call completion
|
|
437
|
+
const completeResponse = await rest.rest(uploadInfo.Complete, 'POST', {}, context);
|
|
438
|
+
return completeResponse.data;
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
/**
|
|
442
|
+
* Upload a single block via PUT
|
|
443
|
+
* @private
|
|
444
|
+
*/
|
|
445
|
+
async function uploadPutBlock(file, uploadInfo, blockNum, blockSize) {
|
|
446
|
+
const startByte = blockNum * blockSize;
|
|
447
|
+
const endByte = Math.min(startByte + blockSize, file.size);
|
|
448
|
+
|
|
449
|
+
const arrayBuffer = await readFileSlice(file, startByte, endByte);
|
|
450
|
+
|
|
451
|
+
const headers = {
|
|
452
|
+
'Content-Type': file.type || 'application/octet-stream'
|
|
453
|
+
};
|
|
454
|
+
|
|
455
|
+
// Add Content-Range for multipart PUT
|
|
456
|
+
const totalBlocks = Math.ceil(file.size / blockSize);
|
|
457
|
+
if (totalBlocks > 1) {
|
|
458
|
+
headers['Content-Range'] = `bytes ${startByte}-${endByte - 1}/*`;
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
const response = await utils.fetch(uploadInfo.PUT, {
|
|
462
|
+
method: 'PUT',
|
|
463
|
+
body: arrayBuffer,
|
|
464
|
+
headers: headers
|
|
465
|
+
});
|
|
466
|
+
|
|
467
|
+
if (!response.ok) {
|
|
468
|
+
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
await response.text();
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
/**
|
|
475
|
+
* Perform an AWS multipart upload
|
|
476
|
+
* @private
|
|
477
|
+
*/
|
|
478
|
+
async function doAwsUpload(file, uploadInfo, context) {
|
|
479
|
+
// Calculate optimal block size (min 5MB for AWS, target ~10k parts)
|
|
480
|
+
let blockSize = Math.ceil(file.size / 10000);
|
|
481
|
+
if (blockSize < 5242880) blockSize = 5242880;
|
|
482
|
+
|
|
483
|
+
const blocks = Math.ceil(file.size / blockSize);
|
|
484
|
+
|
|
485
|
+
// Initialize multipart upload
|
|
486
|
+
const initResponse = await awsReq(
|
|
487
|
+
uploadInfo,
|
|
488
|
+
'POST',
|
|
489
|
+
'uploads=',
|
|
490
|
+
'',
|
|
491
|
+
{ 'Content-Type': file.type || 'application/octet-stream', 'X-Amz-Acl': 'private' },
|
|
492
|
+
context
|
|
493
|
+
);
|
|
494
|
+
const initXml = await initResponse.text();
|
|
495
|
+
const dom = utils.parseXML(initXml);
|
|
496
|
+
const uploadId = dom.querySelector('UploadId').innerHTML;
|
|
497
|
+
|
|
498
|
+
// Upload all parts with concurrency limit
|
|
499
|
+
const etags = {};
|
|
500
|
+
const maxConcurrent = 3;
|
|
501
|
+
|
|
502
|
+
for (let i = 0; i < blocks; i += maxConcurrent) {
|
|
503
|
+
const batch = [];
|
|
504
|
+
for (let j = i; j < Math.min(i + maxConcurrent, blocks); j++) {
|
|
505
|
+
batch.push(
|
|
506
|
+
uploadAwsBlock(file, uploadInfo, uploadId, j, blockSize, context)
|
|
507
|
+
.then(etag => { etags[j] = etag; })
|
|
508
|
+
);
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
await Promise.all(batch);
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
// Complete multipart upload
|
|
515
|
+
let xml = '<CompleteMultipartUpload>';
|
|
516
|
+
for (let i = 0; i < blocks; i++) {
|
|
517
|
+
xml += `<Part><PartNumber>${i + 1}</PartNumber><ETag>${etags[i]}</ETag></Part>`;
|
|
518
|
+
}
|
|
519
|
+
xml += '</CompleteMultipartUpload>';
|
|
520
|
+
|
|
521
|
+
const completeResponse = await awsReq(uploadInfo, 'POST', `uploadId=${uploadId}`, xml, null, context);
|
|
522
|
+
await completeResponse.text();
|
|
523
|
+
|
|
524
|
+
// Call server-side completion handler
|
|
525
|
+
const finalResponse = await rest.rest(
|
|
526
|
+
`Cloud/Aws/Bucket/Upload/${uploadInfo.Cloud_Aws_Bucket_Upload__}:handleComplete`,
|
|
527
|
+
'POST',
|
|
528
|
+
{},
|
|
529
|
+
context
|
|
530
|
+
);
|
|
531
|
+
|
|
532
|
+
return finalResponse.data;
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
/**
|
|
536
|
+
* Upload a single block to AWS S3
|
|
537
|
+
* @private
|
|
538
|
+
*/
|
|
539
|
+
async function uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, context) {
|
|
540
|
+
const startByte = blockNum * blockSize;
|
|
541
|
+
const endByte = Math.min(startByte + blockSize, file.size);
|
|
542
|
+
const awsPartNumber = blockNum + 1; // AWS uses 1-based part numbers
|
|
543
|
+
|
|
544
|
+
const arrayBuffer = await readFileSlice(file, startByte, endByte);
|
|
545
|
+
|
|
546
|
+
const response = await awsReq(
|
|
547
|
+
uploadInfo,
|
|
548
|
+
'PUT',
|
|
549
|
+
`partNumber=${awsPartNumber}&uploadId=${uploadId}`,
|
|
550
|
+
arrayBuffer,
|
|
551
|
+
null,
|
|
552
|
+
context
|
|
553
|
+
);
|
|
554
|
+
|
|
555
|
+
if (!response.ok) {
|
|
556
|
+
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
const etag = response.headers.get('ETag');
|
|
560
|
+
await response.text();
|
|
561
|
+
return etag;
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
/**
|
|
565
|
+
* Read a slice of a file as ArrayBuffer
|
|
566
|
+
* @private
|
|
567
|
+
*/
|
|
568
|
+
function readFileSlice(file, start, end) {
|
|
569
|
+
return new Promise((resolve, reject) => {
|
|
570
|
+
// Handle browser File objects
|
|
571
|
+
if (file.browserFile) {
|
|
572
|
+
const slice = file.browserFile.slice(start, end);
|
|
573
|
+
const reader = new FileReader();
|
|
574
|
+
reader.addEventListener('loadend', () => resolve(reader.result));
|
|
575
|
+
reader.addEventListener('error', (e) => reject(e));
|
|
576
|
+
reader.readAsArrayBuffer(slice);
|
|
577
|
+
return;
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
if (!file.content) {
|
|
581
|
+
reject(new Error('Cannot read file content - no content property'));
|
|
582
|
+
return;
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
const content = file.content;
|
|
586
|
+
|
|
587
|
+
if (content instanceof ArrayBuffer) {
|
|
588
|
+
if (start === 0 && end === content.byteLength) {
|
|
589
|
+
resolve(content);
|
|
590
|
+
} else {
|
|
591
|
+
resolve(content.slice(start, end));
|
|
592
|
+
}
|
|
593
|
+
} else if (content.buffer instanceof ArrayBuffer) {
|
|
594
|
+
// TypedArray (Uint8Array, etc.)
|
|
595
|
+
resolve(content.buffer.slice(content.byteOffset + start, content.byteOffset + end));
|
|
596
|
+
} else if (typeof Buffer !== 'undefined' && content instanceof Buffer) {
|
|
597
|
+
// Node.js Buffer
|
|
598
|
+
const arrayBuffer = content.buffer.slice(
|
|
599
|
+
content.byteOffset + start,
|
|
600
|
+
content.byteOffset + Math.min(end, content.byteLength)
|
|
601
|
+
);
|
|
602
|
+
resolve(arrayBuffer);
|
|
603
|
+
} else if (typeof content === 'string') {
|
|
604
|
+
// String content
|
|
605
|
+
const encoder = new TextEncoder();
|
|
606
|
+
const uint8Array = encoder.encode(content.slice(start, end));
|
|
607
|
+
resolve(uint8Array.buffer);
|
|
608
|
+
} else {
|
|
609
|
+
reject(new Error('Unsupported content type'));
|
|
610
|
+
}
|
|
611
|
+
});
|
|
612
|
+
}
|
|
613
|
+
|
|
269
614
|
/**
|
|
270
615
|
* AWS S3 request handler
|
|
271
616
|
* Performs a signed request to AWS S3 using a signature obtained from the server
|
|
272
|
-
*
|
|
617
|
+
*
|
|
273
618
|
* @param {Object} upInfo - Upload info including bucket endpoint and key
|
|
274
619
|
* @param {string} method - HTTP method (GET, POST, PUT)
|
|
275
620
|
* @param {string} query - Query parameters
|
|
@@ -1168,4 +1513,7 @@ module.exports.upload = (function () {
|
|
|
1168
1513
|
};
|
|
1169
1514
|
|
|
1170
1515
|
return upload;
|
|
1171
|
-
}());
|
|
1516
|
+
}());
|
|
1517
|
+
|
|
1518
|
+
// Export simple upload function for Node.js consumers
|
|
1519
|
+
module.exports.uploadFile = uploadFile;
|