@karpeleslab/klbfw 0.2.19 → 0.2.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.d.ts CHANGED
@@ -35,7 +35,38 @@ declare function restGet(name: string, params?: Record<string, any>): Promise<an
35
35
  declare function restSSE(name: string, method: 'GET', params?: Record<string, any>, context?: Record<string, any>): EventSource;
36
36
 
37
37
  // Upload module types
38
- interface UploadOptions {
38
+
39
+ /** File input types supported by uploadFile */
40
+ type UploadFileInput =
41
+ | ArrayBuffer
42
+ | Uint8Array
43
+ | File
44
+ | string
45
+ | { name?: string; size?: number; type?: string; content: ArrayBuffer | Uint8Array | string; lastModified?: number }
46
+ | NodeJS.ReadableStream;
47
+
48
+ /** Options for uploadFile */
49
+ interface UploadFileOptions {
50
+ /** Progress callback (0-1) */
51
+ onProgress?: (progress: number) => void;
52
+ /** Error callback - resolve to retry, reject to fail */
53
+ onError?: (error: Error, context: { phase: string; blockNum?: number; attempt: number }) => Promise<void>;
54
+ }
55
+
56
+ /** Options for uploadManyFiles */
57
+ interface UploadManyFilesOptions extends UploadFileOptions {
58
+ /** Progress callback with file-level details */
59
+ onProgress?: (progress: { fileIndex: number; fileCount: number; fileProgress: number; totalProgress: number }) => void;
60
+ /** Called when each file completes */
61
+ onFileComplete?: (info: { fileIndex: number; fileCount: number; result: any }) => void;
62
+ /** Error callback - context includes fileIndex */
63
+ onError?: (error: Error, context: { fileIndex: number; phase: string; blockNum?: number; attempt: number }) => Promise<void>;
64
+ /** Maximum concurrent uploads (1-10, default 3) */
65
+ concurrency?: number;
66
+ }
67
+
68
+ /** @deprecated Use uploadFile() instead */
69
+ interface UploadLegacyOptions {
39
70
  progress?: (progress: number) => void;
40
71
  endpoint?: string;
41
72
  headers?: Record<string, string>;
@@ -44,7 +75,40 @@ interface UploadOptions {
44
75
  params?: Record<string, any>;
45
76
  }
46
77
 
47
- declare function upload(file: File, options?: UploadOptions): Promise<any>;
78
+ /** @deprecated Use uploadFile() instead */
79
+ declare const upload: {
80
+ init(path: string, params?: Record<string, any>, notify?: (status: any) => void): Promise<any> | ((files: any) => Promise<any>);
81
+ append(path: string, file: File | object, params?: Record<string, any>, context?: Record<string, any>): Promise<any>;
82
+ run(): void;
83
+ getStatus(): { queue: any[]; running: any[]; failed: any[] };
84
+ resume(): void;
85
+ cancelItem(uploadId: number): void;
86
+ deleteItem(uploadId: number): void;
87
+ pauseItem(uploadId: number): void;
88
+ resumeItem(uploadId: number): void;
89
+ retryItem(uploadId: number): void;
90
+ onprogress?: (status: { queue: any[]; running: any[]; failed: any[] }) => void;
91
+ };
92
+
93
+ /** Upload a single file */
94
+ declare function uploadFile(
95
+ api: string,
96
+ buffer: UploadFileInput,
97
+ method?: string,
98
+ params?: Record<string, any>,
99
+ context?: Record<string, any>,
100
+ options?: UploadFileOptions
101
+ ): Promise<any>;
102
+
103
+ /** Upload multiple files with concurrency control */
104
+ declare function uploadManyFiles(
105
+ api: string,
106
+ files: UploadFileInput[],
107
+ method?: string,
108
+ params?: Record<string, any>,
109
+ context?: Record<string, any>,
110
+ options?: UploadManyFilesOptions
111
+ ): Promise<any[]>;
48
112
 
49
113
  // Utility types
50
114
  declare function getI18N(key: string, args?: Record<string, any>): string;
@@ -78,6 +142,11 @@ export {
78
142
  restGet,
79
143
  restSSE,
80
144
  upload,
145
+ uploadFile,
146
+ uploadManyFiles,
81
147
  getI18N,
82
- trimPrefix
148
+ trimPrefix,
149
+ UploadFileInput,
150
+ UploadFileOptions,
151
+ UploadManyFilesOptions
83
152
  };
package/index.js CHANGED
@@ -9,6 +9,8 @@
9
9
  const internalFW = require('./fw-wrapper');
10
10
  const rest = require('./rest');
11
11
  const upload = require('./upload');
12
+ const uploadMany = require('./upload-many');
13
+ const uploadLegacy = require('./upload-legacy');
12
14
  const util = require('./util');
13
15
  const cookies = require('./cookies');
14
16
 
@@ -45,7 +47,10 @@ module.exports.restGet = rest.restGet; // New camelCase name
45
47
  module.exports.restSSE = rest.restSSE;
46
48
 
47
49
  // Upload module exports
48
- module.exports.upload = upload.upload;
50
+ /** @deprecated Use uploadFile() instead */
51
+ module.exports.upload = uploadLegacy.upload;
52
+ module.exports.uploadFile = upload.uploadFile;
53
+ module.exports.uploadManyFiles = uploadMany.uploadManyFiles;
49
54
 
50
55
  // Utility exports
51
56
  module.exports.getI18N = util.getI18N;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@karpeleslab/klbfw",
3
- "version": "0.2.19",
3
+ "version": "0.2.21",
4
4
  "description": "Frontend Framework",
5
5
  "main": "index.js",
6
6
  "types": "index.d.ts",
@@ -0,0 +1,474 @@
1
+ /**
2
+ * KLB Upload Internal Module
3
+ *
4
+ * Shared utilities for upload modules.
5
+ * This module is not meant to be used directly.
6
+ *
7
+ * @module upload-internal
8
+ * @private
9
+ */
10
+
11
+ 'use strict';
12
+
13
+ const rest = require('./rest');
14
+ const sha256 = require('js-sha256').sha256;
15
+
16
+ /**
17
+ * Environment detection and cross-platform utilities
18
+ */
19
+ const env = {
20
+ /**
21
+ * Detect if running in a browser environment
22
+ */
23
+ isBrowser: typeof window !== 'undefined' && typeof document !== 'undefined',
24
+
25
+ /**
26
+ * Detect if running in a Node.js environment
27
+ */
28
+ isNode: typeof process !== 'undefined' && process.versions && process.versions.node,
29
+
30
+ /**
31
+ * Node.js specific modules (lazy-loaded)
32
+ */
33
+ node: {
34
+ fetch: null,
35
+ xmlParser: null,
36
+ EventEmitter: null,
37
+ eventEmitter: null
38
+ }
39
+ };
40
+
41
+ /**
42
+ * Initialize Node.js dependencies when in Node environment
43
+ */
44
+ if (env.isNode && !env.isBrowser) {
45
+ try {
46
+ env.node.fetch = require('node-fetch');
47
+ env.node.xmlParser = require('@xmldom/xmldom');
48
+ env.node.EventEmitter = require('events');
49
+ env.node.eventEmitter = new (env.node.EventEmitter)();
50
+ } catch (e) {
51
+ console.warn('Node.js dependencies not available. Some functionality may be limited:', e.message);
52
+ console.warn('To use in Node.js, install: npm install node-fetch @xmldom/xmldom');
53
+ }
54
+ }
55
+
56
+ /**
57
+ * Cross-platform utilities
58
+ */
59
+ const utils = {
60
+ /**
61
+ * Environment-agnostic fetch implementation
62
+ * @param {string} url - The URL to fetch
63
+ * @param {Object} options - Fetch options
64
+ * @returns {Promise} - Fetch promise
65
+ */
66
+ fetch(url, options) {
67
+ if (env.isBrowser && typeof window.fetch === 'function') {
68
+ return window.fetch(url, options);
69
+ } else if (env.isNode && env.node.fetch) {
70
+ return env.node.fetch(url, options);
71
+ } else if (typeof fetch === 'function') {
72
+ // For environments where fetch is globally available
73
+ return fetch(url, options);
74
+ }
75
+ return Promise.reject(new Error('fetch not available in this environment'));
76
+ },
77
+
78
+ /**
79
+ * Environment-agnostic XML parser
80
+ * @param {string} xmlString - XML string to parse
81
+ * @returns {Document} - DOM-like document
82
+ */
83
+ parseXML(xmlString) {
84
+ if (env.isBrowser) {
85
+ return new DOMParser().parseFromString(xmlString, 'text/xml');
86
+ } else if (env.isNode && env.node.xmlParser) {
87
+ const DOMParserNode = env.node.xmlParser.DOMParser;
88
+ const dom = new DOMParserNode().parseFromString(xmlString, 'text/xml');
89
+
90
+ // Add querySelector interface for compatibility
91
+ dom.querySelector = function(selector) {
92
+ if (selector === 'UploadId') {
93
+ const elements = this.getElementsByTagName('UploadId');
94
+ return elements.length > 0 ? { innerHTML: elements[0].textContent } : null;
95
+ }
96
+ return null;
97
+ };
98
+
99
+ return dom;
100
+ }
101
+ throw new Error('XML parsing not available in this environment');
102
+ },
103
+
104
+ /**
105
+ * Read file content as ArrayBuffer
106
+ * Compatible with browser File objects and custom objects with content/slice
107
+ *
108
+ * @param {File|Object} file - File object or file-like object
109
+ * @param {Object} options - Options for reading (start, end)
110
+ * @param {Function} callback - Callback function(buffer, error)
111
+ */
112
+ readAsArrayBuffer(file, options, callback) {
113
+ // Handle case where options is the callback
114
+ if (typeof options === 'function') {
115
+ callback = options;
116
+ options = {};
117
+ }
118
+ options = options || {};
119
+
120
+ if (env.isBrowser && file instanceof File) {
121
+ // Browser: use native File API
122
+ const start = options.start || 0;
123
+ const end = options.end || file.size;
124
+ const slice = file.slice(start, end);
125
+
126
+ const reader = new FileReader();
127
+ reader.addEventListener('loadend', () => callback(reader.result));
128
+ reader.addEventListener('error', (e) => callback(null, e));
129
+ reader.readAsArrayBuffer(slice);
130
+ } else if (file.content) {
131
+ // Memory buffer-based file
132
+ const start = options.start || 0;
133
+ const end = options.end || file.content.length || file.content.byteLength;
134
+ let content = file.content;
135
+
136
+ // Handle various content types
137
+ if (content instanceof ArrayBuffer) {
138
+ // Already an ArrayBuffer
139
+ if (start === 0 && end === content.byteLength) {
140
+ callback(content);
141
+ } else {
142
+ callback(content.slice(start, end));
143
+ }
144
+ } else if (content.buffer instanceof ArrayBuffer) {
145
+ // TypedArray (Uint8Array, etc.)
146
+ callback(content.buffer.slice(start, end));
147
+ } else if (typeof Buffer !== 'undefined' && content instanceof Buffer) {
148
+ // Node.js Buffer
149
+ const arrayBuffer = content.buffer.slice(
150
+ content.byteOffset + start,
151
+ content.byteOffset + Math.min(end, content.byteLength)
152
+ );
153
+ callback(arrayBuffer);
154
+ } else if (typeof content === 'string') {
155
+ // String content - convert to ArrayBuffer
156
+ const encoder = new TextEncoder();
157
+ const uint8Array = encoder.encode(content.slice(start, end));
158
+ callback(uint8Array.buffer);
159
+ } else {
160
+ callback(null, new Error('Unsupported content type'));
161
+ }
162
+ } else if (file.slice) {
163
+ // Object with slice method (custom implementation)
164
+ const start = options.start || 0;
165
+ const end = options.end;
166
+ const slice = file.slice(start, end);
167
+
168
+ // Recursively handle the slice
169
+ utils.readAsArrayBuffer(slice, callback);
170
+ } else {
171
+ callback(null, new Error('Cannot read file content - no supported method available'));
172
+ }
173
+ },
174
+
175
+ /**
176
+ * Dispatch a custom event in any environment
177
+ * @param {string} eventName - Event name
178
+ * @param {Object} detail - Event details
179
+ */
180
+ dispatchEvent(eventName, detail) {
181
+ if (env.isBrowser) {
182
+ const evt = new CustomEvent(eventName, { detail });
183
+ document.dispatchEvent(evt);
184
+ } else if (env.isNode && env.node.eventEmitter) {
185
+ env.node.eventEmitter.emit(eventName, detail);
186
+ }
187
+ // In other environments, events are silently ignored
188
+ },
189
+
190
+ /**
191
+ * Format a date for AWS (YYYYMMDDTHHMMSSZ)
192
+ * @returns {string} Formatted date
193
+ */
194
+ getAmzTime() {
195
+ const t = new Date();
196
+ return t.getUTCFullYear() +
197
+ this.pad(t.getUTCMonth() + 1) +
198
+ this.pad(t.getUTCDate()) +
199
+ 'T' + this.pad(t.getUTCHours()) +
200
+ this.pad(t.getUTCMinutes()) +
201
+ this.pad(t.getUTCSeconds()) +
202
+ 'Z';
203
+ },
204
+
205
+ /**
206
+ * Pad a number with leading zero if needed
207
+ * @param {number} number - Number to pad
208
+ * @returns {string} Padded number
209
+ */
210
+ pad(number) {
211
+ return number < 10 ? '0' + number : String(number);
212
+ }
213
+ };
214
+
215
+ /**
216
+ * AWS S3 request handler
217
+ * Performs a signed request to AWS S3 using a signature obtained from the server
218
+ *
219
+ * @param {Object} upInfo - Upload info including bucket endpoint and key
220
+ * @param {string} method - HTTP method (GET, POST, PUT)
221
+ * @param {string} query - Query parameters
222
+ * @param {*} body - Request body
223
+ * @param {Object} headers - Request headers
224
+ * @param {Object} context - Request context
225
+ * @returns {Promise} - Request promise
226
+ */
227
+ function awsReq(upInfo, method, query, body, headers, context) {
228
+ headers = headers || {};
229
+ context = context || {};
230
+
231
+ // Calculate body hash for AWS signature
232
+ let bodyHash;
233
+
234
+ if (!body || body === "") {
235
+ // Empty body hash
236
+ bodyHash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
237
+ } else {
238
+ try {
239
+ // Handle different body types
240
+ let bodyForHash = body;
241
+
242
+ if (body instanceof ArrayBuffer || (body.constructor && body.constructor.name === 'ArrayBuffer')) {
243
+ bodyForHash = new Uint8Array(body);
244
+ } else if (body.constructor && body.constructor.name === 'Buffer') {
245
+ bodyForHash = Buffer.from(body).toString();
246
+ }
247
+
248
+ bodyHash = sha256(bodyForHash);
249
+ } catch (e) {
250
+ console.error("Error calculating hash:", e.message);
251
+ bodyHash = "UNSIGNED-PAYLOAD";
252
+ }
253
+ }
254
+
255
+ // Create AWS timestamp
256
+ const timestamp = utils.getAmzTime();
257
+ const datestamp = timestamp.substring(0, 8);
258
+
259
+ // Set AWS headers
260
+ headers["X-Amz-Content-Sha256"] = bodyHash;
261
+ headers["X-Amz-Date"] = timestamp;
262
+
263
+ // Prepare the string to sign
264
+ const authStringParts = [
265
+ "AWS4-HMAC-SHA256",
266
+ timestamp,
267
+ `${datestamp}/${upInfo.Bucket_Endpoint.Region}/s3/aws4_request`,
268
+ method,
269
+ `/${upInfo.Bucket_Endpoint.Name}/${upInfo.Key}`,
270
+ query,
271
+ `host:${upInfo.Bucket_Endpoint.Host}`
272
+ ];
273
+
274
+ // Add x-* headers to sign
275
+ const headersToSign = ['host'];
276
+ const sortedHeaderKeys = Object.keys(headers).sort();
277
+
278
+ for (const key of sortedHeaderKeys) {
279
+ const lowerKey = key.toLowerCase();
280
+ if (lowerKey.startsWith('x-')) {
281
+ headersToSign.push(lowerKey);
282
+ authStringParts.push(`${lowerKey}:${headers[key]}`);
283
+ }
284
+ }
285
+
286
+ // Complete the string to sign
287
+ authStringParts.push('');
288
+ authStringParts.push(headersToSign.join(';'));
289
+ authStringParts.push(bodyHash);
290
+
291
+ return new Promise((resolve, reject) => {
292
+ // Get signature from server
293
+ rest.rest(
294
+ `Cloud/Aws/Bucket/Upload/${upInfo.Cloud_Aws_Bucket_Upload__}:signV4`,
295
+ "POST",
296
+ { headers: authStringParts.join("\n") },
297
+ context
298
+ )
299
+ .then(response => {
300
+ // Construct the S3 URL
301
+ let url = `https://${upInfo.Bucket_Endpoint.Host}/${upInfo.Bucket_Endpoint.Name}/${upInfo.Key}`;
302
+ if (query) url += `?${query}`;
303
+
304
+ // Add the authorization header
305
+ headers["Authorization"] = response.data.authorization;
306
+
307
+ // Make the actual request to S3
308
+ return utils.fetch(url, {
309
+ method,
310
+ body,
311
+ headers
312
+ });
313
+ })
314
+ .then(resolve)
315
+ .catch(reject);
316
+ });
317
+ }
318
+
319
+ /**
320
+ * Read a chunk of specified size from a stream
321
+ * @param {ReadableStream} stream - Node.js readable stream
322
+ * @param {number} size - Number of bytes to read
323
+ * @returns {Promise<ArrayBuffer|null>} - ArrayBuffer with data, or null if stream ended
324
+ */
325
+ function readChunkFromStream(stream, size) {
326
+ return new Promise((resolve, reject) => {
327
+ // Check if stream already ended before we start
328
+ if (stream.readableEnded) {
329
+ resolve(null);
330
+ return;
331
+ }
332
+
333
+ const chunks = [];
334
+ let bytesRead = 0;
335
+ let resolved = false;
336
+
337
+ const doResolve = (value) => {
338
+ if (resolved) return;
339
+ resolved = true;
340
+ cleanup();
341
+ resolve(value);
342
+ };
343
+
344
+ const onReadable = () => {
345
+ if (resolved) return;
346
+
347
+ let chunk;
348
+ while (bytesRead < size && (chunk = stream.read(Math.min(size - bytesRead, 65536))) !== null) {
349
+ chunks.push(chunk);
350
+ bytesRead += chunk.length;
351
+ }
352
+
353
+ if (bytesRead >= size) {
354
+ doResolve(combineChunks(chunks));
355
+ } else if (stream.readableEnded) {
356
+ // Stream already ended, resolve with what we have
357
+ if (bytesRead === 0) {
358
+ doResolve(null);
359
+ } else {
360
+ doResolve(combineChunks(chunks));
361
+ }
362
+ }
363
+ };
364
+
365
+ const onEnd = () => {
366
+ if (resolved) return;
367
+ if (bytesRead === 0) {
368
+ doResolve(null); // Stream ended, no more data
369
+ } else {
370
+ doResolve(combineChunks(chunks));
371
+ }
372
+ };
373
+
374
+ const onError = (err) => {
375
+ if (resolved) return;
376
+ resolved = true;
377
+ cleanup();
378
+ reject(err);
379
+ };
380
+
381
+ const cleanup = () => {
382
+ stream.removeListener('readable', onReadable);
383
+ stream.removeListener('end', onEnd);
384
+ stream.removeListener('error', onError);
385
+ };
386
+
387
+ stream.on('readable', onReadable);
388
+ stream.on('end', onEnd);
389
+ stream.on('error', onError);
390
+
391
+ // Try reading immediately in case data is already buffered
392
+ onReadable();
393
+ });
394
+ }
395
+
396
+ /**
397
+ * Combine chunks into a single ArrayBuffer
398
+ * @private
399
+ */
400
+ function combineChunks(chunks) {
401
+ if (chunks.length === 0) {
402
+ return new ArrayBuffer(0);
403
+ }
404
+ if (chunks.length === 1) {
405
+ const chunk = chunks[0];
406
+ return chunk.buffer.slice(chunk.byteOffset, chunk.byteOffset + chunk.length);
407
+ }
408
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
409
+ const result = new Uint8Array(totalLength);
410
+ let offset = 0;
411
+ for (const chunk of chunks) {
412
+ result.set(new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.length), offset);
413
+ offset += chunk.length;
414
+ }
415
+ return result.buffer;
416
+ }
417
+
418
+ /**
419
+ * Read a slice of a file as ArrayBuffer
420
+ * @private
421
+ */
422
+ function readFileSlice(file, start, end) {
423
+ return new Promise((resolve, reject) => {
424
+ // Handle browser File objects
425
+ if (file.browserFile) {
426
+ const slice = file.browserFile.slice(start, end);
427
+ const reader = new FileReader();
428
+ reader.addEventListener('loadend', () => resolve(reader.result));
429
+ reader.addEventListener('error', (e) => reject(e));
430
+ reader.readAsArrayBuffer(slice);
431
+ return;
432
+ }
433
+
434
+ if (!file.content) {
435
+ reject(new Error('Cannot read file content - no content property'));
436
+ return;
437
+ }
438
+
439
+ const content = file.content;
440
+
441
+ if (content instanceof ArrayBuffer) {
442
+ if (start === 0 && end === content.byteLength) {
443
+ resolve(content);
444
+ } else {
445
+ resolve(content.slice(start, end));
446
+ }
447
+ } else if (content.buffer instanceof ArrayBuffer) {
448
+ // TypedArray (Uint8Array, etc.)
449
+ resolve(content.buffer.slice(content.byteOffset + start, content.byteOffset + end));
450
+ } else if (typeof Buffer !== 'undefined' && content instanceof Buffer) {
451
+ // Node.js Buffer
452
+ const arrayBuffer = content.buffer.slice(
453
+ content.byteOffset + start,
454
+ content.byteOffset + Math.min(end, content.byteLength)
455
+ );
456
+ resolve(arrayBuffer);
457
+ } else if (typeof content === 'string') {
458
+ // String content
459
+ const encoder = new TextEncoder();
460
+ const uint8Array = encoder.encode(content.slice(start, end));
461
+ resolve(uint8Array.buffer);
462
+ } else {
463
+ reject(new Error('Unsupported content type'));
464
+ }
465
+ });
466
+ }
467
+
468
+ // Exports
469
+ module.exports.env = env;
470
+ module.exports.utils = utils;
471
+ module.exports.awsReq = awsReq;
472
+ module.exports.readChunkFromStream = readChunkFromStream;
473
+ module.exports.combineChunks = combineChunks;
474
+ module.exports.readFileSlice = readFileSlice;