@karpeleslab/klbfw 0.2.19 → 0.2.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/upload.js CHANGED
@@ -1,62 +1,9 @@
1
1
  /**
2
2
  * KLB Upload Module
3
- *
4
- * This module handles file uploads to KLB API endpoints.
5
- * It supports both browser and Node.js environments with a unified API.
6
- *
7
- * The module handles:
8
- * - File upload to KLB API endpoints
9
- * - Multiple upload protocols (PUT and AWS multipart)
10
- * - Progress tracking
11
- * - Pause, resume, retry, and cancel operations
12
- * - Browser and Node.js compatibility
13
- *
14
- * Browser usage:
15
- * ```js
16
- * // Open file picker and upload selected files
17
- * upload.init('Misc/Debug:testUpload')()
18
- * .then(result => console.log('Upload complete', result));
19
- *
20
- * // Open file picker with custom parameters and notification callback
21
- * upload.init('Support/Ticket:upload', {image_variation: 'alias=mini&strip&scale_crop=300x200'}, (result) => {
22
- * if (result.status == 'complete') console.log(result.final);
23
- * });
24
- *
25
- * // Upload a specific File object
26
- * upload.append('Misc/Debug:testUpload', fileObject)
27
- * .then(result => console.log('Upload complete', result));
28
- *
29
- * // Track progress
30
- * upload.onprogress = (status) => {
31
- * console.log('Progress:', status.running.map(i => i.status));
32
- * };
33
- *
34
- * // Cancel an upload
35
- * upload.cancelItem(uploadId);
36
- * ```
37
- *
38
- * Node.js usage:
39
- * ```js
40
- * // For Node.js environments, first install dependencies:
41
- * // npm install node-fetch @xmldom/xmldom
42
3
  *
43
- * // Simple upload with a buffer
44
- * const { uploadFile } = require('./upload');
45
- * const buffer = Buffer.from('Hello, World!');
46
- * const result = await uploadFile('Misc/Debug:testUpload', buffer, 'POST', {
47
- * filename: 'hello.txt',
48
- * type: 'text/plain'
49
- * });
4
+ * This module provides the uploadFile function for uploading files to KLB API endpoints.
5
+ * It supports both browser and Node.js environments with a unified API.
50
6
  *
51
- * // Upload large files using a stream (doesn't load entire file into memory)
52
- * const fs = require('fs');
53
- * const stream = fs.createReadStream('/path/to/2tb-file.bin');
54
- * const result = await uploadFile('Misc/Debug:testUpload', stream, 'POST', {
55
- * filename: 'large-file.bin',
56
- * type: 'application/octet-stream'
57
- * });
58
- * ```
59
- *
60
7
  * @module upload
61
8
  */
62
9
 
@@ -64,213 +11,13 @@
64
11
 
65
12
  const rest = require('./rest');
66
13
  const fwWrapper = require('./fw-wrapper');
67
- const sha256 = require('js-sha256').sha256;
68
-
69
- /**
70
- * Environment detection and cross-platform utilities
71
- */
72
- const env = {
73
- /**
74
- * Detect if running in a browser environment
75
- */
76
- isBrowser: typeof window !== 'undefined' && typeof document !== 'undefined',
77
-
78
- /**
79
- * Detect if running in a Node.js environment
80
- */
81
- isNode: typeof process !== 'undefined' && process.versions && process.versions.node,
82
-
83
- /**
84
- * Node.js specific modules (lazy-loaded)
85
- */
86
- node: {
87
- fetch: null,
88
- xmlParser: null,
89
- EventEmitter: null,
90
- eventEmitter: null
91
- }
92
- };
93
-
94
- /**
95
- * Initialize Node.js dependencies when in Node environment
96
- */
97
- if (env.isNode && !env.isBrowser) {
98
- try {
99
- env.node.fetch = require('node-fetch');
100
- env.node.xmlParser = require('@xmldom/xmldom');
101
- env.node.EventEmitter = require('events');
102
- env.node.eventEmitter = new (env.node.EventEmitter)();
103
- } catch (e) {
104
- console.warn('Node.js dependencies not available. Some functionality may be limited:', e.message);
105
- console.warn('To use in Node.js, install: npm install node-fetch @xmldom/xmldom');
106
- }
107
- }
108
-
109
- /**
110
- * Cross-platform utilities
111
- */
112
- const utils = {
113
- /**
114
- * Environment-agnostic fetch implementation
115
- * @param {string} url - The URL to fetch
116
- * @param {Object} options - Fetch options
117
- * @returns {Promise} - Fetch promise
118
- */
119
- fetch(url, options) {
120
- if (env.isBrowser && typeof window.fetch === 'function') {
121
- return window.fetch(url, options);
122
- } else if (env.isNode && env.node.fetch) {
123
- return env.node.fetch(url, options);
124
- } else if (typeof fetch === 'function') {
125
- // For environments where fetch is globally available
126
- return fetch(url, options);
127
- }
128
- return Promise.reject(new Error('fetch not available in this environment'));
129
- },
130
-
131
- /**
132
- * Environment-agnostic XML parser
133
- * @param {string} xmlString - XML string to parse
134
- * @returns {Document} - DOM-like document
135
- */
136
- parseXML(xmlString) {
137
- if (env.isBrowser) {
138
- return new DOMParser().parseFromString(xmlString, 'text/xml');
139
- } else if (env.isNode && env.node.xmlParser) {
140
- const DOMParserNode = env.node.xmlParser.DOMParser;
141
- const dom = new DOMParserNode().parseFromString(xmlString, 'text/xml');
142
-
143
- // Add querySelector interface for compatibility
144
- dom.querySelector = function(selector) {
145
- if (selector === 'UploadId') {
146
- const elements = this.getElementsByTagName('UploadId');
147
- return elements.length > 0 ? { innerHTML: elements[0].textContent } : null;
148
- }
149
- return null;
150
- };
151
-
152
- return dom;
153
- }
154
- throw new Error('XML parsing not available in this environment');
155
- },
156
-
157
- /**
158
- * Read file content as ArrayBuffer
159
- * Compatible with browser File objects and custom objects with content/slice
160
- *
161
- * @param {File|Object} file - File object or file-like object
162
- * @param {Object} options - Options for reading (start, end)
163
- * @param {Function} callback - Callback function(buffer, error)
164
- */
165
- readAsArrayBuffer(file, options, callback) {
166
- // Handle case where options is the callback
167
- if (typeof options === 'function') {
168
- callback = options;
169
- options = {};
170
- }
171
- options = options || {};
172
-
173
- if (env.isBrowser && file instanceof File) {
174
- // Browser: use native File API
175
- const start = options.start || 0;
176
- const end = options.end || file.size;
177
- const slice = file.slice(start, end);
178
-
179
- const reader = new FileReader();
180
- reader.addEventListener('loadend', () => callback(reader.result));
181
- reader.addEventListener('error', (e) => callback(null, e));
182
- reader.readAsArrayBuffer(slice);
183
- } else if (file.content) {
184
- // Memory buffer-based file
185
- const start = options.start || 0;
186
- const end = options.end || file.content.length || file.content.byteLength;
187
- let content = file.content;
188
-
189
- // Handle various content types
190
- if (content instanceof ArrayBuffer) {
191
- // Already an ArrayBuffer
192
- if (start === 0 && end === content.byteLength) {
193
- callback(content);
194
- } else {
195
- callback(content.slice(start, end));
196
- }
197
- } else if (content.buffer instanceof ArrayBuffer) {
198
- // TypedArray (Uint8Array, etc.)
199
- callback(content.buffer.slice(start, end));
200
- } else if (typeof Buffer !== 'undefined' && content instanceof Buffer) {
201
- // Node.js Buffer
202
- const arrayBuffer = content.buffer.slice(
203
- content.byteOffset + start,
204
- content.byteOffset + Math.min(end, content.byteLength)
205
- );
206
- callback(arrayBuffer);
207
- } else if (typeof content === 'string') {
208
- // String content - convert to ArrayBuffer
209
- const encoder = new TextEncoder();
210
- const uint8Array = encoder.encode(content.slice(start, end));
211
- callback(uint8Array.buffer);
212
- } else {
213
- callback(null, new Error('Unsupported content type'));
214
- }
215
- } else if (file.slice) {
216
- // Object with slice method (custom implementation)
217
- const start = options.start || 0;
218
- const end = options.end;
219
- const slice = file.slice(start, end);
220
-
221
- // Recursively handle the slice
222
- utils.readAsArrayBuffer(slice, callback);
223
- } else {
224
- callback(null, new Error('Cannot read file content - no supported method available'));
225
- }
226
- },
227
-
228
- /**
229
- * Dispatch a custom event in any environment
230
- * @param {string} eventName - Event name
231
- * @param {Object} detail - Event details
232
- */
233
- dispatchEvent(eventName, detail) {
234
- if (env.isBrowser) {
235
- const evt = new CustomEvent(eventName, { detail });
236
- document.dispatchEvent(evt);
237
- } else if (env.isNode && env.node.eventEmitter) {
238
- env.node.eventEmitter.emit(eventName, detail);
239
- }
240
- // In other environments, events are silently ignored
241
- },
242
-
243
- /**
244
- * Format a date for AWS (YYYYMMDDTHHMMSSZ)
245
- * @returns {string} Formatted date
246
- */
247
- getAmzTime() {
248
- const t = new Date();
249
- return t.getUTCFullYear() +
250
- this.pad(t.getUTCMonth() + 1) +
251
- this.pad(t.getUTCDate()) +
252
- 'T' + this.pad(t.getUTCHours()) +
253
- this.pad(t.getUTCMinutes()) +
254
- this.pad(t.getUTCSeconds()) +
255
- 'Z';
256
- },
257
-
258
- /**
259
- * Pad a number with leading zero if needed
260
- * @param {number} number - Number to pad
261
- * @returns {string} Padded number
262
- */
263
- pad(number) {
264
- return number < 10 ? '0' + number : String(number);
265
- }
266
- };
14
+ const { env, utils, awsReq, readChunkFromStream, readFileSlice } = require('./upload-internal');
267
15
 
268
16
  /**
269
- * Simple file upload for Node.js consumers
17
+ * Simple file upload function
270
18
  *
271
19
  * This function provides a straightforward way to upload a file and get a Promise
272
- * that resolves when the upload is complete. It doesn't use global state or the
273
- * upload.run() process.
20
+ * that resolves when the upload is complete.
274
21
  *
275
22
  * @param {string} api - API endpoint path (e.g., 'Misc/Debug:testUpload')
276
23
  * @param {Buffer|ArrayBuffer|Uint8Array|File|Object} buffer - File to upload. Can be:
@@ -358,7 +105,7 @@ async function uploadFile(api, buffer, method, params, context, options) {
358
105
  name: params.filename || 'file.txt',
359
106
  size: uint8Array.length,
360
107
  type: params.type || 'text/plain',
361
- lastModified: Date.now(),
108
+ lastModified: Date.now() / 1000,
362
109
  content: uint8Array.buffer
363
110
  };
364
111
  }
@@ -368,7 +115,7 @@ async function uploadFile(api, buffer, method, params, context, options) {
368
115
  name: params.filename || 'file.bin',
369
116
  size: buffer.byteLength,
370
117
  type: params.type || 'application/octet-stream',
371
- lastModified: Date.now(),
118
+ lastModified: Date.now() / 1000,
372
119
  content: buffer
373
120
  };
374
121
  }
@@ -378,7 +125,7 @@ async function uploadFile(api, buffer, method, params, context, options) {
378
125
  name: params.filename || 'file.bin',
379
126
  size: buffer.byteLength,
380
127
  type: params.type || 'application/octet-stream',
381
- lastModified: Date.now(),
128
+ lastModified: Date.now() / 1000,
382
129
  content: buffer
383
130
  };
384
131
  }
@@ -388,27 +135,27 @@ async function uploadFile(api, buffer, method, params, context, options) {
388
135
  name: params.filename || 'file.bin',
389
136
  size: buffer.length,
390
137
  type: params.type || 'application/octet-stream',
391
- lastModified: Date.now(),
138
+ lastModified: Date.now() / 1000,
392
139
  content: buffer
393
140
  };
394
141
  }
395
142
  // Handle browser File object
396
143
  else if (env.isBrowser && typeof File !== 'undefined' && buffer instanceof File) {
397
144
  fileObj = {
398
- name: buffer.name || params.filename || 'file.bin',
145
+ name: params.filename || buffer.name || 'file.bin',
399
146
  size: buffer.size,
400
- type: buffer.type || params.type || 'application/octet-stream',
401
- lastModified: buffer.lastModified || Date.now(),
147
+ type: params.type || buffer.type || 'application/octet-stream',
148
+ lastModified: (buffer.lastModified || Date.now()) / 1000,
402
149
  browserFile: buffer // Keep reference to original File for reading
403
150
  };
404
151
  }
405
152
  // Handle file-like object with content property
406
153
  else if (buffer && buffer.content !== undefined) {
407
154
  fileObj = {
408
- name: buffer.name || params.filename || 'file.bin',
155
+ name: params.filename || buffer.name || 'file.bin',
409
156
  size: buffer.size || buffer.content.byteLength || buffer.content.length,
410
- type: buffer.type || params.type || 'application/octet-stream',
411
- lastModified: buffer.lastModified || Date.now(),
157
+ type: params.type || buffer.type || 'application/octet-stream',
158
+ lastModified: (buffer.lastModified || Date.now()) / 1000,
412
159
  content: buffer.content
413
160
  };
414
161
  }
@@ -418,7 +165,7 @@ async function uploadFile(api, buffer, method, params, context, options) {
418
165
  name: params.filename || 'file.bin',
419
166
  size: params.size || null, // null means unknown size
420
167
  type: params.type || 'application/octet-stream',
421
- lastModified: Date.now(),
168
+ lastModified: Date.now() / 1000,
422
169
  stream: buffer
423
170
  };
424
171
  }
@@ -430,7 +177,7 @@ async function uploadFile(api, buffer, method, params, context, options) {
430
177
  const uploadParams = { ...params };
431
178
  uploadParams.filename = fileObj.name;
432
179
  uploadParams.size = fileObj.size;
433
- uploadParams.lastModified = fileObj.lastModified / 1000;
180
+ uploadParams.lastModified = fileObj.lastModified;
434
181
  uploadParams.type = fileObj.type;
435
182
 
436
183
  // Initialize upload with the server
@@ -891,1064 +638,5 @@ async function uploadAwsBlock(file, uploadInfo, uploadId, blockNum, blockSize, c
891
638
  return etag;
892
639
  }
893
640
 
894
- /**
895
- * Read a chunk of specified size from a stream
896
- * @private
897
- * @param {ReadableStream} stream - Node.js readable stream
898
- * @param {number} size - Number of bytes to read
899
- * @returns {Promise<ArrayBuffer|null>} - ArrayBuffer with data, or null if stream ended
900
- */
901
- function readChunkFromStream(stream, size) {
902
- return new Promise((resolve, reject) => {
903
- // Check if stream already ended before we start
904
- if (stream.readableEnded) {
905
- resolve(null);
906
- return;
907
- }
908
-
909
- const chunks = [];
910
- let bytesRead = 0;
911
- let resolved = false;
912
-
913
- const doResolve = (value) => {
914
- if (resolved) return;
915
- resolved = true;
916
- cleanup();
917
- resolve(value);
918
- };
919
-
920
- const onReadable = () => {
921
- if (resolved) return;
922
-
923
- let chunk;
924
- while (bytesRead < size && (chunk = stream.read(Math.min(size - bytesRead, 65536))) !== null) {
925
- chunks.push(chunk);
926
- bytesRead += chunk.length;
927
- }
928
-
929
- if (bytesRead >= size) {
930
- doResolve(combineChunks(chunks));
931
- } else if (stream.readableEnded) {
932
- // Stream already ended, resolve with what we have
933
- if (bytesRead === 0) {
934
- doResolve(null);
935
- } else {
936
- doResolve(combineChunks(chunks));
937
- }
938
- }
939
- };
940
-
941
- const onEnd = () => {
942
- if (resolved) return;
943
- if (bytesRead === 0) {
944
- doResolve(null); // Stream ended, no more data
945
- } else {
946
- doResolve(combineChunks(chunks));
947
- }
948
- };
949
-
950
- const onError = (err) => {
951
- if (resolved) return;
952
- resolved = true;
953
- cleanup();
954
- reject(err);
955
- };
956
-
957
- const cleanup = () => {
958
- stream.removeListener('readable', onReadable);
959
- stream.removeListener('end', onEnd);
960
- stream.removeListener('error', onError);
961
- };
962
-
963
- stream.on('readable', onReadable);
964
- stream.on('end', onEnd);
965
- stream.on('error', onError);
966
-
967
- // Try reading immediately in case data is already buffered
968
- onReadable();
969
- });
970
- }
971
-
972
- /**
973
- * Combine chunks into a single ArrayBuffer
974
- * @private
975
- */
976
- function combineChunks(chunks) {
977
- if (chunks.length === 0) {
978
- return new ArrayBuffer(0);
979
- }
980
- if (chunks.length === 1) {
981
- const chunk = chunks[0];
982
- return chunk.buffer.slice(chunk.byteOffset, chunk.byteOffset + chunk.length);
983
- }
984
- const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
985
- const result = new Uint8Array(totalLength);
986
- let offset = 0;
987
- for (const chunk of chunks) {
988
- result.set(new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.length), offset);
989
- offset += chunk.length;
990
- }
991
- return result.buffer;
992
- }
993
-
994
- /**
995
- * Read a slice of a file as ArrayBuffer
996
- * @private
997
- */
998
- function readFileSlice(file, start, end) {
999
- return new Promise((resolve, reject) => {
1000
- // Handle browser File objects
1001
- if (file.browserFile) {
1002
- const slice = file.browserFile.slice(start, end);
1003
- const reader = new FileReader();
1004
- reader.addEventListener('loadend', () => resolve(reader.result));
1005
- reader.addEventListener('error', (e) => reject(e));
1006
- reader.readAsArrayBuffer(slice);
1007
- return;
1008
- }
1009
-
1010
- if (!file.content) {
1011
- reject(new Error('Cannot read file content - no content property'));
1012
- return;
1013
- }
1014
-
1015
- const content = file.content;
1016
-
1017
- if (content instanceof ArrayBuffer) {
1018
- if (start === 0 && end === content.byteLength) {
1019
- resolve(content);
1020
- } else {
1021
- resolve(content.slice(start, end));
1022
- }
1023
- } else if (content.buffer instanceof ArrayBuffer) {
1024
- // TypedArray (Uint8Array, etc.)
1025
- resolve(content.buffer.slice(content.byteOffset + start, content.byteOffset + end));
1026
- } else if (typeof Buffer !== 'undefined' && content instanceof Buffer) {
1027
- // Node.js Buffer
1028
- const arrayBuffer = content.buffer.slice(
1029
- content.byteOffset + start,
1030
- content.byteOffset + Math.min(end, content.byteLength)
1031
- );
1032
- resolve(arrayBuffer);
1033
- } else if (typeof content === 'string') {
1034
- // String content
1035
- const encoder = new TextEncoder();
1036
- const uint8Array = encoder.encode(content.slice(start, end));
1037
- resolve(uint8Array.buffer);
1038
- } else {
1039
- reject(new Error('Unsupported content type'));
1040
- }
1041
- });
1042
- }
1043
-
1044
- /**
1045
- * AWS S3 request handler
1046
- * Performs a signed request to AWS S3 using a signature obtained from the server
1047
- *
1048
- * @param {Object} upInfo - Upload info including bucket endpoint and key
1049
- * @param {string} method - HTTP method (GET, POST, PUT)
1050
- * @param {string} query - Query parameters
1051
- * @param {*} body - Request body
1052
- * @param {Object} headers - Request headers
1053
- * @param {Object} context - Request context
1054
- * @returns {Promise} - Request promise
1055
- */
1056
- function awsReq(upInfo, method, query, body, headers, context) {
1057
- headers = headers || {};
1058
- context = context || {};
1059
-
1060
- // Calculate body hash for AWS signature
1061
- let bodyHash;
1062
-
1063
- if (!body || body === "") {
1064
- // Empty body hash
1065
- bodyHash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
1066
- } else {
1067
- try {
1068
- // Handle different body types
1069
- let bodyForHash = body;
1070
-
1071
- if (body instanceof ArrayBuffer || (body.constructor && body.constructor.name === 'ArrayBuffer')) {
1072
- bodyForHash = new Uint8Array(body);
1073
- } else if (body.constructor && body.constructor.name === 'Buffer') {
1074
- bodyForHash = Buffer.from(body).toString();
1075
- }
1076
-
1077
- bodyHash = sha256(bodyForHash);
1078
- } catch (e) {
1079
- console.error("Error calculating hash:", e.message);
1080
- bodyHash = "UNSIGNED-PAYLOAD";
1081
- }
1082
- }
1083
-
1084
- // Create AWS timestamp
1085
- const timestamp = utils.getAmzTime();
1086
- const datestamp = timestamp.substring(0, 8);
1087
-
1088
- // Set AWS headers
1089
- headers["X-Amz-Content-Sha256"] = bodyHash;
1090
- headers["X-Amz-Date"] = timestamp;
1091
-
1092
- // Prepare the string to sign
1093
- const authStringParts = [
1094
- "AWS4-HMAC-SHA256",
1095
- timestamp,
1096
- `${datestamp}/${upInfo.Bucket_Endpoint.Region}/s3/aws4_request`,
1097
- method,
1098
- `/${upInfo.Bucket_Endpoint.Name}/${upInfo.Key}`,
1099
- query,
1100
- `host:${upInfo.Bucket_Endpoint.Host}`
1101
- ];
1102
-
1103
- // Add x-* headers to sign
1104
- const headersToSign = ['host'];
1105
- const sortedHeaderKeys = Object.keys(headers).sort();
1106
-
1107
- for (const key of sortedHeaderKeys) {
1108
- const lowerKey = key.toLowerCase();
1109
- if (lowerKey.startsWith('x-')) {
1110
- headersToSign.push(lowerKey);
1111
- authStringParts.push(`${lowerKey}:${headers[key]}`);
1112
- }
1113
- }
1114
-
1115
- // Complete the string to sign
1116
- authStringParts.push('');
1117
- authStringParts.push(headersToSign.join(';'));
1118
- authStringParts.push(bodyHash);
1119
-
1120
- return new Promise((resolve, reject) => {
1121
- // Get signature from server
1122
- rest.rest(
1123
- `Cloud/Aws/Bucket/Upload/${upInfo.Cloud_Aws_Bucket_Upload__}:signV4`,
1124
- "POST",
1125
- { headers: authStringParts.join("\n") },
1126
- context
1127
- )
1128
- .then(response => {
1129
- // Construct the S3 URL
1130
- let url = `https://${upInfo.Bucket_Endpoint.Host}/${upInfo.Bucket_Endpoint.Name}/${upInfo.Key}`;
1131
- if (query) url += `?${query}`;
1132
-
1133
- // Add the authorization header
1134
- headers["Authorization"] = response.data.authorization;
1135
-
1136
- // Make the actual request to S3
1137
- return utils.fetch(url, {
1138
- method,
1139
- body,
1140
- headers
1141
- });
1142
- })
1143
- .then(resolve)
1144
- .catch(reject);
1145
- });
1146
- }
1147
-
1148
- /**
1149
- * Upload module (IIFE pattern)
1150
- * @returns {Object} Upload interface
1151
- */
1152
- module.exports.upload = (function () {
1153
- /**
1154
- * Upload state
1155
- */
1156
- const state = {
1157
- queue: [], // Queued uploads
1158
- failed: [], // Failed uploads
1159
- running: {}, // Currently processing uploads
1160
- nextId: 0, // Next upload ID
1161
- lastInput: null // Last created file input element (browser only)
1162
- };
1163
-
1164
- // Public API object
1165
- const upload = {};
1166
-
1167
- /**
1168
- * Helper Functions
1169
- */
1170
-
1171
- /**
1172
- * Notify progress to listeners
1173
- * Calls onprogress callback and dispatches events
1174
- */
1175
- function sendProgress() {
1176
- const status = upload.getStatus();
1177
-
1178
- // Call the onprogress callback if defined
1179
- if (typeof upload.onprogress === "function") {
1180
- upload.onprogress(status);
1181
- }
1182
-
1183
- // Dispatch event for listeners
1184
- utils.dispatchEvent("upload:progress", status);
1185
- }
1186
-
1187
- /**
1188
- * Handle upload failure
1189
- * @param {Object} up - Upload object
1190
- * @param {*} error - Error data
1191
- */
1192
- function handleFailure(up, error) {
1193
- // Skip if upload is no longer running
1194
- if (!(up.up_id in state.running)) return;
1195
-
1196
- // Check if already in failed list
1197
- for (const failedItem of state.failed) {
1198
- if (failedItem.up_id === up.up_id) {
1199
- return; // Already recorded as failed
1200
- }
1201
- }
1202
-
1203
- // Record failure
1204
- up.failure = error;
1205
- state.failed.push(up);
1206
- delete state.running[up.up_id];
1207
-
1208
- // Reject the promise so callers know the upload failed
1209
- if (up.reject) {
1210
- up.reject(error);
1211
- }
1212
-
1213
- // Continue processing queue
1214
- upload.run();
1215
-
1216
- // Notify progress
1217
- sendProgress();
1218
-
1219
- // Dispatch failure event
1220
- utils.dispatchEvent("upload:failed", {
1221
- item: up,
1222
- res: error
1223
- });
1224
- }
1225
-
1226
- /**
1227
- * Process a pending upload
1228
- * Initiates the upload process with the server
1229
- * @param {Object} up - Upload object
1230
- */
1231
- function processUpload(up) {
1232
- // Mark as processing
1233
- up.status = "pending-wip";
1234
-
1235
- // Prepare parameters
1236
- const params = up.params || {};
1237
-
1238
- // Set file metadata
1239
- params.filename = up.file.name;
1240
- params.size = up.file.size;
1241
- params.lastModified = up.file.lastModified / 1000;
1242
- params.type = up.file.type || "application/octet-stream";
1243
-
1244
- // Initialize upload with the server
1245
- rest.rest(up.path, "POST", params, up.context)
1246
- .then(function(response) {
1247
- // Method 1: AWS signed multipart upload
1248
- if (response.data.Cloud_Aws_Bucket_Upload__) {
1249
- return handleAwsMultipartUpload(up, response.data);
1250
- }
1251
-
1252
- // Method 2: Direct PUT upload
1253
- if (response.data.PUT) {
1254
- return handlePutUpload(up, response.data);
1255
- }
1256
-
1257
- // Invalid response format
1258
- delete state.running[up.up_id];
1259
- state.failed.push(up);
1260
- up.reject(new Error('Invalid upload response format'));
1261
- })
1262
- .catch(error => handleFailure(up, error));
1263
- }
1264
-
1265
- /**
1266
- * Set up AWS multipart upload
1267
- * @param {Object} up - Upload object
1268
- * @param {Object} data - Server response data
1269
- */
1270
- function handleAwsMultipartUpload(up, data) {
1271
- // Store upload info
1272
- up.info = data;
1273
-
1274
- // Initialize multipart upload
1275
- return awsReq(
1276
- up.info,
1277
- "POST",
1278
- "uploads=",
1279
- "",
1280
- {"Content-Type": up.file.type || "application/octet-stream", "X-Amz-Acl": "private"},
1281
- up.context
1282
- )
1283
- .then(response => response.text())
1284
- .then(str => utils.parseXML(str))
1285
- .then(dom => dom.querySelector('UploadId').innerHTML)
1286
- .then(uploadId => {
1287
- up.uploadId = uploadId;
1288
-
1289
- // Calculate optimal block size
1290
- const fileSize = up.file.size;
1291
-
1292
- // Target ~10k parts, but minimum 5MB per AWS requirements
1293
- let blockSize = Math.ceil(fileSize / 10000);
1294
- if (blockSize < 5242880) blockSize = 5242880;
1295
-
1296
- // Set up upload parameters
1297
- up.method = 'aws';
1298
- up.bsize = blockSize;
1299
- up.blocks = Math.ceil(fileSize / blockSize);
1300
- up.b = {};
1301
- up.status = 'uploading';
1302
-
1303
- // Continue upload process
1304
- upload.run();
1305
- })
1306
- .catch(error => handleFailure(up, error));
1307
- }
1308
-
1309
- /**
1310
- * Set up direct PUT upload
1311
- * @param {Object} up - Upload object
1312
- * @param {Object} data - Server response data
1313
- */
1314
- function handlePutUpload(up, data) {
1315
- // Store upload info
1316
- up.info = data;
1317
-
1318
- // Calculate block size (if multipart PUT is supported)
1319
- const fileSize = up.file.size;
1320
- let blockSize = fileSize; // Default: single block
1321
-
1322
- if (data.Blocksize) {
1323
- // Server supports multipart upload
1324
- blockSize = data.Blocksize;
1325
- }
1326
-
1327
- // Set up upload parameters
1328
- up.method = 'put';
1329
- up.bsize = blockSize;
1330
- up.blocks = Math.ceil(fileSize / blockSize);
1331
- up.b = {};
1332
- up.status = 'uploading';
1333
-
1334
- // Continue upload process
1335
- upload.run();
1336
- }
1337
-
1338
- /**
1339
- * Upload a single part of a file
1340
- * Handles both AWS multipart and direct PUT methods
1341
- * @param {Object} up - Upload object
1342
- * @param {number} partNumber - Part number (0-based)
1343
- */
1344
- function uploadPart(up, partNumber) {
1345
- // Mark part as pending
1346
- up.b[partNumber] = "pending";
1347
-
1348
- // Calculate byte range for this part
1349
- const startByte = partNumber * up.bsize;
1350
- const endByte = Math.min(startByte + up.bsize, up.file.size);
1351
-
1352
- // Read file slice as ArrayBuffer
1353
- utils.readAsArrayBuffer(up.file, {
1354
- start: startByte,
1355
- end: endByte
1356
- }, (arrayBuffer, error) => {
1357
- if (error) {
1358
- handleFailure(up, error);
1359
- return;
1360
- }
1361
-
1362
- // Choose upload method based on protocol
1363
- if (up.method === 'aws') {
1364
- uploadAwsPart(up, partNumber, arrayBuffer);
1365
- } else if (up.method === 'put') {
1366
- uploadPutPart(up, partNumber, startByte, arrayBuffer);
1367
- } else {
1368
- handleFailure(up, new Error(`Unknown upload method: ${up.method}`));
1369
- }
1370
- });
1371
- }
1372
-
1373
- /**
1374
- * Upload a part using AWS multipart upload
1375
- * @param {Object} up - Upload object
1376
- * @param {number} partNumber - Part number (0-based)
1377
- * @param {ArrayBuffer} data - Part data
1378
- */
1379
- function uploadAwsPart(up, partNumber, data) {
1380
- // AWS part numbers are 1-based
1381
- const awsPartNumber = partNumber + 1;
1382
-
1383
- awsReq(
1384
- up.info,
1385
- "PUT",
1386
- `partNumber=${awsPartNumber}&uploadId=${up.uploadId}`,
1387
- data,
1388
- null,
1389
- up.context
1390
- )
1391
- .then(response => {
1392
- // Verify the response is successful
1393
- if (!response.ok) {
1394
- throw new Error(`HTTP ${response.status}: ${response.statusText}`);
1395
- }
1396
- // Store ETag for this part (needed for completion)
1397
- const etag = response.headers.get("ETag");
1398
- // Read response body to ensure request completed
1399
- return response.text().then(() => etag);
1400
- })
1401
- .then(etag => {
1402
- up.b[partNumber] = etag;
1403
-
1404
- // Update progress and continue processing
1405
- sendProgress();
1406
- upload.run();
1407
- })
1408
- .catch(error => handleFailure(up, error));
1409
- }
1410
-
1411
- /**
1412
- * Upload a part using direct PUT
1413
- * @param {Object} up - Upload object
1414
- * @param {number} partNumber - Part number (0-based)
1415
- * @param {number} startByte - Starting byte position
1416
- * @param {ArrayBuffer} data - Part data
1417
- */
1418
- function uploadPutPart(up, partNumber, startByte, data) {
1419
- // Set up headers
1420
- const headers = {
1421
- "Content-Type": up.file.type || "application/octet-stream"
1422
- };
1423
-
1424
- // Add Content-Range header for multipart PUT
1425
- if (up.blocks > 1) {
1426
- const endByte = startByte + data.byteLength - 1; // inclusive
1427
- headers["Content-Range"] = `bytes ${startByte}-${endByte}/*`;
1428
- }
1429
-
1430
- // Perform the PUT request
1431
- utils.fetch(up.info.PUT, {
1432
- method: "PUT",
1433
- body: data,
1434
- headers: headers,
1435
- })
1436
- .then(response => {
1437
- // Verify the response is successful
1438
- if (!response.ok) {
1439
- throw new Error(`HTTP ${response.status}: ${response.statusText}`);
1440
- }
1441
- // Read response body to ensure request completed
1442
- return response.text();
1443
- })
1444
- .then(() => {
1445
- // Mark part as done
1446
- up.b[partNumber] = "done";
1447
-
1448
- // Update progress and continue processing
1449
- sendProgress();
1450
- upload.run();
1451
- })
1452
- .catch(error => handleFailure(up, error));
1453
- }
1454
-
1455
- /**
1456
- * Process an upload in progress
1457
- * Manages uploading parts and completing the upload
1458
- * @param {Object} up - Upload object
1459
- */
1460
- function processActiveUpload(up) {
1461
- // Skip if paused or canceled
1462
- if (up.paused || up.canceled) return;
1463
-
1464
- // Track upload progress
1465
- let pendingParts = 0;
1466
- let completedParts = 0;
1467
-
1468
- // Process each part
1469
- for (let i = 0; i < up.blocks; i++) {
1470
- if (up.b[i] === undefined) {
1471
- // Part not started yet
1472
- if (up.paused) break; // Don't start new parts when paused
1473
-
1474
- // Start uploading this part
1475
- uploadPart(up, i);
1476
- pendingParts++;
1477
- } else if (up.b[i] !== "pending") {
1478
- // Part completed
1479
- completedParts++;
1480
- continue;
1481
- } else {
1482
- // Part in progress
1483
- pendingParts++;
1484
- }
1485
-
1486
- // Limit concurrent uploads
1487
- if (pendingParts >= 3) break;
1488
- }
1489
-
1490
- // Update upload progress
1491
- up.done = completedParts;
1492
-
1493
- // Check if all parts are complete
1494
- if (pendingParts === 0 && completedParts === up.blocks) {
1495
- // All parts complete, finalize the upload
1496
- up.status = "validating";
1497
-
1498
- if (up.method === 'aws') {
1499
- completeAwsUpload(up);
1500
- } else if (up.method === 'put') {
1501
- completePutUpload(up);
1502
- }
1503
- }
1504
- }
1505
-
1506
- /**
1507
- * Complete AWS multipart upload
1508
- * @param {Object} up - Upload object
1509
- */
1510
- function completeAwsUpload(up) {
1511
- // Create completion XML
1512
- // See: https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
1513
- let xml = "<CompleteMultipartUpload>";
1514
-
1515
- for (let i = 0; i < up.blocks; i++) {
1516
- // AWS part numbers are 1-based
1517
- xml += `<Part><PartNumber>${i + 1}</PartNumber><ETag>${up.b[i]}</ETag></Part>`;
1518
- }
1519
-
1520
- xml += "</CompleteMultipartUpload>";
1521
-
1522
- // Send completion request
1523
- awsReq(up.info, "POST", `uploadId=${up.uploadId}`, xml, null, up.context)
1524
- .then(response => response.text())
1525
- .then(() => {
1526
- // Call server-side completion handler
1527
- return rest.rest(
1528
- `Cloud/Aws/Bucket/Upload/${up.info.Cloud_Aws_Bucket_Upload__}:handleComplete`,
1529
- "POST",
1530
- {},
1531
- up.context
1532
- );
1533
- })
1534
- .then(response => {
1535
- // Mark upload as complete
1536
- up.status = "complete";
1537
- up.final = response.data;
1538
-
1539
- // Notify listeners
1540
- sendProgress();
1541
-
1542
- // Remove from running uploads
1543
- delete state.running[up.up_id];
1544
-
1545
- // Resolve the upload promise
1546
- up.resolve(up);
1547
-
1548
- // Continue processing queue
1549
- upload.run();
1550
- })
1551
- .catch(error => handleFailure(up, error));
1552
- }
1553
-
1554
- /**
1555
- * Complete direct PUT upload
1556
- * @param {Object} up - Upload object
1557
- */
1558
- function completePutUpload(up) {
1559
- // Call completion endpoint
1560
- rest.rest(up.info.Complete, "POST", {}, up.context)
1561
- .then(response => {
1562
- // Mark upload as complete
1563
- up.status = "complete";
1564
- up.final = response.data;
1565
-
1566
- // Notify listeners
1567
- sendProgress();
1568
-
1569
- // Remove from running uploads
1570
- delete state.running[up.up_id];
1571
-
1572
- // Resolve the upload promise
1573
- up.resolve(up);
1574
-
1575
- // Continue processing queue
1576
- upload.run();
1577
- })
1578
- .catch(error => handleFailure(up, error));
1579
- }
1580
-
1581
- /**
1582
- * Fill the upload queue with new upload tasks
1583
- * Takes items from the queue and adds them to running uploads
1584
- */
1585
- function fillUploadQueue() {
1586
- // Skip if we're already running the maximum number of uploads
1587
- if (Object.keys(state.running).length >= 3) return;
1588
-
1589
- // Maximum of 3 concurrent uploads
1590
- while (Object.keys(state.running).length < 3 && state.queue.length > 0) {
1591
- // Get next upload from queue
1592
- const upload = state.queue.shift();
1593
-
1594
- // Add to running uploads
1595
- state.running[upload.up_id] = upload;
1596
- }
1597
-
1598
- // Notify progress
1599
- sendProgress();
1600
- }
1601
-
1602
- /**
1603
- * Get current upload status
1604
- * @returns {Object} Status object with queued, running and failed uploads
1605
- */
1606
- upload.getStatus = function() {
1607
- return {
1608
- queue: state.queue,
1609
- running: Object.keys(state.running).map(id => state.running[id]),
1610
- failed: state.failed
1611
- };
1612
- };
1613
-
1614
- /**
1615
- * Resume all failed uploads
1616
- * Moves failed uploads back to the queue
1617
- */
1618
- upload.resume = function() {
1619
- // Move all failed uploads back to the queue
1620
- while (state.failed.length > 0) {
1621
- state.queue.push(state.failed.shift());
1622
- }
1623
-
1624
- // Restart upload process
1625
- upload.run();
1626
- };
1627
-
1628
- /**
1629
- * Initialize uploads in different environments
1630
- *
1631
- * @param {string} path - API path to upload to
1632
- * @param {Object} params - Upload parameters
1633
- * @param {Function} notify - Notification callback
1634
- * @returns {Function} - Function to start uploads
1635
- */
1636
- upload.init = function(path, params, notify) {
1637
- params = params || {};
1638
-
1639
- if (env.isBrowser) {
1640
- // Browser implementation
1641
- if (state.lastInput !== null) {
1642
- state.lastInput.parentNode.removeChild(state.lastInput);
1643
- state.lastInput = null;
1644
- }
1645
-
1646
- const input = document.createElement("input");
1647
- input.type = "file";
1648
- input.style.display = "none";
1649
- if (!params.single) {
1650
- input.multiple = "multiple";
1651
- }
1652
-
1653
- document.getElementsByTagName('body')[0].appendChild(input);
1654
- state.lastInput = input;
1655
-
1656
- const promise = new Promise(function(resolve, reject) {
1657
- input.onchange = function() {
1658
- if (this.files.length === 0) {
1659
- return resolve();
1660
- }
1661
-
1662
- let count = this.files.length;
1663
- if (notify) notify({status: 'init', count: count});
1664
-
1665
- for (let i = 0; i < this.files.length; i++) {
1666
- upload.append(path, this.files[i], params, fwWrapper.getContext())
1667
- .then(function(obj) {
1668
- count -= 1;
1669
- if (notify) notify(obj);
1670
- if (count === 0) resolve();
1671
- });
1672
- }
1673
- upload.run();
1674
- };
1675
- });
1676
-
1677
- input.click();
1678
- return promise;
1679
- } else {
1680
- // Non-browser environment
1681
- return function(files) {
1682
- // Allow array, single file object, or file content buffer
1683
- if (!Array.isArray(files)) {
1684
- if (files instanceof ArrayBuffer ||
1685
- (files.buffer instanceof ArrayBuffer) ||
1686
- (typeof Buffer !== 'undefined' && files instanceof Buffer)) {
1687
- // If it's a buffer/ArrayBuffer, create a file-like object
1688
- files = [{
1689
- name: params.filename || 'file.bin',
1690
- size: files.byteLength || files.length,
1691
- type: params.type || 'application/octet-stream',
1692
- lastModified: Date.now(),
1693
- content: files
1694
- }];
1695
- } else {
1696
- // Single file object
1697
- files = [files];
1698
- }
1699
- }
1700
-
1701
- return new Promise(function(resolve, reject) {
1702
- const count = files.length;
1703
- if (count === 0) {
1704
- return resolve();
1705
- }
1706
-
1707
- if (notify) notify({status: 'init', count: count});
1708
-
1709
- let remainingCount = count;
1710
-
1711
- files.forEach(file => {
1712
- try {
1713
- // Ensure file has required properties
1714
- if (!file.name) file.name = 'file.bin';
1715
- if (!file.type) file.type = 'application/octet-stream';
1716
- if (!file.lastModified) file.lastModified = Date.now();
1717
-
1718
- // Add slice method if not present
1719
- if (!file.slice && file.content) {
1720
- file.slice = function(start, end) {
1721
- return {
1722
- content: this.content.slice(start, end || this.size)
1723
- };
1724
- };
1725
- }
1726
-
1727
- upload.append(path, file, params, fwWrapper.getContext())
1728
- .then(function(obj) {
1729
- remainingCount -= 1;
1730
- if (notify) notify(obj);
1731
- if (remainingCount === 0) resolve();
1732
- })
1733
- .catch(function(err) {
1734
- remainingCount -= 1;
1735
- console.error('Error uploading file:', err);
1736
- if (remainingCount === 0) resolve();
1737
- });
1738
- } catch (err) {
1739
- remainingCount -= 1;
1740
- console.error('Error processing file:', err);
1741
- if (remainingCount === 0) resolve();
1742
- }
1743
- });
1744
-
1745
- upload.run();
1746
- });
1747
- };
1748
- }
1749
- };
1750
-
1751
- /**
1752
- * Add a file to the upload queue
1753
- * @param {string} path - API path to upload to
1754
- * @param {File|Object} file - File to upload
1755
- * @param {Object} params - Upload parameters
1756
- * @param {Object} context - Request context
1757
- * @returns {Promise} - Upload promise
1758
- */
1759
- upload.append = function(path, file, params, context) {
1760
- return new Promise((resolve, reject) => {
1761
- // Process parameters
1762
- params = params || {};
1763
- context = context || fwWrapper.getContext();
1764
-
1765
- // Create an upload object
1766
- const uploadObject = {
1767
- path: path,
1768
- file: file,
1769
- resolve: resolve,
1770
- reject: reject,
1771
- status: "pending",
1772
- paused: false,
1773
- up_id: state.nextId++,
1774
- params: params,
1775
- context: { ...context } // Create a copy to avoid modification
1776
- };
1777
-
1778
- // Add to queue
1779
- state.queue.push(uploadObject);
1780
- });
1781
- };
1782
-
1783
- /**
1784
- * Cancel an upload in progress or in queue
1785
- * @param {number} uploadId - Upload ID to cancel
1786
- */
1787
- upload.cancelItem = function(uploadId) {
1788
- // Check running uploads
1789
- if (state.running[uploadId]) {
1790
- // Mark running upload as canceled
1791
- state.running[uploadId].canceled = true;
1792
- } else {
1793
- // Check queued uploads
1794
- for (let i = 0; i < state.queue.length; i++) {
1795
- if (state.queue[i].up_id === uploadId) {
1796
- state.queue[i].canceled = true;
1797
- break;
1798
- }
1799
- }
1800
- }
1801
-
1802
- // Update progress
1803
- sendProgress();
1804
- };
1805
-
1806
- /**
1807
- * Delete an upload from queue or failed list
1808
- * Only canceled uploads can be removed from running list
1809
- * @param {number} uploadId - Upload ID to delete
1810
- */
1811
- upload.deleteItem = function(uploadId) {
1812
- // Check running uploads
1813
- if (state.running[uploadId]) {
1814
- // Only delete if canceled
1815
- if (state.running[uploadId].canceled) {
1816
- delete state.running[uploadId];
1817
- }
1818
- } else {
1819
- // Check queue
1820
- for (let i = 0; i < state.queue.length; i++) {
1821
- if (state.queue[i].up_id === uploadId) {
1822
- state.queue.splice(i, 1);
1823
- break;
1824
- }
1825
- }
1826
-
1827
- // Check failed uploads
1828
- for (let i = 0; i < state.failed.length; i++) {
1829
- if (state.failed[i].up_id === uploadId) {
1830
- state.failed.splice(i, 1);
1831
- break;
1832
- }
1833
- }
1834
- }
1835
-
1836
- // Update progress
1837
- sendProgress();
1838
- };
1839
-
1840
- /**
1841
- * Pause an active upload
1842
- * @param {number} uploadId - Upload ID to pause
1843
- */
1844
- upload.pauseItem = function(uploadId) {
1845
- // Find upload in running list
1846
- const upload = state.running[uploadId];
1847
-
1848
- // Only pause if active
1849
- if (upload && upload.status === "uploading") {
1850
- upload.paused = true;
1851
- }
1852
-
1853
- // Update progress
1854
- sendProgress();
1855
- };
1856
-
1857
- /**
1858
- * Resume a paused upload
1859
- * @param {number} uploadId - Upload ID to resume
1860
- */
1861
- upload.resumeItem = function(uploadId) {
1862
- // Find upload in running list
1863
- const upload = state.running[uploadId];
1864
-
1865
- // Only resume if paused
1866
- if (upload && upload.paused) {
1867
- upload.paused = false;
1868
- processActiveUpload(upload);
1869
- }
1870
-
1871
- // Update progress
1872
- sendProgress();
1873
- };
1874
-
1875
- /**
1876
- * Retry a failed upload
1877
- * @param {number} uploadId - Upload ID to retry
1878
- */
1879
- upload.retryItem = function(uploadId) {
1880
- // Find upload in failed list
1881
- let failedUpload = null;
1882
- let failedIndex = -1;
1883
-
1884
- for (let i = 0; i < state.failed.length; i++) {
1885
- if (state.failed[i].up_id === uploadId) {
1886
- failedUpload = state.failed[i];
1887
- failedIndex = i;
1888
- break;
1889
- }
1890
- }
1891
-
1892
- // Skip if not found
1893
- if (!failedUpload) return;
1894
-
1895
- // Check if already in queue
1896
- for (let i = 0; i < state.queue.length; i++) {
1897
- if (state.queue[i].up_id === uploadId) {
1898
- return; // Already in queue
1899
- }
1900
- }
1901
-
1902
- // Reset failure data
1903
- failedUpload.failure = {};
1904
-
1905
- // Reset pending parts
1906
- for (let i = 0; i < failedUpload.blocks; i++) {
1907
- if (failedUpload.b[i] === "pending") {
1908
- failedUpload.b[i] = undefined;
1909
- }
1910
- }
1911
-
1912
- // Move from failed to queue
1913
- state.failed.splice(failedIndex, 1);
1914
- state.queue.push(failedUpload);
1915
-
1916
- // Restart upload
1917
- upload.run();
1918
-
1919
- // Dispatch retry event
1920
- utils.dispatchEvent("upload:retry", { item: failedUpload });
1921
-
1922
- // Update progress
1923
- sendProgress();
1924
- };
1925
-
1926
- /**
1927
- * Start or continue the upload process
1928
- * Processes queued uploads and continues running uploads
1929
- */
1930
- upload.run = function() {
1931
- // Fill queue with new uploads
1932
- fillUploadQueue();
1933
-
1934
- // Process running uploads
1935
- for (const uploadId in state.running) {
1936
- const upload = state.running[uploadId];
1937
-
1938
- // Process based on status
1939
- switch (upload.status) {
1940
- case "pending":
1941
- processUpload(upload);
1942
- break;
1943
- case "uploading":
1944
- processActiveUpload(upload);
1945
- break;
1946
- }
1947
- }
1948
- };
1949
-
1950
- return upload;
1951
- }());
1952
-
1953
- // Export simple upload function for Node.js consumers
1954
- module.exports.uploadFile = uploadFile;
641
+ // Export
642
+ module.exports.uploadFile = uploadFile;