dpu-cloud-sdk 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/.env.development +1 -0
  2. package/.env.production +1 -0
  3. package/dist/DPUClient.d.ts +83 -0
  4. package/dist/DPUClient.js +1043 -0
  5. package/dist/ServiceIntegration.d.ts +20 -0
  6. package/dist/ServiceIntegration.js +506 -0
  7. package/dist/api/auth.d.ts +3 -0
  8. package/dist/api/auth.js +10 -0
  9. package/dist/api/compress.d.ts +4 -0
  10. package/dist/api/compress.js +16 -0
  11. package/dist/api/translate.d.ts +8 -0
  12. package/dist/api/translate.js +38 -0
  13. package/dist/index.d.ts +4 -0
  14. package/dist/index.js +4 -0
  15. package/dist/models/RequestModel.d.ts +33 -0
  16. package/dist/models/RequestModel.js +2 -0
  17. package/dist/models/ResponseModel.d.ts +99 -0
  18. package/dist/models/ResponseModel.js +1 -0
  19. package/dist/utils/Config.d.ts +32 -0
  20. package/dist/utils/Config.js +44 -0
  21. package/dist/utils/Constants.d.ts +48 -0
  22. package/dist/utils/Constants.js +55 -0
  23. package/dist/utils/Enum.d.ts +27 -0
  24. package/dist/utils/Enum.js +30 -0
  25. package/dist/utils/Helper.d.ts +4 -0
  26. package/dist/utils/Helper.js +47 -0
  27. package/dist/workerDownloadSingleFile.d.ts +1 -0
  28. package/dist/workerDownloadSingleFile.js +35 -0
  29. package/dist/workerUploadChildFile.d.ts +1 -0
  30. package/dist/workerUploadChildFile.js +82 -0
  31. package/dist/workerUploadSingleFile.d.ts +1 -0
  32. package/dist/workerUploadSingleFile.js +93 -0
  33. package/dpubim-service-1.1.28.tgz +0 -0
  34. package/package.json +33 -0
  35. package/src/DPUClient.ts +1505 -0
  36. package/src/ServiceIntegration.ts +710 -0
  37. package/src/api/auth.ts +18 -0
  38. package/src/api/compress.ts +36 -0
  39. package/src/api/translate.ts +94 -0
  40. package/src/index.ts +4 -0
  41. package/src/models/RequestModel.ts +44 -0
  42. package/src/models/ResponseModel.ts +110 -0
  43. package/src/utils/Config.ts +59 -0
  44. package/src/utils/Constants.ts +61 -0
  45. package/src/utils/Enum.ts +29 -0
  46. package/src/utils/Helper.ts +57 -0
  47. package/src/workerDownloadSingleFile.ts +34 -0
  48. package/src/workerUploadChildFile.ts +85 -0
  49. package/src/workerUploadSingleFile.ts +123 -0
  50. package/tsconfig.json +108 -0
  51. package/webpack.config.js +43 -0
@@ -0,0 +1,1043 @@
1
+ import pLimit from "p-limit";
2
+ import { CancelDownload, CompressFolder, GetStatusCompress, } from "./api/compress";
3
+ import { ServiceIntegration } from "./ServiceIntegration";
4
+ import { ApiStatus, ConfigFileRules } from "./utils/Constants";
5
+ import { dpuConfig } from "./utils/Config";
6
+ import { readFileDataAsBase64 } from "./utils/Helper";
7
+ import { StatusWorker } from "./utils/Enum";
8
+ // import streamSaver from "streamsaver";
9
+ import { GetEPSGRegionCode, GetFileTileSet, GetStatusTranslate, GetStatusTranslateFile, TranslateFile, } from "./api/translate";
10
+ import { GetToken } from "./api/auth";
11
+ export class DPUClient {
12
+ accessToken;
13
+ reFreshToken;
14
+ maxRetry;
15
+ workersStatus = [];
16
+ constructor(accessToken, reFreshToken, maxRetry, baseURL) {
17
+ this.accessToken = accessToken;
18
+ this.reFreshToken = reFreshToken;
19
+ this.maxRetry = maxRetry || ConfigFileRules.MaxRetry;
20
+ // Set the base URL if provided
21
+ if (baseURL) {
22
+ dpuConfig.setBaseURL(baseURL);
23
+ }
24
+ }
25
+ async baseAction(action) {
26
+ return new Promise(async (resolve, reject) => {
27
+ try {
28
+ if (this.accessToken) {
29
+ let currentRetry = 0;
30
+ let response;
31
+ do {
32
+ currentRetry++;
33
+ response = await action(this.accessToken);
34
+ if (response.statusCode === ApiStatus.Success)
35
+ resolve(response.data);
36
+ else if (response.statusCode === ApiStatus.Unauthorized &&
37
+ this.reFreshToken) {
38
+ this.accessToken = await this.reFreshToken();
39
+ }
40
+ else
41
+ reject(response.message);
42
+ } while (this.maxRetry > currentRetry &&
43
+ response.statusCode === ApiStatus.Unauthorized);
44
+ }
45
+ else
46
+ reject("Access token is required");
47
+ }
48
+ catch (error) {
49
+ reject(error);
50
+ }
51
+ });
52
+ }
53
+ async baseActionAnonymous(action) {
54
+ return new Promise(async (resolve, reject) => {
55
+ try {
56
+ let response;
57
+ response = await action();
58
+ if (response.statusCode === ApiStatus.Success)
59
+ resolve(response.data);
60
+ else
61
+ reject(response.message);
62
+ }
63
+ catch (error) {
64
+ reject(error);
65
+ }
66
+ });
67
+ }
68
+ //#region Compress and Download Folder
69
+ async compressFolder(bucketName, rootFolder) {
70
+ return await this.baseAction((accessToken) => {
71
+ return CompressFolder(accessToken, bucketName, rootFolder);
72
+ });
73
+ }
74
+ async getStatusCompress(requestIds) {
75
+ return await this.baseAction((accessToken) => {
76
+ return GetStatusCompress(requestIds, accessToken);
77
+ });
78
+ }
79
+ async cancelDownload(requestId) {
80
+ return await this.baseAction((accessToken) => {
81
+ return CancelDownload(requestId, accessToken);
82
+ });
83
+ }
84
+ //#endregion
85
+ //#region Upload File
86
+ async uploadFileAferInit(path, file, initUpload, cancellationToken, onProgress, maxWebWorker, worker, dataUploadId) {
87
+ const bucketName = path.split("/").at(0);
88
+ if (!bucketName)
89
+ return new Promise((resolve, reject) => {
90
+ reject("Path is not valid");
91
+ });
92
+ const customFileName = path.split("/").at(-1);
93
+ if (!customFileName)
94
+ return new Promise((resolve, reject) => {
95
+ reject("Path is not valid");
96
+ });
97
+ const prefix = path
98
+ .replace(bucketName + "/", "")
99
+ .replace(customFileName, "");
100
+ if (cancellationToken?.signal.aborted) {
101
+ return new Promise((resolve, reject) => {
102
+ reject("Upload cancelled");
103
+ });
104
+ }
105
+ const abortHandler = () => {
106
+ // Cleanup workers
107
+ this.workersStatus.forEach((workerStatus) => {
108
+ workerStatus.worker.terminate();
109
+ });
110
+ this.workersStatus = [];
111
+ };
112
+ cancellationToken?.signal.addEventListener("abort", abortHandler);
113
+ try {
114
+ const result = await this.uploadFileRetry(bucketName, file, cancellationToken || new AbortController(), onProgress, maxWebWorker, prefix, initUpload, customFileName, worker, dataUploadId);
115
+ // Cleanup
116
+ cancellationToken?.signal.removeEventListener("abort", abortHandler);
117
+ return result;
118
+ }
119
+ catch (error) {
120
+ // Cleanup
121
+ cancellationToken?.signal.removeEventListener("abort", abortHandler);
122
+ if (error instanceof Error && error.name === "AbortError") {
123
+ return new Promise((resolve, reject) => {
124
+ reject({
125
+ errorCode: "ClientAbort",
126
+ message: "Upload cancelled",
127
+ });
128
+ });
129
+ }
130
+ throw error;
131
+ }
132
+ }
133
+ async uploadFileRetry(bucketName, file, cancellationToken, onProgress, maxWebWorker, prefix, initUpload, customFileName, worker, dataUploadId) {
134
+ if (!this.accessToken)
135
+ return new Promise((resolve, reject) => {
136
+ reject("Access token is required");
137
+ });
138
+ // Check cancellation before starting
139
+ if (cancellationToken.signal.aborted) {
140
+ return new Promise((resolve, reject) => {
141
+ reject("Upload cancelled");
142
+ });
143
+ }
144
+ const fileSize = file.size;
145
+ const fileName = prefix && prefix.length > 0
146
+ ? `${prefix.at(prefix.length - 1) === "/" ? prefix : prefix + "/"}${customFileName ?? file.name}`
147
+ : customFileName ?? file.name;
148
+ maxWebWorker = maxWebWorker ? maxWebWorker : ConfigFileRules.MaxWebWorker;
149
+ var service = new ServiceIntegration();
150
+ await service.validateFileSize(fileName, fileSize);
151
+ // Handle small file upload
152
+ if (fileSize <= ConfigFileRules.ChunkSize) {
153
+ const buffer = await readFileDataAsBase64(file);
154
+ const resUploadSmallFile = await service.uploadSmallFile(bucketName, fileName, buffer, this.accessToken, cancellationToken, onProgress, undefined, true);
155
+ onProgress?.(buffer.length);
156
+ return resUploadSmallFile;
157
+ }
158
+ // Init multi upload
159
+ if (!initUpload) {
160
+ initUpload = await service.initMultiPartUpload(bucketName, fileName, this.accessToken, undefined, cancellationToken);
161
+ if (!initUpload) {
162
+ return new Promise((resolve, reject) => {
163
+ reject(`Fail to initiate multipart upload for file ${fileName}`);
164
+ });
165
+ }
166
+ }
167
+ var numberOfChunks = service.calculateNumberOfChunks(fileSize);
168
+ const urls = await service.generatePresignedUrls(bucketName, fileName, initUpload, numberOfChunks, this.accessToken, cancellationToken);
169
+ if (!urls) {
170
+ return new Promise((resolve, reject) => {
171
+ reject(`Fail to generate presigned urls for file ${fileName}`);
172
+ });
173
+ }
174
+ const uploadChunks = async (chunkIndexs) => {
175
+ let chunkIndexErrors = [];
176
+ let responseUploadChilds = [];
177
+ const maxLimit = chunkIndexs.length < ConfigFileRules.LimitFile
178
+ ? chunkIndexs.length
179
+ : ConfigFileRules.LimitFile;
180
+ const limit = pLimit(maxLimit);
181
+ // init worker
182
+ const workerPorts = [];
183
+ const maxWorker = chunkIndexs.length < maxWebWorker ? chunkIndexs.length : maxWebWorker;
184
+ // Create workers
185
+ for (let index = 0; index < maxWorker; index++) {
186
+ const channel = new MessageChannel();
187
+ const worker = new Worker(new URL("workerUploadChildFile", import.meta.url));
188
+ worker.postMessage({ port: channel.port2 }, [channel.port2]);
189
+ workerPorts.push(channel.port1);
190
+ this.workersStatus.push({
191
+ worker: worker,
192
+ taskNumber: 0,
193
+ id: index,
194
+ taskHandle: 0,
195
+ });
196
+ }
197
+ if (worker) {
198
+ worker.postMessage({ workerPorts }, workerPorts);
199
+ }
200
+ try {
201
+ // const tasks = Array.from({ length: numberOfChunks }, (_, index) => {
202
+ const tasks = chunkIndexs.map((index) => {
203
+ return limit(async () => {
204
+ // Check cancellation before each chunk
205
+ if (cancellationToken.signal.aborted) {
206
+ throw new Error("Upload cancelled");
207
+ }
208
+ try {
209
+ const chunkStart = index * ConfigFileRules.ChunkSize;
210
+ const chunkEnd = Math.min(chunkStart + ConfigFileRules.ChunkSize, fileSize);
211
+ const chunk = file.slice(chunkStart, chunkEnd);
212
+ const arrayBuffer = await chunk.arrayBuffer();
213
+ var url = urls[index];
214
+ return this.runOnWorker({
215
+ currentPresignUrl: { url: url, headers: initUpload.headers },
216
+ arrayBuffer: arrayBuffer,
217
+ partNumber: index + 1,
218
+ onProgress: onProgress,
219
+ requestId: `${index + 1}-${Date.now()}`,
220
+ cancellationToken: cancellationToken,
221
+ dataUploadId: dataUploadId,
222
+ fileName: file.name,
223
+ });
224
+ }
225
+ catch (error) {
226
+ chunkIndexErrors.push(index);
227
+ }
228
+ });
229
+ });
230
+ const results = await Promise.allSettled(tasks);
231
+ for (let i = 0; i < results.length; i++) {
232
+ if (results[i].status === "fulfilled") {
233
+ const responseUploadChild = results[i].value.responseUpload;
234
+ if (responseUploadChild) {
235
+ responseUploadChilds.push(responseUploadChild);
236
+ }
237
+ }
238
+ }
239
+ return { chunkIndexErrors, responseUploadChilds };
240
+ }
241
+ catch (error) {
242
+ // Cleanup on error or cancellation
243
+ throw error;
244
+ }
245
+ finally {
246
+ // Always cleanup workers
247
+ this.workersStatus.forEach((workerStatus) => {
248
+ workerStatus.worker.terminate();
249
+ });
250
+ this.workersStatus = [];
251
+ }
252
+ };
253
+ let currentRetry = 0;
254
+ const eTags = {};
255
+ let chunkIndexErrors = Array.from({ length: numberOfChunks }, (_, index) => index);
256
+ let responseUploadChilds = [];
257
+ do {
258
+ if (cancellationToken.signal.aborted) {
259
+ throw new Error("Upload cancelled");
260
+ }
261
+ const responseUploadChunks = await uploadChunks(chunkIndexErrors);
262
+ chunkIndexErrors = responseUploadChunks.chunkIndexErrors;
263
+ responseUploadChilds = responseUploadChilds.concat(responseUploadChunks.responseUploadChilds);
264
+ currentRetry++;
265
+ } while (currentRetry < ConfigFileRules.MaxRetry &&
266
+ chunkIndexErrors.length > 0);
267
+ if (chunkIndexErrors.length > 0) {
268
+ return new Promise((resolve, reject) => {
269
+ reject(`Fail to upload chunks ${chunkIndexErrors} for file ${fileName}`);
270
+ });
271
+ }
272
+ for (let i = 0; i < responseUploadChilds.length; i++) {
273
+ const responseUploadChild = responseUploadChilds[i];
274
+ Object.defineProperty(eTags, responseUploadChild.partNumber, {
275
+ value: responseUploadChild.eTag,
276
+ enumerable: true,
277
+ });
278
+ }
279
+ var completeResponse = await service.completeMultipartUpload(bucketName, fileName, initUpload.uploadId, eTags, this.accessToken, cancellationToken);
280
+ onProgress?.(fileSize);
281
+ if (!completeResponse) {
282
+ throw new Error(`Fail to complete multipart upload for file ${fileName}`);
283
+ }
284
+ const objectDetail = await service.getObjectDetail(bucketName, fileName, this.accessToken);
285
+ if (!objectDetail) {
286
+ throw new Error(`Fail to get object detail for file ${fileName}`);
287
+ }
288
+ return {
289
+ bucketName: bucketName,
290
+ fileName: fileName,
291
+ contentLength: fileSize,
292
+ versionFile: objectDetail.versionId,
293
+ dateVersionFile: objectDetail.lastModified,
294
+ };
295
+ }
296
+ async uploadFile(bucketName, file, cancellationToken, onProgress, maxWebWorker, prefix, initUpload, customFileName, worker, dataUploadId) {
297
+ if (!this.accessToken)
298
+ return new Promise((resolve, reject) => {
299
+ reject("Access token is required");
300
+ });
301
+ // Check cancellation before starting
302
+ if (cancellationToken.signal.aborted) {
303
+ return new Promise((resolve, reject) => {
304
+ reject("Upload cancelled");
305
+ });
306
+ }
307
+ const fileSize = file.size;
308
+ const fileName = prefix && prefix.length > 0
309
+ ? `${prefix.at(prefix.length - 1) === "/" ? prefix : prefix + "/"}${customFileName ?? file.name}`
310
+ : customFileName ?? file.name;
311
+ maxWebWorker = maxWebWorker ? maxWebWorker : ConfigFileRules.MaxWebWorker;
312
+ var service = new ServiceIntegration();
313
+ await service.validateFileSize(fileName, fileSize);
314
+ // Handle small file upload
315
+ if (fileSize <= ConfigFileRules.ChunkSize) {
316
+ const buffer = await readFileDataAsBase64(file);
317
+ const resUploadSmallFile = await service.uploadSmallFile(bucketName, fileName, buffer, this.accessToken, cancellationToken, onProgress, undefined, true);
318
+ onProgress?.(buffer.length);
319
+ return resUploadSmallFile;
320
+ }
321
+ // Init multi upload
322
+ if (!initUpload) {
323
+ initUpload = await service.initMultiPartUpload(bucketName, fileName, this.accessToken, undefined, cancellationToken);
324
+ if (!initUpload) {
325
+ return new Promise((resolve, reject) => {
326
+ reject(`Fail to initiate multipart upload for file ${fileName}`);
327
+ });
328
+ }
329
+ }
330
+ var numberOfChunks = service.calculateNumberOfChunks(fileSize);
331
+ const urls = await service.generatePresignedUrls(bucketName, fileName, initUpload, numberOfChunks, this.accessToken, cancellationToken);
332
+ if (!urls) {
333
+ return new Promise((resolve, reject) => {
334
+ reject(`Fail to generate presigned urls for file ${fileName}`);
335
+ });
336
+ }
337
+ const eTags = {};
338
+ const maxLimit = numberOfChunks < ConfigFileRules.LimitFile
339
+ ? numberOfChunks
340
+ : ConfigFileRules.LimitFile;
341
+ const limit = pLimit(maxLimit);
342
+ // init worker
343
+ const workerPorts = [];
344
+ const maxWorker = numberOfChunks < maxWebWorker ? numberOfChunks : maxWebWorker;
345
+ // Create workers
346
+ for (let index = 0; index < maxWorker; index++) {
347
+ const channel = new MessageChannel();
348
+ const worker = new Worker(new URL("workerUploadChildFile", import.meta.url));
349
+ worker.postMessage({ port: channel.port2 }, [channel.port2]);
350
+ workerPorts.push(channel.port1);
351
+ this.workersStatus.push({
352
+ worker: worker,
353
+ taskNumber: 0,
354
+ id: index,
355
+ taskHandle: 0,
356
+ });
357
+ }
358
+ if (worker) {
359
+ worker.postMessage({ workerPorts }, workerPorts);
360
+ }
361
+ try {
362
+ const tasks = Array.from({ length: numberOfChunks }, (_, index) => {
363
+ return limit(async () => {
364
+ // Check cancellation before each chunk
365
+ if (cancellationToken.signal.aborted) {
366
+ throw new Error("Upload cancelled");
367
+ }
368
+ const chunkStart = index * ConfigFileRules.ChunkSize;
369
+ const chunkEnd = Math.min(chunkStart + ConfigFileRules.ChunkSize, fileSize);
370
+ const chunk = file.slice(chunkStart, chunkEnd);
371
+ const arrayBuffer = await chunk.arrayBuffer();
372
+ var url = urls[index];
373
+ return this.runOnWorker({
374
+ currentPresignUrl: { url: url, headers: initUpload.headers },
375
+ arrayBuffer: arrayBuffer,
376
+ partNumber: index + 1,
377
+ onProgress: onProgress,
378
+ requestId: `${index + 1}-${Date.now()}`,
379
+ cancellationToken: cancellationToken,
380
+ dataUploadId: dataUploadId,
381
+ fileName: file.name,
382
+ });
383
+ });
384
+ });
385
+ const results = await Promise.all(tasks);
386
+ for (let i = 0; i < results.length; i++) {
387
+ const responseUploadChild = results[i].responseUpload;
388
+ if (!responseUploadChild) {
389
+ throw new Error(`Fail to upload chunk ${i} of file ${fileName}`);
390
+ }
391
+ Object.defineProperty(eTags, responseUploadChild.partNumber, {
392
+ value: responseUploadChild.eTag,
393
+ enumerable: true,
394
+ });
395
+ }
396
+ var completeResponse = await service.completeMultipartUpload(bucketName, fileName, initUpload.uploadId, eTags, this.accessToken, cancellationToken);
397
+ onProgress?.(fileSize);
398
+ if (!completeResponse) {
399
+ throw new Error(`Fail to complete multipart upload for file ${fileName}`);
400
+ }
401
+ const objectDetail = await service.getObjectDetail(bucketName, fileName, this.accessToken);
402
+ if (!objectDetail) {
403
+ throw new Error(`Fail to get object detail for file ${fileName}`);
404
+ }
405
+ return {
406
+ bucketName: bucketName,
407
+ fileName: fileName,
408
+ contentLength: fileSize,
409
+ versionFile: objectDetail.versionId,
410
+ dateVersionFile: objectDetail.lastModified,
411
+ };
412
+ }
413
+ catch (error) {
414
+ // Cleanup on error or cancellation
415
+ throw error;
416
+ }
417
+ finally {
418
+ // Always cleanup workers
419
+ this.workersStatus.forEach((workerStatus) => {
420
+ workerStatus.worker.terminate();
421
+ });
422
+ this.workersStatus = [];
423
+ }
424
+ }
425
+ async runOnWorker(data) {
426
+ const workerStatus = this.workersStatus.find((x) => x.taskNumber < ConfigFileRules.TaskCurrently);
427
+ if (!workerStatus)
428
+ return Promise.reject("No worker available");
429
+ // increase task number
430
+ workerStatus.taskNumber++;
431
+ const worker = workerStatus.worker;
432
+ return new Promise((resolve, reject) => {
433
+ if (typeof Worker !== "undefined") {
434
+ worker.addEventListener("message", (message) => {
435
+ if (message.data.requestId !== data.requestId)
436
+ return;
437
+ workerStatus.taskNumber--;
438
+ if (message.data && message.data.status === StatusWorker.Success) {
439
+ data.onProgress && data.onProgress(message.data.contentLength);
440
+ resolve(message.data);
441
+ }
442
+ else {
443
+ reject(message.data);
444
+ }
445
+ });
446
+ worker.addEventListener("error", (error) => {
447
+ workerStatus.taskNumber--;
448
+ console.error(error);
449
+ reject(error);
450
+ });
451
+ // Add abort handler to worker
452
+ const abortHandler = () => {
453
+ worker.postMessage({
454
+ type: "abort",
455
+ requestId: data.requestId,
456
+ });
457
+ };
458
+ data.cancellationToken?.signal.addEventListener("abort", abortHandler);
459
+ const newData = {
460
+ currentPresignUrl: data.currentPresignUrl,
461
+ arrayBuffer: data.arrayBuffer,
462
+ partNumber: data.partNumber,
463
+ requestId: data.requestId,
464
+ dataUploadId: data.dataUploadId,
465
+ fileName: data.fileName,
466
+ };
467
+ worker.postMessage(newData, [data.arrayBuffer]);
468
+ // Cleanup abort handler when promise resolves or rejects
469
+ return () => {
470
+ data.cancellationToken?.signal.removeEventListener("abort", abortHandler);
471
+ };
472
+ }
473
+ else {
474
+ console.error("Web Workers are not supported in this environment.");
475
+ reject("Web Workers are not supported in this environment.");
476
+ }
477
+ });
478
+ }
479
+ //#endregion
480
+ //#region Upload folder
481
+ async uploadFolderAferInit(filesInitiated, cancellationToken, worker, dataUploadId, onProgress, maxWebWorker) {
482
+ const bucketName = filesInitiated[0].path.split("/").at(0);
483
+ if (!bucketName)
484
+ return new Promise((resolve, reject) => {
485
+ reject("Path is not valid");
486
+ });
487
+ if (cancellationToken?.signal.aborted) {
488
+ return new Promise((resolve, reject) => {
489
+ reject("Upload cancelled");
490
+ });
491
+ }
492
+ const abortHandler = () => {
493
+ // Cleanup workers
494
+ this.workersStatus.forEach((workerStatus) => {
495
+ workerStatus.worker.terminate();
496
+ });
497
+ this.workersStatus = [];
498
+ };
499
+ cancellationToken?.signal.addEventListener("abort", abortHandler);
500
+ try {
501
+ const result = await this.uploadFolder(bucketName, filesInitiated.map((x) => ({
502
+ file: x.file,
503
+ fileKey: x.fileKey,
504
+ customFileName: x.path.split("/").slice(1).join("/"),
505
+ initUpload: x.initUpload,
506
+ isGetInfo: x.isGetInfo,
507
+ })), cancellationToken || new AbortController(), worker, dataUploadId, onProgress, maxWebWorker);
508
+ // Cleanup
509
+ cancellationToken?.signal.removeEventListener("abort", abortHandler);
510
+ return result;
511
+ }
512
+ catch (error) {
513
+ // Cleanup
514
+ cancellationToken?.signal.removeEventListener("abort", abortHandler);
515
+ if (error instanceof Error && error.name === "AbortError") {
516
+ return new Promise((resolve, reject) => {
517
+ reject({
518
+ errorCode: "ClientAbort",
519
+ message: "Upload cancelled",
520
+ });
521
+ });
522
+ }
523
+ throw error;
524
+ }
525
+ }
526
+ async uploadFolder(bucketName, files, cancellationToken, worker, dataUploadId, onProgress, maxWebWorker, prefix) {
527
+ try {
528
+ if (!this.accessToken)
529
+ return new Promise((resolve, reject) => {
530
+ reject("Access token is required");
531
+ });
532
+ // Check cancellation before starting
533
+ if (cancellationToken.signal.aborted) {
534
+ return new Promise((resolve, reject) => {
535
+ reject({
536
+ errorCode: "ClientAbort",
537
+ message: "Upload cancelled",
538
+ });
539
+ });
540
+ }
541
+ prefix =
542
+ prefix && prefix.length > 0
543
+ ? `${prefix.at(prefix.length - 1) === "/" ? prefix : prefix + "/"}`
544
+ : "";
545
+ maxWebWorker = maxWebWorker ? maxWebWorker : ConfigFileRules.MaxWebWorker;
546
+ let percentOneBuffer = 100 / files.length;
547
+ let currentPercent = 0;
548
+ let currentRetry = 0;
549
+ const percentOfChild = ({ fileKey, versionFile, }) => {
550
+ currentPercent += percentOneBuffer;
551
+ onProgress?.({
552
+ percentCompleted: Math.floor(currentPercent),
553
+ fileKey: fileKey,
554
+ versionFile: versionFile,
555
+ });
556
+ };
557
+ let filesUploadFail = files;
558
+ let filesUploadSuccess = [];
559
+ do {
560
+ // Check cancellation before each retry
561
+ if (cancellationToken.signal.aborted) {
562
+ throw new Error("Upload cancelled");
563
+ }
564
+ const resUpload = await this.uploadMultiFileRetry(bucketName, filesUploadFail, this.accessToken, cancellationToken, maxWebWorker, this.reFreshToken, percentOfChild, prefix, worker, dataUploadId);
565
+ currentRetry++;
566
+ if (resUpload) {
567
+ filesUploadFail = resUpload.filesUploadFail ?? [];
568
+ filesUploadSuccess = filesUploadSuccess.concat(resUpload.filesUploadSuccess ?? []);
569
+ }
570
+ else {
571
+ return new Promise((resolve, reject) => {
572
+ reject("Fail to upload files");
573
+ });
574
+ }
575
+ } while (currentRetry < ConfigFileRules.MaxRetry &&
576
+ filesUploadFail &&
577
+ filesUploadFail.length > 0);
578
+ // get list file upload fail
579
+ if (!filesUploadFail)
580
+ return new Promise((resolve, reject) => {
581
+ reject("Fail to upload files");
582
+ });
583
+ const filesNameUploadFail = filesUploadFail.map((file) => {
584
+ return file.customFileName ?? file.file.webkitRelativePath;
585
+ });
586
+ // if(worker) {
587
+ // worker.postMessage({
588
+ // type: "progressUpload",
589
+ // progress: {
590
+ // dataUploadId: dataUploadId,
591
+ // percentCompleted: 100,
592
+ // fileKey: "",
593
+ // versionFile: "",
594
+ // },
595
+ // });
596
+ // }
597
+ return new Promise((resolve, reject) => {
598
+ if (filesNameUploadFail.length === files.length)
599
+ reject("Fail to upload all files");
600
+ else
601
+ resolve({
602
+ uploadFail: {
603
+ totalFile: filesNameUploadFail.length,
604
+ filesName: filesNameUploadFail,
605
+ },
606
+ uploadSuccess: {
607
+ totalFile: files.length - filesNameUploadFail.length,
608
+ files: filesUploadSuccess.map((file) => ({
609
+ fileName: file.customFileName ?? file.file.webkitRelativePath,
610
+ versionFile: file.versionFile,
611
+ })),
612
+ },
613
+ });
614
+ });
615
+ }
616
+ catch (error) {
617
+ console.error("uploadMultiFile", error);
618
+ if (error instanceof Error && error.message === "Upload cancelled") {
619
+ return new Promise((resolve, reject) => {
620
+ reject({
621
+ errorCode: "ClientAbort",
622
+ message: "Upload cancelled",
623
+ });
624
+ });
625
+ }
626
+ return new Promise((resolve, reject) => {
627
+ reject(error);
628
+ });
629
+ }
630
+ }
631
+ async uploadMultiFileRetry(bucketName, files, accessToken, cancellationToken, maxWebWorker, reFreshToken, percentOfChild, prefix, worker, dataUploadId) {
632
+ try {
633
+ if (cancellationToken.signal.aborted) {
634
+ throw new Error("Upload cancelled");
635
+ }
636
+ const service = new ServiceIntegration();
637
+ const batchSize = 2000;
638
+ const triggerNextBatchAt = 1000;
639
+ let currentBatchIndex = 0;
640
+ let filesUploadFail = [];
641
+ let filesUploadSuccess = [];
642
+ // Atomic counter for uploaded files
643
+ let isFetchingNextBatch = false;
644
+ // Function to fetch presignUrls for a batch
645
+ const fetchPresignUrlsForBatch = async (batchFiles) => {
646
+ const multiPresignUrl = await service.generateMultiPresignedUrl(bucketName, batchFiles
647
+ .filter((x) => x.file.size <= ConfigFileRules.ChunkSize)
648
+ .map((file) => {
649
+ const customFileName = file.customFileName ?? file.file.webkitRelativePath;
650
+ return `${prefix ?? ""}${customFileName.startsWith("/")
651
+ ? customFileName.slice(1)
652
+ : customFileName}`;
653
+ }), accessToken, cancellationToken);
654
+ if (multiPresignUrl) {
655
+ batchFiles.forEach((file) => {
656
+ const customFileName = file.customFileName ?? file.file.webkitRelativePath;
657
+ const fileName = `${prefix ?? ""}${customFileName.startsWith("/")
658
+ ? customFileName.slice(1)
659
+ : customFileName}`;
660
+ const preSignUrl = multiPresignUrl.urls[fileName];
661
+ if (preSignUrl) {
662
+ file.preSignUrl = {
663
+ url: preSignUrl,
664
+ headers: multiPresignUrl.headers,
665
+ };
666
+ }
667
+ });
668
+ }
669
+ return multiPresignUrl;
670
+ };
671
+ // Function to prepare next batch
672
+ const prepareNextBatch = async () => {
673
+ if (isFetchingNextBatch ||
674
+ !((currentBatchIndex + 1) * batchSize < files.length))
675
+ return;
676
+ isFetchingNextBatch = true;
677
+ try {
678
+ const nextBatchStart = (currentBatchIndex + 1) * batchSize;
679
+ const nextBatchEnd = Math.min((currentBatchIndex + 2) * batchSize, files.length);
680
+ const nextBatchFiles = files.slice(nextBatchStart, nextBatchEnd);
681
+ await fetchPresignUrlsForBatch(nextBatchFiles);
682
+ }
683
+ finally {
684
+ isFetchingNextBatch = false;
685
+ }
686
+ };
687
+ // init worker
688
+ const workerPorts = [];
689
+ const maxWorker = files.length < maxWebWorker ? files.length : maxWebWorker;
690
+ for (let index = 0; index < maxWorker; index++) {
691
+ const channel = new MessageChannel();
692
+ const worker = new Worker(new URL("workerUploadSingleFile", import.meta.url));
693
+ worker.postMessage({ port: channel.port2 }, [channel.port2]);
694
+ workerPorts.push(channel.port1);
695
+ this.workersStatus.push({
696
+ worker: worker,
697
+ taskNumber: 0,
698
+ id: index,
699
+ taskHandle: 0,
700
+ });
701
+ }
702
+ if (worker) {
703
+ worker.postMessage({ workerPorts }, workerPorts);
704
+ }
705
+ while (currentBatchIndex * batchSize < files.length) {
706
+ const batchStart = currentBatchIndex * batchSize;
707
+ const batchEnd = Math.min((currentBatchIndex + 1) * batchSize, files.length);
708
+ const batchFiles = files.slice(batchStart, batchEnd);
709
+ // Get presignUrls for current batch
710
+ await fetchPresignUrlsForBatch(batchFiles);
711
+ const maxLimit = batchFiles.length < ConfigFileRules.LimitFile
712
+ ? batchFiles.length
713
+ : ConfigFileRules.LimitFile;
714
+ const limit = pLimit(maxLimit);
715
+ const tasks = batchFiles.map((file, idx) => limit(async () => {
716
+ if (cancellationToken.signal.aborted)
717
+ throw new Error("Upload cancelled");
718
+ try {
719
+ let arrayBuffer = await file.file.arrayBuffer();
720
+ const res = await this.uploadSingleFile({
721
+ ...file,
722
+ bucketName,
723
+ arrayBuffer: arrayBuffer,
724
+ index: idx,
725
+ requestId: `${idx}-${Date.now()}`,
726
+ fullName: `${prefix ?? ""}${(file.customFileName ?? file.file.webkitRelativePath).startsWith("/")
727
+ ? (file.customFileName ?? file.file.webkitRelativePath).slice(1)
728
+ : file.customFileName ?? file.file.webkitRelativePath}`,
729
+ accessToken,
730
+ percentOfChild,
731
+ cancellationToken,
732
+ dataUploadId,
733
+ });
734
+ arrayBuffer = null;
735
+ filesUploadSuccess.push({
736
+ ...file,
737
+ versionFile: res.responseUpload?.versionFile,
738
+ });
739
+ if (idx === triggerNextBatchAt) {
740
+ prepareNextBatch();
741
+ }
742
+ }
743
+ catch (err) {
744
+ filesUploadFail.push(file);
745
+ }
746
+ }));
747
+ await Promise.allSettled(tasks);
748
+ currentBatchIndex++;
749
+ }
750
+ return {
751
+ filesUploadFail,
752
+ filesUploadSuccess,
753
+ };
754
+ }
755
+ catch (error) {
756
+ console.error("uploadMultiFileRetry", error);
757
+ // Cleanup workers on error
758
+ this.workersStatus.forEach((workerStatus) => {
759
+ workerStatus.worker.terminate();
760
+ });
761
+ this.workersStatus = [];
762
+ if (error instanceof Error && error.message === "Upload cancelled") {
763
+ throw error; // Re-throw cancel error to be handled by caller
764
+ }
765
+ return undefined;
766
+ }
767
+ }
768
+ uploadSingleFile = async (request) => {
769
+ const workerStatus = this.workersStatus.find((x) => x.taskNumber < ConfigFileRules.TaskCurrently);
770
+ if (!workerStatus)
771
+ return Promise.reject("No worker available");
772
+ // Check cancellation before starting
773
+ if (request.cancellationToken?.signal.aborted) {
774
+ return Promise.reject(new Error("Upload cancelled"));
775
+ }
776
+ // increase task number
777
+ workerStatus.taskNumber++;
778
+ const worker = workerStatus.worker;
779
+ return new Promise((resolve, reject) => {
780
+ if (typeof Worker !== "undefined") {
781
+ worker.addEventListener("message", (message) => {
782
+ if (message.data.requestId !== request.requestId)
783
+ return;
784
+ workerStatus.taskNumber--;
785
+ if (message.data && message.data.status === StatusWorker.Success) {
786
+ request.percentOfChild &&
787
+ request.percentOfChild({
788
+ fileKey: message.data.fileKey,
789
+ versionFile: message.data.responseUpload.versionFile,
790
+ });
791
+ resolve(message.data);
792
+ }
793
+ else {
794
+ reject(message.data);
795
+ }
796
+ });
797
+ worker.addEventListener("error", (error) => {
798
+ workerStatus.taskNumber--;
799
+ console.error(error);
800
+ reject(error);
801
+ });
802
+ // Add abort handler to worker
803
+ const abortHandler = () => {
804
+ worker.postMessage({
805
+ type: "abort",
806
+ requestId: request.requestId,
807
+ });
808
+ worker.terminate();
809
+ reject(new Error("Upload cancelled"));
810
+ };
811
+ request.cancellationToken?.signal.addEventListener("abort", abortHandler);
812
+ const newData = {
813
+ bucketName: request.bucketName,
814
+ fullName: request.fullName,
815
+ arrayBuffer: request.arrayBuffer,
816
+ requestId: request.requestId,
817
+ workerId: workerStatus.id,
818
+ accessToken: request.accessToken,
819
+ fileKey: request.fileKey,
820
+ initUpload: request.initUpload,
821
+ isGetInfo: request.isGetInfo,
822
+ preSignUrl: request.preSignUrl,
823
+ dataUploadId: request.dataUploadId,
824
+ };
825
+ worker.postMessage(newData, [request.arrayBuffer]);
826
+ // Cleanup abort handler when promise resolves or rejects
827
+ return () => {
828
+ request.cancellationToken?.signal.removeEventListener("abort", abortHandler);
829
+ };
830
+ }
831
+ else {
832
+ console.error("Web Workers are not supported in this environment.");
833
+ reject("Web Workers are not supported in this environment.");
834
+ }
835
+ });
836
+ };
837
+ //#endregion
838
+ //#region Download file
839
+ // public async downloadFile(
840
+ // bucketName: string,
841
+ // fileName: string,
842
+ // cancellationToken: AbortController,
843
+ // onProgress?: (percentCompleted: number) => void,
844
+ // maxWebWorker?: number
845
+ // ) {
846
+ // try {
847
+ // maxWebWorker = maxWebWorker ? maxWebWorker : ConfigFileRules.MaxWebWorker;
848
+ // var serviceIntegrate = new ServiceIntegration();
849
+ // const file = await serviceIntegrate.getObjectDetail(
850
+ // bucketName,
851
+ // fileName,
852
+ // this.accessToken
853
+ // );
854
+ // if (!file || !file.contentLength || file.contentLength === 0) {
855
+ // return new Promise((resolve, reject) => {
856
+ // reject("File not found");
857
+ // });
858
+ // }
859
+ // const url = await serviceIntegrate.getUrlToDownload(
860
+ // bucketName,
861
+ // fileName,
862
+ // this.accessToken
863
+ // );
864
+ // if (!url)
865
+ // return new Promise((resolve, reject) => {
866
+ // reject("Fail to get url download");
867
+ // });
868
+ // const numberOfChunks = Math.ceil(
869
+ // file.contentLength / ConfigFileRules.ChunkSize
870
+ // );
871
+ // const maxWorker = Math.min(numberOfChunks, maxWebWorker);
872
+ // const maxLimit = Math.min(numberOfChunks, ConfigFileRules.LimitFile);
873
+ // const limit = pLimit(maxLimit);
874
+ // let start = 0;
875
+ // const totalSize = file.contentLength;
876
+ // for (let index = 0; index < maxWorker; index++) {
877
+ // this.workersStatus.push({
878
+ // worker: new Worker(
879
+ // new URL("workerDownloadSingleFile", import.meta.url)
880
+ // ),
881
+ // taskNumber: 0,
882
+ // id: index,
883
+ // taskHandle: 0,
884
+ // });
885
+ // }
886
+ // const newFileName = fileName.split("/").at(-1);
887
+ // const fileWriter = streamSaver.createWriteStream(`${newFileName}`);
888
+ // const writer = fileWriter.getWriter();
889
+ // const chunksQueue: (Uint8Array | undefined)[] = [];
890
+ // let nextChunkIndex = 0;
891
+ // const handleChunk = async (chunk: Uint8Array, index: number) => {
892
+ // chunksQueue[index] = chunk;
893
+ // while (chunksQueue[nextChunkIndex]) {
894
+ // nextChunkIndex++; // hack duplicate chunk :D
895
+ // await writer.write(chunksQueue[nextChunkIndex - 1]);
896
+ // chunksQueue[nextChunkIndex - 1] = undefined; // Clear memory
897
+ // }
898
+ // };
899
+ // const chunksData: any[] = [];
900
+ // Array.from({ length: numberOfChunks }, (_, index) => {
901
+ // const end = Math.min(
902
+ // start + ConfigFileRules.ChunkSize - 1,
903
+ // totalSize - 1
904
+ // );
905
+ // chunksData.push({
906
+ // url: url,
907
+ // start: start,
908
+ // end: end,
909
+ // index: index,
910
+ // requestId: `${index + 1}-${Date.now()}`,
911
+ // });
912
+ // start += ConfigFileRules.ChunkSize;
913
+ // });
914
+ // // hack to client chose location to save file
915
+ // const firstChunk = chunksData.shift();
916
+ // const firstData = await this.downloadSingleLargeFile(
917
+ // firstChunk.index,
918
+ // firstChunk.url,
919
+ // firstChunk.start,
920
+ // firstChunk.end,
921
+ // firstChunk.requestId
922
+ // );
923
+ // await handleChunk(new Uint8Array(firstData.chunk), firstData.index);
924
+ // const tasks = chunksData.map((data) => {
925
+ // return limit(async () => {
926
+ // let task = await this.downloadSingleLargeFile(
927
+ // data.index,
928
+ // data.url,
929
+ // data.start,
930
+ // data.end,
931
+ // data.requestId
932
+ // );
933
+ // await handleChunk(new Uint8Array(task.chunk), task.index);
934
+ // return task;
935
+ // });
936
+ // });
937
+ // await Promise.all(tasks);
938
+ // await writer.close();
939
+ // this.workersStatus.forEach((workerStatus) => {
940
+ // workerStatus.worker.terminate();
941
+ // });
942
+ // this.workersStatus = [];
943
+ // return new Promise((resolve, reject) => {
944
+ // resolve({
945
+ // downloadSuccess: {
946
+ // totalFile: numberOfChunks,
947
+ // blobUrl: fileWriter,
948
+ // fileName: `${newFileName}`,
949
+ // },
950
+ // });
951
+ // });
952
+ // } catch (error) {
953
+ // console.error("downloadLargeFile", error);
954
+ // return new Promise((resolve, reject) => {
955
+ // reject(error);
956
+ // });
957
+ // }
958
+ // }
959
+ async downloadSingleLargeFile(index, url, start, end, requestId) {
960
+ try {
961
+ const workerStatus = this.workersStatus.find((x) => x.taskNumber < ConfigFileRules.TaskCurrently);
962
+ if (!workerStatus)
963
+ return Promise.reject("No worker available");
964
+ // increase task number
965
+ workerStatus.taskNumber++;
966
+ const worker = workerStatus.worker;
967
+ return new Promise((resolve, reject) => {
968
+ if (typeof Worker !== "undefined") {
969
+ worker.addEventListener("message", async (message) => {
970
+ if (message.data.requestId !== requestId)
971
+ return;
972
+ workerStatus.taskNumber--;
973
+ if (message.data && message.data.status === StatusWorker.Success) {
974
+ resolve(message.data);
975
+ }
976
+ else {
977
+ reject(message.data);
978
+ }
979
+ });
980
+ worker.addEventListener("error", (error) => {
981
+ workerStatus.taskNumber--;
982
+ console.error(error);
983
+ reject(error);
984
+ });
985
+ const newData = {
986
+ index: index,
987
+ url: url,
988
+ start: start,
989
+ end: end,
990
+ requestId: requestId,
991
+ };
992
+ worker.postMessage(newData);
993
+ }
994
+ else {
995
+ console.error("Web Workers are not supported in this environment.");
996
+ reject("Web Workers are not supported in this environment.");
997
+ }
998
+ });
999
+ }
1000
+ catch (error) {
1001
+ console.error(`Error when download file: ${error}`);
1002
+ throw error;
1003
+ }
1004
+ }
1005
+ //#endregion
1006
+ //#region Translate
1007
+ async translateFile(request) {
1008
+ return await this.baseAction((accessToken) => {
1009
+ return TranslateFile(accessToken, request);
1010
+ });
1011
+ }
1012
+ async getStatusTranslate(request) {
1013
+ return await this.baseAction((accessToken) => {
1014
+ return GetStatusTranslate(accessToken, request);
1015
+ });
1016
+ }
1017
+ async getStatusTranslateFile(request) {
1018
+ return await this.baseAction((accessToken) => {
1019
+ return GetStatusTranslateFile(accessToken, request);
1020
+ });
1021
+ }
1022
+ async getEPSGRegionCode() {
1023
+ return await this.baseAction((accessToken) => {
1024
+ return GetEPSGRegionCode(accessToken);
1025
+ });
1026
+ }
1027
+ async getFileTileSet(translateId) {
1028
+ return await this.baseAction((accessToken) => {
1029
+ return GetFileTileSet(accessToken, translateId);
1030
+ });
1031
+ }
1032
+ //#endregion
1033
+ //#region Authentication
1034
+ async getToken(clientId, clientSecret, scopes) {
1035
+ return await this.baseActionAnonymous(() => {
1036
+ return GetToken(clientId, clientSecret, scopes);
1037
+ });
1038
+ }
1039
+ //#endregion
1040
+ async delay(ms) {
1041
+ return new Promise((resolve) => setTimeout(resolve, ms));
1042
+ }
1043
+ }