dpu-cloud-sdk 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/.env.development +1 -0
  2. package/.env.production +1 -0
  3. package/dist/DPUClient.d.ts +83 -0
  4. package/dist/DPUClient.js +1043 -0
  5. package/dist/ServiceIntegration.d.ts +20 -0
  6. package/dist/ServiceIntegration.js +506 -0
  7. package/dist/api/auth.d.ts +3 -0
  8. package/dist/api/auth.js +10 -0
  9. package/dist/api/compress.d.ts +4 -0
  10. package/dist/api/compress.js +16 -0
  11. package/dist/api/translate.d.ts +8 -0
  12. package/dist/api/translate.js +38 -0
  13. package/dist/index.d.ts +4 -0
  14. package/dist/index.js +4 -0
  15. package/dist/models/RequestModel.d.ts +33 -0
  16. package/dist/models/RequestModel.js +2 -0
  17. package/dist/models/ResponseModel.d.ts +99 -0
  18. package/dist/models/ResponseModel.js +1 -0
  19. package/dist/utils/Config.d.ts +32 -0
  20. package/dist/utils/Config.js +44 -0
  21. package/dist/utils/Constants.d.ts +48 -0
  22. package/dist/utils/Constants.js +55 -0
  23. package/dist/utils/Enum.d.ts +27 -0
  24. package/dist/utils/Enum.js +30 -0
  25. package/dist/utils/Helper.d.ts +4 -0
  26. package/dist/utils/Helper.js +47 -0
  27. package/dist/workerDownloadSingleFile.d.ts +1 -0
  28. package/dist/workerDownloadSingleFile.js +35 -0
  29. package/dist/workerUploadChildFile.d.ts +1 -0
  30. package/dist/workerUploadChildFile.js +82 -0
  31. package/dist/workerUploadSingleFile.d.ts +1 -0
  32. package/dist/workerUploadSingleFile.js +93 -0
  33. package/dpubim-service-1.1.28.tgz +0 -0
  34. package/package.json +33 -0
  35. package/src/DPUClient.ts +1505 -0
  36. package/src/ServiceIntegration.ts +710 -0
  37. package/src/api/auth.ts +18 -0
  38. package/src/api/compress.ts +36 -0
  39. package/src/api/translate.ts +94 -0
  40. package/src/index.ts +4 -0
  41. package/src/models/RequestModel.ts +44 -0
  42. package/src/models/ResponseModel.ts +110 -0
  43. package/src/utils/Config.ts +59 -0
  44. package/src/utils/Constants.ts +61 -0
  45. package/src/utils/Enum.ts +29 -0
  46. package/src/utils/Helper.ts +57 -0
  47. package/src/workerDownloadSingleFile.ts +34 -0
  48. package/src/workerUploadChildFile.ts +85 -0
  49. package/src/workerUploadSingleFile.ts +123 -0
  50. package/tsconfig.json +108 -0
  51. package/webpack.config.js +43 -0
@@ -0,0 +1,1505 @@
1
+ import pLimit from "p-limit";
2
+ import {
3
+ CancelDownload,
4
+ CompressFolder,
5
+ GetStatusCompress,
6
+ } from "./api/compress";
7
+ import {
8
+ BaseReponseModel,
9
+ CompressionResponse,
10
+ CompressStatus,
11
+ InitUploadResponse,
12
+ PresignURLResponse,
13
+ Token,
14
+ TranslateInfo,
15
+ TranslateStatusInfo,
16
+ } from "./models/ResponseModel";
17
+ import { ServiceIntegration } from "./ServiceIntegration";
18
+ import { ApiStatus, ConfigFileRules } from "./utils/Constants";
19
+ import { dpuConfig } from "./utils/Config";
20
+ import { chunkArray, readFileDataAsBase64 } from "./utils/Helper";
21
+ import { Scope, StatusWorker } from "./utils/Enum";
22
+ // import streamSaver from "streamsaver";
23
+ import {
24
+ GetEPSGRegionCode,
25
+ GetFileTileSet,
26
+ GetStatusTranslate,
27
+ GetStatusTranslateFile,
28
+ TranslateFile,
29
+ } from "./api/translate";
30
+ import {
31
+ GetStatusTranslateRequest,
32
+ TranslateRequest,
33
+ } from "./models/RequestModel";
34
+ import { GetToken } from "./api/auth";
35
+
36
+ interface ISingleFileUpload {
37
+ currentPresignUrl: PresignURLResponse;
38
+ arrayBuffer: ArrayBuffer;
39
+ partNumber: number;
40
+ onProgress?: (chunkUploaded: number) => void;
41
+ requestId: string;
42
+ cancellationToken?: AbortController;
43
+ dataUploadId?: string;
44
+ fileName?: string;
45
+ }
46
+
47
+ interface IMultiFileUpload {
48
+ bucketName: string;
49
+ arrayBuffer: ArrayBuffer;
50
+ index: number;
51
+ percentOfChild?: ({
52
+ fileKey,
53
+ versionFile,
54
+ }: {
55
+ fileKey: string;
56
+ versionFile?: string;
57
+ }) => void;
58
+ requestId: string;
59
+ fullName: string;
60
+ accessToken: string;
61
+ fileKey: string;
62
+ reGetAccessToken?: () => string;
63
+ initUpload?: InitUploadResponse;
64
+ isGetInfo?: boolean;
65
+ cancellationToken?: AbortController;
66
+ preSignUrl?: PresignURLResponse;
67
+ dataUploadId?: string;
68
+ }
69
+
70
+ interface WorkerStatus {
71
+ worker: Worker;
72
+ taskNumber: number;
73
+ id: number;
74
+ taskHandle: number;
75
+ }
76
+
77
+ type FileUploadType = {
78
+ file: File;
79
+ fileKey: string;
80
+ customFileName?: string;
81
+ initUpload?: InitUploadResponse;
82
+ isGetInfo?: boolean;
83
+ preSignUrl?: PresignURLResponse;
84
+ };
85
+
86
+ export class DPUClient {
87
+ private accessToken?: string;
88
+ private reFreshToken?: () => Promise<string>;
89
+ private maxRetry: number;
90
+ private workersStatus: WorkerStatus[] = [];
91
+
92
+ constructor(
93
+ accessToken?: string,
94
+ reFreshToken?: () => Promise<string>,
95
+ maxRetry?: number | undefined,
96
+ baseURL?: string
97
+ ) {
98
+ this.accessToken = accessToken;
99
+ this.reFreshToken = reFreshToken;
100
+ this.maxRetry = maxRetry || ConfigFileRules.MaxRetry;
101
+
102
+ // Set the base URL if provided
103
+ if (baseURL) {
104
+ dpuConfig.setBaseURL(baseURL);
105
+ }
106
+ }
107
+ private async baseAction<T>(
108
+ action: (accessToken: string) => Promise<BaseReponseModel<T>>
109
+ ): Promise<T> {
110
+ return new Promise(async (resolve, reject) => {
111
+ try {
112
+ if (this.accessToken) {
113
+ let currentRetry = 0;
114
+ let response: BaseReponseModel<T>;
115
+ do {
116
+ currentRetry++;
117
+ response = await action(this.accessToken);
118
+ if (response.statusCode === ApiStatus.Success)
119
+ resolve(response.data);
120
+ else if (
121
+ response.statusCode === ApiStatus.Unauthorized &&
122
+ this.reFreshToken
123
+ ) {
124
+ this.accessToken = await this.reFreshToken();
125
+ } else reject(response.message);
126
+ } while (
127
+ this.maxRetry > currentRetry &&
128
+ response.statusCode === ApiStatus.Unauthorized
129
+ );
130
+ } else reject("Access token is required");
131
+ } catch (error) {
132
+ reject(error);
133
+ }
134
+ });
135
+ }
136
+
137
+ private async baseActionAnonymous<T>(
138
+ action: () => Promise<BaseReponseModel<T>>
139
+ ): Promise<T> {
140
+ return new Promise(async (resolve, reject) => {
141
+ try {
142
+ let response: BaseReponseModel<T>;
143
+ response = await action();
144
+ if (response.statusCode === ApiStatus.Success) resolve(response.data);
145
+ else reject(response.message);
146
+ } catch (error) {
147
+ reject(error);
148
+ }
149
+ });
150
+ }
151
+
152
+ //#region Compress and Download Folder
153
+ public async compressFolder(
154
+ bucketName: string,
155
+ rootFolder?: string
156
+ ): Promise<CompressionResponse> {
157
+ return await this.baseAction<CompressionResponse>((accessToken: string) => {
158
+ return CompressFolder(accessToken, bucketName, rootFolder);
159
+ });
160
+ }
161
+
162
+ public async getStatusCompress(
163
+ requestIds: string[]
164
+ ): Promise<CompressStatus> {
165
+ return await this.baseAction<CompressStatus>((accessToken: string) => {
166
+ return GetStatusCompress(requestIds, accessToken);
167
+ });
168
+ }
169
+
170
+ public async cancelDownload(requestId: string): Promise<boolean> {
171
+ return await this.baseAction<boolean>((accessToken: string) => {
172
+ return CancelDownload(requestId, accessToken);
173
+ });
174
+ }
175
+ //#endregion
176
+ //#region Upload File
177
+ public async uploadFileAferInit(
178
+ path: string,
179
+ file: File,
180
+ initUpload?: InitUploadResponse,
181
+ cancellationToken?: AbortController,
182
+ onProgress?: (chunkUploaded: number) => void,
183
+ maxWebWorker?: number,
184
+ worker?: Worker,
185
+ dataUploadId?: string
186
+ ) {
187
+ const bucketName = path.split("/").at(0);
188
+ if (!bucketName)
189
+ return new Promise((resolve, reject) => {
190
+ reject("Path is not valid");
191
+ });
192
+ const customFileName = path.split("/").at(-1);
193
+ if (!customFileName)
194
+ return new Promise((resolve, reject) => {
195
+ reject("Path is not valid");
196
+ });
197
+ const prefix = path
198
+ .replace(bucketName + "/", "")
199
+ .replace(customFileName, "");
200
+ if (cancellationToken?.signal.aborted) {
201
+ return new Promise((resolve, reject) => {
202
+ reject("Upload cancelled");
203
+ });
204
+ }
205
+ const abortHandler = () => {
206
+ // Cleanup workers
207
+ this.workersStatus.forEach((workerStatus) => {
208
+ workerStatus.worker.terminate();
209
+ });
210
+ this.workersStatus = [];
211
+ };
212
+ cancellationToken?.signal.addEventListener("abort", abortHandler);
213
+ try {
214
+ const result = await this.uploadFileRetry(
215
+ bucketName,
216
+ file,
217
+ cancellationToken || new AbortController(),
218
+ onProgress,
219
+ maxWebWorker,
220
+ prefix,
221
+ initUpload,
222
+ customFileName,
223
+ worker,
224
+ dataUploadId
225
+ );
226
+ // Cleanup
227
+ cancellationToken?.signal.removeEventListener("abort", abortHandler);
228
+ return result;
229
+ } catch (error) {
230
+ // Cleanup
231
+ cancellationToken?.signal.removeEventListener("abort", abortHandler);
232
+ if (error instanceof Error && error.name === "AbortError") {
233
+ return new Promise((resolve, reject) => {
234
+ reject({
235
+ errorCode: "ClientAbort",
236
+ message: "Upload cancelled",
237
+ });
238
+ });
239
+ }
240
+
241
+ throw error;
242
+ }
243
+ }
244
+
245
+ public async uploadFileRetry(
246
+ bucketName: string,
247
+ file: File,
248
+ cancellationToken: AbortController,
249
+ onProgress?: (chunkUploaded: number) => void,
250
+ maxWebWorker?: number,
251
+ prefix?: string,
252
+ initUpload?: InitUploadResponse,
253
+ customFileName?: string,
254
+ worker?: Worker,
255
+ dataUploadId?: string
256
+ ) {
257
+ if (!this.accessToken)
258
+ return new Promise((resolve, reject) => {
259
+ reject("Access token is required");
260
+ });
261
+
262
+ // Check cancellation before starting
263
+ if (cancellationToken.signal.aborted) {
264
+ return new Promise((resolve, reject) => {
265
+ reject("Upload cancelled");
266
+ });
267
+ }
268
+
269
+ const fileSize = file.size;
270
+ const fileName =
271
+ prefix && prefix.length > 0
272
+ ? `${prefix.at(prefix.length - 1) === "/" ? prefix : prefix + "/"}${
273
+ customFileName ?? file.name
274
+ }`
275
+ : customFileName ?? file.name;
276
+ maxWebWorker = maxWebWorker ? maxWebWorker : ConfigFileRules.MaxWebWorker;
277
+ var service = new ServiceIntegration();
278
+ await service.validateFileSize(fileName, fileSize);
279
+
280
+ // Handle small file upload
281
+ if (fileSize <= ConfigFileRules.ChunkSize) {
282
+ const buffer = await readFileDataAsBase64(file);
283
+ const resUploadSmallFile = await service.uploadSmallFile(
284
+ bucketName,
285
+ fileName,
286
+ buffer,
287
+ this.accessToken,
288
+ cancellationToken,
289
+ onProgress,
290
+ undefined,
291
+ true
292
+ );
293
+ onProgress?.(buffer.length);
294
+ return resUploadSmallFile;
295
+ }
296
+
297
+ // Init multi upload
298
+ if (!initUpload) {
299
+ initUpload = await service.initMultiPartUpload(
300
+ bucketName,
301
+ fileName,
302
+ this.accessToken,
303
+ undefined,
304
+ cancellationToken
305
+ );
306
+ if (!initUpload) {
307
+ return new Promise((resolve, reject) => {
308
+ reject(`Fail to initiate multipart upload for file ${fileName}`);
309
+ });
310
+ }
311
+ }
312
+
313
+ var numberOfChunks: number = service.calculateNumberOfChunks(fileSize);
314
+ const urls = await service.generatePresignedUrls(
315
+ bucketName,
316
+ fileName,
317
+ initUpload,
318
+ numberOfChunks,
319
+ this.accessToken,
320
+ cancellationToken
321
+ );
322
+ if (!urls) {
323
+ return new Promise((resolve, reject) => {
324
+ reject(`Fail to generate presigned urls for file ${fileName}`);
325
+ });
326
+ }
327
+
328
+ const uploadChunks = async (chunkIndexs: number[]) => {
329
+ let chunkIndexErrors: number[] = [];
330
+ let responseUploadChilds: any[] = [];
331
+ const maxLimit =
332
+ chunkIndexs.length < ConfigFileRules.LimitFile
333
+ ? chunkIndexs.length
334
+ : ConfigFileRules.LimitFile;
335
+ const limit = pLimit(maxLimit);
336
+ // init worker
337
+ const workerPorts: MessagePort[] = [];
338
+ const maxWorker =
339
+ chunkIndexs.length < maxWebWorker ? chunkIndexs.length : maxWebWorker;
340
+ // Create workers
341
+ for (let index = 0; index < maxWorker; index++) {
342
+ const channel = new MessageChannel();
343
+ const worker = new Worker(
344
+ new URL("workerUploadChildFile", import.meta.url)
345
+ );
346
+ worker.postMessage({ port: channel.port2 }, [channel.port2]);
347
+ workerPorts.push(channel.port1);
348
+ this.workersStatus.push({
349
+ worker: worker,
350
+ taskNumber: 0,
351
+ id: index,
352
+ taskHandle: 0,
353
+ });
354
+ }
355
+ if (worker) {
356
+ worker.postMessage({ workerPorts }, workerPorts);
357
+ }
358
+
359
+ try {
360
+ // const tasks = Array.from({ length: numberOfChunks }, (_, index) => {
361
+ const tasks = chunkIndexs.map((index) => {
362
+ return limit(async () => {
363
+ // Check cancellation before each chunk
364
+ if (cancellationToken.signal.aborted) {
365
+ throw new Error("Upload cancelled");
366
+ }
367
+ try {
368
+ const chunkStart = index * ConfigFileRules.ChunkSize;
369
+ const chunkEnd = Math.min(
370
+ chunkStart + ConfigFileRules.ChunkSize,
371
+ fileSize
372
+ );
373
+ const chunk = file.slice(chunkStart, chunkEnd);
374
+ const arrayBuffer = await chunk.arrayBuffer();
375
+ var url: string = urls[index];
376
+
377
+ return this.runOnWorker({
378
+ currentPresignUrl: { url: url, headers: initUpload.headers },
379
+ arrayBuffer: arrayBuffer,
380
+ partNumber: index + 1,
381
+ onProgress: onProgress,
382
+ requestId: `${index + 1}-${Date.now()}`,
383
+ cancellationToken: cancellationToken,
384
+ dataUploadId: dataUploadId,
385
+ fileName: file.name,
386
+ });
387
+ } catch (error) {
388
+ chunkIndexErrors.push(index);
389
+ }
390
+ });
391
+ });
392
+
393
+ const results = await Promise.allSettled(tasks);
394
+
395
+ for (let i = 0; i < results.length; i++) {
396
+ if (results[i].status === "fulfilled") {
397
+ const responseUploadChild = (
398
+ results[i] as PromiseFulfilledResult<any>
399
+ ).value.responseUpload;
400
+ if (responseUploadChild) {
401
+ responseUploadChilds.push(responseUploadChild);
402
+ }
403
+ }
404
+ }
405
+
406
+ return { chunkIndexErrors, responseUploadChilds };
407
+ } catch (error) {
408
+ // Cleanup on error or cancellation
409
+ throw error;
410
+ } finally {
411
+ // Always cleanup workers
412
+ this.workersStatus.forEach((workerStatus) => {
413
+ workerStatus.worker.terminate();
414
+ });
415
+ this.workersStatus = [];
416
+ }
417
+ };
418
+
419
+ let currentRetry = 0;
420
+ const eTags: any = {};
421
+ let chunkIndexErrors: number[] = Array.from(
422
+ { length: numberOfChunks },
423
+ (_, index) => index
424
+ );
425
+ let responseUploadChilds: any[] = [];
426
+ do {
427
+ if (cancellationToken.signal.aborted) {
428
+ throw new Error("Upload cancelled");
429
+ }
430
+ const responseUploadChunks = await uploadChunks(chunkIndexErrors);
431
+ chunkIndexErrors = responseUploadChunks.chunkIndexErrors;
432
+ responseUploadChilds = responseUploadChilds.concat(
433
+ responseUploadChunks.responseUploadChilds
434
+ );
435
+
436
+ currentRetry++;
437
+ } while (
438
+ currentRetry < ConfigFileRules.MaxRetry &&
439
+ chunkIndexErrors.length > 0
440
+ );
441
+
442
+ if (chunkIndexErrors.length > 0) {
443
+ return new Promise((resolve, reject) => {
444
+ reject(
445
+ `Fail to upload chunks ${chunkIndexErrors} for file ${fileName}`
446
+ );
447
+ });
448
+ }
449
+ for (let i = 0; i < responseUploadChilds.length; i++) {
450
+ const responseUploadChild = responseUploadChilds[i];
451
+ Object.defineProperty(eTags, responseUploadChild.partNumber, {
452
+ value: responseUploadChild.eTag,
453
+ enumerable: true,
454
+ });
455
+ }
456
+
457
+ var completeResponse = await service.completeMultipartUpload(
458
+ bucketName,
459
+ fileName,
460
+ initUpload.uploadId,
461
+ eTags,
462
+ this.accessToken,
463
+ cancellationToken
464
+ );
465
+ onProgress?.(fileSize);
466
+
467
+ if (!completeResponse) {
468
+ throw new Error(`Fail to complete multipart upload for file ${fileName}`);
469
+ }
470
+
471
+ const objectDetail: any = await service.getObjectDetail(
472
+ bucketName,
473
+ fileName,
474
+ this.accessToken
475
+ );
476
+ if (!objectDetail) {
477
+ throw new Error(`Fail to get object detail for file ${fileName}`);
478
+ }
479
+
480
+ return {
481
+ bucketName: bucketName,
482
+ fileName: fileName,
483
+ contentLength: fileSize,
484
+ versionFile: objectDetail.versionId,
485
+ dateVersionFile: objectDetail.lastModified,
486
+ };
487
+ }
488
+
489
+ public async uploadFile(
490
+ bucketName: string,
491
+ file: File,
492
+ cancellationToken: AbortController,
493
+ onProgress?: (chunkUploaded: number) => void,
494
+ maxWebWorker?: number,
495
+ prefix?: string,
496
+ initUpload?: InitUploadResponse,
497
+ customFileName?: string,
498
+ worker?: Worker,
499
+ dataUploadId?: string
500
+ ) {
501
+ if (!this.accessToken)
502
+ return new Promise((resolve, reject) => {
503
+ reject("Access token is required");
504
+ });
505
+
506
+ // Check cancellation before starting
507
+ if (cancellationToken.signal.aborted) {
508
+ return new Promise((resolve, reject) => {
509
+ reject("Upload cancelled");
510
+ });
511
+ }
512
+
513
+ const fileSize = file.size;
514
+ const fileName =
515
+ prefix && prefix.length > 0
516
+ ? `${prefix.at(prefix.length - 1) === "/" ? prefix : prefix + "/"}${
517
+ customFileName ?? file.name
518
+ }`
519
+ : customFileName ?? file.name;
520
+ maxWebWorker = maxWebWorker ? maxWebWorker : ConfigFileRules.MaxWebWorker;
521
+ var service = new ServiceIntegration();
522
+ await service.validateFileSize(fileName, fileSize);
523
+
524
+ // Handle small file upload
525
+ if (fileSize <= ConfigFileRules.ChunkSize) {
526
+ const buffer = await readFileDataAsBase64(file);
527
+ const resUploadSmallFile = await service.uploadSmallFile(
528
+ bucketName,
529
+ fileName,
530
+ buffer,
531
+ this.accessToken,
532
+ cancellationToken,
533
+ onProgress,
534
+ undefined,
535
+ true
536
+ );
537
+ onProgress?.(buffer.length);
538
+ return resUploadSmallFile;
539
+ }
540
+
541
+ // Init multi upload
542
+ if (!initUpload) {
543
+ initUpload = await service.initMultiPartUpload(
544
+ bucketName,
545
+ fileName,
546
+ this.accessToken,
547
+ undefined,
548
+ cancellationToken
549
+ );
550
+ if (!initUpload) {
551
+ return new Promise((resolve, reject) => {
552
+ reject(`Fail to initiate multipart upload for file ${fileName}`);
553
+ });
554
+ }
555
+ }
556
+
557
+ var numberOfChunks: number = service.calculateNumberOfChunks(fileSize);
558
+ const urls = await service.generatePresignedUrls(
559
+ bucketName,
560
+ fileName,
561
+ initUpload,
562
+ numberOfChunks,
563
+ this.accessToken,
564
+ cancellationToken
565
+ );
566
+ if (!urls) {
567
+ return new Promise((resolve, reject) => {
568
+ reject(`Fail to generate presigned urls for file ${fileName}`);
569
+ });
570
+ }
571
+
572
+ const eTags: any = {};
573
+ const maxLimit =
574
+ numberOfChunks < ConfigFileRules.LimitFile
575
+ ? numberOfChunks
576
+ : ConfigFileRules.LimitFile;
577
+ const limit = pLimit(maxLimit);
578
+ // init worker
579
+ const workerPorts: MessagePort[] = [];
580
+ const maxWorker =
581
+ numberOfChunks < maxWebWorker ? numberOfChunks : maxWebWorker;
582
+ // Create workers
583
+ for (let index = 0; index < maxWorker; index++) {
584
+ const channel = new MessageChannel();
585
+ const worker = new Worker(
586
+ new URL("workerUploadChildFile", import.meta.url)
587
+ );
588
+ worker.postMessage({ port: channel.port2 }, [channel.port2]);
589
+ workerPorts.push(channel.port1);
590
+ this.workersStatus.push({
591
+ worker: worker,
592
+ taskNumber: 0,
593
+ id: index,
594
+ taskHandle: 0,
595
+ });
596
+ }
597
+ if (worker) {
598
+ worker.postMessage({ workerPorts }, workerPorts);
599
+ }
600
+
601
+ try {
602
+ const tasks = Array.from({ length: numberOfChunks }, (_, index) => {
603
+ return limit(async () => {
604
+ // Check cancellation before each chunk
605
+ if (cancellationToken.signal.aborted) {
606
+ throw new Error("Upload cancelled");
607
+ }
608
+
609
+ const chunkStart = index * ConfigFileRules.ChunkSize;
610
+ const chunkEnd = Math.min(
611
+ chunkStart + ConfigFileRules.ChunkSize,
612
+ fileSize
613
+ );
614
+ const chunk = file.slice(chunkStart, chunkEnd);
615
+ const arrayBuffer = await chunk.arrayBuffer();
616
+ var url: string = urls[index];
617
+
618
+ return this.runOnWorker({
619
+ currentPresignUrl: { url: url, headers: initUpload.headers },
620
+ arrayBuffer: arrayBuffer,
621
+ partNumber: index + 1,
622
+ onProgress: onProgress,
623
+ requestId: `${index + 1}-${Date.now()}`,
624
+ cancellationToken: cancellationToken,
625
+ dataUploadId: dataUploadId,
626
+ fileName: file.name,
627
+ });
628
+ });
629
+ });
630
+
631
+ const results = await Promise.all(tasks);
632
+
633
+ for (let i = 0; i < results.length; i++) {
634
+ const responseUploadChild = results[i].responseUpload;
635
+ if (!responseUploadChild) {
636
+ throw new Error(`Fail to upload chunk ${i} of file ${fileName}`);
637
+ }
638
+ Object.defineProperty(eTags, responseUploadChild.partNumber, {
639
+ value: responseUploadChild.eTag,
640
+ enumerable: true,
641
+ });
642
+ }
643
+
644
+ var completeResponse = await service.completeMultipartUpload(
645
+ bucketName,
646
+ fileName,
647
+ initUpload.uploadId,
648
+ eTags,
649
+ this.accessToken,
650
+ cancellationToken
651
+ );
652
+ onProgress?.(fileSize);
653
+
654
+ if (!completeResponse) {
655
+ throw new Error(
656
+ `Fail to complete multipart upload for file ${fileName}`
657
+ );
658
+ }
659
+
660
+ const objectDetail: any = await service.getObjectDetail(
661
+ bucketName,
662
+ fileName,
663
+ this.accessToken
664
+ );
665
+ if (!objectDetail) {
666
+ throw new Error(`Fail to get object detail for file ${fileName}`);
667
+ }
668
+
669
+ return {
670
+ bucketName: bucketName,
671
+ fileName: fileName,
672
+ contentLength: fileSize,
673
+ versionFile: objectDetail.versionId,
674
+ dateVersionFile: objectDetail.lastModified,
675
+ };
676
+ } catch (error) {
677
+ // Cleanup on error or cancellation
678
+ throw error;
679
+ } finally {
680
+ // Always cleanup workers
681
+ this.workersStatus.forEach((workerStatus) => {
682
+ workerStatus.worker.terminate();
683
+ });
684
+ this.workersStatus = [];
685
+ }
686
+ }
687
+
688
+ private async runOnWorker(data: ISingleFileUpload): Promise<any> {
689
+ const workerStatus = this.workersStatus.find(
690
+ (x) => x.taskNumber < ConfigFileRules.TaskCurrently
691
+ );
692
+ if (!workerStatus) return Promise.reject("No worker available");
693
+ // increase task number
694
+ workerStatus.taskNumber++;
695
+ const worker = workerStatus.worker;
696
+ return new Promise((resolve, reject) => {
697
+ if (typeof Worker !== "undefined") {
698
+ worker.addEventListener("message", (message: MessageEvent) => {
699
+ if (message.data.requestId !== data.requestId) return;
700
+
701
+ workerStatus.taskNumber--;
702
+ if (message.data && message.data.status === StatusWorker.Success) {
703
+ data.onProgress && data.onProgress(message.data.contentLength);
704
+ resolve(message.data);
705
+ } else {
706
+ reject(message.data);
707
+ }
708
+ });
709
+ worker.addEventListener("error", (error) => {
710
+ workerStatus.taskNumber--;
711
+ console.error(error);
712
+ reject(error);
713
+ });
714
+
715
+ // Add abort handler to worker
716
+ const abortHandler = () => {
717
+ worker.postMessage({
718
+ type: "abort",
719
+ requestId: data.requestId,
720
+ });
721
+ };
722
+ data.cancellationToken?.signal.addEventListener("abort", abortHandler);
723
+
724
+ const newData = {
725
+ currentPresignUrl: data.currentPresignUrl,
726
+ arrayBuffer: data.arrayBuffer,
727
+ partNumber: data.partNumber,
728
+ requestId: data.requestId,
729
+ dataUploadId: data.dataUploadId,
730
+ fileName: data.fileName,
731
+ };
732
+ worker.postMessage(newData, [data.arrayBuffer]);
733
+
734
+ // Cleanup abort handler when promise resolves or rejects
735
+ return () => {
736
+ data.cancellationToken?.signal.removeEventListener(
737
+ "abort",
738
+ abortHandler
739
+ );
740
+ };
741
+ } else {
742
+ console.error("Web Workers are not supported in this environment.");
743
+ reject("Web Workers are not supported in this environment.");
744
+ }
745
+ });
746
+ }
747
+ //#endregion
748
+ //#region Upload folder
749
+ public async uploadFolderAferInit(
750
+ filesInitiated: {
751
+ file: File;
752
+ fileKey: string;
753
+ path: string;
754
+ initUpload?: InitUploadResponse;
755
+ isGetInfo?: boolean;
756
+ }[],
757
+ cancellationToken?: AbortController,
758
+ worker?: Worker,
759
+ dataUploadId?: string,
760
+ onProgress?: ({
761
+ percentCompleted,
762
+ fileKey,
763
+ versionFile,
764
+ }: {
765
+ percentCompleted: number;
766
+ fileKey: string;
767
+ versionFile?: string;
768
+ }) => void,
769
+ maxWebWorker?: number
770
+ ) {
771
+ const bucketName = filesInitiated[0].path.split("/").at(0);
772
+ if (!bucketName)
773
+ return new Promise((resolve, reject) => {
774
+ reject("Path is not valid");
775
+ });
776
+
777
+ if (cancellationToken?.signal.aborted) {
778
+ return new Promise((resolve, reject) => {
779
+ reject("Upload cancelled");
780
+ });
781
+ }
782
+
783
+ const abortHandler = () => {
784
+ // Cleanup workers
785
+ this.workersStatus.forEach((workerStatus) => {
786
+ workerStatus.worker.terminate();
787
+ });
788
+ this.workersStatus = [];
789
+ };
790
+ cancellationToken?.signal.addEventListener("abort", abortHandler);
791
+
792
+ try {
793
+ const result = await this.uploadFolder(
794
+ bucketName,
795
+ filesInitiated.map((x) => ({
796
+ file: x.file,
797
+ fileKey: x.fileKey,
798
+ customFileName: x.path.split("/").slice(1).join("/"),
799
+ initUpload: x.initUpload,
800
+ isGetInfo: x.isGetInfo,
801
+ })),
802
+ cancellationToken || new AbortController(),
803
+ worker,
804
+ dataUploadId,
805
+ onProgress,
806
+ maxWebWorker
807
+ );
808
+ // Cleanup
809
+ cancellationToken?.signal.removeEventListener("abort", abortHandler);
810
+ return result;
811
+ } catch (error) {
812
+ // Cleanup
813
+ cancellationToken?.signal.removeEventListener("abort", abortHandler);
814
+ if (error instanceof Error && error.name === "AbortError") {
815
+ return new Promise((resolve, reject) => {
816
+ reject({
817
+ errorCode: "ClientAbort",
818
+ message: "Upload cancelled",
819
+ });
820
+ });
821
+ }
822
+ throw error;
823
+ }
824
+ }
825
+
826
+ public async uploadFolder(
827
+ bucketName: string,
828
+ files: FileUploadType[],
829
+ cancellationToken: AbortController,
830
+ worker?: Worker,
831
+ dataUploadId?: string,
832
+ onProgress?: ({
833
+ percentCompleted,
834
+ fileKey,
835
+ versionFile,
836
+ }: {
837
+ percentCompleted: number;
838
+ fileKey: string;
839
+ versionFile?: string;
840
+ }) => void,
841
+ maxWebWorker?: number,
842
+ prefix?: string
843
+ ): Promise<any> {
844
+ try {
845
+ if (!this.accessToken)
846
+ return new Promise((resolve, reject) => {
847
+ reject("Access token is required");
848
+ });
849
+
850
+ // Check cancellation before starting
851
+ if (cancellationToken.signal.aborted) {
852
+ return new Promise((resolve, reject) => {
853
+ reject({
854
+ errorCode: "ClientAbort",
855
+ message: "Upload cancelled",
856
+ });
857
+ });
858
+ }
859
+
860
+ prefix =
861
+ prefix && prefix.length > 0
862
+ ? `${prefix.at(prefix.length - 1) === "/" ? prefix : prefix + "/"}`
863
+ : "";
864
+ maxWebWorker = maxWebWorker ? maxWebWorker : ConfigFileRules.MaxWebWorker;
865
+ let percentOneBuffer = 100 / files.length;
866
+ let currentPercent = 0;
867
+ let currentRetry = 0;
868
+ const percentOfChild = ({
869
+ fileKey,
870
+ versionFile,
871
+ }: {
872
+ fileKey: string;
873
+ versionFile?: string;
874
+ }) => {
875
+ currentPercent += percentOneBuffer;
876
+ onProgress?.({
877
+ percentCompleted: Math.floor(currentPercent),
878
+ fileKey: fileKey,
879
+ versionFile: versionFile,
880
+ });
881
+ };
882
+
883
+ let filesUploadFail: FileUploadType[] | undefined = files;
884
+ let filesUploadSuccess:
885
+ | {
886
+ file: File;
887
+ customFileName?: string;
888
+ versionFile?: string;
889
+ }[]
890
+ | undefined = [];
891
+ do {
892
+ // Check cancellation before each retry
893
+ if (cancellationToken.signal.aborted) {
894
+ throw new Error("Upload cancelled");
895
+ }
896
+ const resUpload = await this.uploadMultiFileRetry(
897
+ bucketName,
898
+ filesUploadFail,
899
+ this.accessToken,
900
+ cancellationToken,
901
+ maxWebWorker,
902
+ this.reFreshToken,
903
+ percentOfChild,
904
+ prefix,
905
+ worker,
906
+ dataUploadId
907
+ );
908
+ currentRetry++;
909
+ if (resUpload) {
910
+ filesUploadFail = resUpload.filesUploadFail ?? [];
911
+ filesUploadSuccess = filesUploadSuccess.concat(
912
+ resUpload.filesUploadSuccess ?? []
913
+ );
914
+ } else {
915
+ return new Promise((resolve, reject) => {
916
+ reject("Fail to upload files");
917
+ });
918
+ }
919
+ } while (
920
+ currentRetry < ConfigFileRules.MaxRetry &&
921
+ filesUploadFail &&
922
+ filesUploadFail.length > 0
923
+ );
924
+ // get list file upload fail
925
+ if (!filesUploadFail)
926
+ return new Promise((resolve, reject) => {
927
+ reject("Fail to upload files");
928
+ });
929
+ const filesNameUploadFail: string[] = filesUploadFail.map((file) => {
930
+ return file.customFileName ?? file.file.webkitRelativePath;
931
+ });
932
+
933
+ // if(worker) {
934
+ // worker.postMessage({
935
+ // type: "progressUpload",
936
+ // progress: {
937
+ // dataUploadId: dataUploadId,
938
+ // percentCompleted: 100,
939
+ // fileKey: "",
940
+ // versionFile: "",
941
+ // },
942
+ // });
943
+ // }
944
+
945
+ return new Promise((resolve, reject) => {
946
+ if (filesNameUploadFail.length === files.length)
947
+ reject("Fail to upload all files");
948
+ else
949
+ resolve({
950
+ uploadFail: {
951
+ totalFile: filesNameUploadFail.length,
952
+ filesName: filesNameUploadFail,
953
+ },
954
+ uploadSuccess: {
955
+ totalFile: files.length - filesNameUploadFail.length,
956
+ files: filesUploadSuccess.map((file) => ({
957
+ fileName: file.customFileName ?? file.file.webkitRelativePath,
958
+ versionFile: file.versionFile,
959
+ })),
960
+ },
961
+ });
962
+ });
963
+ } catch (error) {
964
+ console.error("uploadMultiFile", error);
965
+ if (error instanceof Error && error.message === "Upload cancelled") {
966
+ return new Promise((resolve, reject) => {
967
+ reject({
968
+ errorCode: "ClientAbort",
969
+ message: "Upload cancelled",
970
+ });
971
+ });
972
+ }
973
+ return new Promise((resolve, reject) => {
974
+ reject(error);
975
+ });
976
+ }
977
+ }
978
+
979
+ public async uploadMultiFileRetry(
980
+ bucketName: string,
981
+ files: FileUploadType[],
982
+ accessToken: string,
983
+ cancellationToken: AbortController,
984
+ maxWebWorker: number,
985
+ reFreshToken?: () => Promise<string>,
986
+ percentOfChild?: ({
987
+ fileKey,
988
+ versionFile,
989
+ }: {
990
+ fileKey: string;
991
+ versionFile?: string;
992
+ }) => void,
993
+ prefix?: string,
994
+ worker?: Worker,
995
+ dataUploadId?: string
996
+ ): Promise<
997
+ | {
998
+ filesUploadFail: FileUploadType[] | undefined;
999
+ filesUploadSuccess:
1000
+ | {
1001
+ file: File;
1002
+ customFileName?: string;
1003
+ versionFile?: string;
1004
+ }[]
1005
+ | undefined;
1006
+ }
1007
+ | undefined
1008
+ > {
1009
+ try {
1010
+ if (cancellationToken.signal.aborted) {
1011
+ throw new Error("Upload cancelled");
1012
+ }
1013
+
1014
+ const service = new ServiceIntegration();
1015
+ const batchSize = 2000;
1016
+ const triggerNextBatchAt = 1000;
1017
+ let currentBatchIndex = 0;
1018
+ let filesUploadFail: FileUploadType[] = [];
1019
+ let filesUploadSuccess: {
1020
+ file: File;
1021
+ customFileName?: string;
1022
+ versionFile?: string;
1023
+ }[] = [];
1024
+
1025
+ // Atomic counter for uploaded files
1026
+ let isFetchingNextBatch = false;
1027
+
1028
+ // Function to fetch presignUrls for a batch
1029
+ const fetchPresignUrlsForBatch = async (batchFiles: FileUploadType[]) => {
1030
+ const multiPresignUrl = await service.generateMultiPresignedUrl(
1031
+ bucketName,
1032
+ batchFiles
1033
+ .filter((x) => x.file.size <= ConfigFileRules.ChunkSize)
1034
+ .map((file) => {
1035
+ const customFileName =
1036
+ file.customFileName ?? file.file.webkitRelativePath;
1037
+ return `${prefix ?? ""}${
1038
+ customFileName.startsWith("/")
1039
+ ? customFileName.slice(1)
1040
+ : customFileName
1041
+ }`;
1042
+ }),
1043
+ accessToken,
1044
+ cancellationToken
1045
+ );
1046
+
1047
+ if (multiPresignUrl) {
1048
+ batchFiles.forEach((file) => {
1049
+ const customFileName =
1050
+ file.customFileName ?? file.file.webkitRelativePath;
1051
+ const fileName = `${prefix ?? ""}${
1052
+ customFileName.startsWith("/")
1053
+ ? customFileName.slice(1)
1054
+ : customFileName
1055
+ }`;
1056
+ const preSignUrl = multiPresignUrl.urls[fileName];
1057
+ if (preSignUrl) {
1058
+ file.preSignUrl = {
1059
+ url: preSignUrl,
1060
+ headers: multiPresignUrl.headers,
1061
+ };
1062
+ }
1063
+ });
1064
+ }
1065
+ return multiPresignUrl;
1066
+ };
1067
+
1068
+ // Function to prepare next batch
1069
+ const prepareNextBatch = async () => {
1070
+ if (
1071
+ isFetchingNextBatch ||
1072
+ !((currentBatchIndex + 1) * batchSize < files.length)
1073
+ )
1074
+ return;
1075
+
1076
+ isFetchingNextBatch = true;
1077
+ try {
1078
+ const nextBatchStart = (currentBatchIndex + 1) * batchSize;
1079
+ const nextBatchEnd = Math.min(
1080
+ (currentBatchIndex + 2) * batchSize,
1081
+ files.length
1082
+ );
1083
+ const nextBatchFiles = files.slice(nextBatchStart, nextBatchEnd);
1084
+ await fetchPresignUrlsForBatch(nextBatchFiles);
1085
+ } finally {
1086
+ isFetchingNextBatch = false;
1087
+ }
1088
+ };
1089
+
1090
+ // init worker
1091
+ const workerPorts: MessagePort[] = [];
1092
+ const maxWorker =
1093
+ files.length < maxWebWorker ? files.length : maxWebWorker;
1094
+ for (let index = 0; index < maxWorker; index++) {
1095
+ const channel = new MessageChannel();
1096
+ const worker = new Worker(
1097
+ new URL("workerUploadSingleFile", import.meta.url)
1098
+ );
1099
+ worker.postMessage({ port: channel.port2 }, [channel.port2]);
1100
+ workerPorts.push(channel.port1);
1101
+ this.workersStatus.push({
1102
+ worker: worker,
1103
+ taskNumber: 0,
1104
+ id: index,
1105
+ taskHandle: 0,
1106
+ });
1107
+ }
1108
+ if (worker) {
1109
+ worker.postMessage({ workerPorts }, workerPorts);
1110
+ }
1111
+
1112
+ while (currentBatchIndex * batchSize < files.length) {
1113
+ const batchStart = currentBatchIndex * batchSize;
1114
+ const batchEnd = Math.min(
1115
+ (currentBatchIndex + 1) * batchSize,
1116
+ files.length
1117
+ );
1118
+ const batchFiles = files.slice(batchStart, batchEnd);
1119
+
1120
+ // Get presignUrls for current batch
1121
+ await fetchPresignUrlsForBatch(batchFiles);
1122
+
1123
+ const maxLimit =
1124
+ batchFiles.length < ConfigFileRules.LimitFile
1125
+ ? batchFiles.length
1126
+ : ConfigFileRules.LimitFile;
1127
+
1128
+ const limit = pLimit(maxLimit);
1129
+
1130
+ const tasks = batchFiles.map((file, idx) =>
1131
+ limit(async () => {
1132
+ if (cancellationToken.signal.aborted)
1133
+ throw new Error("Upload cancelled");
1134
+ try {
1135
+ let arrayBuffer: ArrayBuffer | null =
1136
+ await file.file.arrayBuffer();
1137
+ const res = await this.uploadSingleFile({
1138
+ ...file,
1139
+ bucketName,
1140
+ arrayBuffer: arrayBuffer,
1141
+ index: idx,
1142
+ requestId: `${idx}-${Date.now()}`,
1143
+ fullName: `${prefix ?? ""}${
1144
+ (
1145
+ file.customFileName ?? file.file.webkitRelativePath
1146
+ ).startsWith("/")
1147
+ ? (
1148
+ file.customFileName ?? file.file.webkitRelativePath
1149
+ ).slice(1)
1150
+ : file.customFileName ?? file.file.webkitRelativePath
1151
+ }`,
1152
+ accessToken,
1153
+ percentOfChild,
1154
+ cancellationToken,
1155
+ dataUploadId,
1156
+ });
1157
+ arrayBuffer = null;
1158
+ filesUploadSuccess.push({
1159
+ ...file,
1160
+ versionFile: res.responseUpload?.versionFile,
1161
+ });
1162
+ if (idx === triggerNextBatchAt) {
1163
+ prepareNextBatch();
1164
+ }
1165
+ } catch (err) {
1166
+ filesUploadFail.push(file);
1167
+ }
1168
+ })
1169
+ );
1170
+
1171
+ await Promise.allSettled(tasks);
1172
+ currentBatchIndex++;
1173
+ }
1174
+
1175
+ return {
1176
+ filesUploadFail,
1177
+ filesUploadSuccess,
1178
+ };
1179
+ } catch (error) {
1180
+ console.error("uploadMultiFileRetry", error);
1181
+ // Cleanup workers on error
1182
+ this.workersStatus.forEach((workerStatus) => {
1183
+ workerStatus.worker.terminate();
1184
+ });
1185
+ this.workersStatus = [];
1186
+
1187
+ if (error instanceof Error && error.message === "Upload cancelled") {
1188
+ throw error; // Re-throw cancel error to be handled by caller
1189
+ }
1190
+ return undefined;
1191
+ }
1192
+ }
1193
+
1194
+ uploadSingleFile = async (request: IMultiFileUpload): Promise<any> => {
1195
+ const workerStatus = this.workersStatus.find(
1196
+ (x) => x.taskNumber < ConfigFileRules.TaskCurrently
1197
+ );
1198
+ if (!workerStatus) return Promise.reject("No worker available");
1199
+
1200
+ // Check cancellation before starting
1201
+ if (request.cancellationToken?.signal.aborted) {
1202
+ return Promise.reject(new Error("Upload cancelled"));
1203
+ }
1204
+
1205
+ // increase task number
1206
+ workerStatus.taskNumber++;
1207
+ const worker = workerStatus.worker;
1208
+ return new Promise((resolve, reject) => {
1209
+ if (typeof Worker !== "undefined") {
1210
+ worker.addEventListener("message", (message: MessageEvent) => {
1211
+ if (message.data.requestId !== request.requestId) return;
1212
+
1213
+ workerStatus.taskNumber--;
1214
+ if (message.data && message.data.status === StatusWorker.Success) {
1215
+ request.percentOfChild &&
1216
+ request.percentOfChild({
1217
+ fileKey: message.data.fileKey,
1218
+ versionFile: message.data.responseUpload.versionFile,
1219
+ });
1220
+ resolve(message.data);
1221
+ } else {
1222
+ reject(message.data);
1223
+ }
1224
+ });
1225
+ worker.addEventListener("error", (error) => {
1226
+ workerStatus.taskNumber--;
1227
+ console.error(error);
1228
+ reject(error);
1229
+ });
1230
+
1231
+ // Add abort handler to worker
1232
+ const abortHandler = () => {
1233
+ worker.postMessage({
1234
+ type: "abort",
1235
+ requestId: request.requestId,
1236
+ });
1237
+ worker.terminate();
1238
+ reject(new Error("Upload cancelled"));
1239
+ };
1240
+ request.cancellationToken?.signal.addEventListener(
1241
+ "abort",
1242
+ abortHandler
1243
+ );
1244
+
1245
+ const newData = {
1246
+ bucketName: request.bucketName,
1247
+ fullName: request.fullName,
1248
+ arrayBuffer: request.arrayBuffer,
1249
+ requestId: request.requestId,
1250
+ workerId: workerStatus.id,
1251
+ accessToken: request.accessToken,
1252
+ fileKey: request.fileKey,
1253
+ initUpload: request.initUpload,
1254
+ isGetInfo: request.isGetInfo,
1255
+ preSignUrl: request.preSignUrl,
1256
+ dataUploadId: request.dataUploadId,
1257
+ };
1258
+ worker.postMessage(newData, [request.arrayBuffer]);
1259
+
1260
+ // Cleanup abort handler when promise resolves or rejects
1261
+ return () => {
1262
+ request.cancellationToken?.signal.removeEventListener(
1263
+ "abort",
1264
+ abortHandler
1265
+ );
1266
+ };
1267
+ } else {
1268
+ console.error("Web Workers are not supported in this environment.");
1269
+ reject("Web Workers are not supported in this environment.");
1270
+ }
1271
+ });
1272
+ };
1273
+
1274
+ //#endregion
1275
+ //#region Download file
1276
+ // public async downloadFile(
1277
+ // bucketName: string,
1278
+ // fileName: string,
1279
+ // cancellationToken: AbortController,
1280
+ // onProgress?: (percentCompleted: number) => void,
1281
+ // maxWebWorker?: number
1282
+ // ) {
1283
+ // try {
1284
+ // maxWebWorker = maxWebWorker ? maxWebWorker : ConfigFileRules.MaxWebWorker;
1285
+ // var serviceIntegrate = new ServiceIntegration();
1286
+ // const file = await serviceIntegrate.getObjectDetail(
1287
+ // bucketName,
1288
+ // fileName,
1289
+ // this.accessToken
1290
+ // );
1291
+ // if (!file || !file.contentLength || file.contentLength === 0) {
1292
+ // return new Promise((resolve, reject) => {
1293
+ // reject("File not found");
1294
+ // });
1295
+ // }
1296
+ // const url = await serviceIntegrate.getUrlToDownload(
1297
+ // bucketName,
1298
+ // fileName,
1299
+ // this.accessToken
1300
+ // );
1301
+ // if (!url)
1302
+ // return new Promise((resolve, reject) => {
1303
+ // reject("Fail to get url download");
1304
+ // });
1305
+ // const numberOfChunks = Math.ceil(
1306
+ // file.contentLength / ConfigFileRules.ChunkSize
1307
+ // );
1308
+ // const maxWorker = Math.min(numberOfChunks, maxWebWorker);
1309
+ // const maxLimit = Math.min(numberOfChunks, ConfigFileRules.LimitFile);
1310
+ // const limit = pLimit(maxLimit);
1311
+ // let start = 0;
1312
+ // const totalSize = file.contentLength;
1313
+ // for (let index = 0; index < maxWorker; index++) {
1314
+ // this.workersStatus.push({
1315
+ // worker: new Worker(
1316
+ // new URL("workerDownloadSingleFile", import.meta.url)
1317
+ // ),
1318
+ // taskNumber: 0,
1319
+ // id: index,
1320
+ // taskHandle: 0,
1321
+ // });
1322
+ // }
1323
+ // const newFileName = fileName.split("/").at(-1);
1324
+ // const fileWriter = streamSaver.createWriteStream(`${newFileName}`);
1325
+ // const writer = fileWriter.getWriter();
1326
+ // const chunksQueue: (Uint8Array | undefined)[] = [];
1327
+ // let nextChunkIndex = 0;
1328
+ // const handleChunk = async (chunk: Uint8Array, index: number) => {
1329
+ // chunksQueue[index] = chunk;
1330
+ // while (chunksQueue[nextChunkIndex]) {
1331
+ // nextChunkIndex++; // hack duplicate chunk :D
1332
+ // await writer.write(chunksQueue[nextChunkIndex - 1]);
1333
+ // chunksQueue[nextChunkIndex - 1] = undefined; // Clear memory
1334
+ // }
1335
+ // };
1336
+ // const chunksData: any[] = [];
1337
+ // Array.from({ length: numberOfChunks }, (_, index) => {
1338
+ // const end = Math.min(
1339
+ // start + ConfigFileRules.ChunkSize - 1,
1340
+ // totalSize - 1
1341
+ // );
1342
+ // chunksData.push({
1343
+ // url: url,
1344
+ // start: start,
1345
+ // end: end,
1346
+ // index: index,
1347
+ // requestId: `${index + 1}-${Date.now()}`,
1348
+ // });
1349
+ // start += ConfigFileRules.ChunkSize;
1350
+ // });
1351
+ // // hack to client chose location to save file
1352
+ // const firstChunk = chunksData.shift();
1353
+ // const firstData = await this.downloadSingleLargeFile(
1354
+ // firstChunk.index,
1355
+ // firstChunk.url,
1356
+ // firstChunk.start,
1357
+ // firstChunk.end,
1358
+ // firstChunk.requestId
1359
+ // );
1360
+ // await handleChunk(new Uint8Array(firstData.chunk), firstData.index);
1361
+ // const tasks = chunksData.map((data) => {
1362
+ // return limit(async () => {
1363
+ // let task = await this.downloadSingleLargeFile(
1364
+ // data.index,
1365
+ // data.url,
1366
+ // data.start,
1367
+ // data.end,
1368
+ // data.requestId
1369
+ // );
1370
+ // await handleChunk(new Uint8Array(task.chunk), task.index);
1371
+ // return task;
1372
+ // });
1373
+ // });
1374
+
1375
+ // await Promise.all(tasks);
1376
+ // await writer.close();
1377
+ // this.workersStatus.forEach((workerStatus) => {
1378
+ // workerStatus.worker.terminate();
1379
+ // });
1380
+ // this.workersStatus = [];
1381
+
1382
+ // return new Promise((resolve, reject) => {
1383
+ // resolve({
1384
+ // downloadSuccess: {
1385
+ // totalFile: numberOfChunks,
1386
+ // blobUrl: fileWriter,
1387
+ // fileName: `${newFileName}`,
1388
+ // },
1389
+ // });
1390
+ // });
1391
+ // } catch (error) {
1392
+ // console.error("downloadLargeFile", error);
1393
+ // return new Promise((resolve, reject) => {
1394
+ // reject(error);
1395
+ // });
1396
+ // }
1397
+ // }
1398
+
1399
+ private async downloadSingleLargeFile(
1400
+ index: number,
1401
+ url: string,
1402
+ start: number,
1403
+ end: number,
1404
+ requestId: string
1405
+ ): Promise<any> {
1406
+ try {
1407
+ const workerStatus = this.workersStatus.find(
1408
+ (x) => x.taskNumber < ConfigFileRules.TaskCurrently
1409
+ );
1410
+ if (!workerStatus) return Promise.reject("No worker available");
1411
+ // increase task number
1412
+ workerStatus.taskNumber++;
1413
+ const worker = workerStatus.worker;
1414
+ return new Promise((resolve, reject) => {
1415
+ if (typeof Worker !== "undefined") {
1416
+ worker.addEventListener("message", async (message: MessageEvent) => {
1417
+ if (message.data.requestId !== requestId) return;
1418
+
1419
+ workerStatus.taskNumber--;
1420
+ if (message.data && message.data.status === StatusWorker.Success) {
1421
+ resolve(message.data);
1422
+ } else {
1423
+ reject(message.data);
1424
+ }
1425
+ });
1426
+ worker.addEventListener("error", (error) => {
1427
+ workerStatus.taskNumber--;
1428
+ console.error(error);
1429
+ reject(error);
1430
+ });
1431
+
1432
+ const newData = {
1433
+ index: index,
1434
+ url: url,
1435
+ start: start,
1436
+ end: end,
1437
+ requestId: requestId,
1438
+ };
1439
+ worker.postMessage(newData);
1440
+ } else {
1441
+ console.error("Web Workers are not supported in this environment.");
1442
+ reject("Web Workers are not supported in this environment.");
1443
+ }
1444
+ });
1445
+ } catch (error) {
1446
+ console.error(`Error when download file: ${error}`);
1447
+ throw error;
1448
+ }
1449
+ }
1450
+ //#endregion
1451
+ //#region Translate
1452
+ public async translateFile(
1453
+ request: TranslateRequest
1454
+ ): Promise<TranslateInfo> {
1455
+ return await this.baseAction<TranslateInfo>((accessToken: string) => {
1456
+ return TranslateFile(accessToken, request);
1457
+ });
1458
+ }
1459
+
1460
+ public async getStatusTranslate(request: string[]): Promise<any> {
1461
+ return await this.baseAction<TranslateStatusInfo[]>(
1462
+ (accessToken: string) => {
1463
+ return GetStatusTranslate(accessToken, request);
1464
+ }
1465
+ );
1466
+ }
1467
+
1468
+ public async getStatusTranslateFile(
1469
+ request: GetStatusTranslateRequest[]
1470
+ ): Promise<any> {
1471
+ return await this.baseAction<TranslateStatusInfo[]>(
1472
+ (accessToken: string) => {
1473
+ return GetStatusTranslateFile(accessToken, request);
1474
+ }
1475
+ );
1476
+ }
1477
+
1478
+ public async getEPSGRegionCode(): Promise<string[]> {
1479
+ return await this.baseAction<string[]>((accessToken: string) => {
1480
+ return GetEPSGRegionCode(accessToken);
1481
+ });
1482
+ }
1483
+
1484
+ public async getFileTileSet(translateId: string): Promise<string> {
1485
+ return await this.baseAction<string>((accessToken: string) => {
1486
+ return GetFileTileSet(accessToken, translateId);
1487
+ });
1488
+ }
1489
+ //#endregion
1490
+ //#region Authentication
1491
+ public async getToken(
1492
+ clientId: string,
1493
+ clientSecret: string,
1494
+ scopes: Scope[]
1495
+ ): Promise<Token> {
1496
+ return await this.baseActionAnonymous<Token>(() => {
1497
+ return GetToken(clientId, clientSecret, scopes);
1498
+ });
1499
+ }
1500
+ //#endregion
1501
+
1502
+ public async delay(ms: number) {
1503
+ return new Promise((resolve) => setTimeout(resolve, ms));
1504
+ }
1505
+ }