dpu-cloud-sdk 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/.env.development +1 -0
  2. package/.env.production +1 -0
  3. package/dist/DPUClient.d.ts +83 -0
  4. package/dist/DPUClient.js +1043 -0
  5. package/dist/ServiceIntegration.d.ts +20 -0
  6. package/dist/ServiceIntegration.js +506 -0
  7. package/dist/api/auth.d.ts +3 -0
  8. package/dist/api/auth.js +10 -0
  9. package/dist/api/compress.d.ts +4 -0
  10. package/dist/api/compress.js +16 -0
  11. package/dist/api/translate.d.ts +8 -0
  12. package/dist/api/translate.js +38 -0
  13. package/dist/index.d.ts +4 -0
  14. package/dist/index.js +4 -0
  15. package/dist/models/RequestModel.d.ts +33 -0
  16. package/dist/models/RequestModel.js +2 -0
  17. package/dist/models/ResponseModel.d.ts +99 -0
  18. package/dist/models/ResponseModel.js +1 -0
  19. package/dist/utils/Config.d.ts +32 -0
  20. package/dist/utils/Config.js +44 -0
  21. package/dist/utils/Constants.d.ts +48 -0
  22. package/dist/utils/Constants.js +55 -0
  23. package/dist/utils/Enum.d.ts +27 -0
  24. package/dist/utils/Enum.js +30 -0
  25. package/dist/utils/Helper.d.ts +4 -0
  26. package/dist/utils/Helper.js +47 -0
  27. package/dist/workerDownloadSingleFile.d.ts +1 -0
  28. package/dist/workerDownloadSingleFile.js +35 -0
  29. package/dist/workerUploadChildFile.d.ts +1 -0
  30. package/dist/workerUploadChildFile.js +82 -0
  31. package/dist/workerUploadSingleFile.d.ts +1 -0
  32. package/dist/workerUploadSingleFile.js +93 -0
  33. package/dpubim-service-1.1.28.tgz +0 -0
  34. package/package.json +33 -0
  35. package/src/DPUClient.ts +1505 -0
  36. package/src/ServiceIntegration.ts +710 -0
  37. package/src/api/auth.ts +18 -0
  38. package/src/api/compress.ts +36 -0
  39. package/src/api/translate.ts +94 -0
  40. package/src/index.ts +4 -0
  41. package/src/models/RequestModel.ts +44 -0
  42. package/src/models/ResponseModel.ts +110 -0
  43. package/src/utils/Config.ts +59 -0
  44. package/src/utils/Constants.ts +61 -0
  45. package/src/utils/Enum.ts +29 -0
  46. package/src/utils/Helper.ts +57 -0
  47. package/src/workerDownloadSingleFile.ts +34 -0
  48. package/src/workerUploadChildFile.ts +85 -0
  49. package/src/workerUploadSingleFile.ts +123 -0
  50. package/tsconfig.json +108 -0
  51. package/webpack.config.js +43 -0
@@ -0,0 +1,20 @@
1
+ import { InitUploadResponse, PresignURLResponse, ObjectDetail, CompleteMultipartUploadResponse, MultiPresignURLResponse } from "./models/ResponseModel";
2
+ export declare class ServiceIntegration {
3
+ constructor();
4
+ getUrlToDownload(bucketName: string, fileName: string, accessToken?: string): Promise<string | null | undefined>;
5
+ fetchChunkFile(url: string, start: number, end: number): Promise<ArrayBuffer>;
6
+ uploadSmallFile(bucketName: string, fileName: string, buffer: Buffer, accessToken: string, cancellationToken: AbortController, onProgress?: (percentCompleted: number) => void, reGetAccessToken?: () => string, isGetInfo?: boolean, preSignUrl?: PresignURLResponse): Promise<unknown>;
7
+ private generatePresignedUrl;
8
+ private uploadChildFile;
9
+ uploadLargeFile(bucketName: string, fileName: string, buffer: Buffer, accessToken: string, cancellationToken: AbortController, onProgress?: (percentCompleted: number) => void, reGetAccessToken?: () => string, initUpload?: InitUploadResponse, isGetInfo?: boolean): Promise<unknown>;
10
+ validateFileSize(fileName: string, fileSize: number): Promise<void>;
11
+ private isFileSizeAllowed;
12
+ calculateNumberOfChunks(fileSize: number): number;
13
+ getObjectDetail(bucketName: string, fileName: string, accessToken?: string): Promise<ObjectDetail | null>;
14
+ initMultiPartUpload(bucketName: string, fileName: string, accessToken?: string, autoCreateBucket?: boolean | false, cancellationToken?: AbortController): Promise<InitUploadResponse | undefined>;
15
+ generatePresignedUrls(bucketName: string, fileName: string, initUpload: InitUploadResponse, totalPart: number, accessToken: string, cancellationToken?: AbortController): Promise<string[] | null | undefined>;
16
+ throwIfCancellationRequested(cancellationToken: AbortController, fileName: string): void;
17
+ readFileBytes(file: File, start: number, end: number): Promise<ArrayBuffer>;
18
+ completeMultipartUpload(bucketName: string, fileName: string, uploadId: string, eTags: object, accessToken?: string, cancellationToken?: AbortController): Promise<false | CompleteMultipartUploadResponse | undefined>;
19
+ generateMultiPresignedUrl(bucketName: string, filesName: string[], accessToken?: string, cancellationToken?: AbortController): Promise<MultiPresignURLResponse | null | undefined>;
20
+ }
@@ -0,0 +1,506 @@
1
+ import { ApiStatus, ConfigFileRules, Path } from "./utils/Constants";
2
+ import { validString } from "./utils/Helper";
3
+ export class ServiceIntegration {
4
+ constructor() { }
5
+ async getUrlToDownload(bucketName, fileName, accessToken) {
6
+ try {
7
+ const url = `${Path.BaseURL}${Path.GetUrlDownload}`;
8
+ const headers = {
9
+ Authorization: `Bearer ${accessToken}`,
10
+ "Content-Type": "application/json",
11
+ "ngrok-skip-browser-warning": true,
12
+ };
13
+ let queryParams;
14
+ if (validString(bucketName)) {
15
+ queryParams = { ...queryParams, bucketName: bucketName };
16
+ }
17
+ if (validString(fileName)) {
18
+ queryParams = { ...queryParams, fileName: fileName };
19
+ }
20
+ const response = await fetch(`${url}?${new URLSearchParams(queryParams)}`, {
21
+ method: "GET",
22
+ headers: headers,
23
+ });
24
+ if (response.ok) {
25
+ const json = (await response.json());
26
+ if (json.statusCode === ApiStatus.Success) {
27
+ return json.data;
28
+ }
29
+ else {
30
+ console.error(`Fail to generate url download with message error: ${json.message}`);
31
+ }
32
+ }
33
+ else {
34
+ console.error(`Fail to generate url download with status ${response.statusText} and error: ${response.statusText}`);
35
+ }
36
+ return null;
37
+ }
38
+ catch (error) {
39
+ console.error(`Error when generate url download: ${error}`);
40
+ }
41
+ }
42
+ async fetchChunkFile(url, start, end) {
43
+ try {
44
+ const response = await fetch(url, {
45
+ method: "GET",
46
+ headers: {
47
+ Range: `bytes=${start}-${end}`,
48
+ },
49
+ });
50
+ if (!response.ok) {
51
+ const error = `Fail to fetch chunk file with status ${response.statusText} and error: ${await response.text()}`;
52
+ throw new Error(error);
53
+ }
54
+ return await response.arrayBuffer();
55
+ }
56
+ catch (error) {
57
+ console.error(`Error when download file: ${error}`);
58
+ throw error;
59
+ }
60
+ }
61
+ //#region Upload small file
62
+ async uploadSmallFile(bucketName, fileName, buffer,
63
+ // file: File,
64
+ accessToken, cancellationToken, onProgress, reGetAccessToken, isGetInfo, preSignUrl) {
65
+ if (!preSignUrl) {
66
+ const presignUrlResponse = await this.generatePresignedUrl(bucketName, fileName, accessToken, cancellationToken);
67
+ if (!presignUrlResponse) {
68
+ return new Promise((resolve, reject) => {
69
+ reject(`Fail to generate presigned url for file ${fileName}`);
70
+ });
71
+ }
72
+ preSignUrl = presignUrlResponse;
73
+ }
74
+ var responseUploadChild = await this.uploadChildFile(preSignUrl, buffer, cancellationToken);
75
+ if (!responseUploadChild) {
76
+ return new Promise((resolve, reject) => {
77
+ reject(`Fail to upload file ${fileName}`);
78
+ });
79
+ }
80
+ if (isGetInfo === true) {
81
+ const objectDetail = await this.getObjectDetail(bucketName, fileName, accessToken);
82
+ if (!objectDetail) {
83
+ return new Promise((resolve, reject) => {
84
+ reject(`Fail to get object detail for file ${fileName}`);
85
+ });
86
+ }
87
+ const fileModel = {
88
+ bucketName: bucketName,
89
+ fileName: fileName,
90
+ contentLength: buffer.length,
91
+ versionFile: objectDetail.versionId,
92
+ dateVersionFile: objectDetail.lastModified,
93
+ };
94
+ return new Promise((resolve, reject) => {
95
+ resolve(fileModel);
96
+ });
97
+ }
98
+ return new Promise((resolve, reject) => {
99
+ resolve({
100
+ bucketName: bucketName,
101
+ fileName: fileName,
102
+ contentLength: buffer.length,
103
+ versionFile: null,
104
+ dateVersionFile: null,
105
+ });
106
+ });
107
+ }
108
+ async generatePresignedUrl(bucketName, fileName, accessToken, cancellationToken) {
109
+ try {
110
+ const url = `${Path.BaseURL}${Path.GeneratePresignedUrl}`;
111
+ const headers = {
112
+ Authorization: `Bearer ${accessToken}`,
113
+ "Content-Type": "application/json",
114
+ "ngrok-skip-browser-warning": true,
115
+ };
116
+ let queryParams = {
117
+ setPermission: false,
118
+ };
119
+ if (validString(bucketName)) {
120
+ queryParams = { ...queryParams, bucketName: bucketName };
121
+ }
122
+ if (validString(fileName)) {
123
+ queryParams = { ...queryParams, objectName: fileName };
124
+ }
125
+ const response = await fetch(`${url}?${new URLSearchParams(queryParams)}`, {
126
+ method: "GET",
127
+ headers: headers,
128
+ signal: cancellationToken?.signal
129
+ });
130
+ if (response.ok) {
131
+ const json = (await response.json());
132
+ if (json.statusCode === ApiStatus.Success) {
133
+ return json.data;
134
+ }
135
+ else {
136
+ console.error(`Fail to generate presigned urls with message error: ${json.message}`);
137
+ }
138
+ }
139
+ else {
140
+ console.error(`Fail to generate presigned urls with status ${response.statusText} and error: ${response.statusText}`);
141
+ }
142
+ return null;
143
+ }
144
+ catch (error) {
145
+ console.error(`Error when generate presigned urls: ${error}`);
146
+ }
147
+ }
148
+ async uploadChildFile(presignUrlResponse, buffer, cancellationToken) {
149
+ try {
150
+ const response = await fetch(presignUrlResponse.url, {
151
+ method: "PUT",
152
+ headers: presignUrlResponse.headers,
153
+ body: buffer,
154
+ signal: cancellationToken?.signal
155
+ });
156
+ if (response.status === ApiStatus.Success) {
157
+ var eTag = response.headers.get("ETag");
158
+ if (eTag)
159
+ return eTag.replace(/^"|"$/g, "");
160
+ }
161
+ else {
162
+ console.error(`Fail to upload child file with status ${response.statusText} and error: ${response.statusText}`);
163
+ }
164
+ return null;
165
+ }
166
+ catch (error) {
167
+ console.error(`Error when upload child file: ${error}`);
168
+ }
169
+ }
170
+ //#endregion
171
+ //#region Upload large file
172
+ async uploadLargeFile(bucketName, fileName, buffer, accessToken, cancellationToken, onProgress, reGetAccessToken, initUpload, isGetInfo) {
173
+ await this.validateFileSize(fileName, buffer.length);
174
+ // init multi upload
175
+ if (!initUpload) {
176
+ initUpload = await this.initMultiPartUpload(bucketName, fileName, accessToken, false, cancellationToken);
177
+ }
178
+ if (!initUpload) {
179
+ return new Promise((resolve, reject) => {
180
+ reject(`Fail to initiate multipart upload for file ${fileName}`);
181
+ });
182
+ }
183
+ var numberOfChunks = this.calculateNumberOfChunks(buffer.length);
184
+ const urls = await this.generatePresignedUrls(bucketName, fileName, initUpload, numberOfChunks, accessToken, cancellationToken);
185
+ if (!urls) {
186
+ return new Promise((resolve, reject) => {
187
+ reject(`Fail to generate presigned urls for file ${fileName}`);
188
+ });
189
+ }
190
+ var chunksUploaded = 0;
191
+ var start = 0;
192
+ const eTags = {};
193
+ while (chunksUploaded < numberOfChunks) {
194
+ this.throwIfCancellationRequested(cancellationToken, fileName);
195
+ var end = Math.min(start + ConfigFileRules.ChunkSize, buffer.length);
196
+ // var fileChunk: ArrayBuffer = await this.readFileBytes(file, start, end);
197
+ const sliced = buffer.subarray(start, end); // lấy phần con của buffer
198
+ const fileChunk = sliced.buffer.slice(sliced.byteOffset, sliced.byteOffset + sliced.byteLength);
199
+ this.throwIfCancellationRequested(cancellationToken, fileName);
200
+ var url = urls[chunksUploaded];
201
+ var responseUploadChild = await this.uploadChildFile({ url: url, headers: initUpload.headers }, fileChunk);
202
+ if (!responseUploadChild) {
203
+ return new Promise((resolve, reject) => {
204
+ reject(`Fail to upload chunk ${chunksUploaded} of file ${fileName}`);
205
+ });
206
+ }
207
+ Object.defineProperty(eTags, chunksUploaded + 1, {
208
+ value: responseUploadChild,
209
+ enumerable: true,
210
+ });
211
+ chunksUploaded++;
212
+ start = end;
213
+ var percentCompleted = (chunksUploaded / numberOfChunks) * 100;
214
+ onProgress?.(percentCompleted);
215
+ // console.log(`${requestId} Number of chunks uploaded : ${chunksUploaded}`);
216
+ }
217
+ var completeResponse = await this.completeMultipartUpload(bucketName, fileName, initUpload.uploadId, eTags, accessToken, cancellationToken);
218
+ onProgress?.(100);
219
+ if (!completeResponse) {
220
+ return new Promise((resolve, reject) => {
221
+ reject(`Fail to complete multipart upload for file ${fileName}`);
222
+ });
223
+ }
224
+ if (isGetInfo === false) {
225
+ return new Promise((resolve, reject) => {
226
+ resolve(fileName);
227
+ });
228
+ }
229
+ else {
230
+ const objectDetail = await this.getObjectDetail(bucketName, fileName, accessToken);
231
+ if (!objectDetail) {
232
+ return new Promise((resolve, reject) => {
233
+ reject(`Fail to get object detail for file ${fileName}`);
234
+ });
235
+ }
236
+ const fileModel = {
237
+ bucketName: bucketName,
238
+ fileName: fileName,
239
+ contentLength: buffer.length,
240
+ versionFile: objectDetail.versionId,
241
+ dateVersionFile: objectDetail.lastModified,
242
+ };
243
+ return new Promise((resolve, reject) => {
244
+ resolve(fileModel);
245
+ });
246
+ }
247
+ }
248
+ async validateFileSize(fileName, fileSize) {
249
+ var sizeAllowed = await this.isFileSizeAllowed(fileSize);
250
+ if (!sizeAllowed) {
251
+ console.error(`File size of ${fileName} too big to upload. Currently max file size allowed is ${Number(ConfigFileRules.MaxChunkCountAllowed) *
252
+ Number(ConfigFileRules.ChunkSize)} bytes`);
253
+ // throw new OssApiError(`${requestId} File size too big to upload. Currently max file size allowed is ${Number(ConfigFileRules.MaxChunkCountAllowed) * Number(Constants.ChunkSize)} bytes`);
254
+ }
255
+ }
256
+ async isFileSizeAllowed(fileSize) {
257
+ const numberOfChunks = this.calculateNumberOfChunks(fileSize);
258
+ if (numberOfChunks > ConfigFileRules.MaxChunkCountAllowed) {
259
+ return false;
260
+ }
261
+ return true;
262
+ }
263
+ calculateNumberOfChunks(fileSize) {
264
+ if (fileSize == 0) {
265
+ return 1;
266
+ }
267
+ var numberOfChunks = Math.trunc(fileSize / ConfigFileRules.ChunkSize);
268
+ if (fileSize % ConfigFileRules.ChunkSize != 0) {
269
+ numberOfChunks++;
270
+ }
271
+ return numberOfChunks;
272
+ }
273
+ async getObjectDetail(bucketName, fileName, accessToken) {
274
+ try {
275
+ const url = `${Path.BaseURL}${Path.GetObjectDetail}`;
276
+ const headers = {
277
+ Authorization: `Bearer ${accessToken}`,
278
+ "Content-Type": "application/json",
279
+ "ngrok-skip-browser-warning": true,
280
+ };
281
+ let queryParams;
282
+ if (validString(bucketName)) {
283
+ queryParams = { ...queryParams, bucketName: bucketName };
284
+ }
285
+ if (validString(fileName)) {
286
+ queryParams = { ...queryParams, objectName: fileName };
287
+ }
288
+ const response = await fetch(`${url}?${new URLSearchParams(queryParams)}`, {
289
+ method: "GET",
290
+ headers: headers,
291
+ });
292
+ if (response.ok) {
293
+ const json = (await response.json());
294
+ if (json.statusCode === ApiStatus.Success) {
295
+ return json.data;
296
+ }
297
+ else {
298
+ console.error(`Fail to generate presigned urls with message error: ${json.message}`);
299
+ }
300
+ }
301
+ else {
302
+ console.error(`Fail to generate presigned urls with status ${response.statusText} and error: ${response.statusText}`);
303
+ }
304
+ }
305
+ catch (error) {
306
+ console.error(`Error when get file info: ${error}`);
307
+ }
308
+ return null;
309
+ }
310
+ async initMultiPartUpload(bucketName, fileName, accessToken, autoCreateBucket, cancellationToken) {
311
+ try {
312
+ const url = `${Path.BaseURL}${Path.InitiateMultipartUpload}`;
313
+ const headers = {
314
+ Authorization: `Bearer ${accessToken}`,
315
+ "Content-Type": "application/json",
316
+ "ngrok-skip-browser-warning": true,
317
+ };
318
+ let queryParams;
319
+ if (validString(bucketName)) {
320
+ queryParams = { ...queryParams, bucketName: bucketName };
321
+ }
322
+ if (validString(fileName)) {
323
+ queryParams = { ...queryParams, objectName: fileName };
324
+ }
325
+ if (autoCreateBucket === true) {
326
+ queryParams = { ...queryParams, autoCreateBucket: autoCreateBucket };
327
+ }
328
+ queryParams = { ...queryParams, setPermission: false };
329
+ const response = await fetch(`${url}?${new URLSearchParams(queryParams)}`, {
330
+ method: "GET",
331
+ headers: headers,
332
+ });
333
+ if (response.ok) {
334
+ const json = (await response.json());
335
+ if (json.statusCode === ApiStatus.Success) {
336
+ return json.data;
337
+ }
338
+ else {
339
+ console.error(`Fail to initiate multipart upload with message error: ${json.message}`);
340
+ }
341
+ }
342
+ else {
343
+ console.error(`Fail to initiate multipart upload with status ${response.statusText} and error: ${response.statusText}`);
344
+ }
345
+ }
346
+ catch (error) {
347
+ console.error(`Error when initiate multipart upload: ${error}`);
348
+ }
349
+ return undefined;
350
+ }
351
+ async generatePresignedUrls(bucketName, fileName, initUpload, totalPart, accessToken, cancellationToken) {
352
+ try {
353
+ const url = `${Path.BaseURL}${Path.GeneratePresignedUrls}`;
354
+ const headers = initUpload.headers;
355
+ let queryParams;
356
+ if (validString(bucketName)) {
357
+ queryParams = { ...queryParams, bucketName: bucketName };
358
+ }
359
+ if (validString(fileName)) {
360
+ queryParams = { ...queryParams, objectName: fileName };
361
+ }
362
+ if (validString(initUpload.uploadId)) {
363
+ queryParams = { ...queryParams, uploadId: initUpload.uploadId };
364
+ }
365
+ if (totalPart > 0) {
366
+ queryParams = { ...queryParams, totalPart: totalPart };
367
+ }
368
+ const newHeaders = {
369
+ Authorization: `Bearer ${accessToken}`,
370
+ "ngrok-skip-browser-warning": true,
371
+ };
372
+ const response = await fetch(`${url}?${new URLSearchParams(queryParams)}`, {
373
+ method: "GET",
374
+ headers: { ...headers, ...newHeaders },
375
+ signal: cancellationToken?.signal
376
+ });
377
+ if (response.ok) {
378
+ const json = (await response.json());
379
+ if (json.statusCode === ApiStatus.Success) {
380
+ return json.data;
381
+ }
382
+ else {
383
+ console.error(`Fail to generate presigned urls with message error: ${json.message}`);
384
+ }
385
+ }
386
+ else {
387
+ console.error(`Fail to generate presigned urls with status ${response.statusText} and error: ${response.statusText}`);
388
+ }
389
+ return null;
390
+ }
391
+ catch (error) {
392
+ console.error(`Error when generate presigned urls: ${error}`);
393
+ }
394
+ }
395
+ throwIfCancellationRequested(cancellationToken, fileName) {
396
+ if (cancellationToken.signal.aborted) {
397
+ console.error(`${fileName} Cancellation requested.`);
398
+ cancellationToken.signal.throwIfAborted();
399
+ }
400
+ }
401
+ // public readFileBytes(file: Buffer, start: number, end: number): Buffer {
402
+ // const fileReader = file.subarray(start, end);
403
+ // return fileReader;
404
+ // }
405
+ readFileBytes(file, start, end) {
406
+ return new Promise((resolve, reject) => {
407
+ const blob = file.slice(start, end); // tạo blob từ phần file cần đọc
408
+ const reader = new FileReader();
409
+ reader.onload = () => {
410
+ if (reader.result instanceof ArrayBuffer) {
411
+ resolve(reader.result);
412
+ }
413
+ else {
414
+ reject(new Error("Failed to read as ArrayBuffer"));
415
+ }
416
+ };
417
+ reader.onerror = () => reject(reader.error);
418
+ reader.readAsArrayBuffer(blob); // đọc phần blob thành ArrayBuffer
419
+ });
420
+ }
421
+ async completeMultipartUpload(bucketName, fileName, uploadId, eTags, accessToken, cancellationToken) {
422
+ try {
423
+ const url = `${Path.BaseURL}${Path.CompleteMultipartUpload}`;
424
+ const headers = {
425
+ Authorization: `Bearer ${accessToken}`,
426
+ "Content-Type": "application/json",
427
+ "ngrok-skip-browser-warning": true,
428
+ };
429
+ let body;
430
+ if (validString(bucketName)) {
431
+ body = { ...body, bucketName: bucketName };
432
+ }
433
+ if (validString(fileName)) {
434
+ body = { ...body, objectName: fileName };
435
+ }
436
+ if (validString(uploadId)) {
437
+ body = { ...body, uploadId: uploadId };
438
+ }
439
+ if (eTags) {
440
+ body = { ...body, eTags: eTags };
441
+ }
442
+ const bodyString = JSON.stringify(body);
443
+ const response = await fetch(url, {
444
+ method: "POST",
445
+ headers: headers,
446
+ body: bodyString,
447
+ signal: cancellationToken?.signal
448
+ });
449
+ if (response.ok) {
450
+ const json = (await response.json());
451
+ if (json.statusCode === ApiStatus.Success) {
452
+ return json.data;
453
+ }
454
+ else {
455
+ console.error(`Fail to complete multiple part upload with message error: ${json.message}`);
456
+ }
457
+ }
458
+ else {
459
+ console.error(`Fail to complete multiple part upload with status ${response.statusText} and error: ${response.statusText}`);
460
+ }
461
+ return false;
462
+ }
463
+ catch (error) {
464
+ console.error(`Error when complete multiple part upload: ${error}`);
465
+ }
466
+ }
467
+ async generateMultiPresignedUrl(bucketName, filesName, accessToken, cancellationToken) {
468
+ try {
469
+ const url = `${Path.BaseURL}${Path.GenerateMultiPresignedURL}`;
470
+ // const url = `https://localhost:7193/api/Object/GenerateMultiPresignedURL`;
471
+ const headers = {
472
+ Authorization: `Bearer ${accessToken}`,
473
+ "Content-Type": "application/json",
474
+ "ngrok-skip-browser-warning": true,
475
+ };
476
+ const body = {
477
+ bucketName: bucketName,
478
+ objectNames: filesName,
479
+ setPermission: false
480
+ };
481
+ const bodyString = JSON.stringify(body);
482
+ const response = await fetch(url, {
483
+ method: "POST",
484
+ headers: headers,
485
+ body: bodyString,
486
+ signal: cancellationToken?.signal
487
+ });
488
+ if (response.ok) {
489
+ const json = (await response.json());
490
+ if (json.statusCode === ApiStatus.Success) {
491
+ return json.data;
492
+ }
493
+ else {
494
+ console.error(`Fail to generate multiple presigned url with message error: ${json.message}`);
495
+ }
496
+ }
497
+ else {
498
+ console.error(`Fail to generate multiple presigned url with status ${response.statusText} and error: ${response.statusText}`);
499
+ }
500
+ return null;
501
+ }
502
+ catch (error) {
503
+ console.error(`Error when generate multiple presigned url: ${error}`);
504
+ }
505
+ }
506
+ }
@@ -0,0 +1,3 @@
1
+ import { BaseReponseModel, Token } from "../models/ResponseModel";
2
+ import { Scope } from "../utils/Enum";
3
+ export declare const GetToken: (clientId: string, clientSecret: string, scopes: Scope[]) => Promise<BaseReponseModel<Token>>;
@@ -0,0 +1,10 @@
1
+ import { Path, PathAuth } from "../utils/Constants";
2
+ import { baseFetch } from "./translate";
3
+ export const GetToken = async (clientId, clientSecret, scopes) => {
4
+ const request = {
5
+ clientId,
6
+ clientSecret,
7
+ scopes: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
8
+ };
9
+ return baseFetch(`${Path.BaseURL}${PathAuth.GetToken}`, "POST", "", request);
10
+ };
@@ -0,0 +1,4 @@
1
+ import { BaseReponseModel, CompressionResponse, CompressStatus } from "../models/ResponseModel";
2
+ export declare const CompressFolder: (accessToken: string, bucketName: string, rootFolder?: string) => Promise<BaseReponseModel<CompressionResponse>>;
3
+ export declare const GetStatusCompress: (requestIds: string[], accessToken: string) => Promise<BaseReponseModel<CompressStatus>>;
4
+ export declare const CancelDownload: (requestId: string, accessToken: string) => Promise<BaseReponseModel<boolean>>;
@@ -0,0 +1,16 @@
1
+ import { Path, PathManagement } from "../utils/Constants";
2
+ import { baseFetch } from "./translate";
3
+ export const CompressFolder = async (accessToken, bucketName, rootFolder) => {
4
+ return baseFetch(`${Path.BaseURL}${PathManagement.CompressFolder}`, "POST", accessToken, {
5
+ bucketName,
6
+ rootFolder
7
+ });
8
+ };
9
+ export const GetStatusCompress = async (requestIds, accessToken) => {
10
+ return baseFetch(`${Path.BaseURL}${PathManagement.GetStatusCompress}`, "POST", accessToken, {
11
+ requestIds
12
+ });
13
+ };
14
+ export const CancelDownload = async (requestId, accessToken) => {
15
+ return baseFetch(`${Path.BaseURL}${PathManagement.CancelDownload}`, "DELETE", accessToken, requestId);
16
+ };
@@ -0,0 +1,8 @@
1
+ import { GetStatusTranslateRequest, TranslateRequest } from "../models/RequestModel";
2
+ import { BaseReponseModel, TranslateInfo, TranslateStatusInfo } from "../models/ResponseModel";
3
+ export declare const baseFetch: <T, T1>(url: string, method: string, accessToken: string, request?: T1) => Promise<BaseReponseModel<T>>;
4
+ export declare const TranslateFile: (accessToken: string, request: TranslateRequest) => Promise<BaseReponseModel<TranslateInfo>>;
5
+ export declare const GetStatusTranslate: (accessToken: string, request: string[]) => Promise<BaseReponseModel<TranslateStatusInfo[]>>;
6
+ export declare const GetStatusTranslateFile: (accessToken: string, request: GetStatusTranslateRequest[]) => Promise<BaseReponseModel<TranslateStatusInfo[]>>;
7
+ export declare const GetEPSGRegionCode: (accessToken: string) => Promise<BaseReponseModel<string[]>>;
8
+ export declare const GetFileTileSet: (accessToken: string, translateId: string) => Promise<BaseReponseModel<string>>;
@@ -0,0 +1,38 @@
1
+ import { ApiStatus, Path, PathManagement } from "../utils/Constants";
2
+ export const baseFetch = async (url, method, accessToken, request) => {
3
+ const response = await fetch(url, {
4
+ method: method,
5
+ headers: {
6
+ "Content-Type": "application/json",
7
+ Authorization: `Bearer ${accessToken}`,
8
+ 'ngrok-skip-browser-warning': true
9
+ },
10
+ body: JSON.stringify(request),
11
+ });
12
+ if (response.ok)
13
+ return response.json();
14
+ if (response.status === 401)
15
+ return {
16
+ statusCode: ApiStatus.Unauthorized,
17
+ message: "Unauthorized",
18
+ data: null,
19
+ };
20
+ return new Promise((resolve, reject) => {
21
+ reject(response.text);
22
+ });
23
+ };
24
+ export const TranslateFile = async (accessToken, request) => {
25
+ return baseFetch(`${Path.BaseURL}${PathManagement.TranslateFile}`, "POST", accessToken, request);
26
+ };
27
+ export const GetStatusTranslate = async (accessToken, request) => {
28
+ return baseFetch(`${Path.BaseURL}${PathManagement.GetStatusTranslate}`, "POST", accessToken, request);
29
+ };
30
+ export const GetStatusTranslateFile = async (accessToken, request) => {
31
+ return baseFetch(`${Path.BaseURL}${PathManagement.GetStatusTranslateFile}`, "POST", accessToken, request);
32
+ };
33
+ export const GetEPSGRegionCode = async (accessToken) => {
34
+ return baseFetch(`${Path.BaseURL}${PathManagement.GetEPSGRegionCode}`, "GET", accessToken);
35
+ };
36
+ export const GetFileTileSet = async (accessToken, translateId) => {
37
+ return baseFetch(`${Path.BaseURL}${PathManagement.GetFileTileSet}?translateId=${translateId}`, "GET", accessToken);
38
+ };
@@ -0,0 +1,4 @@
1
+ export * from "./DPUClient";
2
+ export * from "./models/RequestModel";
3
+ export * from "./models/ResponseModel";
4
+ export { dpuConfig } from "./utils/Config";
package/dist/index.js ADDED
@@ -0,0 +1,4 @@
1
+ export * from "./DPUClient";
2
+ export * from "./models/RequestModel";
3
+ export * from "./models/ResponseModel";
4
+ export { dpuConfig } from "./utils/Config";
@@ -0,0 +1,33 @@
1
+ export type FileDownloadInfo = {
2
+ fullName: string;
3
+ contentLength?: number;
4
+ };
5
+ export type CompressFolderRequest = {
6
+ bucketName: string;
7
+ rootFolder?: string;
8
+ };
9
+ export type GetStatusCompressionRequest = {
10
+ requestIds: string[];
11
+ };
12
+ export interface TranslateRequest {
13
+ modelType: string;
14
+ epsgCode: string;
15
+ translatePos?: number[];
16
+ }
17
+ export interface FileTranslateRequest extends TranslateRequest {
18
+ bucketName?: string;
19
+ fileName?: string;
20
+ versionId?: string;
21
+ url?: string;
22
+ }
23
+ export interface URNTranslateRequest extends TranslateRequest {
24
+ urn?: string;
25
+ accessToken: string | null;
26
+ clientId: string | null;
27
+ clientSecret: string | null;
28
+ }
29
+ export interface GetStatusTranslateRequest {
30
+ bucketName: string;
31
+ fileName: string;
32
+ versionId: string;
33
+ }
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //#endregion