@commercetools-frontend-extensions/operations 3.1.0 → 3.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/README.md +15 -1
- package/dist/commercetools-frontend-extensions-operations.cjs.dev.js +96 -4
- package/dist/commercetools-frontend-extensions-operations.cjs.prod.js +96 -4
- package/dist/commercetools-frontend-extensions-operations.esm.js +95 -5
- package/dist/declarations/src/@api/file-import-jobs.d.ts +1 -1
- package/dist/declarations/src/@constants/import-limits.d.ts +1 -1
- package/dist/declarations/src/@hooks/use-file-import-job-upload.d.ts +1 -0
- package/dist/declarations/src/@hooks/use-file-upload.d.ts +2 -0
- package/dist/declarations/src/@types/file-import-job.d.ts +1 -0
- package/dist/declarations/src/@utils/file-import-job-helpers.d.ts +1 -0
- package/dist/declarations/src/@utils/index.d.ts +1 -0
- package/dist/declarations/src/@utils/poll-job-until-processing.d.ts +10 -0
- package/package.json +2 -2
- package/src/@api/file-import-jobs.ts +2 -0
- package/src/@constants/import-limits.ts +1 -1
- package/src/@hooks/use-file-import-job-upload.ts +2 -0
- package/src/@hooks/use-file-upload.ts +47 -3
- package/src/@types/file-import-job.ts +1 -0
- package/src/@utils/file-import-job-helpers.ts +4 -0
- package/src/@utils/index.ts +1 -0
- package/src/@utils/poll-job-until-processing.ts +72 -0
- package/src/@utils/poll-job-until-validated.ts +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,19 @@
|
|
|
1
1
|
# @commercetools-frontend-extensions/operations
|
|
2
2
|
|
|
3
|
+
## 3.1.2
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- [#1678](https://github.com/commercetools/merchant-center-operations/pull/1678) [`482dfc3`](https://github.com/commercetools/merchant-center-operations/commit/482dfc367ab236be9767896da0404f34afb33dd2) Thanks [@yassinejebli](https://github.com/yassinejebli)! - feat: add `skipValidationPolling` to skip import validation for the new flow
|
|
8
|
+
|
|
9
|
+
## 3.1.1
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- [#1673](https://github.com/commercetools/merchant-center-operations/pull/1673) [`fda98c9`](https://github.com/commercetools/merchant-center-operations/commit/fda98c935fc45adfffe1247e30bea8c13df2abc8) Thanks [@yassinejebli](https://github.com/yassinejebli)! - feat: add `autoProcess` option for file import job flow
|
|
14
|
+
|
|
15
|
+
- [#1673](https://github.com/commercetools/merchant-center-operations/pull/1673) [`fda98c9`](https://github.com/commercetools/merchant-center-operations/commit/fda98c935fc45adfffe1247e30bea8c13df2abc8) Thanks [@yassinejebli](https://github.com/yassinejebli)! - feat: adjust the max file size for the new Import flow
|
|
16
|
+
|
|
3
17
|
## 3.1.0
|
|
4
18
|
|
|
5
19
|
### Minor Changes
|
package/README.md
CHANGED
|
@@ -27,7 +27,7 @@ const { upload, isUploading, progress, validationProgress } = useFileUpload({
|
|
|
27
27
|
- `projectKey` (required): The commercetools project key
|
|
28
28
|
- `useJobBasedFlow` (optional): Whether to use the job-based flow. Default: `false`
|
|
29
29
|
- `pollingInterval` (optional): Polling interval in ms for job-based flow. Default: `5000`
|
|
30
|
-
- `maxPollingAttempts` (optional): Maximum polling attempts. Default: `
|
|
30
|
+
- `maxPollingAttempts` (optional): Maximum polling attempts. Default: `200`
|
|
31
31
|
|
|
32
32
|
**Returns:**
|
|
33
33
|
- `upload` - Function to start the upload
|
|
@@ -38,6 +38,18 @@ const { upload, isUploading, progress, validationProgress } = useFileUpload({
|
|
|
38
38
|
- `total`: Total number of unique resources in the file (counted by unique keys in the CSV)
|
|
39
39
|
- `isValidating`: Whether validation is in progress
|
|
40
40
|
|
|
41
|
+
**Upload config options:**
|
|
42
|
+
- `file` (required): The file to upload
|
|
43
|
+
- `resourceType` (required): The resource type
|
|
44
|
+
- `settings` (optional): Import settings (format, decimal separator...)
|
|
45
|
+
- `autoProcess` (optional): When `true`, the backend automatically starts processing after validation completes (job-based flow only). Default: `false`
|
|
46
|
+
- `skipValidationPolling` (optional): When `true`, skips full validation polling and returns once the job reaches `processing` state. Useful for fire-and-forget uploads with `autoProcess: true` (job-based flow only). Default: `false`
|
|
47
|
+
- `abortSignal` (optional): AbortSignal for cancellation
|
|
48
|
+
- `onSuccess` (required): Callback when upload completes
|
|
49
|
+
- `onError` (optional): Callback for errors
|
|
50
|
+
- `onProgress` (optional): Callback for upload progress (0-100)
|
|
51
|
+
- `onValidationProgress` (optional): Callback for validation progress (job-based flow only)
|
|
52
|
+
|
|
41
53
|
**Usage:**
|
|
42
54
|
```typescript
|
|
43
55
|
import { useFileUpload } from '@commercetools-frontend-extensions/operations'
|
|
@@ -61,6 +73,8 @@ await upload({
|
|
|
61
73
|
unpublishAllChanges?: boolean
|
|
62
74
|
}
|
|
63
75
|
},
|
|
76
|
+
autoProcess?: boolean, // job-based flow only, default: false
|
|
77
|
+
skipValidationPolling?: boolean, // job-based flow only, default: false
|
|
64
78
|
abortSignal: abortController.signal,
|
|
65
79
|
onSuccess: (result) => {
|
|
66
80
|
// result.containerKey - Import container key
|
|
@@ -482,6 +482,9 @@ function isImportJobRejected(job) {
|
|
|
482
482
|
function isImportJobTerminal(job) {
|
|
483
483
|
return isImportJobValidated(job) || isImportJobRejected(job);
|
|
484
484
|
}
|
|
485
|
+
function hasImportJobStartedProcessing(job) {
|
|
486
|
+
return !isImportJobQueued(job);
|
|
487
|
+
}
|
|
485
488
|
function shouldContinuePollingForImportValidation(job) {
|
|
486
489
|
if (!job) return true;
|
|
487
490
|
return isImportJobQueued(job) || isImportJobProcessing(job);
|
|
@@ -735,6 +738,52 @@ const decodeFileNameFromImportContainerKey = importContainerKey => {
|
|
|
735
738
|
}
|
|
736
739
|
};
|
|
737
740
|
|
|
741
|
+
const pollJobUntilProcessing = async _ref => {
|
|
742
|
+
let projectKey = _ref.projectKey,
|
|
743
|
+
jobId = _ref.jobId,
|
|
744
|
+
importContainerKey = _ref.importContainerKey,
|
|
745
|
+
_ref$pollingInterval = _ref.pollingInterval,
|
|
746
|
+
pollingInterval = _ref$pollingInterval === void 0 ? 1000 : _ref$pollingInterval,
|
|
747
|
+
_ref$maxAttempts = _ref.maxAttempts,
|
|
748
|
+
maxAttempts = _ref$maxAttempts === void 0 ? 60 : _ref$maxAttempts,
|
|
749
|
+
abortSignal = _ref.abortSignal;
|
|
750
|
+
let attempts = 0;
|
|
751
|
+
while (attempts < maxAttempts) {
|
|
752
|
+
if (abortSignal?.aborted) {
|
|
753
|
+
throw new PollingAbortedError();
|
|
754
|
+
}
|
|
755
|
+
const job = await getFileImportJob({
|
|
756
|
+
projectKey,
|
|
757
|
+
importContainerKey,
|
|
758
|
+
jobId
|
|
759
|
+
});
|
|
760
|
+
if (abortSignal?.aborted) {
|
|
761
|
+
throw new PollingAbortedError();
|
|
762
|
+
}
|
|
763
|
+
if (hasImportJobStartedProcessing(job)) {
|
|
764
|
+
return job;
|
|
765
|
+
}
|
|
766
|
+
await new _Promise__default["default"]((resolve, reject) => {
|
|
767
|
+
let timeoutId;
|
|
768
|
+
const onAbort = () => {
|
|
769
|
+
clearTimeout(timeoutId);
|
|
770
|
+
reject(new PollingAbortedError());
|
|
771
|
+
};
|
|
772
|
+
if (abortSignal?.aborted) {
|
|
773
|
+
reject(new PollingAbortedError());
|
|
774
|
+
return;
|
|
775
|
+
}
|
|
776
|
+
timeoutId = _setTimeout__default["default"](() => {
|
|
777
|
+
abortSignal?.removeEventListener('abort', onAbort);
|
|
778
|
+
resolve();
|
|
779
|
+
}, pollingInterval);
|
|
780
|
+
abortSignal?.addEventListener('abort', onAbort);
|
|
781
|
+
});
|
|
782
|
+
attempts++;
|
|
783
|
+
}
|
|
784
|
+
throw new Error(`Job did not start processing after ${maxAttempts} attempts (${maxAttempts * pollingInterval / 1000}s)`);
|
|
785
|
+
};
|
|
786
|
+
|
|
738
787
|
const pollJobUntilValidated = async _ref => {
|
|
739
788
|
let projectKey = _ref.projectKey,
|
|
740
789
|
jobId = _ref.jobId,
|
|
@@ -742,7 +791,7 @@ const pollJobUntilValidated = async _ref => {
|
|
|
742
791
|
_ref$pollingInterval = _ref.pollingInterval,
|
|
743
792
|
pollingInterval = _ref$pollingInterval === void 0 ? 5000 : _ref$pollingInterval,
|
|
744
793
|
_ref$maxAttempts = _ref.maxAttempts,
|
|
745
|
-
maxAttempts = _ref$maxAttempts === void 0 ?
|
|
794
|
+
maxAttempts = _ref$maxAttempts === void 0 ? 200 : _ref$maxAttempts,
|
|
746
795
|
onJobUpdate = _ref.onJobUpdate,
|
|
747
796
|
abortSignal = _ref.abortSignal;
|
|
748
797
|
let attempts = 0;
|
|
@@ -941,6 +990,8 @@ function createFileImportJob(_ref) {
|
|
|
941
990
|
resourceType = _ref.resourceType,
|
|
942
991
|
importContainerKey = _ref.importContainerKey,
|
|
943
992
|
payload = _ref.payload,
|
|
993
|
+
_ref$autoProcess = _ref.autoProcess,
|
|
994
|
+
autoProcess = _ref$autoProcess === void 0 ? false : _ref$autoProcess,
|
|
944
995
|
onProgress = _ref.onProgress,
|
|
945
996
|
abortSignal = _ref.abortSignal;
|
|
946
997
|
const url = getFileImportJobsURL({
|
|
@@ -954,6 +1005,7 @@ function createFileImportJob(_ref) {
|
|
|
954
1005
|
formData.append('fileType', payload.fileType);
|
|
955
1006
|
formData.append('fileName', payload.fileName);
|
|
956
1007
|
formData.append('file', payload.file, payload.fileName);
|
|
1008
|
+
formData.append('autoProcess', autoProcess ? 'true' : 'false');
|
|
957
1009
|
fetchUsingXhr({
|
|
958
1010
|
url,
|
|
959
1011
|
payload: formData,
|
|
@@ -1371,7 +1423,7 @@ const COLUMN_DELIMITERS = [DELIMITERS.COMMA, DELIMITERS.SEMICOLON, DELIMITERS.PI
|
|
|
1371
1423
|
|
|
1372
1424
|
const FILE_IMPORT_JOB_POLLING_INTERVAL = 2000;
|
|
1373
1425
|
|
|
1374
|
-
const IMPORT_MAX_FILE_SIZE_MB =
|
|
1426
|
+
const IMPORT_MAX_FILE_SIZE_MB = 100;
|
|
1375
1427
|
const IMPORT_MAX_ITEM_COUNT = 500_000;
|
|
1376
1428
|
|
|
1377
1429
|
// =============================================================================
|
|
@@ -2819,6 +2871,7 @@ const useFileImportJobUpload = _ref => {
|
|
|
2819
2871
|
fileName: config.file.name,
|
|
2820
2872
|
file: config.file
|
|
2821
2873
|
},
|
|
2874
|
+
autoProcess: config.autoProcess,
|
|
2822
2875
|
onProgress: uploadProgress => {
|
|
2823
2876
|
setProgress(uploadProgress);
|
|
2824
2877
|
config.onProgress?.(uploadProgress);
|
|
@@ -2968,7 +3021,7 @@ const useFileUpload = _ref2 => {
|
|
|
2968
3021
|
_ref2$pollingInterval = _ref2.pollingInterval,
|
|
2969
3022
|
pollingInterval = _ref2$pollingInterval === void 0 ? 5000 : _ref2$pollingInterval,
|
|
2970
3023
|
_ref2$maxPollingAttem = _ref2.maxPollingAttempts,
|
|
2971
|
-
maxPollingAttempts = _ref2$maxPollingAttem === void 0 ?
|
|
3024
|
+
maxPollingAttempts = _ref2$maxPollingAttem === void 0 ? 200 : _ref2$maxPollingAttem;
|
|
2972
3025
|
const _React$useState = React__default["default"].useState(false),
|
|
2973
3026
|
_React$useState2 = _slicedToArray(_React$useState, 2),
|
|
2974
3027
|
isUploading = _React$useState2[0],
|
|
@@ -3005,13 +3058,50 @@ const useFileUpload = _ref2 => {
|
|
|
3005
3058
|
setProgress(0);
|
|
3006
3059
|
try {
|
|
3007
3060
|
if (useJobBasedFlow) {
|
|
3008
|
-
const totalResources = await countUniqueResourcesInCsv(config.file);
|
|
3061
|
+
const totalResources = config.skipValidationPolling ? 0 : await countUniqueResourcesInCsv(config.file);
|
|
3009
3062
|
await jobUpload.upload({
|
|
3010
3063
|
file: config.file,
|
|
3011
3064
|
resourceType: config.resourceType,
|
|
3012
3065
|
settings: config.settings,
|
|
3066
|
+
autoProcess: config.autoProcess,
|
|
3013
3067
|
abortSignal: config.abortSignal,
|
|
3014
3068
|
onSuccess: async (jobId, containerKey) => {
|
|
3069
|
+
if (config.skipValidationPolling) {
|
|
3070
|
+
try {
|
|
3071
|
+
const processingJob = await pollJobUntilProcessing({
|
|
3072
|
+
projectKey,
|
|
3073
|
+
jobId,
|
|
3074
|
+
importContainerKey: containerKey,
|
|
3075
|
+
abortSignal: config.abortSignal
|
|
3076
|
+
});
|
|
3077
|
+
const result = {
|
|
3078
|
+
containerKey,
|
|
3079
|
+
summary: {
|
|
3080
|
+
total: processingJob.summary?.total ?? 0,
|
|
3081
|
+
valid: processingJob.summary?.valid ?? 0,
|
|
3082
|
+
invalid: processingJob.summary?.invalid ?? 0,
|
|
3083
|
+
fieldsCount: processingJob.summary?.fieldsCount ?? 0,
|
|
3084
|
+
fields: processingJob.summary?.fields ?? [],
|
|
3085
|
+
ignoredFields: processingJob.summary?.ignoredFields ?? [],
|
|
3086
|
+
results: []
|
|
3087
|
+
},
|
|
3088
|
+
jobId,
|
|
3089
|
+
job: processingJob
|
|
3090
|
+
};
|
|
3091
|
+
setIsUploading(false);
|
|
3092
|
+
config.onSuccess(result);
|
|
3093
|
+
} catch (error) {
|
|
3094
|
+
await safeDeleteContainer({
|
|
3095
|
+
projectKey,
|
|
3096
|
+
containerKey
|
|
3097
|
+
});
|
|
3098
|
+
resetState();
|
|
3099
|
+
if (!(error instanceof PollingAbortedError)) {
|
|
3100
|
+
config.onError?.(error);
|
|
3101
|
+
}
|
|
3102
|
+
}
|
|
3103
|
+
return;
|
|
3104
|
+
}
|
|
3015
3105
|
try {
|
|
3016
3106
|
setValidationProgress({
|
|
3017
3107
|
processed: 0,
|
|
@@ -3251,6 +3341,7 @@ exports.getMissingRequiredFields = getMissingRequiredFields;
|
|
|
3251
3341
|
exports.getProccessFileURL = getProccessFileURL;
|
|
3252
3342
|
exports.getRowCount = getRowCount;
|
|
3253
3343
|
exports.getValidatedColumns = getValidatedColumns;
|
|
3344
|
+
exports.hasImportJobStartedProcessing = hasImportJobStartedProcessing;
|
|
3254
3345
|
exports.hasOwnProperty = hasOwnProperty;
|
|
3255
3346
|
exports.hasRequiredFields = hasRequiredFields;
|
|
3256
3347
|
exports.hasSingleKeyColumn = hasSingleKeyColumn;
|
|
@@ -3275,6 +3366,7 @@ exports.manualImports = manualImports;
|
|
|
3275
3366
|
exports.mapFileUploadErrorsToUploadFileErrorRows = mapFileUploadErrorsToUploadFileErrorRows;
|
|
3276
3367
|
exports.mapFormikErrors = mapFormikErrors;
|
|
3277
3368
|
exports.mapUploadFileErrorsResponseToUploadFileErrorRows = mapUploadFileErrorsResponseToUploadFileErrorRows;
|
|
3369
|
+
exports.pollJobUntilProcessing = pollJobUntilProcessing;
|
|
3278
3370
|
exports.pollJobUntilValidated = pollJobUntilValidated;
|
|
3279
3371
|
exports.processFileImportJob = processFileImportJob;
|
|
3280
3372
|
exports.processFileImportJobResponse = processFileImportJobResponse;
|
|
@@ -482,6 +482,9 @@ function isImportJobRejected(job) {
|
|
|
482
482
|
function isImportJobTerminal(job) {
|
|
483
483
|
return isImportJobValidated(job) || isImportJobRejected(job);
|
|
484
484
|
}
|
|
485
|
+
function hasImportJobStartedProcessing(job) {
|
|
486
|
+
return !isImportJobQueued(job);
|
|
487
|
+
}
|
|
485
488
|
function shouldContinuePollingForImportValidation(job) {
|
|
486
489
|
if (!job) return true;
|
|
487
490
|
return isImportJobQueued(job) || isImportJobProcessing(job);
|
|
@@ -735,6 +738,52 @@ const decodeFileNameFromImportContainerKey = importContainerKey => {
|
|
|
735
738
|
}
|
|
736
739
|
};
|
|
737
740
|
|
|
741
|
+
const pollJobUntilProcessing = async _ref => {
|
|
742
|
+
let projectKey = _ref.projectKey,
|
|
743
|
+
jobId = _ref.jobId,
|
|
744
|
+
importContainerKey = _ref.importContainerKey,
|
|
745
|
+
_ref$pollingInterval = _ref.pollingInterval,
|
|
746
|
+
pollingInterval = _ref$pollingInterval === void 0 ? 1000 : _ref$pollingInterval,
|
|
747
|
+
_ref$maxAttempts = _ref.maxAttempts,
|
|
748
|
+
maxAttempts = _ref$maxAttempts === void 0 ? 60 : _ref$maxAttempts,
|
|
749
|
+
abortSignal = _ref.abortSignal;
|
|
750
|
+
let attempts = 0;
|
|
751
|
+
while (attempts < maxAttempts) {
|
|
752
|
+
if (abortSignal?.aborted) {
|
|
753
|
+
throw new PollingAbortedError();
|
|
754
|
+
}
|
|
755
|
+
const job = await getFileImportJob({
|
|
756
|
+
projectKey,
|
|
757
|
+
importContainerKey,
|
|
758
|
+
jobId
|
|
759
|
+
});
|
|
760
|
+
if (abortSignal?.aborted) {
|
|
761
|
+
throw new PollingAbortedError();
|
|
762
|
+
}
|
|
763
|
+
if (hasImportJobStartedProcessing(job)) {
|
|
764
|
+
return job;
|
|
765
|
+
}
|
|
766
|
+
await new _Promise__default["default"]((resolve, reject) => {
|
|
767
|
+
let timeoutId;
|
|
768
|
+
const onAbort = () => {
|
|
769
|
+
clearTimeout(timeoutId);
|
|
770
|
+
reject(new PollingAbortedError());
|
|
771
|
+
};
|
|
772
|
+
if (abortSignal?.aborted) {
|
|
773
|
+
reject(new PollingAbortedError());
|
|
774
|
+
return;
|
|
775
|
+
}
|
|
776
|
+
timeoutId = _setTimeout__default["default"](() => {
|
|
777
|
+
abortSignal?.removeEventListener('abort', onAbort);
|
|
778
|
+
resolve();
|
|
779
|
+
}, pollingInterval);
|
|
780
|
+
abortSignal?.addEventListener('abort', onAbort);
|
|
781
|
+
});
|
|
782
|
+
attempts++;
|
|
783
|
+
}
|
|
784
|
+
throw new Error(`Job did not start processing after ${maxAttempts} attempts (${maxAttempts * pollingInterval / 1000}s)`);
|
|
785
|
+
};
|
|
786
|
+
|
|
738
787
|
const pollJobUntilValidated = async _ref => {
|
|
739
788
|
let projectKey = _ref.projectKey,
|
|
740
789
|
jobId = _ref.jobId,
|
|
@@ -742,7 +791,7 @@ const pollJobUntilValidated = async _ref => {
|
|
|
742
791
|
_ref$pollingInterval = _ref.pollingInterval,
|
|
743
792
|
pollingInterval = _ref$pollingInterval === void 0 ? 5000 : _ref$pollingInterval,
|
|
744
793
|
_ref$maxAttempts = _ref.maxAttempts,
|
|
745
|
-
maxAttempts = _ref$maxAttempts === void 0 ?
|
|
794
|
+
maxAttempts = _ref$maxAttempts === void 0 ? 200 : _ref$maxAttempts,
|
|
746
795
|
onJobUpdate = _ref.onJobUpdate,
|
|
747
796
|
abortSignal = _ref.abortSignal;
|
|
748
797
|
let attempts = 0;
|
|
@@ -941,6 +990,8 @@ function createFileImportJob(_ref) {
|
|
|
941
990
|
resourceType = _ref.resourceType,
|
|
942
991
|
importContainerKey = _ref.importContainerKey,
|
|
943
992
|
payload = _ref.payload,
|
|
993
|
+
_ref$autoProcess = _ref.autoProcess,
|
|
994
|
+
autoProcess = _ref$autoProcess === void 0 ? false : _ref$autoProcess,
|
|
944
995
|
onProgress = _ref.onProgress,
|
|
945
996
|
abortSignal = _ref.abortSignal;
|
|
946
997
|
const url = getFileImportJobsURL({
|
|
@@ -954,6 +1005,7 @@ function createFileImportJob(_ref) {
|
|
|
954
1005
|
formData.append('fileType', payload.fileType);
|
|
955
1006
|
formData.append('fileName', payload.fileName);
|
|
956
1007
|
formData.append('file', payload.file, payload.fileName);
|
|
1008
|
+
formData.append('autoProcess', autoProcess ? 'true' : 'false');
|
|
957
1009
|
fetchUsingXhr({
|
|
958
1010
|
url,
|
|
959
1011
|
payload: formData,
|
|
@@ -1371,7 +1423,7 @@ const COLUMN_DELIMITERS = [DELIMITERS.COMMA, DELIMITERS.SEMICOLON, DELIMITERS.PI
|
|
|
1371
1423
|
|
|
1372
1424
|
const FILE_IMPORT_JOB_POLLING_INTERVAL = 2000;
|
|
1373
1425
|
|
|
1374
|
-
const IMPORT_MAX_FILE_SIZE_MB =
|
|
1426
|
+
const IMPORT_MAX_FILE_SIZE_MB = 100;
|
|
1375
1427
|
const IMPORT_MAX_ITEM_COUNT = 500_000;
|
|
1376
1428
|
|
|
1377
1429
|
// =============================================================================
|
|
@@ -2811,6 +2863,7 @@ const useFileImportJobUpload = _ref => {
|
|
|
2811
2863
|
fileName: config.file.name,
|
|
2812
2864
|
file: config.file
|
|
2813
2865
|
},
|
|
2866
|
+
autoProcess: config.autoProcess,
|
|
2814
2867
|
onProgress: uploadProgress => {
|
|
2815
2868
|
setProgress(uploadProgress);
|
|
2816
2869
|
config.onProgress?.(uploadProgress);
|
|
@@ -2960,7 +3013,7 @@ const useFileUpload = _ref2 => {
|
|
|
2960
3013
|
_ref2$pollingInterval = _ref2.pollingInterval,
|
|
2961
3014
|
pollingInterval = _ref2$pollingInterval === void 0 ? 5000 : _ref2$pollingInterval,
|
|
2962
3015
|
_ref2$maxPollingAttem = _ref2.maxPollingAttempts,
|
|
2963
|
-
maxPollingAttempts = _ref2$maxPollingAttem === void 0 ?
|
|
3016
|
+
maxPollingAttempts = _ref2$maxPollingAttem === void 0 ? 200 : _ref2$maxPollingAttem;
|
|
2964
3017
|
const _React$useState = React__default["default"].useState(false),
|
|
2965
3018
|
_React$useState2 = _slicedToArray(_React$useState, 2),
|
|
2966
3019
|
isUploading = _React$useState2[0],
|
|
@@ -2997,13 +3050,50 @@ const useFileUpload = _ref2 => {
|
|
|
2997
3050
|
setProgress(0);
|
|
2998
3051
|
try {
|
|
2999
3052
|
if (useJobBasedFlow) {
|
|
3000
|
-
const totalResources = await countUniqueResourcesInCsv(config.file);
|
|
3053
|
+
const totalResources = config.skipValidationPolling ? 0 : await countUniqueResourcesInCsv(config.file);
|
|
3001
3054
|
await jobUpload.upload({
|
|
3002
3055
|
file: config.file,
|
|
3003
3056
|
resourceType: config.resourceType,
|
|
3004
3057
|
settings: config.settings,
|
|
3058
|
+
autoProcess: config.autoProcess,
|
|
3005
3059
|
abortSignal: config.abortSignal,
|
|
3006
3060
|
onSuccess: async (jobId, containerKey) => {
|
|
3061
|
+
if (config.skipValidationPolling) {
|
|
3062
|
+
try {
|
|
3063
|
+
const processingJob = await pollJobUntilProcessing({
|
|
3064
|
+
projectKey,
|
|
3065
|
+
jobId,
|
|
3066
|
+
importContainerKey: containerKey,
|
|
3067
|
+
abortSignal: config.abortSignal
|
|
3068
|
+
});
|
|
3069
|
+
const result = {
|
|
3070
|
+
containerKey,
|
|
3071
|
+
summary: {
|
|
3072
|
+
total: processingJob.summary?.total ?? 0,
|
|
3073
|
+
valid: processingJob.summary?.valid ?? 0,
|
|
3074
|
+
invalid: processingJob.summary?.invalid ?? 0,
|
|
3075
|
+
fieldsCount: processingJob.summary?.fieldsCount ?? 0,
|
|
3076
|
+
fields: processingJob.summary?.fields ?? [],
|
|
3077
|
+
ignoredFields: processingJob.summary?.ignoredFields ?? [],
|
|
3078
|
+
results: []
|
|
3079
|
+
},
|
|
3080
|
+
jobId,
|
|
3081
|
+
job: processingJob
|
|
3082
|
+
};
|
|
3083
|
+
setIsUploading(false);
|
|
3084
|
+
config.onSuccess(result);
|
|
3085
|
+
} catch (error) {
|
|
3086
|
+
await safeDeleteContainer({
|
|
3087
|
+
projectKey,
|
|
3088
|
+
containerKey
|
|
3089
|
+
});
|
|
3090
|
+
resetState();
|
|
3091
|
+
if (!(error instanceof PollingAbortedError)) {
|
|
3092
|
+
config.onError?.(error);
|
|
3093
|
+
}
|
|
3094
|
+
}
|
|
3095
|
+
return;
|
|
3096
|
+
}
|
|
3007
3097
|
try {
|
|
3008
3098
|
setValidationProgress({
|
|
3009
3099
|
processed: 0,
|
|
@@ -3243,6 +3333,7 @@ exports.getMissingRequiredFields = getMissingRequiredFields;
|
|
|
3243
3333
|
exports.getProccessFileURL = getProccessFileURL;
|
|
3244
3334
|
exports.getRowCount = getRowCount;
|
|
3245
3335
|
exports.getValidatedColumns = getValidatedColumns;
|
|
3336
|
+
exports.hasImportJobStartedProcessing = hasImportJobStartedProcessing;
|
|
3246
3337
|
exports.hasOwnProperty = hasOwnProperty;
|
|
3247
3338
|
exports.hasRequiredFields = hasRequiredFields;
|
|
3248
3339
|
exports.hasSingleKeyColumn = hasSingleKeyColumn;
|
|
@@ -3267,6 +3358,7 @@ exports.manualImports = manualImports;
|
|
|
3267
3358
|
exports.mapFileUploadErrorsToUploadFileErrorRows = mapFileUploadErrorsToUploadFileErrorRows;
|
|
3268
3359
|
exports.mapFormikErrors = mapFormikErrors;
|
|
3269
3360
|
exports.mapUploadFileErrorsResponseToUploadFileErrorRows = mapUploadFileErrorsResponseToUploadFileErrorRows;
|
|
3361
|
+
exports.pollJobUntilProcessing = pollJobUntilProcessing;
|
|
3270
3362
|
exports.pollJobUntilValidated = pollJobUntilValidated;
|
|
3271
3363
|
exports.processFileImportJob = processFileImportJob;
|
|
3272
3364
|
exports.processFileImportJobResponse = processFileImportJobResponse;
|
|
@@ -442,6 +442,9 @@ function isImportJobRejected(job) {
|
|
|
442
442
|
function isImportJobTerminal(job) {
|
|
443
443
|
return isImportJobValidated(job) || isImportJobRejected(job);
|
|
444
444
|
}
|
|
445
|
+
function hasImportJobStartedProcessing(job) {
|
|
446
|
+
return !isImportJobQueued(job);
|
|
447
|
+
}
|
|
445
448
|
function shouldContinuePollingForImportValidation(job) {
|
|
446
449
|
if (!job) return true;
|
|
447
450
|
return isImportJobQueued(job) || isImportJobProcessing(job);
|
|
@@ -695,6 +698,52 @@ const decodeFileNameFromImportContainerKey = importContainerKey => {
|
|
|
695
698
|
}
|
|
696
699
|
};
|
|
697
700
|
|
|
701
|
+
const pollJobUntilProcessing = async _ref => {
|
|
702
|
+
let projectKey = _ref.projectKey,
|
|
703
|
+
jobId = _ref.jobId,
|
|
704
|
+
importContainerKey = _ref.importContainerKey,
|
|
705
|
+
_ref$pollingInterval = _ref.pollingInterval,
|
|
706
|
+
pollingInterval = _ref$pollingInterval === void 0 ? 1000 : _ref$pollingInterval,
|
|
707
|
+
_ref$maxAttempts = _ref.maxAttempts,
|
|
708
|
+
maxAttempts = _ref$maxAttempts === void 0 ? 60 : _ref$maxAttempts,
|
|
709
|
+
abortSignal = _ref.abortSignal;
|
|
710
|
+
let attempts = 0;
|
|
711
|
+
while (attempts < maxAttempts) {
|
|
712
|
+
if (abortSignal?.aborted) {
|
|
713
|
+
throw new PollingAbortedError();
|
|
714
|
+
}
|
|
715
|
+
const job = await getFileImportJob({
|
|
716
|
+
projectKey,
|
|
717
|
+
importContainerKey,
|
|
718
|
+
jobId
|
|
719
|
+
});
|
|
720
|
+
if (abortSignal?.aborted) {
|
|
721
|
+
throw new PollingAbortedError();
|
|
722
|
+
}
|
|
723
|
+
if (hasImportJobStartedProcessing(job)) {
|
|
724
|
+
return job;
|
|
725
|
+
}
|
|
726
|
+
await new _Promise((resolve, reject) => {
|
|
727
|
+
let timeoutId;
|
|
728
|
+
const onAbort = () => {
|
|
729
|
+
clearTimeout(timeoutId);
|
|
730
|
+
reject(new PollingAbortedError());
|
|
731
|
+
};
|
|
732
|
+
if (abortSignal?.aborted) {
|
|
733
|
+
reject(new PollingAbortedError());
|
|
734
|
+
return;
|
|
735
|
+
}
|
|
736
|
+
timeoutId = _setTimeout(() => {
|
|
737
|
+
abortSignal?.removeEventListener('abort', onAbort);
|
|
738
|
+
resolve();
|
|
739
|
+
}, pollingInterval);
|
|
740
|
+
abortSignal?.addEventListener('abort', onAbort);
|
|
741
|
+
});
|
|
742
|
+
attempts++;
|
|
743
|
+
}
|
|
744
|
+
throw new Error(`Job did not start processing after ${maxAttempts} attempts (${maxAttempts * pollingInterval / 1000}s)`);
|
|
745
|
+
};
|
|
746
|
+
|
|
698
747
|
const pollJobUntilValidated = async _ref => {
|
|
699
748
|
let projectKey = _ref.projectKey,
|
|
700
749
|
jobId = _ref.jobId,
|
|
@@ -702,7 +751,7 @@ const pollJobUntilValidated = async _ref => {
|
|
|
702
751
|
_ref$pollingInterval = _ref.pollingInterval,
|
|
703
752
|
pollingInterval = _ref$pollingInterval === void 0 ? 5000 : _ref$pollingInterval,
|
|
704
753
|
_ref$maxAttempts = _ref.maxAttempts,
|
|
705
|
-
maxAttempts = _ref$maxAttempts === void 0 ?
|
|
754
|
+
maxAttempts = _ref$maxAttempts === void 0 ? 200 : _ref$maxAttempts,
|
|
706
755
|
onJobUpdate = _ref.onJobUpdate,
|
|
707
756
|
abortSignal = _ref.abortSignal;
|
|
708
757
|
let attempts = 0;
|
|
@@ -901,6 +950,8 @@ function createFileImportJob(_ref) {
|
|
|
901
950
|
resourceType = _ref.resourceType,
|
|
902
951
|
importContainerKey = _ref.importContainerKey,
|
|
903
952
|
payload = _ref.payload,
|
|
953
|
+
_ref$autoProcess = _ref.autoProcess,
|
|
954
|
+
autoProcess = _ref$autoProcess === void 0 ? false : _ref$autoProcess,
|
|
904
955
|
onProgress = _ref.onProgress,
|
|
905
956
|
abortSignal = _ref.abortSignal;
|
|
906
957
|
const url = getFileImportJobsURL({
|
|
@@ -914,6 +965,7 @@ function createFileImportJob(_ref) {
|
|
|
914
965
|
formData.append('fileType', payload.fileType);
|
|
915
966
|
formData.append('fileName', payload.fileName);
|
|
916
967
|
formData.append('file', payload.file, payload.fileName);
|
|
968
|
+
formData.append('autoProcess', autoProcess ? 'true' : 'false');
|
|
917
969
|
fetchUsingXhr({
|
|
918
970
|
url,
|
|
919
971
|
payload: formData,
|
|
@@ -1331,7 +1383,7 @@ const COLUMN_DELIMITERS = [DELIMITERS.COMMA, DELIMITERS.SEMICOLON, DELIMITERS.PI
|
|
|
1331
1383
|
|
|
1332
1384
|
const FILE_IMPORT_JOB_POLLING_INTERVAL = 2000;
|
|
1333
1385
|
|
|
1334
|
-
const IMPORT_MAX_FILE_SIZE_MB =
|
|
1386
|
+
const IMPORT_MAX_FILE_SIZE_MB = 100;
|
|
1335
1387
|
const IMPORT_MAX_ITEM_COUNT = 500_000;
|
|
1336
1388
|
|
|
1337
1389
|
// =============================================================================
|
|
@@ -2779,6 +2831,7 @@ const useFileImportJobUpload = _ref => {
|
|
|
2779
2831
|
fileName: config.file.name,
|
|
2780
2832
|
file: config.file
|
|
2781
2833
|
},
|
|
2834
|
+
autoProcess: config.autoProcess,
|
|
2782
2835
|
onProgress: uploadProgress => {
|
|
2783
2836
|
setProgress(uploadProgress);
|
|
2784
2837
|
config.onProgress?.(uploadProgress);
|
|
@@ -2928,7 +2981,7 @@ const useFileUpload = _ref2 => {
|
|
|
2928
2981
|
_ref2$pollingInterval = _ref2.pollingInterval,
|
|
2929
2982
|
pollingInterval = _ref2$pollingInterval === void 0 ? 5000 : _ref2$pollingInterval,
|
|
2930
2983
|
_ref2$maxPollingAttem = _ref2.maxPollingAttempts,
|
|
2931
|
-
maxPollingAttempts = _ref2$maxPollingAttem === void 0 ?
|
|
2984
|
+
maxPollingAttempts = _ref2$maxPollingAttem === void 0 ? 200 : _ref2$maxPollingAttem;
|
|
2932
2985
|
const _React$useState = React.useState(false),
|
|
2933
2986
|
_React$useState2 = _slicedToArray(_React$useState, 2),
|
|
2934
2987
|
isUploading = _React$useState2[0],
|
|
@@ -2965,13 +3018,50 @@ const useFileUpload = _ref2 => {
|
|
|
2965
3018
|
setProgress(0);
|
|
2966
3019
|
try {
|
|
2967
3020
|
if (useJobBasedFlow) {
|
|
2968
|
-
const totalResources = await countUniqueResourcesInCsv(config.file);
|
|
3021
|
+
const totalResources = config.skipValidationPolling ? 0 : await countUniqueResourcesInCsv(config.file);
|
|
2969
3022
|
await jobUpload.upload({
|
|
2970
3023
|
file: config.file,
|
|
2971
3024
|
resourceType: config.resourceType,
|
|
2972
3025
|
settings: config.settings,
|
|
3026
|
+
autoProcess: config.autoProcess,
|
|
2973
3027
|
abortSignal: config.abortSignal,
|
|
2974
3028
|
onSuccess: async (jobId, containerKey) => {
|
|
3029
|
+
if (config.skipValidationPolling) {
|
|
3030
|
+
try {
|
|
3031
|
+
const processingJob = await pollJobUntilProcessing({
|
|
3032
|
+
projectKey,
|
|
3033
|
+
jobId,
|
|
3034
|
+
importContainerKey: containerKey,
|
|
3035
|
+
abortSignal: config.abortSignal
|
|
3036
|
+
});
|
|
3037
|
+
const result = {
|
|
3038
|
+
containerKey,
|
|
3039
|
+
summary: {
|
|
3040
|
+
total: processingJob.summary?.total ?? 0,
|
|
3041
|
+
valid: processingJob.summary?.valid ?? 0,
|
|
3042
|
+
invalid: processingJob.summary?.invalid ?? 0,
|
|
3043
|
+
fieldsCount: processingJob.summary?.fieldsCount ?? 0,
|
|
3044
|
+
fields: processingJob.summary?.fields ?? [],
|
|
3045
|
+
ignoredFields: processingJob.summary?.ignoredFields ?? [],
|
|
3046
|
+
results: []
|
|
3047
|
+
},
|
|
3048
|
+
jobId,
|
|
3049
|
+
job: processingJob
|
|
3050
|
+
};
|
|
3051
|
+
setIsUploading(false);
|
|
3052
|
+
config.onSuccess(result);
|
|
3053
|
+
} catch (error) {
|
|
3054
|
+
await safeDeleteContainer({
|
|
3055
|
+
projectKey,
|
|
3056
|
+
containerKey
|
|
3057
|
+
});
|
|
3058
|
+
resetState();
|
|
3059
|
+
if (!(error instanceof PollingAbortedError)) {
|
|
3060
|
+
config.onError?.(error);
|
|
3061
|
+
}
|
|
3062
|
+
}
|
|
3063
|
+
return;
|
|
3064
|
+
}
|
|
2975
3065
|
try {
|
|
2976
3066
|
setValidationProgress({
|
|
2977
3067
|
processed: 0,
|
|
@@ -3102,4 +3192,4 @@ const useFileUpload = _ref2 => {
|
|
|
3102
3192
|
};
|
|
3103
3193
|
};
|
|
3104
3194
|
|
|
3105
|
-
export { ActiveDragDropArea, COLUMN_DELIMITERS, CT_API_DOCS_URL, DELIMITERS, DisabledDropArea, DropAreaWrapper, EnabledDropArea, FILE_IMPORT_JOB_POLLING_INTERVAL, FileDropArea, FileDroppedArea, FileIcon, HttpError, IMPORT_LEGACY_MAX_FILE_SIZE_MB, IMPORT_LEGACY_MAX_ROW_COUNT, IMPORT_MAX_FILE_SIZE_MB, IMPORT_MAX_ITEM_COUNT, IMPORT_TAG_KEYS, IMPORT_TAG_VALUES, ImportStates, InfoBox, InvalidResponseError, LockIcon, NoResourcesToExportError, PollingAbortedError, ProjectKeyNotAvailableError, QueryPredicateError, RESOURCE_TYPE_DOCUMENTATION_LINKS, RESOURCE_TYPE_TEMPLATE_DOWNLOAD_LINKS, TAG_KEY_SOURCE_FILE_UPLOAD, UnexpectedColumnError, UnexpectedOperationStateError, UnexpectedResourceTypeError, UploadSeparator, UploadSettings, UploadingModal, allAutomatedImportOperations, allAutomatedImportOperationsResponse, allFileUploadImportOperations, allFileUploadImportOperationsResponse, appendCsvOrJsonExtensionIfAbsent, assertCancelContainerResponse, assertExportOperationsDownloadFileResponse, assertFileImportJob, assertFileImportJobRecordsResponse, assertFileUploadResponse, assertImportContainer, assertImportContainerPagedResponse, assertImportOperationPagedResponse, assertImportSummary, assertListFileImportJobsResponse, assertPaginatedExportOperationResponse, assertProcessFileImportJobResponse, assertProcessFileResponse, assertResourceType, automatedImportContainerKey, automatedImports, cancelImportContainerByKey, checkIfFileUploadImport, convertFileSizeToKB, countJsonFileItems, countUniqueResourcesInCsv, createFileImportJob, createImportContainerForFileUpload, decodeFileNameFromImportContainerKey, deleteFileImportJob, deleteImportContainer, dropAreaStyles, encodeFileNameWithTimestampToContainerKey, exportOperationsCompleted, exportOperationsProcessing, extractErrorDescriptionFromValidationMessage, fetchExportOperations, fetchImportContainerByKey, fetchImportContainerDetails, fetchImportContainers, fetchImportOperations, fetchImportSummaries, fetchImportSummary, fetchUsingXhr, fetcher, fileUploadImportContainerKey, fileUploadMissingKeysResponse, formatErrorCode, formatKeys, formatQueryString, getCreateImportContainerURL, getDeleteImportContainerURL, getExportOperationsURL, getFileImportJob, getFileImportJobByIdURL, getFileImportJobDeleteURL, getFileImportJobFileType, getFileImportJobProcessURL, getFileImportJobRecords, getFileImportJobRecordsURL, getFileImportJobsListURL, getFileImportJobsURL, getFileUploadErrorsCount, getFileUploadURL, getImportContainerByKeyURL, getImportContainerTasksURL, getImportContainersURL, getImportOperationsURL, getImportState, getImportSummaryURL, getMissingRequiredFields, getProccessFileURL, getRowCount, getValidatedColumns, hasOwnProperty, hasRequiredFields, hasSingleKeyColumn, importContainers, importStatesMap, importsSummaries, invalidFileImportJobRecordsResponse, invalidFileImportJobValidated, invalidFileUploadResponse, isAbortError, isError, isImportJobInitializing, isImportJobProcessing, isImportJobQueued, isImportJobReady, isImportJobRejected, isImportJobTerminal, isImportJobValidated, isResourceType, listFileImportJobs, manualImports, mapFileUploadErrorsToUploadFileErrorRows, mapFormikErrors, mapUploadFileErrorsResponseToUploadFileErrorRows, pollJobUntilValidated, processFileImportJob, processFileImportJobResponse, processUploadedFile, shouldContinuePollingForImportValidation, successfulAutomatedImportOperations, successfulAutomatedImportOperationsResponse, successfulFileUploadImportOperations, successfulFileUploadImportOperationsResponse, toBytes, toImportApiResourceType, uploadFileForImport, useFetchExportOperations, useFetchFileImportJob, useFetchFileImportJobRecords, useFetchImportContainerDetails, useFetchImportOperations, useFetchImportSummaries, useFileImportJobUpload, useFileUpload, useImportContainerUpload, validFileImportJobProcessing, validFileImportJobQueued, validFileImportJobRecordsResponse, validFileImportJobValidated, validFileUploadResponse, validProcessFileResponse, validateDelimiter };
|
|
3195
|
+
export { ActiveDragDropArea, COLUMN_DELIMITERS, CT_API_DOCS_URL, DELIMITERS, DisabledDropArea, DropAreaWrapper, EnabledDropArea, FILE_IMPORT_JOB_POLLING_INTERVAL, FileDropArea, FileDroppedArea, FileIcon, HttpError, IMPORT_LEGACY_MAX_FILE_SIZE_MB, IMPORT_LEGACY_MAX_ROW_COUNT, IMPORT_MAX_FILE_SIZE_MB, IMPORT_MAX_ITEM_COUNT, IMPORT_TAG_KEYS, IMPORT_TAG_VALUES, ImportStates, InfoBox, InvalidResponseError, LockIcon, NoResourcesToExportError, PollingAbortedError, ProjectKeyNotAvailableError, QueryPredicateError, RESOURCE_TYPE_DOCUMENTATION_LINKS, RESOURCE_TYPE_TEMPLATE_DOWNLOAD_LINKS, TAG_KEY_SOURCE_FILE_UPLOAD, UnexpectedColumnError, UnexpectedOperationStateError, UnexpectedResourceTypeError, UploadSeparator, UploadSettings, UploadingModal, allAutomatedImportOperations, allAutomatedImportOperationsResponse, allFileUploadImportOperations, allFileUploadImportOperationsResponse, appendCsvOrJsonExtensionIfAbsent, assertCancelContainerResponse, assertExportOperationsDownloadFileResponse, assertFileImportJob, assertFileImportJobRecordsResponse, assertFileUploadResponse, assertImportContainer, assertImportContainerPagedResponse, assertImportOperationPagedResponse, assertImportSummary, assertListFileImportJobsResponse, assertPaginatedExportOperationResponse, assertProcessFileImportJobResponse, assertProcessFileResponse, assertResourceType, automatedImportContainerKey, automatedImports, cancelImportContainerByKey, checkIfFileUploadImport, convertFileSizeToKB, countJsonFileItems, countUniqueResourcesInCsv, createFileImportJob, createImportContainerForFileUpload, decodeFileNameFromImportContainerKey, deleteFileImportJob, deleteImportContainer, dropAreaStyles, encodeFileNameWithTimestampToContainerKey, exportOperationsCompleted, exportOperationsProcessing, extractErrorDescriptionFromValidationMessage, fetchExportOperations, fetchImportContainerByKey, fetchImportContainerDetails, fetchImportContainers, fetchImportOperations, fetchImportSummaries, fetchImportSummary, fetchUsingXhr, fetcher, fileUploadImportContainerKey, fileUploadMissingKeysResponse, formatErrorCode, formatKeys, formatQueryString, getCreateImportContainerURL, getDeleteImportContainerURL, getExportOperationsURL, getFileImportJob, getFileImportJobByIdURL, getFileImportJobDeleteURL, getFileImportJobFileType, getFileImportJobProcessURL, getFileImportJobRecords, getFileImportJobRecordsURL, getFileImportJobsListURL, getFileImportJobsURL, getFileUploadErrorsCount, getFileUploadURL, getImportContainerByKeyURL, getImportContainerTasksURL, getImportContainersURL, getImportOperationsURL, getImportState, getImportSummaryURL, getMissingRequiredFields, getProccessFileURL, getRowCount, getValidatedColumns, hasImportJobStartedProcessing, hasOwnProperty, hasRequiredFields, hasSingleKeyColumn, importContainers, importStatesMap, importsSummaries, invalidFileImportJobRecordsResponse, invalidFileImportJobValidated, invalidFileUploadResponse, isAbortError, isError, isImportJobInitializing, isImportJobProcessing, isImportJobQueued, isImportJobReady, isImportJobRejected, isImportJobTerminal, isImportJobValidated, isResourceType, listFileImportJobs, manualImports, mapFileUploadErrorsToUploadFileErrorRows, mapFormikErrors, mapUploadFileErrorsResponseToUploadFileErrorRows, pollJobUntilProcessing, pollJobUntilValidated, processFileImportJob, processFileImportJobResponse, processUploadedFile, shouldContinuePollingForImportValidation, successfulAutomatedImportOperations, successfulAutomatedImportOperationsResponse, successfulFileUploadImportOperations, successfulFileUploadImportOperationsResponse, toBytes, toImportApiResourceType, uploadFileForImport, useFetchExportOperations, useFetchFileImportJob, useFetchFileImportJobRecords, useFetchImportContainerDetails, useFetchImportOperations, useFetchImportSummaries, useFileImportJobUpload, useFileUpload, useImportContainerUpload, validFileImportJobProcessing, validFileImportJobQueued, validFileImportJobRecordsResponse, validFileImportJobValidated, validFileUploadResponse, validProcessFileResponse, validateDelimiter };
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { FileImportJob, CreateFileImportJobParameters, GetFileImportJobParameters, GetFileImportJobRecordsParameters, ProcessFileImportJobParameters, ProcessFileImportJobResponse, DeleteFileImportJobParameters, ListFileImportJobsParameters, ListFileImportJobsResponse, FileImportJobRecordsResponse } from "../@types/index.js";
|
|
2
|
-
export declare function createFileImportJob({ projectKey, resourceType, importContainerKey, payload, onProgress, abortSignal, }: CreateFileImportJobParameters): Promise<FileImportJob>;
|
|
2
|
+
export declare function createFileImportJob({ projectKey, resourceType, importContainerKey, payload, autoProcess, onProgress, abortSignal, }: CreateFileImportJobParameters): Promise<FileImportJob>;
|
|
3
3
|
export declare function getFileImportJob({ projectKey, importContainerKey, jobId, }: GetFileImportJobParameters): Promise<FileImportJob>;
|
|
4
4
|
export declare function getFileImportJobRecords({ projectKey, importContainerKey, jobId, limit, offset, isValid, }: GetFileImportJobRecordsParameters): Promise<FileImportJobRecordsResponse>;
|
|
5
5
|
export declare function processFileImportJob({ projectKey, resourceType, importContainerKey, jobId, action, }: ProcessFileImportJobParameters): Promise<ProcessFileImportJobResponse>;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export declare const IMPORT_MAX_FILE_SIZE_MB =
|
|
1
|
+
export declare const IMPORT_MAX_FILE_SIZE_MB = 100;
|
|
2
2
|
export declare const IMPORT_MAX_ITEM_COUNT = 500000;
|
|
3
3
|
/** @deprecated Use IMPORT_MAX_FILE_SIZE_MB instead. Remove after migration. */
|
|
4
4
|
export declare const IMPORT_LEGACY_MAX_FILE_SIZE_MB = 35;
|
|
@@ -4,6 +4,7 @@ export type UseFileImportJobUploadConfig = {
|
|
|
4
4
|
file: File;
|
|
5
5
|
resourceType: ResourceTypeId;
|
|
6
6
|
settings?: ExtendedImportContainerDraft['settings'];
|
|
7
|
+
autoProcess?: boolean;
|
|
7
8
|
onSuccess: (jobId: string, importContainerKey: string) => void;
|
|
8
9
|
onError?: (error: unknown) => void;
|
|
9
10
|
onProgress?: (progress: number) => void;
|
|
@@ -9,6 +9,8 @@ export type FileUploadConfig = {
|
|
|
9
9
|
file: File;
|
|
10
10
|
resourceType: ResourceTypeId;
|
|
11
11
|
settings?: ExtendedImportContainerDraft['settings'];
|
|
12
|
+
autoProcess?: boolean;
|
|
13
|
+
skipValidationPolling?: boolean;
|
|
12
14
|
onSuccess: (result: FileUploadResult) => void;
|
|
13
15
|
onError?: (error: unknown) => void;
|
|
14
16
|
onProgress?: (progress: number) => void;
|
|
@@ -9,4 +9,5 @@ export declare function isImportJobInitializing(job?: FileImportJob): boolean;
|
|
|
9
9
|
export declare function isImportJobReady(job?: FileImportJob): boolean;
|
|
10
10
|
export declare function isImportJobRejected(job?: FileImportJob): boolean;
|
|
11
11
|
export declare function isImportJobTerminal(job?: FileImportJob): boolean;
|
|
12
|
+
export declare function hasImportJobStartedProcessing(job?: FileImportJob): boolean;
|
|
12
13
|
export declare function shouldContinuePollingForImportValidation(job?: FileImportJob): boolean;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { FileImportJob } from "../@types/index.js";
|
|
2
|
+
export type PollJobUntilProcessingConfig = {
|
|
3
|
+
projectKey: string;
|
|
4
|
+
jobId: string;
|
|
5
|
+
importContainerKey: string;
|
|
6
|
+
pollingInterval?: number;
|
|
7
|
+
maxAttempts?: number;
|
|
8
|
+
abortSignal?: AbortSignal;
|
|
9
|
+
};
|
|
10
|
+
export declare const pollJobUntilProcessing: ({ projectKey, jobId, importContainerKey, pollingInterval, maxAttempts, abortSignal, }: PollJobUntilProcessingConfig) => Promise<FileImportJob>;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@commercetools-frontend-extensions/operations",
|
|
3
|
-
"version": "3.1.
|
|
3
|
+
"version": "3.1.2",
|
|
4
4
|
"license": "Proprietary",
|
|
5
5
|
"publishConfig": {
|
|
6
6
|
"access": "public"
|
|
@@ -31,7 +31,7 @@
|
|
|
31
31
|
"@emotion/styled": "11.14.1",
|
|
32
32
|
"@testing-library/react": "16.1.0",
|
|
33
33
|
"@types/jest": "29.5.14",
|
|
34
|
-
"@types/papaparse": "5.5.
|
|
34
|
+
"@types/papaparse": "5.5.2",
|
|
35
35
|
"@types/pluralize": "0.0.33",
|
|
36
36
|
"@types/react": "19.2.0",
|
|
37
37
|
"msw": "1.3.5",
|
|
@@ -33,6 +33,7 @@ export function createFileImportJob({
|
|
|
33
33
|
resourceType,
|
|
34
34
|
importContainerKey,
|
|
35
35
|
payload,
|
|
36
|
+
autoProcess = false,
|
|
36
37
|
onProgress,
|
|
37
38
|
abortSignal,
|
|
38
39
|
}: CreateFileImportJobParameters): Promise<FileImportJob> {
|
|
@@ -48,6 +49,7 @@ export function createFileImportJob({
|
|
|
48
49
|
formData.append('fileType', payload.fileType)
|
|
49
50
|
formData.append('fileName', payload.fileName)
|
|
50
51
|
formData.append('file', payload.file, payload.fileName)
|
|
52
|
+
formData.append('autoProcess', autoProcess ? 'true' : 'false')
|
|
51
53
|
|
|
52
54
|
fetchUsingXhr({
|
|
53
55
|
url,
|
|
@@ -17,6 +17,7 @@ export type UseFileImportJobUploadConfig = {
|
|
|
17
17
|
file: File
|
|
18
18
|
resourceType: ResourceTypeId
|
|
19
19
|
settings?: ExtendedImportContainerDraft['settings']
|
|
20
|
+
autoProcess?: boolean
|
|
20
21
|
onSuccess: (jobId: string, importContainerKey: string) => void
|
|
21
22
|
onError?: (error: unknown) => void
|
|
22
23
|
onProgress?: (progress: number) => void
|
|
@@ -64,6 +65,7 @@ export const useFileImportJobUpload = ({
|
|
|
64
65
|
fileName: config.file.name,
|
|
65
66
|
file: config.file,
|
|
66
67
|
},
|
|
68
|
+
autoProcess: config.autoProcess,
|
|
67
69
|
onProgress: (uploadProgress) => {
|
|
68
70
|
setProgress(uploadProgress)
|
|
69
71
|
config.onProgress?.(uploadProgress)
|
|
@@ -4,7 +4,11 @@ import { useImportContainerUpload } from './use-import-container-upload'
|
|
|
4
4
|
import { useFileImportJobUpload } from './use-file-import-job-upload'
|
|
5
5
|
import { deleteImportContainer } from '../@api'
|
|
6
6
|
import { HttpError, PollingAbortedError } from '../@errors'
|
|
7
|
-
import {
|
|
7
|
+
import {
|
|
8
|
+
pollJobUntilValidated,
|
|
9
|
+
pollJobUntilProcessing,
|
|
10
|
+
countUniqueResourcesInCsv,
|
|
11
|
+
} from '../@utils'
|
|
8
12
|
import type {
|
|
9
13
|
ExtendedImportContainerDraft,
|
|
10
14
|
FileUploadResult,
|
|
@@ -21,6 +25,8 @@ export type FileUploadConfig = {
|
|
|
21
25
|
file: File
|
|
22
26
|
resourceType: ResourceTypeId
|
|
23
27
|
settings?: ExtendedImportContainerDraft['settings']
|
|
28
|
+
autoProcess?: boolean
|
|
29
|
+
skipValidationPolling?: boolean
|
|
24
30
|
onSuccess: (result: FileUploadResult) => void
|
|
25
31
|
onError?: (error: unknown) => void
|
|
26
32
|
onProgress?: (progress: number) => void
|
|
@@ -54,7 +60,7 @@ export const useFileUpload = ({
|
|
|
54
60
|
projectKey,
|
|
55
61
|
useJobBasedFlow = false,
|
|
56
62
|
pollingInterval = 5000,
|
|
57
|
-
maxPollingAttempts =
|
|
63
|
+
maxPollingAttempts = 200,
|
|
58
64
|
}: FileUploadOptions) => {
|
|
59
65
|
const [isUploading, setIsUploading] = React.useState(false)
|
|
60
66
|
const [progress, setProgress] = React.useState(0)
|
|
@@ -81,14 +87,52 @@ export const useFileUpload = ({
|
|
|
81
87
|
|
|
82
88
|
try {
|
|
83
89
|
if (useJobBasedFlow) {
|
|
84
|
-
const totalResources =
|
|
90
|
+
const totalResources = config.skipValidationPolling
|
|
91
|
+
? 0
|
|
92
|
+
: await countUniqueResourcesInCsv(config.file)
|
|
85
93
|
|
|
86
94
|
await jobUpload.upload({
|
|
87
95
|
file: config.file,
|
|
88
96
|
resourceType: config.resourceType,
|
|
89
97
|
settings: config.settings,
|
|
98
|
+
autoProcess: config.autoProcess,
|
|
90
99
|
abortSignal: config.abortSignal,
|
|
91
100
|
onSuccess: async (jobId, containerKey) => {
|
|
101
|
+
if (config.skipValidationPolling) {
|
|
102
|
+
try {
|
|
103
|
+
const processingJob = await pollJobUntilProcessing({
|
|
104
|
+
projectKey,
|
|
105
|
+
jobId,
|
|
106
|
+
importContainerKey: containerKey,
|
|
107
|
+
abortSignal: config.abortSignal,
|
|
108
|
+
})
|
|
109
|
+
|
|
110
|
+
const result: FileUploadResult = {
|
|
111
|
+
containerKey,
|
|
112
|
+
summary: {
|
|
113
|
+
total: processingJob.summary?.total ?? 0,
|
|
114
|
+
valid: processingJob.summary?.valid ?? 0,
|
|
115
|
+
invalid: processingJob.summary?.invalid ?? 0,
|
|
116
|
+
fieldsCount: processingJob.summary?.fieldsCount ?? 0,
|
|
117
|
+
fields: processingJob.summary?.fields ?? [],
|
|
118
|
+
ignoredFields: processingJob.summary?.ignoredFields ?? [],
|
|
119
|
+
results: [],
|
|
120
|
+
},
|
|
121
|
+
jobId,
|
|
122
|
+
job: processingJob,
|
|
123
|
+
}
|
|
124
|
+
setIsUploading(false)
|
|
125
|
+
config.onSuccess(result)
|
|
126
|
+
} catch (error) {
|
|
127
|
+
await safeDeleteContainer({ projectKey, containerKey })
|
|
128
|
+
resetState()
|
|
129
|
+
if (!(error instanceof PollingAbortedError)) {
|
|
130
|
+
config.onError?.(error)
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
return
|
|
134
|
+
}
|
|
135
|
+
|
|
92
136
|
try {
|
|
93
137
|
setValidationProgress({
|
|
94
138
|
processed: 0,
|
|
@@ -39,6 +39,10 @@ export function isImportJobTerminal(job?: FileImportJob): boolean {
|
|
|
39
39
|
return isImportJobValidated(job) || isImportJobRejected(job)
|
|
40
40
|
}
|
|
41
41
|
|
|
42
|
+
export function hasImportJobStartedProcessing(job?: FileImportJob): boolean {
|
|
43
|
+
return !isImportJobQueued(job)
|
|
44
|
+
}
|
|
45
|
+
|
|
42
46
|
export function shouldContinuePollingForImportValidation(
|
|
43
47
|
job?: FileImportJob
|
|
44
48
|
): boolean {
|
package/src/@utils/index.ts
CHANGED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { getFileImportJob } from '../@api'
|
|
2
|
+
import { PollingAbortedError } from '../@errors'
|
|
3
|
+
import type { FileImportJob } from '../@types'
|
|
4
|
+
import { hasImportJobStartedProcessing } from './file-import-job-helpers'
|
|
5
|
+
|
|
6
|
+
export type PollJobUntilProcessingConfig = {
|
|
7
|
+
projectKey: string
|
|
8
|
+
jobId: string
|
|
9
|
+
importContainerKey: string
|
|
10
|
+
pollingInterval?: number
|
|
11
|
+
maxAttempts?: number
|
|
12
|
+
abortSignal?: AbortSignal
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export const pollJobUntilProcessing = async ({
|
|
16
|
+
projectKey,
|
|
17
|
+
jobId,
|
|
18
|
+
importContainerKey,
|
|
19
|
+
pollingInterval = 1000,
|
|
20
|
+
maxAttempts = 60,
|
|
21
|
+
abortSignal,
|
|
22
|
+
}: PollJobUntilProcessingConfig): Promise<FileImportJob> => {
|
|
23
|
+
let attempts = 0
|
|
24
|
+
|
|
25
|
+
while (attempts < maxAttempts) {
|
|
26
|
+
if (abortSignal?.aborted) {
|
|
27
|
+
throw new PollingAbortedError()
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const job = await getFileImportJob({
|
|
31
|
+
projectKey,
|
|
32
|
+
importContainerKey,
|
|
33
|
+
jobId,
|
|
34
|
+
})
|
|
35
|
+
|
|
36
|
+
if (abortSignal?.aborted) {
|
|
37
|
+
throw new PollingAbortedError()
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (hasImportJobStartedProcessing(job)) {
|
|
41
|
+
return job
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
await new Promise<void>((resolve, reject) => {
|
|
45
|
+
let timeoutId: ReturnType<typeof setTimeout>
|
|
46
|
+
|
|
47
|
+
const onAbort = () => {
|
|
48
|
+
clearTimeout(timeoutId)
|
|
49
|
+
reject(new PollingAbortedError())
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (abortSignal?.aborted) {
|
|
53
|
+
reject(new PollingAbortedError())
|
|
54
|
+
return
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
timeoutId = setTimeout(() => {
|
|
58
|
+
abortSignal?.removeEventListener('abort', onAbort)
|
|
59
|
+
resolve()
|
|
60
|
+
}, pollingInterval)
|
|
61
|
+
|
|
62
|
+
abortSignal?.addEventListener('abort', onAbort)
|
|
63
|
+
})
|
|
64
|
+
attempts++
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
throw new Error(
|
|
68
|
+
`Job did not start processing after ${maxAttempts} attempts (${
|
|
69
|
+
(maxAttempts * pollingInterval) / 1000
|
|
70
|
+
}s)`
|
|
71
|
+
)
|
|
72
|
+
}
|