@commercetools-frontend-extensions/operations 3.1.1 → 3.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @commercetools-frontend-extensions/operations
2
2
 
3
+ ## 3.1.2
4
+
5
+ ### Patch Changes
6
+
7
+ - [#1678](https://github.com/commercetools/merchant-center-operations/pull/1678) [`482dfc3`](https://github.com/commercetools/merchant-center-operations/commit/482dfc367ab236be9767896da0404f34afb33dd2) Thanks [@yassinejebli](https://github.com/yassinejebli)! - feat: add `skipValidationPolling` to skip import validation for the new flow
8
+
3
9
  ## 3.1.1
4
10
 
5
11
  ### Patch Changes
package/README.md CHANGED
@@ -43,6 +43,7 @@ const { upload, isUploading, progress, validationProgress } = useFileUpload({
43
43
  - `resourceType` (required): The resource type
44
44
  - `settings` (optional): Import settings (format, decimal separator...)
45
45
  - `autoProcess` (optional): When `true`, the backend automatically starts processing after validation completes (job-based flow only). Default: `false`
46
+ - `skipValidationPolling` (optional): When `true`, skips full validation polling and returns once the job reaches `processing` state. Useful for fire-and-forget uploads with `autoProcess: true` (job-based flow only). Default: `false`
46
47
  - `abortSignal` (optional): AbortSignal for cancellation
47
48
  - `onSuccess` (required): Callback when upload completes
48
49
  - `onError` (optional): Callback for errors
@@ -73,6 +74,7 @@ await upload({
73
74
  }
74
75
  },
75
76
  autoProcess?: boolean, // job-based flow only, default: false
77
+ skipValidationPolling?: boolean, // job-based flow only, default: false
76
78
  abortSignal: abortController.signal,
77
79
  onSuccess: (result) => {
78
80
  // result.containerKey - Import container key
@@ -482,6 +482,9 @@ function isImportJobRejected(job) {
482
482
  function isImportJobTerminal(job) {
483
483
  return isImportJobValidated(job) || isImportJobRejected(job);
484
484
  }
485
+ function hasImportJobStartedProcessing(job) {
486
+ return !isImportJobQueued(job);
487
+ }
485
488
  function shouldContinuePollingForImportValidation(job) {
486
489
  if (!job) return true;
487
490
  return isImportJobQueued(job) || isImportJobProcessing(job);
@@ -735,6 +738,52 @@ const decodeFileNameFromImportContainerKey = importContainerKey => {
735
738
  }
736
739
  };
737
740
 
741
+ const pollJobUntilProcessing = async _ref => {
742
+ let projectKey = _ref.projectKey,
743
+ jobId = _ref.jobId,
744
+ importContainerKey = _ref.importContainerKey,
745
+ _ref$pollingInterval = _ref.pollingInterval,
746
+ pollingInterval = _ref$pollingInterval === void 0 ? 1000 : _ref$pollingInterval,
747
+ _ref$maxAttempts = _ref.maxAttempts,
748
+ maxAttempts = _ref$maxAttempts === void 0 ? 60 : _ref$maxAttempts,
749
+ abortSignal = _ref.abortSignal;
750
+ let attempts = 0;
751
+ while (attempts < maxAttempts) {
752
+ if (abortSignal?.aborted) {
753
+ throw new PollingAbortedError();
754
+ }
755
+ const job = await getFileImportJob({
756
+ projectKey,
757
+ importContainerKey,
758
+ jobId
759
+ });
760
+ if (abortSignal?.aborted) {
761
+ throw new PollingAbortedError();
762
+ }
763
+ if (hasImportJobStartedProcessing(job)) {
764
+ return job;
765
+ }
766
+ await new _Promise__default["default"]((resolve, reject) => {
767
+ let timeoutId;
768
+ const onAbort = () => {
769
+ clearTimeout(timeoutId);
770
+ reject(new PollingAbortedError());
771
+ };
772
+ if (abortSignal?.aborted) {
773
+ reject(new PollingAbortedError());
774
+ return;
775
+ }
776
+ timeoutId = _setTimeout__default["default"](() => {
777
+ abortSignal?.removeEventListener('abort', onAbort);
778
+ resolve();
779
+ }, pollingInterval);
780
+ abortSignal?.addEventListener('abort', onAbort);
781
+ });
782
+ attempts++;
783
+ }
784
+ throw new Error(`Job did not start processing after ${maxAttempts} attempts (${maxAttempts * pollingInterval / 1000}s)`);
785
+ };
786
+
738
787
  const pollJobUntilValidated = async _ref => {
739
788
  let projectKey = _ref.projectKey,
740
789
  jobId = _ref.jobId,
@@ -3009,7 +3058,7 @@ const useFileUpload = _ref2 => {
3009
3058
  setProgress(0);
3010
3059
  try {
3011
3060
  if (useJobBasedFlow) {
3012
- const totalResources = await countUniqueResourcesInCsv(config.file);
3061
+ const totalResources = config.skipValidationPolling ? 0 : await countUniqueResourcesInCsv(config.file);
3013
3062
  await jobUpload.upload({
3014
3063
  file: config.file,
3015
3064
  resourceType: config.resourceType,
@@ -3017,6 +3066,42 @@ const useFileUpload = _ref2 => {
3017
3066
  autoProcess: config.autoProcess,
3018
3067
  abortSignal: config.abortSignal,
3019
3068
  onSuccess: async (jobId, containerKey) => {
3069
+ if (config.skipValidationPolling) {
3070
+ try {
3071
+ const processingJob = await pollJobUntilProcessing({
3072
+ projectKey,
3073
+ jobId,
3074
+ importContainerKey: containerKey,
3075
+ abortSignal: config.abortSignal
3076
+ });
3077
+ const result = {
3078
+ containerKey,
3079
+ summary: {
3080
+ total: processingJob.summary?.total ?? 0,
3081
+ valid: processingJob.summary?.valid ?? 0,
3082
+ invalid: processingJob.summary?.invalid ?? 0,
3083
+ fieldsCount: processingJob.summary?.fieldsCount ?? 0,
3084
+ fields: processingJob.summary?.fields ?? [],
3085
+ ignoredFields: processingJob.summary?.ignoredFields ?? [],
3086
+ results: []
3087
+ },
3088
+ jobId,
3089
+ job: processingJob
3090
+ };
3091
+ setIsUploading(false);
3092
+ config.onSuccess(result);
3093
+ } catch (error) {
3094
+ await safeDeleteContainer({
3095
+ projectKey,
3096
+ containerKey
3097
+ });
3098
+ resetState();
3099
+ if (!(error instanceof PollingAbortedError)) {
3100
+ config.onError?.(error);
3101
+ }
3102
+ }
3103
+ return;
3104
+ }
3020
3105
  try {
3021
3106
  setValidationProgress({
3022
3107
  processed: 0,
@@ -3256,6 +3341,7 @@ exports.getMissingRequiredFields = getMissingRequiredFields;
3256
3341
  exports.getProccessFileURL = getProccessFileURL;
3257
3342
  exports.getRowCount = getRowCount;
3258
3343
  exports.getValidatedColumns = getValidatedColumns;
3344
+ exports.hasImportJobStartedProcessing = hasImportJobStartedProcessing;
3259
3345
  exports.hasOwnProperty = hasOwnProperty;
3260
3346
  exports.hasRequiredFields = hasRequiredFields;
3261
3347
  exports.hasSingleKeyColumn = hasSingleKeyColumn;
@@ -3280,6 +3366,7 @@ exports.manualImports = manualImports;
3280
3366
  exports.mapFileUploadErrorsToUploadFileErrorRows = mapFileUploadErrorsToUploadFileErrorRows;
3281
3367
  exports.mapFormikErrors = mapFormikErrors;
3282
3368
  exports.mapUploadFileErrorsResponseToUploadFileErrorRows = mapUploadFileErrorsResponseToUploadFileErrorRows;
3369
+ exports.pollJobUntilProcessing = pollJobUntilProcessing;
3283
3370
  exports.pollJobUntilValidated = pollJobUntilValidated;
3284
3371
  exports.processFileImportJob = processFileImportJob;
3285
3372
  exports.processFileImportJobResponse = processFileImportJobResponse;
@@ -482,6 +482,9 @@ function isImportJobRejected(job) {
482
482
  function isImportJobTerminal(job) {
483
483
  return isImportJobValidated(job) || isImportJobRejected(job);
484
484
  }
485
+ function hasImportJobStartedProcessing(job) {
486
+ return !isImportJobQueued(job);
487
+ }
485
488
  function shouldContinuePollingForImportValidation(job) {
486
489
  if (!job) return true;
487
490
  return isImportJobQueued(job) || isImportJobProcessing(job);
@@ -735,6 +738,52 @@ const decodeFileNameFromImportContainerKey = importContainerKey => {
735
738
  }
736
739
  };
737
740
 
741
+ const pollJobUntilProcessing = async _ref => {
742
+ let projectKey = _ref.projectKey,
743
+ jobId = _ref.jobId,
744
+ importContainerKey = _ref.importContainerKey,
745
+ _ref$pollingInterval = _ref.pollingInterval,
746
+ pollingInterval = _ref$pollingInterval === void 0 ? 1000 : _ref$pollingInterval,
747
+ _ref$maxAttempts = _ref.maxAttempts,
748
+ maxAttempts = _ref$maxAttempts === void 0 ? 60 : _ref$maxAttempts,
749
+ abortSignal = _ref.abortSignal;
750
+ let attempts = 0;
751
+ while (attempts < maxAttempts) {
752
+ if (abortSignal?.aborted) {
753
+ throw new PollingAbortedError();
754
+ }
755
+ const job = await getFileImportJob({
756
+ projectKey,
757
+ importContainerKey,
758
+ jobId
759
+ });
760
+ if (abortSignal?.aborted) {
761
+ throw new PollingAbortedError();
762
+ }
763
+ if (hasImportJobStartedProcessing(job)) {
764
+ return job;
765
+ }
766
+ await new _Promise__default["default"]((resolve, reject) => {
767
+ let timeoutId;
768
+ const onAbort = () => {
769
+ clearTimeout(timeoutId);
770
+ reject(new PollingAbortedError());
771
+ };
772
+ if (abortSignal?.aborted) {
773
+ reject(new PollingAbortedError());
774
+ return;
775
+ }
776
+ timeoutId = _setTimeout__default["default"](() => {
777
+ abortSignal?.removeEventListener('abort', onAbort);
778
+ resolve();
779
+ }, pollingInterval);
780
+ abortSignal?.addEventListener('abort', onAbort);
781
+ });
782
+ attempts++;
783
+ }
784
+ throw new Error(`Job did not start processing after ${maxAttempts} attempts (${maxAttempts * pollingInterval / 1000}s)`);
785
+ };
786
+
738
787
  const pollJobUntilValidated = async _ref => {
739
788
  let projectKey = _ref.projectKey,
740
789
  jobId = _ref.jobId,
@@ -3001,7 +3050,7 @@ const useFileUpload = _ref2 => {
3001
3050
  setProgress(0);
3002
3051
  try {
3003
3052
  if (useJobBasedFlow) {
3004
- const totalResources = await countUniqueResourcesInCsv(config.file);
3053
+ const totalResources = config.skipValidationPolling ? 0 : await countUniqueResourcesInCsv(config.file);
3005
3054
  await jobUpload.upload({
3006
3055
  file: config.file,
3007
3056
  resourceType: config.resourceType,
@@ -3009,6 +3058,42 @@ const useFileUpload = _ref2 => {
3009
3058
  autoProcess: config.autoProcess,
3010
3059
  abortSignal: config.abortSignal,
3011
3060
  onSuccess: async (jobId, containerKey) => {
3061
+ if (config.skipValidationPolling) {
3062
+ try {
3063
+ const processingJob = await pollJobUntilProcessing({
3064
+ projectKey,
3065
+ jobId,
3066
+ importContainerKey: containerKey,
3067
+ abortSignal: config.abortSignal
3068
+ });
3069
+ const result = {
3070
+ containerKey,
3071
+ summary: {
3072
+ total: processingJob.summary?.total ?? 0,
3073
+ valid: processingJob.summary?.valid ?? 0,
3074
+ invalid: processingJob.summary?.invalid ?? 0,
3075
+ fieldsCount: processingJob.summary?.fieldsCount ?? 0,
3076
+ fields: processingJob.summary?.fields ?? [],
3077
+ ignoredFields: processingJob.summary?.ignoredFields ?? [],
3078
+ results: []
3079
+ },
3080
+ jobId,
3081
+ job: processingJob
3082
+ };
3083
+ setIsUploading(false);
3084
+ config.onSuccess(result);
3085
+ } catch (error) {
3086
+ await safeDeleteContainer({
3087
+ projectKey,
3088
+ containerKey
3089
+ });
3090
+ resetState();
3091
+ if (!(error instanceof PollingAbortedError)) {
3092
+ config.onError?.(error);
3093
+ }
3094
+ }
3095
+ return;
3096
+ }
3012
3097
  try {
3013
3098
  setValidationProgress({
3014
3099
  processed: 0,
@@ -3248,6 +3333,7 @@ exports.getMissingRequiredFields = getMissingRequiredFields;
3248
3333
  exports.getProccessFileURL = getProccessFileURL;
3249
3334
  exports.getRowCount = getRowCount;
3250
3335
  exports.getValidatedColumns = getValidatedColumns;
3336
+ exports.hasImportJobStartedProcessing = hasImportJobStartedProcessing;
3251
3337
  exports.hasOwnProperty = hasOwnProperty;
3252
3338
  exports.hasRequiredFields = hasRequiredFields;
3253
3339
  exports.hasSingleKeyColumn = hasSingleKeyColumn;
@@ -3272,6 +3358,7 @@ exports.manualImports = manualImports;
3272
3358
  exports.mapFileUploadErrorsToUploadFileErrorRows = mapFileUploadErrorsToUploadFileErrorRows;
3273
3359
  exports.mapFormikErrors = mapFormikErrors;
3274
3360
  exports.mapUploadFileErrorsResponseToUploadFileErrorRows = mapUploadFileErrorsResponseToUploadFileErrorRows;
3361
+ exports.pollJobUntilProcessing = pollJobUntilProcessing;
3275
3362
  exports.pollJobUntilValidated = pollJobUntilValidated;
3276
3363
  exports.processFileImportJob = processFileImportJob;
3277
3364
  exports.processFileImportJobResponse = processFileImportJobResponse;
@@ -442,6 +442,9 @@ function isImportJobRejected(job) {
442
442
  function isImportJobTerminal(job) {
443
443
  return isImportJobValidated(job) || isImportJobRejected(job);
444
444
  }
445
+ function hasImportJobStartedProcessing(job) {
446
+ return !isImportJobQueued(job);
447
+ }
445
448
  function shouldContinuePollingForImportValidation(job) {
446
449
  if (!job) return true;
447
450
  return isImportJobQueued(job) || isImportJobProcessing(job);
@@ -695,6 +698,52 @@ const decodeFileNameFromImportContainerKey = importContainerKey => {
695
698
  }
696
699
  };
697
700
 
701
+ const pollJobUntilProcessing = async _ref => {
702
+ let projectKey = _ref.projectKey,
703
+ jobId = _ref.jobId,
704
+ importContainerKey = _ref.importContainerKey,
705
+ _ref$pollingInterval = _ref.pollingInterval,
706
+ pollingInterval = _ref$pollingInterval === void 0 ? 1000 : _ref$pollingInterval,
707
+ _ref$maxAttempts = _ref.maxAttempts,
708
+ maxAttempts = _ref$maxAttempts === void 0 ? 60 : _ref$maxAttempts,
709
+ abortSignal = _ref.abortSignal;
710
+ let attempts = 0;
711
+ while (attempts < maxAttempts) {
712
+ if (abortSignal?.aborted) {
713
+ throw new PollingAbortedError();
714
+ }
715
+ const job = await getFileImportJob({
716
+ projectKey,
717
+ importContainerKey,
718
+ jobId
719
+ });
720
+ if (abortSignal?.aborted) {
721
+ throw new PollingAbortedError();
722
+ }
723
+ if (hasImportJobStartedProcessing(job)) {
724
+ return job;
725
+ }
726
+ await new _Promise((resolve, reject) => {
727
+ let timeoutId;
728
+ const onAbort = () => {
729
+ clearTimeout(timeoutId);
730
+ reject(new PollingAbortedError());
731
+ };
732
+ if (abortSignal?.aborted) {
733
+ reject(new PollingAbortedError());
734
+ return;
735
+ }
736
+ timeoutId = _setTimeout(() => {
737
+ abortSignal?.removeEventListener('abort', onAbort);
738
+ resolve();
739
+ }, pollingInterval);
740
+ abortSignal?.addEventListener('abort', onAbort);
741
+ });
742
+ attempts++;
743
+ }
744
+ throw new Error(`Job did not start processing after ${maxAttempts} attempts (${maxAttempts * pollingInterval / 1000}s)`);
745
+ };
746
+
698
747
  const pollJobUntilValidated = async _ref => {
699
748
  let projectKey = _ref.projectKey,
700
749
  jobId = _ref.jobId,
@@ -2969,7 +3018,7 @@ const useFileUpload = _ref2 => {
2969
3018
  setProgress(0);
2970
3019
  try {
2971
3020
  if (useJobBasedFlow) {
2972
- const totalResources = await countUniqueResourcesInCsv(config.file);
3021
+ const totalResources = config.skipValidationPolling ? 0 : await countUniqueResourcesInCsv(config.file);
2973
3022
  await jobUpload.upload({
2974
3023
  file: config.file,
2975
3024
  resourceType: config.resourceType,
@@ -2977,6 +3026,42 @@ const useFileUpload = _ref2 => {
2977
3026
  autoProcess: config.autoProcess,
2978
3027
  abortSignal: config.abortSignal,
2979
3028
  onSuccess: async (jobId, containerKey) => {
3029
+ if (config.skipValidationPolling) {
3030
+ try {
3031
+ const processingJob = await pollJobUntilProcessing({
3032
+ projectKey,
3033
+ jobId,
3034
+ importContainerKey: containerKey,
3035
+ abortSignal: config.abortSignal
3036
+ });
3037
+ const result = {
3038
+ containerKey,
3039
+ summary: {
3040
+ total: processingJob.summary?.total ?? 0,
3041
+ valid: processingJob.summary?.valid ?? 0,
3042
+ invalid: processingJob.summary?.invalid ?? 0,
3043
+ fieldsCount: processingJob.summary?.fieldsCount ?? 0,
3044
+ fields: processingJob.summary?.fields ?? [],
3045
+ ignoredFields: processingJob.summary?.ignoredFields ?? [],
3046
+ results: []
3047
+ },
3048
+ jobId,
3049
+ job: processingJob
3050
+ };
3051
+ setIsUploading(false);
3052
+ config.onSuccess(result);
3053
+ } catch (error) {
3054
+ await safeDeleteContainer({
3055
+ projectKey,
3056
+ containerKey
3057
+ });
3058
+ resetState();
3059
+ if (!(error instanceof PollingAbortedError)) {
3060
+ config.onError?.(error);
3061
+ }
3062
+ }
3063
+ return;
3064
+ }
2980
3065
  try {
2981
3066
  setValidationProgress({
2982
3067
  processed: 0,
@@ -3107,4 +3192,4 @@ const useFileUpload = _ref2 => {
3107
3192
  };
3108
3193
  };
3109
3194
 
3110
- export { ActiveDragDropArea, COLUMN_DELIMITERS, CT_API_DOCS_URL, DELIMITERS, DisabledDropArea, DropAreaWrapper, EnabledDropArea, FILE_IMPORT_JOB_POLLING_INTERVAL, FileDropArea, FileDroppedArea, FileIcon, HttpError, IMPORT_LEGACY_MAX_FILE_SIZE_MB, IMPORT_LEGACY_MAX_ROW_COUNT, IMPORT_MAX_FILE_SIZE_MB, IMPORT_MAX_ITEM_COUNT, IMPORT_TAG_KEYS, IMPORT_TAG_VALUES, ImportStates, InfoBox, InvalidResponseError, LockIcon, NoResourcesToExportError, PollingAbortedError, ProjectKeyNotAvailableError, QueryPredicateError, RESOURCE_TYPE_DOCUMENTATION_LINKS, RESOURCE_TYPE_TEMPLATE_DOWNLOAD_LINKS, TAG_KEY_SOURCE_FILE_UPLOAD, UnexpectedColumnError, UnexpectedOperationStateError, UnexpectedResourceTypeError, UploadSeparator, UploadSettings, UploadingModal, allAutomatedImportOperations, allAutomatedImportOperationsResponse, allFileUploadImportOperations, allFileUploadImportOperationsResponse, appendCsvOrJsonExtensionIfAbsent, assertCancelContainerResponse, assertExportOperationsDownloadFileResponse, assertFileImportJob, assertFileImportJobRecordsResponse, assertFileUploadResponse, assertImportContainer, assertImportContainerPagedResponse, assertImportOperationPagedResponse, assertImportSummary, assertListFileImportJobsResponse, assertPaginatedExportOperationResponse, assertProcessFileImportJobResponse, assertProcessFileResponse, assertResourceType, automatedImportContainerKey, automatedImports, cancelImportContainerByKey, checkIfFileUploadImport, convertFileSizeToKB, countJsonFileItems, countUniqueResourcesInCsv, createFileImportJob, createImportContainerForFileUpload, decodeFileNameFromImportContainerKey, deleteFileImportJob, deleteImportContainer, dropAreaStyles, encodeFileNameWithTimestampToContainerKey, exportOperationsCompleted, exportOperationsProcessing, extractErrorDescriptionFromValidationMessage, fetchExportOperations, fetchImportContainerByKey, fetchImportContainerDetails, fetchImportContainers, fetchImportOperations, fetchImportSummaries, fetchImportSummary, fetchUsingXhr, fetcher, fileUploadImportContainerKey, fileUploadMissingKeysResponse, formatErrorCode, formatKeys, formatQueryString, getCreateImportContainerURL, getDeleteImportContainerURL, getExportOperationsURL, getFileImportJob, getFileImportJobByIdURL, getFileImportJobDeleteURL, getFileImportJobFileType, getFileImportJobProcessURL, getFileImportJobRecords, getFileImportJobRecordsURL, getFileImportJobsListURL, getFileImportJobsURL, getFileUploadErrorsCount, getFileUploadURL, getImportContainerByKeyURL, getImportContainerTasksURL, getImportContainersURL, getImportOperationsURL, getImportState, getImportSummaryURL, getMissingRequiredFields, getProccessFileURL, getRowCount, getValidatedColumns, hasOwnProperty, hasRequiredFields, hasSingleKeyColumn, importContainers, importStatesMap, importsSummaries, invalidFileImportJobRecordsResponse, invalidFileImportJobValidated, invalidFileUploadResponse, isAbortError, isError, isImportJobInitializing, isImportJobProcessing, isImportJobQueued, isImportJobReady, isImportJobRejected, isImportJobTerminal, isImportJobValidated, isResourceType, listFileImportJobs, manualImports, mapFileUploadErrorsToUploadFileErrorRows, mapFormikErrors, mapUploadFileErrorsResponseToUploadFileErrorRows, pollJobUntilValidated, processFileImportJob, processFileImportJobResponse, processUploadedFile, shouldContinuePollingForImportValidation, successfulAutomatedImportOperations, successfulAutomatedImportOperationsResponse, successfulFileUploadImportOperations, successfulFileUploadImportOperationsResponse, toBytes, toImportApiResourceType, uploadFileForImport, useFetchExportOperations, useFetchFileImportJob, useFetchFileImportJobRecords, useFetchImportContainerDetails, useFetchImportOperations, useFetchImportSummaries, useFileImportJobUpload, useFileUpload, useImportContainerUpload, validFileImportJobProcessing, validFileImportJobQueued, validFileImportJobRecordsResponse, validFileImportJobValidated, validFileUploadResponse, validProcessFileResponse, validateDelimiter };
3195
+ export { ActiveDragDropArea, COLUMN_DELIMITERS, CT_API_DOCS_URL, DELIMITERS, DisabledDropArea, DropAreaWrapper, EnabledDropArea, FILE_IMPORT_JOB_POLLING_INTERVAL, FileDropArea, FileDroppedArea, FileIcon, HttpError, IMPORT_LEGACY_MAX_FILE_SIZE_MB, IMPORT_LEGACY_MAX_ROW_COUNT, IMPORT_MAX_FILE_SIZE_MB, IMPORT_MAX_ITEM_COUNT, IMPORT_TAG_KEYS, IMPORT_TAG_VALUES, ImportStates, InfoBox, InvalidResponseError, LockIcon, NoResourcesToExportError, PollingAbortedError, ProjectKeyNotAvailableError, QueryPredicateError, RESOURCE_TYPE_DOCUMENTATION_LINKS, RESOURCE_TYPE_TEMPLATE_DOWNLOAD_LINKS, TAG_KEY_SOURCE_FILE_UPLOAD, UnexpectedColumnError, UnexpectedOperationStateError, UnexpectedResourceTypeError, UploadSeparator, UploadSettings, UploadingModal, allAutomatedImportOperations, allAutomatedImportOperationsResponse, allFileUploadImportOperations, allFileUploadImportOperationsResponse, appendCsvOrJsonExtensionIfAbsent, assertCancelContainerResponse, assertExportOperationsDownloadFileResponse, assertFileImportJob, assertFileImportJobRecordsResponse, assertFileUploadResponse, assertImportContainer, assertImportContainerPagedResponse, assertImportOperationPagedResponse, assertImportSummary, assertListFileImportJobsResponse, assertPaginatedExportOperationResponse, assertProcessFileImportJobResponse, assertProcessFileResponse, assertResourceType, automatedImportContainerKey, automatedImports, cancelImportContainerByKey, checkIfFileUploadImport, convertFileSizeToKB, countJsonFileItems, countUniqueResourcesInCsv, createFileImportJob, createImportContainerForFileUpload, decodeFileNameFromImportContainerKey, deleteFileImportJob, deleteImportContainer, dropAreaStyles, encodeFileNameWithTimestampToContainerKey, exportOperationsCompleted, exportOperationsProcessing, extractErrorDescriptionFromValidationMessage, fetchExportOperations, fetchImportContainerByKey, fetchImportContainerDetails, fetchImportContainers, fetchImportOperations, fetchImportSummaries, fetchImportSummary, fetchUsingXhr, fetcher, fileUploadImportContainerKey, fileUploadMissingKeysResponse, formatErrorCode, formatKeys, formatQueryString, getCreateImportContainerURL, getDeleteImportContainerURL, getExportOperationsURL, getFileImportJob, getFileImportJobByIdURL, getFileImportJobDeleteURL, getFileImportJobFileType, getFileImportJobProcessURL, getFileImportJobRecords, getFileImportJobRecordsURL, getFileImportJobsListURL, getFileImportJobsURL, getFileUploadErrorsCount, getFileUploadURL, getImportContainerByKeyURL, getImportContainerTasksURL, getImportContainersURL, getImportOperationsURL, getImportState, getImportSummaryURL, getMissingRequiredFields, getProccessFileURL, getRowCount, getValidatedColumns, hasImportJobStartedProcessing, hasOwnProperty, hasRequiredFields, hasSingleKeyColumn, importContainers, importStatesMap, importsSummaries, invalidFileImportJobRecordsResponse, invalidFileImportJobValidated, invalidFileUploadResponse, isAbortError, isError, isImportJobInitializing, isImportJobProcessing, isImportJobQueued, isImportJobReady, isImportJobRejected, isImportJobTerminal, isImportJobValidated, isResourceType, listFileImportJobs, manualImports, mapFileUploadErrorsToUploadFileErrorRows, mapFormikErrors, mapUploadFileErrorsResponseToUploadFileErrorRows, pollJobUntilProcessing, pollJobUntilValidated, processFileImportJob, processFileImportJobResponse, processUploadedFile, shouldContinuePollingForImportValidation, successfulAutomatedImportOperations, successfulAutomatedImportOperationsResponse, successfulFileUploadImportOperations, successfulFileUploadImportOperationsResponse, toBytes, toImportApiResourceType, uploadFileForImport, useFetchExportOperations, useFetchFileImportJob, useFetchFileImportJobRecords, useFetchImportContainerDetails, useFetchImportOperations, useFetchImportSummaries, useFileImportJobUpload, useFileUpload, useImportContainerUpload, validFileImportJobProcessing, validFileImportJobQueued, validFileImportJobRecordsResponse, validFileImportJobValidated, validFileUploadResponse, validProcessFileResponse, validateDelimiter };
@@ -10,6 +10,7 @@ export type FileUploadConfig = {
10
10
  resourceType: ResourceTypeId;
11
11
  settings?: ExtendedImportContainerDraft['settings'];
12
12
  autoProcess?: boolean;
13
+ skipValidationPolling?: boolean;
13
14
  onSuccess: (result: FileUploadResult) => void;
14
15
  onError?: (error: unknown) => void;
15
16
  onProgress?: (progress: number) => void;
@@ -9,4 +9,5 @@ export declare function isImportJobInitializing(job?: FileImportJob): boolean;
9
9
  export declare function isImportJobReady(job?: FileImportJob): boolean;
10
10
  export declare function isImportJobRejected(job?: FileImportJob): boolean;
11
11
  export declare function isImportJobTerminal(job?: FileImportJob): boolean;
12
+ export declare function hasImportJobStartedProcessing(job?: FileImportJob): boolean;
12
13
  export declare function shouldContinuePollingForImportValidation(job?: FileImportJob): boolean;
@@ -4,5 +4,6 @@ export * from "./file-upload.js";
4
4
  export * from "./form.js";
5
5
  export * from "./format.js";
6
6
  export * from "./import-container.js";
7
+ export * from "./poll-job-until-processing.js";
7
8
  export * from "./poll-job-until-validated.js";
8
9
  export * from "./url.js";
@@ -0,0 +1,10 @@
1
+ import type { FileImportJob } from "../@types/index.js";
2
+ export type PollJobUntilProcessingConfig = {
3
+ projectKey: string;
4
+ jobId: string;
5
+ importContainerKey: string;
6
+ pollingInterval?: number;
7
+ maxAttempts?: number;
8
+ abortSignal?: AbortSignal;
9
+ };
10
+ export declare const pollJobUntilProcessing: ({ projectKey, jobId, importContainerKey, pollingInterval, maxAttempts, abortSignal, }: PollJobUntilProcessingConfig) => Promise<FileImportJob>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@commercetools-frontend-extensions/operations",
3
- "version": "3.1.1",
3
+ "version": "3.1.2",
4
4
  "license": "Proprietary",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -31,7 +31,7 @@
31
31
  "@emotion/styled": "11.14.1",
32
32
  "@testing-library/react": "16.1.0",
33
33
  "@types/jest": "29.5.14",
34
- "@types/papaparse": "5.5.1",
34
+ "@types/papaparse": "5.5.2",
35
35
  "@types/pluralize": "0.0.33",
36
36
  "@types/react": "19.2.0",
37
37
  "msw": "1.3.5",
@@ -4,7 +4,11 @@ import { useImportContainerUpload } from './use-import-container-upload'
4
4
  import { useFileImportJobUpload } from './use-file-import-job-upload'
5
5
  import { deleteImportContainer } from '../@api'
6
6
  import { HttpError, PollingAbortedError } from '../@errors'
7
- import { pollJobUntilValidated, countUniqueResourcesInCsv } from '../@utils'
7
+ import {
8
+ pollJobUntilValidated,
9
+ pollJobUntilProcessing,
10
+ countUniqueResourcesInCsv,
11
+ } from '../@utils'
8
12
  import type {
9
13
  ExtendedImportContainerDraft,
10
14
  FileUploadResult,
@@ -22,6 +26,7 @@ export type FileUploadConfig = {
22
26
  resourceType: ResourceTypeId
23
27
  settings?: ExtendedImportContainerDraft['settings']
24
28
  autoProcess?: boolean
29
+ skipValidationPolling?: boolean
25
30
  onSuccess: (result: FileUploadResult) => void
26
31
  onError?: (error: unknown) => void
27
32
  onProgress?: (progress: number) => void
@@ -82,7 +87,9 @@ export const useFileUpload = ({
82
87
 
83
88
  try {
84
89
  if (useJobBasedFlow) {
85
- const totalResources = await countUniqueResourcesInCsv(config.file)
90
+ const totalResources = config.skipValidationPolling
91
+ ? 0
92
+ : await countUniqueResourcesInCsv(config.file)
86
93
 
87
94
  await jobUpload.upload({
88
95
  file: config.file,
@@ -91,6 +98,41 @@ export const useFileUpload = ({
91
98
  autoProcess: config.autoProcess,
92
99
  abortSignal: config.abortSignal,
93
100
  onSuccess: async (jobId, containerKey) => {
101
+ if (config.skipValidationPolling) {
102
+ try {
103
+ const processingJob = await pollJobUntilProcessing({
104
+ projectKey,
105
+ jobId,
106
+ importContainerKey: containerKey,
107
+ abortSignal: config.abortSignal,
108
+ })
109
+
110
+ const result: FileUploadResult = {
111
+ containerKey,
112
+ summary: {
113
+ total: processingJob.summary?.total ?? 0,
114
+ valid: processingJob.summary?.valid ?? 0,
115
+ invalid: processingJob.summary?.invalid ?? 0,
116
+ fieldsCount: processingJob.summary?.fieldsCount ?? 0,
117
+ fields: processingJob.summary?.fields ?? [],
118
+ ignoredFields: processingJob.summary?.ignoredFields ?? [],
119
+ results: [],
120
+ },
121
+ jobId,
122
+ job: processingJob,
123
+ }
124
+ setIsUploading(false)
125
+ config.onSuccess(result)
126
+ } catch (error) {
127
+ await safeDeleteContainer({ projectKey, containerKey })
128
+ resetState()
129
+ if (!(error instanceof PollingAbortedError)) {
130
+ config.onError?.(error)
131
+ }
132
+ }
133
+ return
134
+ }
135
+
94
136
  try {
95
137
  setValidationProgress({
96
138
  processed: 0,
@@ -39,6 +39,10 @@ export function isImportJobTerminal(job?: FileImportJob): boolean {
39
39
  return isImportJobValidated(job) || isImportJobRejected(job)
40
40
  }
41
41
 
42
+ export function hasImportJobStartedProcessing(job?: FileImportJob): boolean {
43
+ return !isImportJobQueued(job)
44
+ }
45
+
42
46
  export function shouldContinuePollingForImportValidation(
43
47
  job?: FileImportJob
44
48
  ): boolean {
@@ -4,5 +4,6 @@ export * from './file-upload'
4
4
  export * from './form'
5
5
  export * from './format'
6
6
  export * from './import-container'
7
+ export * from './poll-job-until-processing'
7
8
  export * from './poll-job-until-validated'
8
9
  export * from './url'
@@ -0,0 +1,72 @@
1
+ import { getFileImportJob } from '../@api'
2
+ import { PollingAbortedError } from '../@errors'
3
+ import type { FileImportJob } from '../@types'
4
+ import { hasImportJobStartedProcessing } from './file-import-job-helpers'
5
+
6
+ export type PollJobUntilProcessingConfig = {
7
+ projectKey: string
8
+ jobId: string
9
+ importContainerKey: string
10
+ pollingInterval?: number
11
+ maxAttempts?: number
12
+ abortSignal?: AbortSignal
13
+ }
14
+
15
+ export const pollJobUntilProcessing = async ({
16
+ projectKey,
17
+ jobId,
18
+ importContainerKey,
19
+ pollingInterval = 1000,
20
+ maxAttempts = 60,
21
+ abortSignal,
22
+ }: PollJobUntilProcessingConfig): Promise<FileImportJob> => {
23
+ let attempts = 0
24
+
25
+ while (attempts < maxAttempts) {
26
+ if (abortSignal?.aborted) {
27
+ throw new PollingAbortedError()
28
+ }
29
+
30
+ const job = await getFileImportJob({
31
+ projectKey,
32
+ importContainerKey,
33
+ jobId,
34
+ })
35
+
36
+ if (abortSignal?.aborted) {
37
+ throw new PollingAbortedError()
38
+ }
39
+
40
+ if (hasImportJobStartedProcessing(job)) {
41
+ return job
42
+ }
43
+
44
+ await new Promise<void>((resolve, reject) => {
45
+ let timeoutId: ReturnType<typeof setTimeout>
46
+
47
+ const onAbort = () => {
48
+ clearTimeout(timeoutId)
49
+ reject(new PollingAbortedError())
50
+ }
51
+
52
+ if (abortSignal?.aborted) {
53
+ reject(new PollingAbortedError())
54
+ return
55
+ }
56
+
57
+ timeoutId = setTimeout(() => {
58
+ abortSignal?.removeEventListener('abort', onAbort)
59
+ resolve()
60
+ }, pollingInterval)
61
+
62
+ abortSignal?.addEventListener('abort', onAbort)
63
+ })
64
+ attempts++
65
+ }
66
+
67
+ throw new Error(
68
+ `Job did not start processing after ${maxAttempts} attempts (${
69
+ (maxAttempts * pollingInterval) / 1000
70
+ }s)`
71
+ )
72
+ }