@commercetools-frontend-extensions/operations 3.0.0 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,19 @@
1
1
  # @commercetools-frontend-extensions/operations
2
2
 
3
+ ## 3.1.0
4
+
5
+ ### Minor Changes
6
+
7
+ - [#1669](https://github.com/commercetools/merchant-center-operations/pull/1669) [`a4a9c65`](https://github.com/commercetools/merchant-center-operations/commit/a4a9c6513e9a0b6da7d0681f044d7640ad8f0132) Thanks [@yassinejebli](https://github.com/yassinejebli)! - feat(useFileUpload): add `total` to `validationProgress` for tracking validation progress
8
+
9
+ The `validationProgress` object returned by `useFileUpload` now includes a `total` field representing the total number of unique resources in the CSV file (counted by unique keys). This allows consumers to display progress like "Validating X of Y resources" during the job-based validation flow.
10
+
11
+ - [#1669](https://github.com/commercetools/merchant-center-operations/pull/1669) [`a4a9c65`](https://github.com/commercetools/merchant-center-operations/commit/a4a9c6513e9a0b6da7d0681f044d7640ad8f0132) Thanks [@yassinejebli](https://github.com/yassinejebli)! - feat(import): server side pagination for new import flow errors
12
+
13
+ ### Patch Changes
14
+
15
+ - [#1669](https://github.com/commercetools/merchant-center-operations/pull/1669) [`a4a9c65`](https://github.com/commercetools/merchant-center-operations/commit/a4a9c6513e9a0b6da7d0681f044d7640ad8f0132) Thanks [@yassinejebli](https://github.com/yassinejebli)! - feat: add `total` for tracking validation progress
16
+
3
17
  ## 3.0.0
4
18
 
5
19
  ### Major Changes
package/README.md CHANGED
@@ -2,6 +2,12 @@
2
2
 
3
3
  Shared functionality for import/export operations across multiple frontend applications and extensions.
4
4
 
5
+ ## Installation
6
+
7
+ ```bash
8
+ pnpm add @commercetools-frontend-extensions/operations
9
+ ```
10
+
5
11
  ## Hooks
6
12
 
7
13
  ### `useFileUpload`
@@ -27,7 +33,10 @@ const { upload, isUploading, progress, validationProgress } = useFileUpload({
27
33
  - `upload` - Function to start the upload
28
34
  - `isUploading` - Whether upload is in progress
29
35
  - `progress` - Upload progress (0-100)
30
- - `validationProgress` - `{ processed: number, isValidating: boolean }` (job-based flow only)
36
+ - `validationProgress` - `{ processed: number, total: number, isValidating: boolean }` (job-based flow only)
37
+ - `processed`: Number of resources validated so far (from backend)
38
+ - `total`: Total number of unique resources in the file (counted by unique keys in the CSV)
39
+ - `isValidating`: Whether validation is in progress
31
40
 
32
41
  **Usage:**
33
42
  ```typescript
@@ -149,6 +158,55 @@ const { data, error, isLoading, refetch } = useFetchFileImportJob({
149
158
 
150
159
  ---
151
160
 
161
+ ### `useFetchFileImportJobRecords`
162
+
163
+ Fetches records (errors or valid entries) from a File Import Job with pagination support. Keeps previous data visible while loading new pages.
164
+
165
+ ```typescript
166
+ const { data, error, isLoading, refetch } = useFetchFileImportJobRecords({
167
+ projectKey,
168
+ importContainerKey,
169
+ jobId,
170
+ limit?: number,
171
+ offset?: number,
172
+ isValid?: boolean,
173
+ skip?: boolean
174
+ })
175
+ ```
176
+
177
+ **Parameters:**
178
+ - `projectKey`: The commercetools project key
179
+ - `importContainerKey`: The import container key
180
+ - `jobId`: The file import job ID
181
+ - `limit` (optional): Number of records to fetch per page
182
+ - `offset` (optional): Offset for pagination
183
+ - `isValid` (optional): Filter by valid (`true`) or invalid (`false`) records
184
+ - `skip` (optional): Skip fetching (useful for conditional fetching)
185
+
186
+ **Returns:**
187
+ - `data` - `{ results, total, limit, offset, count }` or `null`
188
+ - `error` - Error object if fetch failed
189
+ - `isLoading` - Whether fetch is in progress
190
+ - `refetch` - Function to manually trigger a refetch
191
+
192
+ **Usage example (paginated error table):**
193
+ ```typescript
194
+ const pagination = usePaginationState()
195
+ const offset = (pagination.page.value - 1) * pagination.perPage.value
196
+
197
+ const { data, isLoading } = useFetchFileImportJobRecords({
198
+ projectKey,
199
+ importContainerKey: containerKey,
200
+ jobId,
201
+ offset,
202
+ limit: pagination.perPage.value,
203
+ isValid: false, // Fetch only invalid records (errors)
204
+ })
205
+
206
+ ```
207
+
208
+ ---
209
+
152
210
  ## Helper Functions
153
211
 
154
212
  ```typescript
@@ -2679,6 +2679,39 @@ const useFetchFileImportJob = _ref => {
2679
2679
  });
2680
2680
  };
2681
2681
 
2682
+ const EMPTY_RESPONSE = {
2683
+ results: [],
2684
+ total: 0,
2685
+ limit: 0,
2686
+ offset: 0,
2687
+ count: 0
2688
+ };
2689
+ const useFetchFileImportJobRecords = _ref => {
2690
+ let projectKey = _ref.projectKey,
2691
+ importContainerKey = _ref.importContainerKey,
2692
+ jobId = _ref.jobId,
2693
+ limit = _ref.limit,
2694
+ offset = _ref.offset,
2695
+ isValid = _ref.isValid,
2696
+ _ref$skip = _ref.skip,
2697
+ skip = _ref$skip === void 0 ? false : _ref$skip;
2698
+ const shouldSkip = skip || !projectKey || !importContainerKey || !jobId;
2699
+ const fetchData = React__default["default"].useCallback(() => {
2700
+ if (shouldSkip) {
2701
+ return _Promise__default["default"].resolve(EMPTY_RESPONSE);
2702
+ }
2703
+ return getFileImportJobRecords({
2704
+ projectKey: projectKey,
2705
+ importContainerKey: importContainerKey,
2706
+ jobId: jobId,
2707
+ limit,
2708
+ offset,
2709
+ isValid
2710
+ });
2711
+ }, [shouldSkip, projectKey, importContainerKey, jobId, limit, offset, isValid]);
2712
+ return useFetch(fetchData);
2713
+ };
2714
+
2682
2715
  const useFetchImportContainerDetails = _ref => {
2683
2716
  let projectKey = _ref.projectKey,
2684
2717
  importContainerKey = _ref.importContainerKey,
@@ -2946,6 +2979,7 @@ const useFileUpload = _ref2 => {
2946
2979
  setProgress = _React$useState4[1];
2947
2980
  const _React$useState5 = React__default["default"].useState({
2948
2981
  processed: 0,
2982
+ total: 0,
2949
2983
  isValidating: false
2950
2984
  }),
2951
2985
  _React$useState6 = _slicedToArray(_React$useState5, 2),
@@ -2962,6 +2996,7 @@ const useFileUpload = _ref2 => {
2962
2996
  setProgress(0);
2963
2997
  setValidationProgress({
2964
2998
  processed: 0,
2999
+ total: 0,
2965
3000
  isValidating: false
2966
3001
  });
2967
3002
  }, []);
@@ -2970,6 +3005,7 @@ const useFileUpload = _ref2 => {
2970
3005
  setProgress(0);
2971
3006
  try {
2972
3007
  if (useJobBasedFlow) {
3008
+ const totalResources = await countUniqueResourcesInCsv(config.file);
2973
3009
  await jobUpload.upload({
2974
3010
  file: config.file,
2975
3011
  resourceType: config.resourceType,
@@ -2979,6 +3015,7 @@ const useFileUpload = _ref2 => {
2979
3015
  try {
2980
3016
  setValidationProgress({
2981
3017
  processed: 0,
3018
+ total: totalResources,
2982
3019
  isValidating: true
2983
3020
  });
2984
3021
  const validatedJob = await pollJobUntilValidated({
@@ -2992,6 +3029,7 @@ const useFileUpload = _ref2 => {
2992
3029
  const processed = job.summary?.total ?? 0;
2993
3030
  setValidationProgress({
2994
3031
  processed,
3032
+ total: totalResources,
2995
3033
  isValidating: true
2996
3034
  });
2997
3035
  config.onValidationProgress?.(job);
@@ -3003,16 +3041,7 @@ const useFileUpload = _ref2 => {
3003
3041
  if (validatedJob.jobError) {
3004
3042
  throw new HttpError(400, validatedJob.jobError.message, validatedJob.jobError);
3005
3043
  }
3006
- let results = [];
3007
3044
  if (validatedJob.summary.invalid > 0) {
3008
- const recordsResponse = await getFileImportJobRecords({
3009
- projectKey,
3010
- importContainerKey: containerKey,
3011
- jobId,
3012
- limit: 500,
3013
- isValid: false
3014
- });
3015
- results = recordsResponse.results;
3016
3045
  await safeDeleteContainer({
3017
3046
  projectKey,
3018
3047
  containerKey
@@ -3021,7 +3050,8 @@ const useFileUpload = _ref2 => {
3021
3050
  const result = {
3022
3051
  containerKey,
3023
3052
  summary: _objectSpread(_objectSpread({}, validatedJob.summary), {}, {
3024
- results
3053
+ // TODO: Remove this once the old flow is fully removed
3054
+ results: []
3025
3055
  }),
3026
3056
  jobId,
3027
3057
  job: validatedJob
@@ -3029,6 +3059,7 @@ const useFileUpload = _ref2 => {
3029
3059
  setIsUploading(false);
3030
3060
  setValidationProgress({
3031
3061
  processed: 0,
3062
+ total: 0,
3032
3063
  isValidating: false
3033
3064
  });
3034
3065
  config.onSuccess(result);
@@ -3258,6 +3289,7 @@ exports.toImportApiResourceType = toImportApiResourceType;
3258
3289
  exports.uploadFileForImport = uploadFileForImport;
3259
3290
  exports.useFetchExportOperations = useFetchExportOperations;
3260
3291
  exports.useFetchFileImportJob = useFetchFileImportJob;
3292
+ exports.useFetchFileImportJobRecords = useFetchFileImportJobRecords;
3261
3293
  exports.useFetchImportContainerDetails = useFetchImportContainerDetails;
3262
3294
  exports.useFetchImportOperations = useFetchImportOperations;
3263
3295
  exports.useFetchImportSummaries = useFetchImportSummaries;
@@ -2671,6 +2671,39 @@ const useFetchFileImportJob = _ref => {
2671
2671
  });
2672
2672
  };
2673
2673
 
2674
+ const EMPTY_RESPONSE = {
2675
+ results: [],
2676
+ total: 0,
2677
+ limit: 0,
2678
+ offset: 0,
2679
+ count: 0
2680
+ };
2681
+ const useFetchFileImportJobRecords = _ref => {
2682
+ let projectKey = _ref.projectKey,
2683
+ importContainerKey = _ref.importContainerKey,
2684
+ jobId = _ref.jobId,
2685
+ limit = _ref.limit,
2686
+ offset = _ref.offset,
2687
+ isValid = _ref.isValid,
2688
+ _ref$skip = _ref.skip,
2689
+ skip = _ref$skip === void 0 ? false : _ref$skip;
2690
+ const shouldSkip = skip || !projectKey || !importContainerKey || !jobId;
2691
+ const fetchData = React__default["default"].useCallback(() => {
2692
+ if (shouldSkip) {
2693
+ return _Promise__default["default"].resolve(EMPTY_RESPONSE);
2694
+ }
2695
+ return getFileImportJobRecords({
2696
+ projectKey: projectKey,
2697
+ importContainerKey: importContainerKey,
2698
+ jobId: jobId,
2699
+ limit,
2700
+ offset,
2701
+ isValid
2702
+ });
2703
+ }, [shouldSkip, projectKey, importContainerKey, jobId, limit, offset, isValid]);
2704
+ return useFetch(fetchData);
2705
+ };
2706
+
2674
2707
  const useFetchImportContainerDetails = _ref => {
2675
2708
  let projectKey = _ref.projectKey,
2676
2709
  importContainerKey = _ref.importContainerKey,
@@ -2938,6 +2971,7 @@ const useFileUpload = _ref2 => {
2938
2971
  setProgress = _React$useState4[1];
2939
2972
  const _React$useState5 = React__default["default"].useState({
2940
2973
  processed: 0,
2974
+ total: 0,
2941
2975
  isValidating: false
2942
2976
  }),
2943
2977
  _React$useState6 = _slicedToArray(_React$useState5, 2),
@@ -2954,6 +2988,7 @@ const useFileUpload = _ref2 => {
2954
2988
  setProgress(0);
2955
2989
  setValidationProgress({
2956
2990
  processed: 0,
2991
+ total: 0,
2957
2992
  isValidating: false
2958
2993
  });
2959
2994
  }, []);
@@ -2962,6 +2997,7 @@ const useFileUpload = _ref2 => {
2962
2997
  setProgress(0);
2963
2998
  try {
2964
2999
  if (useJobBasedFlow) {
3000
+ const totalResources = await countUniqueResourcesInCsv(config.file);
2965
3001
  await jobUpload.upload({
2966
3002
  file: config.file,
2967
3003
  resourceType: config.resourceType,
@@ -2971,6 +3007,7 @@ const useFileUpload = _ref2 => {
2971
3007
  try {
2972
3008
  setValidationProgress({
2973
3009
  processed: 0,
3010
+ total: totalResources,
2974
3011
  isValidating: true
2975
3012
  });
2976
3013
  const validatedJob = await pollJobUntilValidated({
@@ -2984,6 +3021,7 @@ const useFileUpload = _ref2 => {
2984
3021
  const processed = job.summary?.total ?? 0;
2985
3022
  setValidationProgress({
2986
3023
  processed,
3024
+ total: totalResources,
2987
3025
  isValidating: true
2988
3026
  });
2989
3027
  config.onValidationProgress?.(job);
@@ -2995,16 +3033,7 @@ const useFileUpload = _ref2 => {
2995
3033
  if (validatedJob.jobError) {
2996
3034
  throw new HttpError(400, validatedJob.jobError.message, validatedJob.jobError);
2997
3035
  }
2998
- let results = [];
2999
3036
  if (validatedJob.summary.invalid > 0) {
3000
- const recordsResponse = await getFileImportJobRecords({
3001
- projectKey,
3002
- importContainerKey: containerKey,
3003
- jobId,
3004
- limit: 500,
3005
- isValid: false
3006
- });
3007
- results = recordsResponse.results;
3008
3037
  await safeDeleteContainer({
3009
3038
  projectKey,
3010
3039
  containerKey
@@ -3013,7 +3042,8 @@ const useFileUpload = _ref2 => {
3013
3042
  const result = {
3014
3043
  containerKey,
3015
3044
  summary: _objectSpread(_objectSpread({}, validatedJob.summary), {}, {
3016
- results
3045
+ // TODO: Remove this once the old flow is fully removed
3046
+ results: []
3017
3047
  }),
3018
3048
  jobId,
3019
3049
  job: validatedJob
@@ -3021,6 +3051,7 @@ const useFileUpload = _ref2 => {
3021
3051
  setIsUploading(false);
3022
3052
  setValidationProgress({
3023
3053
  processed: 0,
3054
+ total: 0,
3024
3055
  isValidating: false
3025
3056
  });
3026
3057
  config.onSuccess(result);
@@ -3250,6 +3281,7 @@ exports.toImportApiResourceType = toImportApiResourceType;
3250
3281
  exports.uploadFileForImport = uploadFileForImport;
3251
3282
  exports.useFetchExportOperations = useFetchExportOperations;
3252
3283
  exports.useFetchFileImportJob = useFetchFileImportJob;
3284
+ exports.useFetchFileImportJobRecords = useFetchFileImportJobRecords;
3253
3285
  exports.useFetchImportContainerDetails = useFetchImportContainerDetails;
3254
3286
  exports.useFetchImportOperations = useFetchImportOperations;
3255
3287
  exports.useFetchImportSummaries = useFetchImportSummaries;
@@ -2639,6 +2639,39 @@ const useFetchFileImportJob = _ref => {
2639
2639
  });
2640
2640
  };
2641
2641
 
2642
+ const EMPTY_RESPONSE = {
2643
+ results: [],
2644
+ total: 0,
2645
+ limit: 0,
2646
+ offset: 0,
2647
+ count: 0
2648
+ };
2649
+ const useFetchFileImportJobRecords = _ref => {
2650
+ let projectKey = _ref.projectKey,
2651
+ importContainerKey = _ref.importContainerKey,
2652
+ jobId = _ref.jobId,
2653
+ limit = _ref.limit,
2654
+ offset = _ref.offset,
2655
+ isValid = _ref.isValid,
2656
+ _ref$skip = _ref.skip,
2657
+ skip = _ref$skip === void 0 ? false : _ref$skip;
2658
+ const shouldSkip = skip || !projectKey || !importContainerKey || !jobId;
2659
+ const fetchData = React.useCallback(() => {
2660
+ if (shouldSkip) {
2661
+ return _Promise.resolve(EMPTY_RESPONSE);
2662
+ }
2663
+ return getFileImportJobRecords({
2664
+ projectKey: projectKey,
2665
+ importContainerKey: importContainerKey,
2666
+ jobId: jobId,
2667
+ limit,
2668
+ offset,
2669
+ isValid
2670
+ });
2671
+ }, [shouldSkip, projectKey, importContainerKey, jobId, limit, offset, isValid]);
2672
+ return useFetch(fetchData);
2673
+ };
2674
+
2642
2675
  const useFetchImportContainerDetails = _ref => {
2643
2676
  let projectKey = _ref.projectKey,
2644
2677
  importContainerKey = _ref.importContainerKey,
@@ -2906,6 +2939,7 @@ const useFileUpload = _ref2 => {
2906
2939
  setProgress = _React$useState4[1];
2907
2940
  const _React$useState5 = React.useState({
2908
2941
  processed: 0,
2942
+ total: 0,
2909
2943
  isValidating: false
2910
2944
  }),
2911
2945
  _React$useState6 = _slicedToArray(_React$useState5, 2),
@@ -2922,6 +2956,7 @@ const useFileUpload = _ref2 => {
2922
2956
  setProgress(0);
2923
2957
  setValidationProgress({
2924
2958
  processed: 0,
2959
+ total: 0,
2925
2960
  isValidating: false
2926
2961
  });
2927
2962
  }, []);
@@ -2930,6 +2965,7 @@ const useFileUpload = _ref2 => {
2930
2965
  setProgress(0);
2931
2966
  try {
2932
2967
  if (useJobBasedFlow) {
2968
+ const totalResources = await countUniqueResourcesInCsv(config.file);
2933
2969
  await jobUpload.upload({
2934
2970
  file: config.file,
2935
2971
  resourceType: config.resourceType,
@@ -2939,6 +2975,7 @@ const useFileUpload = _ref2 => {
2939
2975
  try {
2940
2976
  setValidationProgress({
2941
2977
  processed: 0,
2978
+ total: totalResources,
2942
2979
  isValidating: true
2943
2980
  });
2944
2981
  const validatedJob = await pollJobUntilValidated({
@@ -2952,6 +2989,7 @@ const useFileUpload = _ref2 => {
2952
2989
  const processed = job.summary?.total ?? 0;
2953
2990
  setValidationProgress({
2954
2991
  processed,
2992
+ total: totalResources,
2955
2993
  isValidating: true
2956
2994
  });
2957
2995
  config.onValidationProgress?.(job);
@@ -2963,16 +3001,7 @@ const useFileUpload = _ref2 => {
2963
3001
  if (validatedJob.jobError) {
2964
3002
  throw new HttpError(400, validatedJob.jobError.message, validatedJob.jobError);
2965
3003
  }
2966
- let results = [];
2967
3004
  if (validatedJob.summary.invalid > 0) {
2968
- const recordsResponse = await getFileImportJobRecords({
2969
- projectKey,
2970
- importContainerKey: containerKey,
2971
- jobId,
2972
- limit: 500,
2973
- isValid: false
2974
- });
2975
- results = recordsResponse.results;
2976
3005
  await safeDeleteContainer({
2977
3006
  projectKey,
2978
3007
  containerKey
@@ -2981,7 +3010,8 @@ const useFileUpload = _ref2 => {
2981
3010
  const result = {
2982
3011
  containerKey,
2983
3012
  summary: _objectSpread(_objectSpread({}, validatedJob.summary), {}, {
2984
- results
3013
+ // TODO: Remove this once the old flow is fully removed
3014
+ results: []
2985
3015
  }),
2986
3016
  jobId,
2987
3017
  job: validatedJob
@@ -2989,6 +3019,7 @@ const useFileUpload = _ref2 => {
2989
3019
  setIsUploading(false);
2990
3020
  setValidationProgress({
2991
3021
  processed: 0,
3022
+ total: 0,
2992
3023
  isValidating: false
2993
3024
  });
2994
3025
  config.onSuccess(result);
@@ -3071,4 +3102,4 @@ const useFileUpload = _ref2 => {
3071
3102
  };
3072
3103
  };
3073
3104
 
3074
- export { ActiveDragDropArea, COLUMN_DELIMITERS, CT_API_DOCS_URL, DELIMITERS, DisabledDropArea, DropAreaWrapper, EnabledDropArea, FILE_IMPORT_JOB_POLLING_INTERVAL, FileDropArea, FileDroppedArea, FileIcon, HttpError, IMPORT_LEGACY_MAX_FILE_SIZE_MB, IMPORT_LEGACY_MAX_ROW_COUNT, IMPORT_MAX_FILE_SIZE_MB, IMPORT_MAX_ITEM_COUNT, IMPORT_TAG_KEYS, IMPORT_TAG_VALUES, ImportStates, InfoBox, InvalidResponseError, LockIcon, NoResourcesToExportError, PollingAbortedError, ProjectKeyNotAvailableError, QueryPredicateError, RESOURCE_TYPE_DOCUMENTATION_LINKS, RESOURCE_TYPE_TEMPLATE_DOWNLOAD_LINKS, TAG_KEY_SOURCE_FILE_UPLOAD, UnexpectedColumnError, UnexpectedOperationStateError, UnexpectedResourceTypeError, UploadSeparator, UploadSettings, UploadingModal, allAutomatedImportOperations, allAutomatedImportOperationsResponse, allFileUploadImportOperations, allFileUploadImportOperationsResponse, appendCsvOrJsonExtensionIfAbsent, assertCancelContainerResponse, assertExportOperationsDownloadFileResponse, assertFileImportJob, assertFileImportJobRecordsResponse, assertFileUploadResponse, assertImportContainer, assertImportContainerPagedResponse, assertImportOperationPagedResponse, assertImportSummary, assertListFileImportJobsResponse, assertPaginatedExportOperationResponse, assertProcessFileImportJobResponse, assertProcessFileResponse, assertResourceType, automatedImportContainerKey, automatedImports, cancelImportContainerByKey, checkIfFileUploadImport, convertFileSizeToKB, countJsonFileItems, countUniqueResourcesInCsv, createFileImportJob, createImportContainerForFileUpload, decodeFileNameFromImportContainerKey, deleteFileImportJob, deleteImportContainer, dropAreaStyles, encodeFileNameWithTimestampToContainerKey, exportOperationsCompleted, exportOperationsProcessing, extractErrorDescriptionFromValidationMessage, fetchExportOperations, fetchImportContainerByKey, fetchImportContainerDetails, fetchImportContainers, fetchImportOperations, fetchImportSummaries, fetchImportSummary, fetchUsingXhr, fetcher, fileUploadImportContainerKey, fileUploadMissingKeysResponse, formatErrorCode, formatKeys, formatQueryString, getCreateImportContainerURL, getDeleteImportContainerURL, getExportOperationsURL, getFileImportJob, getFileImportJobByIdURL, getFileImportJobDeleteURL, getFileImportJobFileType, getFileImportJobProcessURL, getFileImportJobRecords, getFileImportJobRecordsURL, getFileImportJobsListURL, getFileImportJobsURL, getFileUploadErrorsCount, getFileUploadURL, getImportContainerByKeyURL, getImportContainerTasksURL, getImportContainersURL, getImportOperationsURL, getImportState, getImportSummaryURL, getMissingRequiredFields, getProccessFileURL, getRowCount, getValidatedColumns, hasOwnProperty, hasRequiredFields, hasSingleKeyColumn, importContainers, importStatesMap, importsSummaries, invalidFileImportJobRecordsResponse, invalidFileImportJobValidated, invalidFileUploadResponse, isAbortError, isError, isImportJobInitializing, isImportJobProcessing, isImportJobQueued, isImportJobReady, isImportJobRejected, isImportJobTerminal, isImportJobValidated, isResourceType, listFileImportJobs, manualImports, mapFileUploadErrorsToUploadFileErrorRows, mapFormikErrors, mapUploadFileErrorsResponseToUploadFileErrorRows, pollJobUntilValidated, processFileImportJob, processFileImportJobResponse, processUploadedFile, shouldContinuePollingForImportValidation, successfulAutomatedImportOperations, successfulAutomatedImportOperationsResponse, successfulFileUploadImportOperations, successfulFileUploadImportOperationsResponse, toBytes, toImportApiResourceType, uploadFileForImport, useFetchExportOperations, useFetchFileImportJob, useFetchImportContainerDetails, useFetchImportOperations, useFetchImportSummaries, useFileImportJobUpload, useFileUpload, useImportContainerUpload, validFileImportJobProcessing, validFileImportJobQueued, validFileImportJobRecordsResponse, validFileImportJobValidated, validFileUploadResponse, validProcessFileResponse, validateDelimiter };
3105
+ export { ActiveDragDropArea, COLUMN_DELIMITERS, CT_API_DOCS_URL, DELIMITERS, DisabledDropArea, DropAreaWrapper, EnabledDropArea, FILE_IMPORT_JOB_POLLING_INTERVAL, FileDropArea, FileDroppedArea, FileIcon, HttpError, IMPORT_LEGACY_MAX_FILE_SIZE_MB, IMPORT_LEGACY_MAX_ROW_COUNT, IMPORT_MAX_FILE_SIZE_MB, IMPORT_MAX_ITEM_COUNT, IMPORT_TAG_KEYS, IMPORT_TAG_VALUES, ImportStates, InfoBox, InvalidResponseError, LockIcon, NoResourcesToExportError, PollingAbortedError, ProjectKeyNotAvailableError, QueryPredicateError, RESOURCE_TYPE_DOCUMENTATION_LINKS, RESOURCE_TYPE_TEMPLATE_DOWNLOAD_LINKS, TAG_KEY_SOURCE_FILE_UPLOAD, UnexpectedColumnError, UnexpectedOperationStateError, UnexpectedResourceTypeError, UploadSeparator, UploadSettings, UploadingModal, allAutomatedImportOperations, allAutomatedImportOperationsResponse, allFileUploadImportOperations, allFileUploadImportOperationsResponse, appendCsvOrJsonExtensionIfAbsent, assertCancelContainerResponse, assertExportOperationsDownloadFileResponse, assertFileImportJob, assertFileImportJobRecordsResponse, assertFileUploadResponse, assertImportContainer, assertImportContainerPagedResponse, assertImportOperationPagedResponse, assertImportSummary, assertListFileImportJobsResponse, assertPaginatedExportOperationResponse, assertProcessFileImportJobResponse, assertProcessFileResponse, assertResourceType, automatedImportContainerKey, automatedImports, cancelImportContainerByKey, checkIfFileUploadImport, convertFileSizeToKB, countJsonFileItems, countUniqueResourcesInCsv, createFileImportJob, createImportContainerForFileUpload, decodeFileNameFromImportContainerKey, deleteFileImportJob, deleteImportContainer, dropAreaStyles, encodeFileNameWithTimestampToContainerKey, exportOperationsCompleted, exportOperationsProcessing, extractErrorDescriptionFromValidationMessage, fetchExportOperations, fetchImportContainerByKey, fetchImportContainerDetails, fetchImportContainers, fetchImportOperations, fetchImportSummaries, fetchImportSummary, fetchUsingXhr, fetcher, fileUploadImportContainerKey, fileUploadMissingKeysResponse, formatErrorCode, formatKeys, formatQueryString, getCreateImportContainerURL, getDeleteImportContainerURL, getExportOperationsURL, getFileImportJob, getFileImportJobByIdURL, getFileImportJobDeleteURL, getFileImportJobFileType, getFileImportJobProcessURL, getFileImportJobRecords, getFileImportJobRecordsURL, getFileImportJobsListURL, getFileImportJobsURL, getFileUploadErrorsCount, getFileUploadURL, getImportContainerByKeyURL, getImportContainerTasksURL, getImportContainersURL, getImportOperationsURL, getImportState, getImportSummaryURL, getMissingRequiredFields, getProccessFileURL, getRowCount, getValidatedColumns, hasOwnProperty, hasRequiredFields, hasSingleKeyColumn, importContainers, importStatesMap, importsSummaries, invalidFileImportJobRecordsResponse, invalidFileImportJobValidated, invalidFileUploadResponse, isAbortError, isError, isImportJobInitializing, isImportJobProcessing, isImportJobQueued, isImportJobReady, isImportJobRejected, isImportJobTerminal, isImportJobValidated, isResourceType, listFileImportJobs, manualImports, mapFileUploadErrorsToUploadFileErrorRows, mapFormikErrors, mapUploadFileErrorsResponseToUploadFileErrorRows, pollJobUntilValidated, processFileImportJob, processFileImportJobResponse, processUploadedFile, shouldContinuePollingForImportValidation, successfulAutomatedImportOperations, successfulAutomatedImportOperationsResponse, successfulFileUploadImportOperations, successfulFileUploadImportOperationsResponse, toBytes, toImportApiResourceType, uploadFileForImport, useFetchExportOperations, useFetchFileImportJob, useFetchFileImportJobRecords, useFetchImportContainerDetails, useFetchImportOperations, useFetchImportSummaries, useFileImportJobUpload, useFileUpload, useImportContainerUpload, validFileImportJobProcessing, validFileImportJobQueued, validFileImportJobRecordsResponse, validFileImportJobValidated, validFileUploadResponse, validProcessFileResponse, validateDelimiter };
@@ -1,5 +1,6 @@
1
1
  export * from "./use-fetch-export-operations.js";
2
2
  export * from "./use-fetch-file-import-job.js";
3
+ export * from "./use-fetch-file-import-job-records.js";
3
4
  export * from "./use-fetch-import-container-details.js";
4
5
  export * from "./use-fetch-import-operations.js";
5
6
  export * from "./use-fetch-import-summaries.js";
@@ -0,0 +1,18 @@
1
+ import type { FileImportJobRecordsResponse } from "../@types/index.js";
2
+ type UseFetchFileImportJobRecordsConfig = {
3
+ projectKey?: string;
4
+ importContainerKey?: string;
5
+ jobId?: string;
6
+ limit?: number;
7
+ offset?: number;
8
+ isValid?: boolean;
9
+ skip?: boolean;
10
+ };
11
+ export declare const useFetchFileImportJobRecords: ({ projectKey, importContainerKey, jobId, limit, offset, isValid, skip, }: UseFetchFileImportJobRecordsConfig) => {
12
+ data: FileImportJobRecordsResponse | null;
13
+ error: Error | null;
14
+ isLoading: boolean;
15
+ refetch: () => void;
16
+ lastFetchTime: Date;
17
+ };
18
+ export {};
@@ -2,6 +2,7 @@ import type { ResourceTypeId } from '@commercetools/importapi-sdk';
2
2
  import type { ExtendedImportContainerDraft, FileUploadResult, FileImportJob } from "../@types/index.js";
3
3
  export type ValidationProgress = {
4
4
  processed: number;
5
+ total: number;
5
6
  isValidating: boolean;
6
7
  };
7
8
  export type FileUploadConfig = {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@commercetools-frontend-extensions/operations",
3
- "version": "3.0.0",
3
+ "version": "3.1.0",
4
4
  "license": "Proprietary",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -18,14 +18,14 @@
18
18
  "react-dropzone": "14.3.8"
19
19
  },
20
20
  "devDependencies": {
21
- "@commercetools-frontend/actions-global": "24.11.0",
22
- "@commercetools-frontend/application-components": "24.11.0",
23
- "@commercetools-frontend/application-shell": "24.11.0",
24
- "@commercetools-frontend/application-shell-connectors": "24.11.0",
25
- "@commercetools-frontend/constants": "24.11.0",
26
- "@commercetools-frontend/jest-preset-mc-app": "24.11.0",
27
- "@commercetools-frontend/permissions": "24.11.0",
28
- "@commercetools-frontend/sentry": "24.11.0",
21
+ "@commercetools-frontend/actions-global": "24.12.0",
22
+ "@commercetools-frontend/application-components": "24.12.0",
23
+ "@commercetools-frontend/application-shell": "24.12.0",
24
+ "@commercetools-frontend/application-shell-connectors": "24.12.0",
25
+ "@commercetools-frontend/constants": "24.12.0",
26
+ "@commercetools-frontend/jest-preset-mc-app": "24.12.0",
27
+ "@commercetools-frontend/permissions": "24.12.0",
28
+ "@commercetools-frontend/sentry": "24.12.0",
29
29
  "@commercetools-frontend/ui-kit": "20.3.0",
30
30
  "@emotion/react": "11.14.0",
31
31
  "@emotion/styled": "11.14.1",
@@ -1,5 +1,6 @@
1
1
  export * from './use-fetch-export-operations'
2
2
  export * from './use-fetch-file-import-job'
3
+ export * from './use-fetch-file-import-job-records'
3
4
  export * from './use-fetch-import-container-details'
4
5
  export * from './use-fetch-import-operations'
5
6
  export * from './use-fetch-import-summaries'
@@ -0,0 +1,58 @@
1
+ import React from 'react'
2
+ import { getFileImportJobRecords } from '../@api'
3
+ import type { FileImportJobRecordsResponse } from '../@types'
4
+ import { useFetch } from './use-fetch'
5
+
6
+ type UseFetchFileImportJobRecordsConfig = {
7
+ projectKey?: string
8
+ importContainerKey?: string
9
+ jobId?: string
10
+ limit?: number
11
+ offset?: number
12
+ isValid?: boolean
13
+ skip?: boolean
14
+ }
15
+
16
+ const EMPTY_RESPONSE: FileImportJobRecordsResponse = {
17
+ results: [],
18
+ total: 0,
19
+ limit: 0,
20
+ offset: 0,
21
+ count: 0,
22
+ }
23
+
24
+ export const useFetchFileImportJobRecords = ({
25
+ projectKey,
26
+ importContainerKey,
27
+ jobId,
28
+ limit,
29
+ offset,
30
+ isValid,
31
+ skip = false,
32
+ }: UseFetchFileImportJobRecordsConfig) => {
33
+ const shouldSkip = skip || !projectKey || !importContainerKey || !jobId
34
+
35
+ const fetchData = React.useCallback(() => {
36
+ if (shouldSkip) {
37
+ return Promise.resolve(EMPTY_RESPONSE)
38
+ }
39
+ return getFileImportJobRecords({
40
+ projectKey: projectKey!,
41
+ importContainerKey: importContainerKey!,
42
+ jobId: jobId!,
43
+ limit,
44
+ offset,
45
+ isValid,
46
+ })
47
+ }, [
48
+ shouldSkip,
49
+ projectKey,
50
+ importContainerKey,
51
+ jobId,
52
+ limit,
53
+ offset,
54
+ isValid,
55
+ ])
56
+
57
+ return useFetch<FileImportJobRecordsResponse>(fetchData)
58
+ }
@@ -2,9 +2,9 @@ import React from 'react'
2
2
  import type { ResourceTypeId } from '@commercetools/importapi-sdk'
3
3
  import { useImportContainerUpload } from './use-import-container-upload'
4
4
  import { useFileImportJobUpload } from './use-file-import-job-upload'
5
- import { getFileImportJobRecords, deleteImportContainer } from '../@api'
5
+ import { deleteImportContainer } from '../@api'
6
6
  import { HttpError, PollingAbortedError } from '../@errors'
7
- import { pollJobUntilValidated } from '../@utils'
7
+ import { pollJobUntilValidated, countUniqueResourcesInCsv } from '../@utils'
8
8
  import type {
9
9
  ExtendedImportContainerDraft,
10
10
  FileUploadResult,
@@ -13,6 +13,7 @@ import type {
13
13
 
14
14
  export type ValidationProgress = {
15
15
  processed: number
16
+ total: number
16
17
  isValidating: boolean
17
18
  }
18
19
 
@@ -60,6 +61,7 @@ export const useFileUpload = ({
60
61
  const [validationProgress, setValidationProgress] =
61
62
  React.useState<ValidationProgress>({
62
63
  processed: 0,
64
+ total: 0,
63
65
  isValidating: false,
64
66
  })
65
67
 
@@ -69,7 +71,7 @@ export const useFileUpload = ({
69
71
  const resetState = React.useCallback(() => {
70
72
  setIsUploading(false)
71
73
  setProgress(0)
72
- setValidationProgress({ processed: 0, isValidating: false })
74
+ setValidationProgress({ processed: 0, total: 0, isValidating: false })
73
75
  }, [])
74
76
 
75
77
  const upload = React.useCallback(
@@ -79,6 +81,8 @@ export const useFileUpload = ({
79
81
 
80
82
  try {
81
83
  if (useJobBasedFlow) {
84
+ const totalResources = await countUniqueResourcesInCsv(config.file)
85
+
82
86
  await jobUpload.upload({
83
87
  file: config.file,
84
88
  resourceType: config.resourceType,
@@ -86,7 +90,11 @@ export const useFileUpload = ({
86
90
  abortSignal: config.abortSignal,
87
91
  onSuccess: async (jobId, containerKey) => {
88
92
  try {
89
- setValidationProgress({ processed: 0, isValidating: true })
93
+ setValidationProgress({
94
+ processed: 0,
95
+ total: totalResources,
96
+ isValidating: true,
97
+ })
90
98
 
91
99
  const validatedJob = await pollJobUntilValidated({
92
100
  projectKey,
@@ -97,7 +105,11 @@ export const useFileUpload = ({
97
105
  abortSignal: config.abortSignal,
98
106
  onJobUpdate: (job) => {
99
107
  const processed = job.summary?.total ?? 0
100
- setValidationProgress({ processed, isValidating: true })
108
+ setValidationProgress({
109
+ processed,
110
+ total: totalResources,
111
+ isValidating: true,
112
+ })
101
113
  config.onValidationProgress?.(job)
102
114
  },
103
115
  })
@@ -112,16 +124,7 @@ export const useFileUpload = ({
112
124
  )
113
125
  }
114
126
 
115
- let results: FileUploadResult['summary']['results'] = []
116
127
  if (validatedJob.summary.invalid > 0) {
117
- const recordsResponse = await getFileImportJobRecords({
118
- projectKey,
119
- importContainerKey: containerKey,
120
- jobId,
121
- limit: 500,
122
- isValid: false,
123
- })
124
- results = recordsResponse.results
125
128
  await safeDeleteContainer({ projectKey, containerKey })
126
129
  }
127
130
 
@@ -129,14 +132,19 @@ export const useFileUpload = ({
129
132
  containerKey,
130
133
  summary: {
131
134
  ...validatedJob.summary,
132
- results,
135
+ // TODO: Remove this once the old flow is fully removed
136
+ results: [],
133
137
  },
134
138
  jobId,
135
139
  job: validatedJob,
136
140
  }
137
141
 
138
142
  setIsUploading(false)
139
- setValidationProgress({ processed: 0, isValidating: false })
143
+ setValidationProgress({
144
+ processed: 0,
145
+ total: 0,
146
+ isValidating: false,
147
+ })
140
148
  config.onSuccess(result)
141
149
  } catch (error) {
142
150
  await safeDeleteContainer({ projectKey, containerKey })