@commercetools-frontend-extensions/operations 3.1.1 → 3.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # @commercetools-frontend-extensions/operations
2
2
 
3
+ ## 3.2.0
4
+
5
+ ### Minor Changes
6
+
7
+ - [#1684](https://github.com/commercetools/merchant-center-operations/pull/1684) [`6dbe7fc`](https://github.com/commercetools/merchant-center-operations/commit/6dbe7fcff7440e42df01aaaf89dad93cb3bb3126) Thanks [@yassinejebli](https://github.com/yassinejebli)! - fix(import-details): fix progress calculation for new file import flow
8
+
9
+ ## 3.1.2
10
+
11
+ ### Patch Changes
12
+
13
+ - [#1678](https://github.com/commercetools/merchant-center-operations/pull/1678) [`482dfc3`](https://github.com/commercetools/merchant-center-operations/commit/482dfc367ab236be9767896da0404f34afb33dd2) Thanks [@yassinejebli](https://github.com/yassinejebli)! - feat: add `skipValidationPolling` to skip import validation for the new flow
14
+
3
15
  ## 3.1.1
4
16
 
5
17
  ### Patch Changes
package/README.md CHANGED
@@ -43,6 +43,7 @@ const { upload, isUploading, progress, validationProgress } = useFileUpload({
43
43
  - `resourceType` (required): The resource type
44
44
  - `settings` (optional): Import settings (format, decimal separator...)
45
45
  - `autoProcess` (optional): When `true`, the backend automatically starts processing after validation completes (job-based flow only). Default: `false`
46
+ - `skipValidationPolling` (optional): When `true`, skips full validation polling and returns once the job reaches `processing` state. Useful for fire-and-forget uploads with `autoProcess: true` (job-based flow only). Default: `false`
46
47
  - `abortSignal` (optional): AbortSignal for cancellation
47
48
  - `onSuccess` (required): Callback when upload completes
48
49
  - `onError` (optional): Callback for errors
@@ -73,6 +74,7 @@ await upload({
73
74
  }
74
75
  },
75
76
  autoProcess?: boolean, // job-based flow only, default: false
77
+ skipValidationPolling?: boolean, // job-based flow only, default: false
76
78
  abortSignal: abortController.signal,
77
79
  onSuccess: (result) => {
78
80
  // result.containerKey - Import container key
@@ -22,8 +22,8 @@ var _inherits = require('@babel/runtime-corejs3/helpers/inherits');
22
22
  var _wrapNativeSuper = require('@babel/runtime-corejs3/helpers/wrapNativeSuper');
23
23
  var constants = require('@commercetools-frontend/constants');
24
24
  var _everyInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/every');
25
- var _Array$isArray = require('@babel/runtime-corejs3/core-js-stable/array/is-array');
26
25
  var pluralize = require('pluralize');
26
+ var _Array$isArray = require('@babel/runtime-corejs3/core-js-stable/array/is-array');
27
27
  var _reduceInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/reduce');
28
28
  var _flatMapInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/flat-map');
29
29
  var _mapInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/map');
@@ -371,14 +371,9 @@ function assertProcessFileImportJobResponse(maybeResponse) {
371
371
  throw new Error('Invalid Process File Import Job response');
372
372
  }
373
373
  function assertListFileImportJobsResponse(maybeResponse) {
374
- if (!_Array$isArray__default["default"](maybeResponse)) {
375
- throw new Error('Invalid List File Import Jobs response: expected an array');
376
- }
377
- if (maybeResponse.length > 0) {
378
- const requiredFields = ['id', 'fileName', 'importContainerKey', 'state'];
379
- if (!hasRequiredFields(maybeResponse[0], requiredFields)) {
380
- throw new Error('Invalid List File Import Jobs response: missing required fields');
381
- }
374
+ const requiredFields = ['results', 'total', 'limit', 'offset', 'count'];
375
+ if (!hasRequiredFields(maybeResponse, requiredFields)) {
376
+ throw new Error('Invalid List File Import Jobs response: missing required fields');
382
377
  }
383
378
  }
384
379
 
@@ -482,6 +477,9 @@ function isImportJobRejected(job) {
482
477
  function isImportJobTerminal(job) {
483
478
  return isImportJobValidated(job) || isImportJobRejected(job);
484
479
  }
480
+ function hasImportJobStartedProcessing(job) {
481
+ return !isImportJobQueued(job);
482
+ }
485
483
  function shouldContinuePollingForImportValidation(job) {
486
484
  if (!job) return true;
487
485
  return isImportJobQueued(job) || isImportJobProcessing(job);
@@ -735,6 +733,52 @@ const decodeFileNameFromImportContainerKey = importContainerKey => {
735
733
  }
736
734
  };
737
735
 
736
+ const pollJobUntilProcessing = async _ref => {
737
+ let projectKey = _ref.projectKey,
738
+ jobId = _ref.jobId,
739
+ importContainerKey = _ref.importContainerKey,
740
+ _ref$pollingInterval = _ref.pollingInterval,
741
+ pollingInterval = _ref$pollingInterval === void 0 ? 1000 : _ref$pollingInterval,
742
+ _ref$maxAttempts = _ref.maxAttempts,
743
+ maxAttempts = _ref$maxAttempts === void 0 ? 60 : _ref$maxAttempts,
744
+ abortSignal = _ref.abortSignal;
745
+ let attempts = 0;
746
+ while (attempts < maxAttempts) {
747
+ if (abortSignal?.aborted) {
748
+ throw new PollingAbortedError();
749
+ }
750
+ const job = await getFileImportJob({
751
+ projectKey,
752
+ importContainerKey,
753
+ jobId
754
+ });
755
+ if (abortSignal?.aborted) {
756
+ throw new PollingAbortedError();
757
+ }
758
+ if (hasImportJobStartedProcessing(job)) {
759
+ return job;
760
+ }
761
+ await new _Promise__default["default"]((resolve, reject) => {
762
+ let timeoutId;
763
+ const onAbort = () => {
764
+ clearTimeout(timeoutId);
765
+ reject(new PollingAbortedError());
766
+ };
767
+ if (abortSignal?.aborted) {
768
+ reject(new PollingAbortedError());
769
+ return;
770
+ }
771
+ timeoutId = _setTimeout__default["default"](() => {
772
+ abortSignal?.removeEventListener('abort', onAbort);
773
+ resolve();
774
+ }, pollingInterval);
775
+ abortSignal?.addEventListener('abort', onAbort);
776
+ });
777
+ attempts++;
778
+ }
779
+ throw new Error(`Job did not start processing after ${maxAttempts} attempts (${maxAttempts * pollingInterval / 1000}s)`);
780
+ };
781
+
738
782
  const pollJobUntilValidated = async _ref => {
739
783
  let projectKey = _ref.projectKey,
740
784
  jobId = _ref.jobId,
@@ -1111,10 +1155,46 @@ async function listFileImportJobs(_ref6) {
1111
1155
  assertListFileImportJobsResponse(response);
1112
1156
  return response;
1113
1157
  }
1158
+ /**
1159
+ * Gets the file import job info for an import container
1160
+ *
1161
+ * For the new file import job flow, import operations are created incrementally
1162
+ * during the 'initialising' state. The import summary total
1163
+ * reflects only the operations created so far, which can be misleading
1164
+ *
1165
+ * This helper fetches the file import job (if it exists) to get:
1166
+ * - The true total from the job summary (known from initial CSV validation)
1167
+ * - Whether the job is still initializing (creating import operations)
1168
+ *
1169
+ * @returns Job info if found, null otherwise
1170
+ */
1171
+ async function getFileImportJobInfoForContainer(_ref7) {
1172
+ let projectKey = _ref7.projectKey,
1173
+ importContainerKey = _ref7.importContainerKey;
1174
+ try {
1175
+ const response = await listFileImportJobs({
1176
+ projectKey,
1177
+ importContainerKey,
1178
+ limit: 1
1179
+ });
1180
+ if (response.results.length > 0 && response.results[0].summary?.total != null) {
1181
+ const job = response.results[0];
1182
+ return {
1183
+ total: job.summary.total,
1184
+ isInitializing: job.state === 'initialising'
1185
+ };
1186
+ }
1187
+ return null;
1188
+ } catch {
1189
+ // Job might not exist (old flow)
1190
+ return null;
1191
+ }
1192
+ }
1114
1193
 
1115
1194
  function ownKeys$6(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
1116
1195
  function _objectSpread$6(e) { for (var r = 1; r < arguments.length; r++) { var _context2, _context3; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context2 = ownKeys$6(Object(t), !0)).call(_context2, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context3 = ownKeys$6(Object(t))).call(_context3, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
1117
1196
  function getImportState(importSummary) {
1197
+ let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
1118
1198
  const processing = importSummary.states.processing > 0;
1119
1199
  if (processing) return ImportStates.Processing;
1120
1200
  const waitForUnresolvedReferences = importSummary.states.waitForMasterVariant > 0 || importSummary.states.unresolved > 0;
@@ -1123,6 +1203,11 @@ function getImportState(importSummary) {
1123
1203
  if (partiallyCompleted) return ImportStates.PartiallyCompleted;
1124
1204
  const noRunning = importSummary.total === 0;
1125
1205
  if (noRunning) return ImportStates.NoRunningImports;
1206
+
1207
+ // For the new flow: job is actively creating import operations (to show as Processing even if no operations exist yet)
1208
+ if (options.isJobInitializing) {
1209
+ return ImportStates.Processing;
1210
+ }
1126
1211
  const successfullyCompleted = importSummary.states.imported === importSummary.total || importSummary.states.deleted === importSummary.total;
1127
1212
  if (successfullyCompleted) return ImportStates.SuccessfullyCompleted;
1128
1213
  const failed = importSummary.states.rejected + importSummary.states.validationFailed === importSummary.total;
@@ -1271,12 +1356,33 @@ async function cancelImportContainerByKey(_ref8) {
1271
1356
  return response;
1272
1357
  }
1273
1358
  async function importContainerToContainerDetails(projectKey, importContainer) {
1274
- const importSummary = await fetchImportSummary({
1359
+ let importSummary = await fetchImportSummary({
1275
1360
  projectKey,
1276
1361
  importContainerKey: importContainer.key
1277
1362
  });
1278
- const importState = getImportState(importSummary);
1279
1363
  const isFileUploadImport = checkIfFileUploadImport(importContainer.tags);
1364
+
1365
+ // For the new file import job flow the import operations are created incrementally
1366
+ // The import summary total reflects only operations created so far
1367
+ // Only override total when job is actively initializing (creating operations)
1368
+ let isJobInitializing = false;
1369
+ if (isFileUploadImport) {
1370
+ const jobInfo = await getFileImportJobInfoForContainer({
1371
+ projectKey,
1372
+ importContainerKey: importContainer.key
1373
+ });
1374
+ if (jobInfo !== null) {
1375
+ isJobInitializing = jobInfo.isInitializing;
1376
+ if (isJobInitializing || importSummary.total > 0) {
1377
+ importSummary = _objectSpread$6(_objectSpread$6({}, importSummary), {}, {
1378
+ total: jobInfo.total
1379
+ });
1380
+ }
1381
+ }
1382
+ }
1383
+ const importState = getImportState(importSummary, {
1384
+ isJobInitializing
1385
+ });
1280
1386
  return {
1281
1387
  importContainer: importContainer,
1282
1388
  importState,
@@ -3009,7 +3115,7 @@ const useFileUpload = _ref2 => {
3009
3115
  setProgress(0);
3010
3116
  try {
3011
3117
  if (useJobBasedFlow) {
3012
- const totalResources = await countUniqueResourcesInCsv(config.file);
3118
+ const totalResources = config.skipValidationPolling ? 0 : await countUniqueResourcesInCsv(config.file);
3013
3119
  await jobUpload.upload({
3014
3120
  file: config.file,
3015
3121
  resourceType: config.resourceType,
@@ -3017,6 +3123,42 @@ const useFileUpload = _ref2 => {
3017
3123
  autoProcess: config.autoProcess,
3018
3124
  abortSignal: config.abortSignal,
3019
3125
  onSuccess: async (jobId, containerKey) => {
3126
+ if (config.skipValidationPolling) {
3127
+ try {
3128
+ const processingJob = await pollJobUntilProcessing({
3129
+ projectKey,
3130
+ jobId,
3131
+ importContainerKey: containerKey,
3132
+ abortSignal: config.abortSignal
3133
+ });
3134
+ const result = {
3135
+ containerKey,
3136
+ summary: {
3137
+ total: processingJob.summary?.total ?? 0,
3138
+ valid: processingJob.summary?.valid ?? 0,
3139
+ invalid: processingJob.summary?.invalid ?? 0,
3140
+ fieldsCount: processingJob.summary?.fieldsCount ?? 0,
3141
+ fields: processingJob.summary?.fields ?? [],
3142
+ ignoredFields: processingJob.summary?.ignoredFields ?? [],
3143
+ results: []
3144
+ },
3145
+ jobId,
3146
+ job: processingJob
3147
+ };
3148
+ setIsUploading(false);
3149
+ config.onSuccess(result);
3150
+ } catch (error) {
3151
+ await safeDeleteContainer({
3152
+ projectKey,
3153
+ containerKey
3154
+ });
3155
+ resetState();
3156
+ if (!(error instanceof PollingAbortedError)) {
3157
+ config.onError?.(error);
3158
+ }
3159
+ }
3160
+ return;
3161
+ }
3020
3162
  try {
3021
3163
  setValidationProgress({
3022
3164
  processed: 0,
@@ -3239,6 +3381,7 @@ exports.getFileImportJob = getFileImportJob;
3239
3381
  exports.getFileImportJobByIdURL = getFileImportJobByIdURL;
3240
3382
  exports.getFileImportJobDeleteURL = getFileImportJobDeleteURL;
3241
3383
  exports.getFileImportJobFileType = getFileImportJobFileType;
3384
+ exports.getFileImportJobInfoForContainer = getFileImportJobInfoForContainer;
3242
3385
  exports.getFileImportJobProcessURL = getFileImportJobProcessURL;
3243
3386
  exports.getFileImportJobRecords = getFileImportJobRecords;
3244
3387
  exports.getFileImportJobRecordsURL = getFileImportJobRecordsURL;
@@ -3256,6 +3399,7 @@ exports.getMissingRequiredFields = getMissingRequiredFields;
3256
3399
  exports.getProccessFileURL = getProccessFileURL;
3257
3400
  exports.getRowCount = getRowCount;
3258
3401
  exports.getValidatedColumns = getValidatedColumns;
3402
+ exports.hasImportJobStartedProcessing = hasImportJobStartedProcessing;
3259
3403
  exports.hasOwnProperty = hasOwnProperty;
3260
3404
  exports.hasRequiredFields = hasRequiredFields;
3261
3405
  exports.hasSingleKeyColumn = hasSingleKeyColumn;
@@ -3280,6 +3424,7 @@ exports.manualImports = manualImports;
3280
3424
  exports.mapFileUploadErrorsToUploadFileErrorRows = mapFileUploadErrorsToUploadFileErrorRows;
3281
3425
  exports.mapFormikErrors = mapFormikErrors;
3282
3426
  exports.mapUploadFileErrorsResponseToUploadFileErrorRows = mapUploadFileErrorsResponseToUploadFileErrorRows;
3427
+ exports.pollJobUntilProcessing = pollJobUntilProcessing;
3283
3428
  exports.pollJobUntilValidated = pollJobUntilValidated;
3284
3429
  exports.processFileImportJob = processFileImportJob;
3285
3430
  exports.processFileImportJobResponse = processFileImportJobResponse;
@@ -22,8 +22,8 @@ var _inherits = require('@babel/runtime-corejs3/helpers/inherits');
22
22
  var _wrapNativeSuper = require('@babel/runtime-corejs3/helpers/wrapNativeSuper');
23
23
  var constants = require('@commercetools-frontend/constants');
24
24
  var _everyInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/every');
25
- var _Array$isArray = require('@babel/runtime-corejs3/core-js-stable/array/is-array');
26
25
  var pluralize = require('pluralize');
26
+ var _Array$isArray = require('@babel/runtime-corejs3/core-js-stable/array/is-array');
27
27
  var _reduceInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/reduce');
28
28
  var _flatMapInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/flat-map');
29
29
  var _mapInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/map');
@@ -371,14 +371,9 @@ function assertProcessFileImportJobResponse(maybeResponse) {
371
371
  throw new Error('Invalid Process File Import Job response');
372
372
  }
373
373
  function assertListFileImportJobsResponse(maybeResponse) {
374
- if (!_Array$isArray__default["default"](maybeResponse)) {
375
- throw new Error('Invalid List File Import Jobs response: expected an array');
376
- }
377
- if (maybeResponse.length > 0) {
378
- const requiredFields = ['id', 'fileName', 'importContainerKey', 'state'];
379
- if (!hasRequiredFields(maybeResponse[0], requiredFields)) {
380
- throw new Error('Invalid List File Import Jobs response: missing required fields');
381
- }
374
+ const requiredFields = ['results', 'total', 'limit', 'offset', 'count'];
375
+ if (!hasRequiredFields(maybeResponse, requiredFields)) {
376
+ throw new Error('Invalid List File Import Jobs response: missing required fields');
382
377
  }
383
378
  }
384
379
 
@@ -482,6 +477,9 @@ function isImportJobRejected(job) {
482
477
  function isImportJobTerminal(job) {
483
478
  return isImportJobValidated(job) || isImportJobRejected(job);
484
479
  }
480
+ function hasImportJobStartedProcessing(job) {
481
+ return !isImportJobQueued(job);
482
+ }
485
483
  function shouldContinuePollingForImportValidation(job) {
486
484
  if (!job) return true;
487
485
  return isImportJobQueued(job) || isImportJobProcessing(job);
@@ -735,6 +733,52 @@ const decodeFileNameFromImportContainerKey = importContainerKey => {
735
733
  }
736
734
  };
737
735
 
736
+ const pollJobUntilProcessing = async _ref => {
737
+ let projectKey = _ref.projectKey,
738
+ jobId = _ref.jobId,
739
+ importContainerKey = _ref.importContainerKey,
740
+ _ref$pollingInterval = _ref.pollingInterval,
741
+ pollingInterval = _ref$pollingInterval === void 0 ? 1000 : _ref$pollingInterval,
742
+ _ref$maxAttempts = _ref.maxAttempts,
743
+ maxAttempts = _ref$maxAttempts === void 0 ? 60 : _ref$maxAttempts,
744
+ abortSignal = _ref.abortSignal;
745
+ let attempts = 0;
746
+ while (attempts < maxAttempts) {
747
+ if (abortSignal?.aborted) {
748
+ throw new PollingAbortedError();
749
+ }
750
+ const job = await getFileImportJob({
751
+ projectKey,
752
+ importContainerKey,
753
+ jobId
754
+ });
755
+ if (abortSignal?.aborted) {
756
+ throw new PollingAbortedError();
757
+ }
758
+ if (hasImportJobStartedProcessing(job)) {
759
+ return job;
760
+ }
761
+ await new _Promise__default["default"]((resolve, reject) => {
762
+ let timeoutId;
763
+ const onAbort = () => {
764
+ clearTimeout(timeoutId);
765
+ reject(new PollingAbortedError());
766
+ };
767
+ if (abortSignal?.aborted) {
768
+ reject(new PollingAbortedError());
769
+ return;
770
+ }
771
+ timeoutId = _setTimeout__default["default"](() => {
772
+ abortSignal?.removeEventListener('abort', onAbort);
773
+ resolve();
774
+ }, pollingInterval);
775
+ abortSignal?.addEventListener('abort', onAbort);
776
+ });
777
+ attempts++;
778
+ }
779
+ throw new Error(`Job did not start processing after ${maxAttempts} attempts (${maxAttempts * pollingInterval / 1000}s)`);
780
+ };
781
+
738
782
  const pollJobUntilValidated = async _ref => {
739
783
  let projectKey = _ref.projectKey,
740
784
  jobId = _ref.jobId,
@@ -1111,10 +1155,46 @@ async function listFileImportJobs(_ref6) {
1111
1155
  assertListFileImportJobsResponse(response);
1112
1156
  return response;
1113
1157
  }
1158
+ /**
1159
+ * Gets the file import job info for an import container
1160
+ *
1161
+ * For the new file import job flow, import operations are created incrementally
1162
+ * during the 'initialising' state. The import summary total
1163
+ * reflects only the operations created so far, which can be misleading
1164
+ *
1165
+ * This helper fetches the file import job (if it exists) to get:
1166
+ * - The true total from the job summary (known from initial CSV validation)
1167
+ * - Whether the job is still initializing (creating import operations)
1168
+ *
1169
+ * @returns Job info if found, null otherwise
1170
+ */
1171
+ async function getFileImportJobInfoForContainer(_ref7) {
1172
+ let projectKey = _ref7.projectKey,
1173
+ importContainerKey = _ref7.importContainerKey;
1174
+ try {
1175
+ const response = await listFileImportJobs({
1176
+ projectKey,
1177
+ importContainerKey,
1178
+ limit: 1
1179
+ });
1180
+ if (response.results.length > 0 && response.results[0].summary?.total != null) {
1181
+ const job = response.results[0];
1182
+ return {
1183
+ total: job.summary.total,
1184
+ isInitializing: job.state === 'initialising'
1185
+ };
1186
+ }
1187
+ return null;
1188
+ } catch {
1189
+ // Job might not exist (old flow)
1190
+ return null;
1191
+ }
1192
+ }
1114
1193
 
1115
1194
  function ownKeys$6(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
1116
1195
  function _objectSpread$6(e) { for (var r = 1; r < arguments.length; r++) { var _context2, _context3; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context2 = ownKeys$6(Object(t), !0)).call(_context2, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context3 = ownKeys$6(Object(t))).call(_context3, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
1117
1196
  function getImportState(importSummary) {
1197
+ let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
1118
1198
  const processing = importSummary.states.processing > 0;
1119
1199
  if (processing) return ImportStates.Processing;
1120
1200
  const waitForUnresolvedReferences = importSummary.states.waitForMasterVariant > 0 || importSummary.states.unresolved > 0;
@@ -1123,6 +1203,11 @@ function getImportState(importSummary) {
1123
1203
  if (partiallyCompleted) return ImportStates.PartiallyCompleted;
1124
1204
  const noRunning = importSummary.total === 0;
1125
1205
  if (noRunning) return ImportStates.NoRunningImports;
1206
+
1207
+ // For the new flow: job is actively creating import operations (to show as Processing even if no operations exist yet)
1208
+ if (options.isJobInitializing) {
1209
+ return ImportStates.Processing;
1210
+ }
1126
1211
  const successfullyCompleted = importSummary.states.imported === importSummary.total || importSummary.states.deleted === importSummary.total;
1127
1212
  if (successfullyCompleted) return ImportStates.SuccessfullyCompleted;
1128
1213
  const failed = importSummary.states.rejected + importSummary.states.validationFailed === importSummary.total;
@@ -1271,12 +1356,33 @@ async function cancelImportContainerByKey(_ref8) {
1271
1356
  return response;
1272
1357
  }
1273
1358
  async function importContainerToContainerDetails(projectKey, importContainer) {
1274
- const importSummary = await fetchImportSummary({
1359
+ let importSummary = await fetchImportSummary({
1275
1360
  projectKey,
1276
1361
  importContainerKey: importContainer.key
1277
1362
  });
1278
- const importState = getImportState(importSummary);
1279
1363
  const isFileUploadImport = checkIfFileUploadImport(importContainer.tags);
1364
+
1365
+ // For the new file import job flow the import operations are created incrementally
1366
+ // The import summary total reflects only operations created so far
1367
+ // Only override total when job is actively initializing (creating operations)
1368
+ let isJobInitializing = false;
1369
+ if (isFileUploadImport) {
1370
+ const jobInfo = await getFileImportJobInfoForContainer({
1371
+ projectKey,
1372
+ importContainerKey: importContainer.key
1373
+ });
1374
+ if (jobInfo !== null) {
1375
+ isJobInitializing = jobInfo.isInitializing;
1376
+ if (isJobInitializing || importSummary.total > 0) {
1377
+ importSummary = _objectSpread$6(_objectSpread$6({}, importSummary), {}, {
1378
+ total: jobInfo.total
1379
+ });
1380
+ }
1381
+ }
1382
+ }
1383
+ const importState = getImportState(importSummary, {
1384
+ isJobInitializing
1385
+ });
1280
1386
  return {
1281
1387
  importContainer: importContainer,
1282
1388
  importState,
@@ -3001,7 +3107,7 @@ const useFileUpload = _ref2 => {
3001
3107
  setProgress(0);
3002
3108
  try {
3003
3109
  if (useJobBasedFlow) {
3004
- const totalResources = await countUniqueResourcesInCsv(config.file);
3110
+ const totalResources = config.skipValidationPolling ? 0 : await countUniqueResourcesInCsv(config.file);
3005
3111
  await jobUpload.upload({
3006
3112
  file: config.file,
3007
3113
  resourceType: config.resourceType,
@@ -3009,6 +3115,42 @@ const useFileUpload = _ref2 => {
3009
3115
  autoProcess: config.autoProcess,
3010
3116
  abortSignal: config.abortSignal,
3011
3117
  onSuccess: async (jobId, containerKey) => {
3118
+ if (config.skipValidationPolling) {
3119
+ try {
3120
+ const processingJob = await pollJobUntilProcessing({
3121
+ projectKey,
3122
+ jobId,
3123
+ importContainerKey: containerKey,
3124
+ abortSignal: config.abortSignal
3125
+ });
3126
+ const result = {
3127
+ containerKey,
3128
+ summary: {
3129
+ total: processingJob.summary?.total ?? 0,
3130
+ valid: processingJob.summary?.valid ?? 0,
3131
+ invalid: processingJob.summary?.invalid ?? 0,
3132
+ fieldsCount: processingJob.summary?.fieldsCount ?? 0,
3133
+ fields: processingJob.summary?.fields ?? [],
3134
+ ignoredFields: processingJob.summary?.ignoredFields ?? [],
3135
+ results: []
3136
+ },
3137
+ jobId,
3138
+ job: processingJob
3139
+ };
3140
+ setIsUploading(false);
3141
+ config.onSuccess(result);
3142
+ } catch (error) {
3143
+ await safeDeleteContainer({
3144
+ projectKey,
3145
+ containerKey
3146
+ });
3147
+ resetState();
3148
+ if (!(error instanceof PollingAbortedError)) {
3149
+ config.onError?.(error);
3150
+ }
3151
+ }
3152
+ return;
3153
+ }
3012
3154
  try {
3013
3155
  setValidationProgress({
3014
3156
  processed: 0,
@@ -3231,6 +3373,7 @@ exports.getFileImportJob = getFileImportJob;
3231
3373
  exports.getFileImportJobByIdURL = getFileImportJobByIdURL;
3232
3374
  exports.getFileImportJobDeleteURL = getFileImportJobDeleteURL;
3233
3375
  exports.getFileImportJobFileType = getFileImportJobFileType;
3376
+ exports.getFileImportJobInfoForContainer = getFileImportJobInfoForContainer;
3234
3377
  exports.getFileImportJobProcessURL = getFileImportJobProcessURL;
3235
3378
  exports.getFileImportJobRecords = getFileImportJobRecords;
3236
3379
  exports.getFileImportJobRecordsURL = getFileImportJobRecordsURL;
@@ -3248,6 +3391,7 @@ exports.getMissingRequiredFields = getMissingRequiredFields;
3248
3391
  exports.getProccessFileURL = getProccessFileURL;
3249
3392
  exports.getRowCount = getRowCount;
3250
3393
  exports.getValidatedColumns = getValidatedColumns;
3394
+ exports.hasImportJobStartedProcessing = hasImportJobStartedProcessing;
3251
3395
  exports.hasOwnProperty = hasOwnProperty;
3252
3396
  exports.hasRequiredFields = hasRequiredFields;
3253
3397
  exports.hasSingleKeyColumn = hasSingleKeyColumn;
@@ -3272,6 +3416,7 @@ exports.manualImports = manualImports;
3272
3416
  exports.mapFileUploadErrorsToUploadFileErrorRows = mapFileUploadErrorsToUploadFileErrorRows;
3273
3417
  exports.mapFormikErrors = mapFormikErrors;
3274
3418
  exports.mapUploadFileErrorsResponseToUploadFileErrorRows = mapUploadFileErrorsResponseToUploadFileErrorRows;
3419
+ exports.pollJobUntilProcessing = pollJobUntilProcessing;
3275
3420
  exports.pollJobUntilValidated = pollJobUntilValidated;
3276
3421
  exports.processFileImportJob = processFileImportJob;
3277
3422
  exports.processFileImportJobResponse = processFileImportJobResponse;
@@ -18,8 +18,8 @@ import _inherits from '@babel/runtime-corejs3/helpers/esm/inherits';
18
18
  import _wrapNativeSuper from '@babel/runtime-corejs3/helpers/esm/wrapNativeSuper';
19
19
  import { MC_API_PROXY_TARGETS } from '@commercetools-frontend/constants';
20
20
  import _everyInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/every';
21
- import _Array$isArray from '@babel/runtime-corejs3/core-js-stable/array/is-array';
22
21
  import { plural } from 'pluralize';
22
+ import _Array$isArray from '@babel/runtime-corejs3/core-js-stable/array/is-array';
23
23
  import _reduceInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/reduce';
24
24
  import _flatMapInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/flat-map';
25
25
  import _mapInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/map';
@@ -331,14 +331,9 @@ function assertProcessFileImportJobResponse(maybeResponse) {
331
331
  throw new Error('Invalid Process File Import Job response');
332
332
  }
333
333
  function assertListFileImportJobsResponse(maybeResponse) {
334
- if (!_Array$isArray(maybeResponse)) {
335
- throw new Error('Invalid List File Import Jobs response: expected an array');
336
- }
337
- if (maybeResponse.length > 0) {
338
- const requiredFields = ['id', 'fileName', 'importContainerKey', 'state'];
339
- if (!hasRequiredFields(maybeResponse[0], requiredFields)) {
340
- throw new Error('Invalid List File Import Jobs response: missing required fields');
341
- }
334
+ const requiredFields = ['results', 'total', 'limit', 'offset', 'count'];
335
+ if (!hasRequiredFields(maybeResponse, requiredFields)) {
336
+ throw new Error('Invalid List File Import Jobs response: missing required fields');
342
337
  }
343
338
  }
344
339
 
@@ -442,6 +437,9 @@ function isImportJobRejected(job) {
442
437
  function isImportJobTerminal(job) {
443
438
  return isImportJobValidated(job) || isImportJobRejected(job);
444
439
  }
440
+ function hasImportJobStartedProcessing(job) {
441
+ return !isImportJobQueued(job);
442
+ }
445
443
  function shouldContinuePollingForImportValidation(job) {
446
444
  if (!job) return true;
447
445
  return isImportJobQueued(job) || isImportJobProcessing(job);
@@ -695,6 +693,52 @@ const decodeFileNameFromImportContainerKey = importContainerKey => {
695
693
  }
696
694
  };
697
695
 
696
+ const pollJobUntilProcessing = async _ref => {
697
+ let projectKey = _ref.projectKey,
698
+ jobId = _ref.jobId,
699
+ importContainerKey = _ref.importContainerKey,
700
+ _ref$pollingInterval = _ref.pollingInterval,
701
+ pollingInterval = _ref$pollingInterval === void 0 ? 1000 : _ref$pollingInterval,
702
+ _ref$maxAttempts = _ref.maxAttempts,
703
+ maxAttempts = _ref$maxAttempts === void 0 ? 60 : _ref$maxAttempts,
704
+ abortSignal = _ref.abortSignal;
705
+ let attempts = 0;
706
+ while (attempts < maxAttempts) {
707
+ if (abortSignal?.aborted) {
708
+ throw new PollingAbortedError();
709
+ }
710
+ const job = await getFileImportJob({
711
+ projectKey,
712
+ importContainerKey,
713
+ jobId
714
+ });
715
+ if (abortSignal?.aborted) {
716
+ throw new PollingAbortedError();
717
+ }
718
+ if (hasImportJobStartedProcessing(job)) {
719
+ return job;
720
+ }
721
+ await new _Promise((resolve, reject) => {
722
+ let timeoutId;
723
+ const onAbort = () => {
724
+ clearTimeout(timeoutId);
725
+ reject(new PollingAbortedError());
726
+ };
727
+ if (abortSignal?.aborted) {
728
+ reject(new PollingAbortedError());
729
+ return;
730
+ }
731
+ timeoutId = _setTimeout(() => {
732
+ abortSignal?.removeEventListener('abort', onAbort);
733
+ resolve();
734
+ }, pollingInterval);
735
+ abortSignal?.addEventListener('abort', onAbort);
736
+ });
737
+ attempts++;
738
+ }
739
+ throw new Error(`Job did not start processing after ${maxAttempts} attempts (${maxAttempts * pollingInterval / 1000}s)`);
740
+ };
741
+
698
742
  const pollJobUntilValidated = async _ref => {
699
743
  let projectKey = _ref.projectKey,
700
744
  jobId = _ref.jobId,
@@ -1071,10 +1115,46 @@ async function listFileImportJobs(_ref6) {
1071
1115
  assertListFileImportJobsResponse(response);
1072
1116
  return response;
1073
1117
  }
1118
+ /**
1119
+ * Gets the file import job info for an import container
1120
+ *
1121
+ * For the new file import job flow, import operations are created incrementally
1122
+ * during the 'initialising' state. The import summary total
1123
+ * reflects only the operations created so far, which can be misleading
1124
+ *
1125
+ * This helper fetches the file import job (if it exists) to get:
1126
+ * - The true total from the job summary (known from initial CSV validation)
1127
+ * - Whether the job is still initializing (creating import operations)
1128
+ *
1129
+ * @returns Job info if found, null otherwise
1130
+ */
1131
+ async function getFileImportJobInfoForContainer(_ref7) {
1132
+ let projectKey = _ref7.projectKey,
1133
+ importContainerKey = _ref7.importContainerKey;
1134
+ try {
1135
+ const response = await listFileImportJobs({
1136
+ projectKey,
1137
+ importContainerKey,
1138
+ limit: 1
1139
+ });
1140
+ if (response.results.length > 0 && response.results[0].summary?.total != null) {
1141
+ const job = response.results[0];
1142
+ return {
1143
+ total: job.summary.total,
1144
+ isInitializing: job.state === 'initialising'
1145
+ };
1146
+ }
1147
+ return null;
1148
+ } catch {
1149
+ // Job might not exist (old flow)
1150
+ return null;
1151
+ }
1152
+ }
1074
1153
 
1075
1154
  function ownKeys$6(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
1076
1155
  function _objectSpread$6(e) { for (var r = 1; r < arguments.length; r++) { var _context2, _context3; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context2 = ownKeys$6(Object(t), !0)).call(_context2, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context3 = ownKeys$6(Object(t))).call(_context3, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
1077
1156
  function getImportState(importSummary) {
1157
+ let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
1078
1158
  const processing = importSummary.states.processing > 0;
1079
1159
  if (processing) return ImportStates.Processing;
1080
1160
  const waitForUnresolvedReferences = importSummary.states.waitForMasterVariant > 0 || importSummary.states.unresolved > 0;
@@ -1083,6 +1163,11 @@ function getImportState(importSummary) {
1083
1163
  if (partiallyCompleted) return ImportStates.PartiallyCompleted;
1084
1164
  const noRunning = importSummary.total === 0;
1085
1165
  if (noRunning) return ImportStates.NoRunningImports;
1166
+
1167
+ // For the new flow: job is actively creating import operations (to show as Processing even if no operations exist yet)
1168
+ if (options.isJobInitializing) {
1169
+ return ImportStates.Processing;
1170
+ }
1086
1171
  const successfullyCompleted = importSummary.states.imported === importSummary.total || importSummary.states.deleted === importSummary.total;
1087
1172
  if (successfullyCompleted) return ImportStates.SuccessfullyCompleted;
1088
1173
  const failed = importSummary.states.rejected + importSummary.states.validationFailed === importSummary.total;
@@ -1231,12 +1316,33 @@ async function cancelImportContainerByKey(_ref8) {
1231
1316
  return response;
1232
1317
  }
1233
1318
  async function importContainerToContainerDetails(projectKey, importContainer) {
1234
- const importSummary = await fetchImportSummary({
1319
+ let importSummary = await fetchImportSummary({
1235
1320
  projectKey,
1236
1321
  importContainerKey: importContainer.key
1237
1322
  });
1238
- const importState = getImportState(importSummary);
1239
1323
  const isFileUploadImport = checkIfFileUploadImport(importContainer.tags);
1324
+
1325
+ // For the new file import job flow the import operations are created incrementally
1326
+ // The import summary total reflects only operations created so far
1327
+ // Only override total when job is actively initializing (creating operations)
1328
+ let isJobInitializing = false;
1329
+ if (isFileUploadImport) {
1330
+ const jobInfo = await getFileImportJobInfoForContainer({
1331
+ projectKey,
1332
+ importContainerKey: importContainer.key
1333
+ });
1334
+ if (jobInfo !== null) {
1335
+ isJobInitializing = jobInfo.isInitializing;
1336
+ if (isJobInitializing || importSummary.total > 0) {
1337
+ importSummary = _objectSpread$6(_objectSpread$6({}, importSummary), {}, {
1338
+ total: jobInfo.total
1339
+ });
1340
+ }
1341
+ }
1342
+ }
1343
+ const importState = getImportState(importSummary, {
1344
+ isJobInitializing
1345
+ });
1240
1346
  return {
1241
1347
  importContainer: importContainer,
1242
1348
  importState,
@@ -2969,7 +3075,7 @@ const useFileUpload = _ref2 => {
2969
3075
  setProgress(0);
2970
3076
  try {
2971
3077
  if (useJobBasedFlow) {
2972
- const totalResources = await countUniqueResourcesInCsv(config.file);
3078
+ const totalResources = config.skipValidationPolling ? 0 : await countUniqueResourcesInCsv(config.file);
2973
3079
  await jobUpload.upload({
2974
3080
  file: config.file,
2975
3081
  resourceType: config.resourceType,
@@ -2977,6 +3083,42 @@ const useFileUpload = _ref2 => {
2977
3083
  autoProcess: config.autoProcess,
2978
3084
  abortSignal: config.abortSignal,
2979
3085
  onSuccess: async (jobId, containerKey) => {
3086
+ if (config.skipValidationPolling) {
3087
+ try {
3088
+ const processingJob = await pollJobUntilProcessing({
3089
+ projectKey,
3090
+ jobId,
3091
+ importContainerKey: containerKey,
3092
+ abortSignal: config.abortSignal
3093
+ });
3094
+ const result = {
3095
+ containerKey,
3096
+ summary: {
3097
+ total: processingJob.summary?.total ?? 0,
3098
+ valid: processingJob.summary?.valid ?? 0,
3099
+ invalid: processingJob.summary?.invalid ?? 0,
3100
+ fieldsCount: processingJob.summary?.fieldsCount ?? 0,
3101
+ fields: processingJob.summary?.fields ?? [],
3102
+ ignoredFields: processingJob.summary?.ignoredFields ?? [],
3103
+ results: []
3104
+ },
3105
+ jobId,
3106
+ job: processingJob
3107
+ };
3108
+ setIsUploading(false);
3109
+ config.onSuccess(result);
3110
+ } catch (error) {
3111
+ await safeDeleteContainer({
3112
+ projectKey,
3113
+ containerKey
3114
+ });
3115
+ resetState();
3116
+ if (!(error instanceof PollingAbortedError)) {
3117
+ config.onError?.(error);
3118
+ }
3119
+ }
3120
+ return;
3121
+ }
2980
3122
  try {
2981
3123
  setValidationProgress({
2982
3124
  processed: 0,
@@ -3107,4 +3249,4 @@ const useFileUpload = _ref2 => {
3107
3249
  };
3108
3250
  };
3109
3251
 
3110
- export { ActiveDragDropArea, COLUMN_DELIMITERS, CT_API_DOCS_URL, DELIMITERS, DisabledDropArea, DropAreaWrapper, EnabledDropArea, FILE_IMPORT_JOB_POLLING_INTERVAL, FileDropArea, FileDroppedArea, FileIcon, HttpError, IMPORT_LEGACY_MAX_FILE_SIZE_MB, IMPORT_LEGACY_MAX_ROW_COUNT, IMPORT_MAX_FILE_SIZE_MB, IMPORT_MAX_ITEM_COUNT, IMPORT_TAG_KEYS, IMPORT_TAG_VALUES, ImportStates, InfoBox, InvalidResponseError, LockIcon, NoResourcesToExportError, PollingAbortedError, ProjectKeyNotAvailableError, QueryPredicateError, RESOURCE_TYPE_DOCUMENTATION_LINKS, RESOURCE_TYPE_TEMPLATE_DOWNLOAD_LINKS, TAG_KEY_SOURCE_FILE_UPLOAD, UnexpectedColumnError, UnexpectedOperationStateError, UnexpectedResourceTypeError, UploadSeparator, UploadSettings, UploadingModal, allAutomatedImportOperations, allAutomatedImportOperationsResponse, allFileUploadImportOperations, allFileUploadImportOperationsResponse, appendCsvOrJsonExtensionIfAbsent, assertCancelContainerResponse, assertExportOperationsDownloadFileResponse, assertFileImportJob, assertFileImportJobRecordsResponse, assertFileUploadResponse, assertImportContainer, assertImportContainerPagedResponse, assertImportOperationPagedResponse, assertImportSummary, assertListFileImportJobsResponse, assertPaginatedExportOperationResponse, assertProcessFileImportJobResponse, assertProcessFileResponse, assertResourceType, automatedImportContainerKey, automatedImports, cancelImportContainerByKey, checkIfFileUploadImport, convertFileSizeToKB, countJsonFileItems, countUniqueResourcesInCsv, createFileImportJob, createImportContainerForFileUpload, decodeFileNameFromImportContainerKey, deleteFileImportJob, deleteImportContainer, dropAreaStyles, encodeFileNameWithTimestampToContainerKey, exportOperationsCompleted, exportOperationsProcessing, extractErrorDescriptionFromValidationMessage, fetchExportOperations, fetchImportContainerByKey, fetchImportContainerDetails, fetchImportContainers, fetchImportOperations, fetchImportSummaries, fetchImportSummary, fetchUsingXhr, fetcher, fileUploadImportContainerKey, fileUploadMissingKeysResponse, formatErrorCode, formatKeys, formatQueryString, getCreateImportContainerURL, getDeleteImportContainerURL, getExportOperationsURL, getFileImportJob, getFileImportJobByIdURL, getFileImportJobDeleteURL, getFileImportJobFileType, getFileImportJobProcessURL, getFileImportJobRecords, getFileImportJobRecordsURL, getFileImportJobsListURL, getFileImportJobsURL, getFileUploadErrorsCount, getFileUploadURL, getImportContainerByKeyURL, getImportContainerTasksURL, getImportContainersURL, getImportOperationsURL, getImportState, getImportSummaryURL, getMissingRequiredFields, getProccessFileURL, getRowCount, getValidatedColumns, hasOwnProperty, hasRequiredFields, hasSingleKeyColumn, importContainers, importStatesMap, importsSummaries, invalidFileImportJobRecordsResponse, invalidFileImportJobValidated, invalidFileUploadResponse, isAbortError, isError, isImportJobInitializing, isImportJobProcessing, isImportJobQueued, isImportJobReady, isImportJobRejected, isImportJobTerminal, isImportJobValidated, isResourceType, listFileImportJobs, manualImports, mapFileUploadErrorsToUploadFileErrorRows, mapFormikErrors, mapUploadFileErrorsResponseToUploadFileErrorRows, pollJobUntilValidated, processFileImportJob, processFileImportJobResponse, processUploadedFile, shouldContinuePollingForImportValidation, successfulAutomatedImportOperations, successfulAutomatedImportOperationsResponse, successfulFileUploadImportOperations, successfulFileUploadImportOperationsResponse, toBytes, toImportApiResourceType, uploadFileForImport, useFetchExportOperations, useFetchFileImportJob, useFetchFileImportJobRecords, useFetchImportContainerDetails, useFetchImportOperations, useFetchImportSummaries, useFileImportJobUpload, useFileUpload, useImportContainerUpload, validFileImportJobProcessing, validFileImportJobQueued, validFileImportJobRecordsResponse, validFileImportJobValidated, validFileUploadResponse, validProcessFileResponse, validateDelimiter };
3252
+ export { ActiveDragDropArea, COLUMN_DELIMITERS, CT_API_DOCS_URL, DELIMITERS, DisabledDropArea, DropAreaWrapper, EnabledDropArea, FILE_IMPORT_JOB_POLLING_INTERVAL, FileDropArea, FileDroppedArea, FileIcon, HttpError, IMPORT_LEGACY_MAX_FILE_SIZE_MB, IMPORT_LEGACY_MAX_ROW_COUNT, IMPORT_MAX_FILE_SIZE_MB, IMPORT_MAX_ITEM_COUNT, IMPORT_TAG_KEYS, IMPORT_TAG_VALUES, ImportStates, InfoBox, InvalidResponseError, LockIcon, NoResourcesToExportError, PollingAbortedError, ProjectKeyNotAvailableError, QueryPredicateError, RESOURCE_TYPE_DOCUMENTATION_LINKS, RESOURCE_TYPE_TEMPLATE_DOWNLOAD_LINKS, TAG_KEY_SOURCE_FILE_UPLOAD, UnexpectedColumnError, UnexpectedOperationStateError, UnexpectedResourceTypeError, UploadSeparator, UploadSettings, UploadingModal, allAutomatedImportOperations, allAutomatedImportOperationsResponse, allFileUploadImportOperations, allFileUploadImportOperationsResponse, appendCsvOrJsonExtensionIfAbsent, assertCancelContainerResponse, assertExportOperationsDownloadFileResponse, assertFileImportJob, assertFileImportJobRecordsResponse, assertFileUploadResponse, assertImportContainer, assertImportContainerPagedResponse, assertImportOperationPagedResponse, assertImportSummary, assertListFileImportJobsResponse, assertPaginatedExportOperationResponse, assertProcessFileImportJobResponse, assertProcessFileResponse, assertResourceType, automatedImportContainerKey, automatedImports, cancelImportContainerByKey, checkIfFileUploadImport, convertFileSizeToKB, countJsonFileItems, countUniqueResourcesInCsv, createFileImportJob, createImportContainerForFileUpload, decodeFileNameFromImportContainerKey, deleteFileImportJob, deleteImportContainer, dropAreaStyles, encodeFileNameWithTimestampToContainerKey, exportOperationsCompleted, exportOperationsProcessing, extractErrorDescriptionFromValidationMessage, fetchExportOperations, fetchImportContainerByKey, fetchImportContainerDetails, fetchImportContainers, fetchImportOperations, fetchImportSummaries, fetchImportSummary, fetchUsingXhr, fetcher, fileUploadImportContainerKey, fileUploadMissingKeysResponse, formatErrorCode, formatKeys, formatQueryString, getCreateImportContainerURL, getDeleteImportContainerURL, getExportOperationsURL, getFileImportJob, getFileImportJobByIdURL, getFileImportJobDeleteURL, getFileImportJobFileType, getFileImportJobInfoForContainer, getFileImportJobProcessURL, getFileImportJobRecords, getFileImportJobRecordsURL, getFileImportJobsListURL, getFileImportJobsURL, getFileUploadErrorsCount, getFileUploadURL, getImportContainerByKeyURL, getImportContainerTasksURL, getImportContainersURL, getImportOperationsURL, getImportState, getImportSummaryURL, getMissingRequiredFields, getProccessFileURL, getRowCount, getValidatedColumns, hasImportJobStartedProcessing, hasOwnProperty, hasRequiredFields, hasSingleKeyColumn, importContainers, importStatesMap, importsSummaries, invalidFileImportJobRecordsResponse, invalidFileImportJobValidated, invalidFileUploadResponse, isAbortError, isError, isImportJobInitializing, isImportJobProcessing, isImportJobQueued, isImportJobReady, isImportJobRejected, isImportJobTerminal, isImportJobValidated, isResourceType, listFileImportJobs, manualImports, mapFileUploadErrorsToUploadFileErrorRows, mapFormikErrors, mapUploadFileErrorsResponseToUploadFileErrorRows, pollJobUntilProcessing, pollJobUntilValidated, processFileImportJob, processFileImportJobResponse, processUploadedFile, shouldContinuePollingForImportValidation, successfulAutomatedImportOperations, successfulAutomatedImportOperationsResponse, successfulFileUploadImportOperations, successfulFileUploadImportOperationsResponse, toBytes, toImportApiResourceType, uploadFileForImport, useFetchExportOperations, useFetchFileImportJob, useFetchFileImportJobRecords, useFetchImportContainerDetails, useFetchImportOperations, useFetchImportSummaries, useFileImportJobUpload, useFileUpload, useImportContainerUpload, validFileImportJobProcessing, validFileImportJobQueued, validFileImportJobRecordsResponse, validFileImportJobValidated, validFileUploadResponse, validProcessFileResponse, validateDelimiter };
@@ -5,3 +5,24 @@ export declare function getFileImportJobRecords({ projectKey, importContainerKey
5
5
  export declare function processFileImportJob({ projectKey, resourceType, importContainerKey, jobId, action, }: ProcessFileImportJobParameters): Promise<ProcessFileImportJobResponse>;
6
6
  export declare function deleteFileImportJob({ projectKey, importContainerKey, jobId, }: DeleteFileImportJobParameters): Promise<void>;
7
7
  export declare function listFileImportJobs({ projectKey, importContainerKey, limit, offset, }: ListFileImportJobsParameters): Promise<ListFileImportJobsResponse>;
8
+ export type FileImportJobInfo = {
9
+ total: number;
10
+ isInitializing: boolean;
11
+ };
12
+ /**
13
+ * Gets the file import job info for an import container
14
+ *
15
+ * For the new file import job flow, import operations are created incrementally
16
+ * during the 'initialising' state. The import summary total
17
+ * reflects only the operations created so far, which can be misleading
18
+ *
19
+ * This helper fetches the file import job (if it exists) to get:
20
+ * - The true total from the job summary (known from initial CSV validation)
21
+ * - Whether the job is still initializing (creating import operations)
22
+ *
23
+ * @returns Job info if found, null otherwise
24
+ */
25
+ export declare function getFileImportJobInfoForContainer({ projectKey, importContainerKey, }: {
26
+ projectKey: string;
27
+ importContainerKey: string;
28
+ }): Promise<FileImportJobInfo | null>;
@@ -1,6 +1,9 @@
1
1
  import type { ImportContainer, ImportContainerPagedResponse } from '@commercetools/importapi-sdk';
2
2
  import { ImportStates, type ImportContainerQueryParams, type CancelContainerResponse, type ImportSummary, type ExtendedImportContainerDraft, type ImportContainerDetails, type ImportSummaries } from "../@types/index.js";
3
- export declare function getImportState(importSummary: ImportSummary): ImportStates;
3
+ type GetImportStateOptions = {
4
+ isJobInitializing?: boolean;
5
+ };
6
+ export declare function getImportState(importSummary: ImportSummary, options?: GetImportStateOptions): ImportStates;
4
7
  export declare function createImportContainerForFileUpload({ importContainerDraft, projectKey, }: {
5
8
  importContainerDraft: ExtendedImportContainerDraft;
6
9
  projectKey: string;
@@ -33,3 +36,4 @@ export declare function cancelImportContainerByKey({ projectKey, importContainer
33
36
  projectKey: string;
34
37
  importContainerKey: string;
35
38
  }): Promise<CancelContainerResponse>;
39
+ export {};
@@ -10,6 +10,7 @@ export type FileUploadConfig = {
10
10
  resourceType: ResourceTypeId;
11
11
  settings?: ExtendedImportContainerDraft['settings'];
12
12
  autoProcess?: boolean;
13
+ skipValidationPolling?: boolean;
13
14
  onSuccess: (result: FileUploadResult) => void;
14
15
  onError?: (error: unknown) => void;
15
16
  onProgress?: (progress: number) => void;
@@ -93,7 +93,13 @@ export interface ListFileImportJobsParameters {
93
93
  limit?: number;
94
94
  offset?: number;
95
95
  }
96
- export type ListFileImportJobsResponse = FileImportJob[];
96
+ export interface ListFileImportJobsResponse {
97
+ results: FileImportJob[];
98
+ total: number;
99
+ limit: number;
100
+ offset: number;
101
+ count: number;
102
+ }
97
103
  export declare function assertFileImportJob(maybeJob: unknown): asserts maybeJob is FileImportJob;
98
104
  export declare function assertFileImportJobRecordsResponse(maybeRecords: unknown): asserts maybeRecords is FileImportJobRecordsResponse;
99
105
  export declare function assertProcessFileImportJobResponse(maybeResponse: unknown): asserts maybeResponse is ProcessFileImportJobResponse;
@@ -9,4 +9,5 @@ export declare function isImportJobInitializing(job?: FileImportJob): boolean;
9
9
  export declare function isImportJobReady(job?: FileImportJob): boolean;
10
10
  export declare function isImportJobRejected(job?: FileImportJob): boolean;
11
11
  export declare function isImportJobTerminal(job?: FileImportJob): boolean;
12
+ export declare function hasImportJobStartedProcessing(job?: FileImportJob): boolean;
12
13
  export declare function shouldContinuePollingForImportValidation(job?: FileImportJob): boolean;
@@ -4,5 +4,6 @@ export * from "./file-upload.js";
4
4
  export * from "./form.js";
5
5
  export * from "./format.js";
6
6
  export * from "./import-container.js";
7
+ export * from "./poll-job-until-processing.js";
7
8
  export * from "./poll-job-until-validated.js";
8
9
  export * from "./url.js";
@@ -0,0 +1,10 @@
1
+ import type { FileImportJob } from "../@types/index.js";
2
+ export type PollJobUntilProcessingConfig = {
3
+ projectKey: string;
4
+ jobId: string;
5
+ importContainerKey: string;
6
+ pollingInterval?: number;
7
+ maxAttempts?: number;
8
+ abortSignal?: AbortSignal;
9
+ };
10
+ export declare const pollJobUntilProcessing: ({ projectKey, jobId, importContainerKey, pollingInterval, maxAttempts, abortSignal, }: PollJobUntilProcessingConfig) => Promise<FileImportJob>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@commercetools-frontend-extensions/operations",
3
- "version": "3.1.1",
3
+ "version": "3.2.0",
4
4
  "license": "Proprietary",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -18,20 +18,20 @@
18
18
  "react-dropzone": "14.3.8"
19
19
  },
20
20
  "devDependencies": {
21
- "@commercetools-frontend/actions-global": "24.12.0",
22
- "@commercetools-frontend/application-components": "24.12.0",
23
- "@commercetools-frontend/application-shell": "24.12.0",
24
- "@commercetools-frontend/application-shell-connectors": "24.12.0",
25
- "@commercetools-frontend/constants": "24.12.0",
26
- "@commercetools-frontend/jest-preset-mc-app": "24.12.0",
27
- "@commercetools-frontend/permissions": "24.12.0",
28
- "@commercetools-frontend/sentry": "24.12.0",
21
+ "@commercetools-frontend/actions-global": "24.13.0",
22
+ "@commercetools-frontend/application-components": "24.13.0",
23
+ "@commercetools-frontend/application-shell": "24.13.0",
24
+ "@commercetools-frontend/application-shell-connectors": "24.13.0",
25
+ "@commercetools-frontend/constants": "24.13.0",
26
+ "@commercetools-frontend/jest-preset-mc-app": "24.13.0",
27
+ "@commercetools-frontend/permissions": "24.13.0",
28
+ "@commercetools-frontend/sentry": "24.13.0",
29
29
  "@commercetools-frontend/ui-kit": "20.3.0",
30
30
  "@emotion/react": "11.14.0",
31
31
  "@emotion/styled": "11.14.1",
32
32
  "@testing-library/react": "16.1.0",
33
33
  "@types/jest": "29.5.14",
34
- "@types/papaparse": "5.5.1",
34
+ "@types/papaparse": "5.5.2",
35
35
  "@types/pluralize": "0.0.33",
36
36
  "@types/react": "19.2.0",
37
37
  "msw": "1.3.5",
@@ -217,3 +217,53 @@ export async function listFileImportJobs({
217
217
  assertListFileImportJobsResponse(response)
218
218
  return response
219
219
  }
220
+
221
+ export type FileImportJobInfo = {
222
+ total: number
223
+ isInitializing: boolean
224
+ }
225
+
226
+ /**
227
+ * Gets the file import job info for an import container
228
+ *
229
+ * For the new file import job flow, import operations are created incrementally
230
+ * during the 'initialising' state. The import summary total
231
+ * reflects only the operations created so far, which can be misleading
232
+ *
233
+ * This helper fetches the file import job (if it exists) to get:
234
+ * - The true total from the job summary (known from initial CSV validation)
235
+ * - Whether the job is still initializing (creating import operations)
236
+ *
237
+ * @returns Job info if found, null otherwise
238
+ */
239
+ export async function getFileImportJobInfoForContainer({
240
+ projectKey,
241
+ importContainerKey,
242
+ }: {
243
+ projectKey: string
244
+ importContainerKey: string
245
+ }): Promise<FileImportJobInfo | null> {
246
+ try {
247
+ const response = await listFileImportJobs({
248
+ projectKey,
249
+ importContainerKey,
250
+ limit: 1,
251
+ })
252
+
253
+ if (
254
+ response.results.length > 0 &&
255
+ response.results[0].summary?.total != null
256
+ ) {
257
+ const job = response.results[0]
258
+ return {
259
+ total: job.summary.total,
260
+ isInitializing: job.state === 'initialising',
261
+ }
262
+ }
263
+
264
+ return null
265
+ } catch {
266
+ // Job might not exist (old flow)
267
+ return null
268
+ }
269
+ }
@@ -27,8 +27,16 @@ import {
27
27
  } from './urls'
28
28
  import { checkIfFileUploadImport } from '../@utils'
29
29
  import { fetcher } from './fetcher'
30
+ import { getFileImportJobInfoForContainer } from './file-import-jobs'
30
31
 
31
- export function getImportState(importSummary: ImportSummary): ImportStates {
32
+ type GetImportStateOptions = {
33
+ isJobInitializing?: boolean
34
+ }
35
+
36
+ export function getImportState(
37
+ importSummary: ImportSummary,
38
+ options: GetImportStateOptions = {}
39
+ ): ImportStates {
32
40
  const processing = importSummary.states.processing > 0
33
41
  if (processing) return ImportStates.Processing
34
42
 
@@ -48,6 +56,11 @@ export function getImportState(importSummary: ImportSummary): ImportStates {
48
56
  const noRunning = importSummary.total === 0
49
57
  if (noRunning) return ImportStates.NoRunningImports
50
58
 
59
+ // For the new flow: job is actively creating import operations (to show as Processing even if no operations exist yet)
60
+ if (options.isJobInitializing) {
61
+ return ImportStates.Processing
62
+ }
63
+
51
64
  const successfullyCompleted =
52
65
  importSummary.states.imported === importSummary.total ||
53
66
  importSummary.states.deleted === importSummary.total
@@ -240,13 +253,31 @@ async function importContainerToContainerDetails(
240
253
  projectKey: string,
241
254
  importContainer: ExtendedImportContainer
242
255
  ): Promise<ImportContainerDetails> {
243
- const importSummary = await fetchImportSummary({
256
+ let importSummary = await fetchImportSummary({
244
257
  projectKey,
245
258
  importContainerKey: importContainer.key,
246
259
  })
247
- const importState = getImportState(importSummary)
248
260
  const isFileUploadImport = checkIfFileUploadImport(importContainer.tags)
249
261
 
262
+ // For the new file import job flow the import operations are created incrementally
263
+ // The import summary total reflects only operations created so far
264
+ // Only override total when job is actively initializing (creating operations)
265
+ let isJobInitializing = false
266
+ if (isFileUploadImport) {
267
+ const jobInfo = await getFileImportJobInfoForContainer({
268
+ projectKey,
269
+ importContainerKey: importContainer.key,
270
+ })
271
+ if (jobInfo !== null) {
272
+ isJobInitializing = jobInfo.isInitializing
273
+ if (isJobInitializing || importSummary.total > 0) {
274
+ importSummary = { ...importSummary, total: jobInfo.total }
275
+ }
276
+ }
277
+ }
278
+
279
+ const importState = getImportState(importSummary, { isJobInitializing })
280
+
250
281
  return {
251
282
  importContainer: importContainer,
252
283
  importState,
@@ -4,7 +4,11 @@ import { useImportContainerUpload } from './use-import-container-upload'
4
4
  import { useFileImportJobUpload } from './use-file-import-job-upload'
5
5
  import { deleteImportContainer } from '../@api'
6
6
  import { HttpError, PollingAbortedError } from '../@errors'
7
- import { pollJobUntilValidated, countUniqueResourcesInCsv } from '../@utils'
7
+ import {
8
+ pollJobUntilValidated,
9
+ pollJobUntilProcessing,
10
+ countUniqueResourcesInCsv,
11
+ } from '../@utils'
8
12
  import type {
9
13
  ExtendedImportContainerDraft,
10
14
  FileUploadResult,
@@ -22,6 +26,7 @@ export type FileUploadConfig = {
22
26
  resourceType: ResourceTypeId
23
27
  settings?: ExtendedImportContainerDraft['settings']
24
28
  autoProcess?: boolean
29
+ skipValidationPolling?: boolean
25
30
  onSuccess: (result: FileUploadResult) => void
26
31
  onError?: (error: unknown) => void
27
32
  onProgress?: (progress: number) => void
@@ -82,7 +87,9 @@ export const useFileUpload = ({
82
87
 
83
88
  try {
84
89
  if (useJobBasedFlow) {
85
- const totalResources = await countUniqueResourcesInCsv(config.file)
90
+ const totalResources = config.skipValidationPolling
91
+ ? 0
92
+ : await countUniqueResourcesInCsv(config.file)
86
93
 
87
94
  await jobUpload.upload({
88
95
  file: config.file,
@@ -91,6 +98,41 @@ export const useFileUpload = ({
91
98
  autoProcess: config.autoProcess,
92
99
  abortSignal: config.abortSignal,
93
100
  onSuccess: async (jobId, containerKey) => {
101
+ if (config.skipValidationPolling) {
102
+ try {
103
+ const processingJob = await pollJobUntilProcessing({
104
+ projectKey,
105
+ jobId,
106
+ importContainerKey: containerKey,
107
+ abortSignal: config.abortSignal,
108
+ })
109
+
110
+ const result: FileUploadResult = {
111
+ containerKey,
112
+ summary: {
113
+ total: processingJob.summary?.total ?? 0,
114
+ valid: processingJob.summary?.valid ?? 0,
115
+ invalid: processingJob.summary?.invalid ?? 0,
116
+ fieldsCount: processingJob.summary?.fieldsCount ?? 0,
117
+ fields: processingJob.summary?.fields ?? [],
118
+ ignoredFields: processingJob.summary?.ignoredFields ?? [],
119
+ results: [],
120
+ },
121
+ jobId,
122
+ job: processingJob,
123
+ }
124
+ setIsUploading(false)
125
+ config.onSuccess(result)
126
+ } catch (error) {
127
+ await safeDeleteContainer({ projectKey, containerKey })
128
+ resetState()
129
+ if (!(error instanceof PollingAbortedError)) {
130
+ config.onError?.(error)
131
+ }
132
+ }
133
+ return
134
+ }
135
+
94
136
  try {
95
137
  setValidationProgress({
96
138
  processed: 0,
@@ -120,7 +120,13 @@ export interface ListFileImportJobsParameters {
120
120
  offset?: number
121
121
  }
122
122
 
123
- export type ListFileImportJobsResponse = FileImportJob[]
123
+ export interface ListFileImportJobsResponse {
124
+ results: FileImportJob[]
125
+ total: number
126
+ limit: number
127
+ offset: number
128
+ count: number
129
+ }
124
130
 
125
131
  export function assertFileImportJob(
126
132
  maybeJob: unknown
@@ -152,15 +158,10 @@ export function assertProcessFileImportJobResponse(
152
158
  export function assertListFileImportJobsResponse(
153
159
  maybeResponse: unknown
154
160
  ): asserts maybeResponse is ListFileImportJobsResponse {
155
- if (!Array.isArray(maybeResponse)) {
156
- throw new Error('Invalid List File Import Jobs response: expected an array')
157
- }
158
- if (maybeResponse.length > 0) {
159
- const requiredFields = ['id', 'fileName', 'importContainerKey', 'state']
160
- if (!hasRequiredFields(maybeResponse[0], requiredFields)) {
161
- throw new Error(
162
- 'Invalid List File Import Jobs response: missing required fields'
163
- )
164
- }
161
+ const requiredFields = ['results', 'total', 'limit', 'offset', 'count']
162
+ if (!hasRequiredFields(maybeResponse, requiredFields)) {
163
+ throw new Error(
164
+ 'Invalid List File Import Jobs response: missing required fields'
165
+ )
165
166
  }
166
167
  }
@@ -39,6 +39,10 @@ export function isImportJobTerminal(job?: FileImportJob): boolean {
39
39
  return isImportJobValidated(job) || isImportJobRejected(job)
40
40
  }
41
41
 
42
+ export function hasImportJobStartedProcessing(job?: FileImportJob): boolean {
43
+ return !isImportJobQueued(job)
44
+ }
45
+
42
46
  export function shouldContinuePollingForImportValidation(
43
47
  job?: FileImportJob
44
48
  ): boolean {
@@ -4,5 +4,6 @@ export * from './file-upload'
4
4
  export * from './form'
5
5
  export * from './format'
6
6
  export * from './import-container'
7
+ export * from './poll-job-until-processing'
7
8
  export * from './poll-job-until-validated'
8
9
  export * from './url'
@@ -0,0 +1,72 @@
1
+ import { getFileImportJob } from '../@api'
2
+ import { PollingAbortedError } from '../@errors'
3
+ import type { FileImportJob } from '../@types'
4
+ import { hasImportJobStartedProcessing } from './file-import-job-helpers'
5
+
6
+ export type PollJobUntilProcessingConfig = {
7
+ projectKey: string
8
+ jobId: string
9
+ importContainerKey: string
10
+ pollingInterval?: number
11
+ maxAttempts?: number
12
+ abortSignal?: AbortSignal
13
+ }
14
+
15
+ export const pollJobUntilProcessing = async ({
16
+ projectKey,
17
+ jobId,
18
+ importContainerKey,
19
+ pollingInterval = 1000,
20
+ maxAttempts = 60,
21
+ abortSignal,
22
+ }: PollJobUntilProcessingConfig): Promise<FileImportJob> => {
23
+ let attempts = 0
24
+
25
+ while (attempts < maxAttempts) {
26
+ if (abortSignal?.aborted) {
27
+ throw new PollingAbortedError()
28
+ }
29
+
30
+ const job = await getFileImportJob({
31
+ projectKey,
32
+ importContainerKey,
33
+ jobId,
34
+ })
35
+
36
+ if (abortSignal?.aborted) {
37
+ throw new PollingAbortedError()
38
+ }
39
+
40
+ if (hasImportJobStartedProcessing(job)) {
41
+ return job
42
+ }
43
+
44
+ await new Promise<void>((resolve, reject) => {
45
+ let timeoutId: ReturnType<typeof setTimeout>
46
+
47
+ const onAbort = () => {
48
+ clearTimeout(timeoutId)
49
+ reject(new PollingAbortedError())
50
+ }
51
+
52
+ if (abortSignal?.aborted) {
53
+ reject(new PollingAbortedError())
54
+ return
55
+ }
56
+
57
+ timeoutId = setTimeout(() => {
58
+ abortSignal?.removeEventListener('abort', onAbort)
59
+ resolve()
60
+ }, pollingInterval)
61
+
62
+ abortSignal?.addEventListener('abort', onAbort)
63
+ })
64
+ attempts++
65
+ }
66
+
67
+ throw new Error(
68
+ `Job did not start processing after ${maxAttempts} attempts (${
69
+ (maxAttempts * pollingInterval) / 1000
70
+ }s)`
71
+ )
72
+ }