@medusajs/core-flows 2.8.4-preview-20250529032055 → 2.8.4-preview-20250529090137

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,11 @@ export declare const normalizeCsvToChunksStepId = "normalize-product-csv-to-chun
11
11
  * const data = normalizeCsvToChunksStep("products.csv")
12
12
  */
13
13
  export declare const normalizeCsvToChunksStep: import("@medusajs/framework/workflows-sdk").StepFunction<string, {
14
- chunks: string[];
14
+ chunks: {
15
+ id: string;
16
+ toCreate: number;
17
+ toUpdate: number;
18
+ }[];
15
19
  summary: {
16
20
  toCreate: number;
17
21
  toUpdate: number;
@@ -1 +1 @@
1
- {"version":3,"file":"normalize-products-to-chunks.d.ts","sourceRoot":"","sources":["../../../src/product/steps/normalize-products-to-chunks.ts"],"names":[],"mappings":"AASA;;GAEG;AACH,MAAM,MAAM,8BAA8B,GAAG,MAAM,CAAA;AAEnD,eAAO,MAAM,0BAA0B,oCAAoC,CAAA;AAE3E;;;;;;GAMG;AACH,eAAO,MAAM,wBAAwB;;;;;;EA+CpC,CAAA"}
1
+ {"version":3,"file":"normalize-products-to-chunks.d.ts","sourceRoot":"","sources":["../../../src/product/steps/normalize-products-to-chunks.ts"],"names":[],"mappings":"AASA;;GAEG;AACH,MAAM,MAAM,8BAA8B,GAAG,MAAM,CAAA;AAEnD,eAAO,MAAM,0BAA0B,oCAAoC,CAAA;AA0K3E;;;;;;GAMG;AACH,eAAO,MAAM,wBAAwB;;YA5Ff,MAAM;kBAAY,MAAM;kBAAY,MAAM;;;kBA4GlB,MAAM;kBAAY,MAAM;;EAmBrE,CAAA"}
@@ -1,42 +1,153 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.normalizeCsvToChunksStep = exports.normalizeCsvToChunksStepId = void 0;
4
+ const csv_parse_1 = require("csv-parse");
4
5
  const utils_1 = require("@medusajs/framework/utils");
5
6
  const workflows_sdk_1 = require("@medusajs/framework/workflows-sdk");
6
- const utils_2 = require("../utils");
7
7
  exports.normalizeCsvToChunksStepId = "normalize-product-csv-to-chunks";
8
8
  /**
9
- * This step parses a CSV file holding products to import, returning the chunks
10
- * to be processed. Each chunk is written to a file using the file provider.
11
- *
12
- * @example
13
- * const data = normalizeCsvToChunksStep("products.csv")
9
+ * Processes a chunk of products by writing them to a file. Later the
10
+ * file will be processed after the import has been confirmed.
14
11
  */
15
- exports.normalizeCsvToChunksStep = (0, workflows_sdk_1.createStep)(exports.normalizeCsvToChunksStepId, async (fileKey, { container }) => {
16
- const file = container.resolve(utils_1.Modules.FILE);
17
- const contents = await file.getAsBuffer(fileKey);
18
- const csvProducts = (0, utils_2.convertCsvToJson)(contents.toString("utf-8"));
19
- const normalizer = new utils_1.CSVNormalizer(csvProducts);
20
- const products = normalizer.proccess();
21
- const create = Object.keys(products.toCreate).reduce((result, toCreateHandle) => {
12
+ async function processChunk(file, fileKey, csvRows, currentRowNumber) {
13
+ const normalizer = new utils_1.CSVNormalizer(csvRows);
14
+ const products = normalizer.proccess(currentRowNumber);
15
+ let create = Object.keys(products.toCreate).reduce((result, toCreateHandle) => {
22
16
  result.push(utils_1.productValidators.CreateProduct.parse(products.toCreate[toCreateHandle]));
23
17
  return result;
24
18
  }, []);
25
- const update = Object.keys(products.toUpdate).reduce((result, toUpdateId) => {
19
+ let update = Object.keys(products.toUpdate).reduce((result, toUpdateId) => {
26
20
  result.push(utils_1.productValidators.UpdateProduct.parse(products.toUpdate[toUpdateId]));
27
21
  return result;
28
22
  }, []);
23
+ const toCreate = create.length;
24
+ const toUpdate = update.length;
29
25
  const { id } = await file.createFiles({
30
26
  filename: `${fileKey}.json`,
31
27
  content: JSON.stringify({ create, update }),
32
28
  mimeType: "application/json",
33
29
  });
30
+ /**
31
+ * Release products from the memory
32
+ */
33
+ create = [];
34
+ update = [];
35
+ return {
36
+ id,
37
+ toCreate,
38
+ toUpdate,
39
+ };
40
+ }
41
+ /**
42
+ * Creates chunks by reading CSV rows from the stream
43
+ */
44
+ async function createChunks(file, fileKey, stream) {
45
+ /**
46
+ * The row under process
47
+ */
48
+ let currentCSVRow = 0;
49
+ /**
50
+ * Number of rows to process in a chunk. The rows count might go a little
51
+ * up if there are more rows for the same product.
52
+ */
53
+ const rowsToRead = 1000;
54
+ /**
55
+ * Current count of processed rows for a given chunk.
56
+ */
57
+ let rowsReadSoFar = 0;
58
+ /**
59
+ * Validated chunks that have been written with the file
60
+ * provider
61
+ */
62
+ const chunks = [];
63
+ /**
64
+ * Currently collected rows to be processed as one chunk
65
+ */
66
+ let rows = [];
67
+ /**
68
+ * The unique value for the current row. We need this value to scan
69
+ * more rows after rowsToRead threshold has reached, but the upcoming
70
+ * rows are part of the same product.
71
+ */
72
+ let currentRowUniqueValue;
73
+ try {
74
+ for await (const row of stream) {
75
+ rowsReadSoFar++;
76
+ currentCSVRow++;
77
+ const normalizedRow = utils_1.CSVNormalizer.preProcess(row, currentCSVRow);
78
+ const rowValueValue = normalizedRow["product id"] || normalizedRow["product handle"];
79
+ /**
80
+ * Reached rows threshold
81
+ */
82
+ if (rowsReadSoFar > rowsToRead) {
83
+ /**
84
+ * The current row unique value is not same as the previous row's
85
+ * unique value. Hence we can break the chunk here and process
86
+ * it.
87
+ */
88
+ if (rowValueValue !== currentRowUniqueValue) {
89
+ chunks.push(await processChunk(file, `${fileKey}-${chunks.length + 1}`, rows, currentCSVRow));
90
+ /**
91
+ * Reset for new row
92
+ */
93
+ rows = [normalizedRow];
94
+ rowsReadSoFar = 0;
95
+ }
96
+ else {
97
+ rows.push(normalizedRow);
98
+ }
99
+ }
100
+ else {
101
+ rows.push(normalizedRow);
102
+ }
103
+ currentRowUniqueValue = rowValueValue;
104
+ }
105
+ /**
106
+ * The file has finished and we have collected some rows that were
107
+ * under the chunk rows size threshold.
108
+ */
109
+ if (rows.length) {
110
+ chunks.push(await processChunk(file, `${fileKey}-${chunks.length + 1}`, rows, currentCSVRow));
111
+ }
112
+ }
113
+ catch (error) {
114
+ if (!stream.destroyed) {
115
+ stream.destroy();
116
+ }
117
+ /**
118
+ * Cleanup in case of an error
119
+ */
120
+ await file.deleteFiles(chunks.map((chunk) => chunk.id).concat(fileKey));
121
+ throw error;
122
+ }
123
+ return chunks;
124
+ }
125
+ /**
126
+ * This step parses a CSV file holding products to import, returning the chunks
127
+ * to be processed. Each chunk is written to a file using the file provider.
128
+ *
129
+ * @example
130
+ * const data = normalizeCsvToChunksStep("products.csv")
131
+ */
132
+ exports.normalizeCsvToChunksStep = (0, workflows_sdk_1.createStep)(exports.normalizeCsvToChunksStepId, async (fileKey, { container }) => {
133
+ const file = container.resolve(utils_1.Modules.FILE);
134
+ const contents = await file.getDownloadStream(fileKey);
135
+ const chunks = await createChunks(file, fileKey, contents.pipe((0, csv_parse_1.parse)({
136
+ columns: true,
137
+ skip_empty_lines: true,
138
+ })));
139
+ const summary = chunks.reduce((result, chunk) => {
140
+ result.toCreate = result.toCreate + chunk.toCreate;
141
+ result.toUpdate = result.toUpdate + chunk.toUpdate;
142
+ return result;
143
+ }, { toCreate: 0, toUpdate: 0 });
144
+ /**
145
+ * Delete CSV file once we have the chunks
146
+ */
147
+ await file.deleteFiles(fileKey);
34
148
  return new workflows_sdk_1.StepResponse({
35
- chunks: [id],
36
- summary: {
37
- toCreate: create.length,
38
- toUpdate: update.length,
39
- },
149
+ chunks,
150
+ summary,
40
151
  });
41
152
  });
42
153
  //# sourceMappingURL=normalize-products-to-chunks.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"normalize-products-to-chunks.js","sourceRoot":"","sources":["../../../src/product/steps/normalize-products-to-chunks.ts"],"names":[],"mappings":";;;AACA,qDAIkC;AAClC,qEAA4E;AAC5E,oCAA2C;AAO9B,QAAA,0BAA0B,GAAG,iCAAiC,CAAA;AAE3E;;;;;;GAMG;AACU,QAAA,wBAAwB,GAAG,IAAA,0BAAU,EAChD,kCAA0B,EAC1B,KAAK,EAAE,OAAuC,EAAE,EAAE,SAAS,EAAE,EAAE,EAAE;IAC/D,MAAM,IAAI,GAAG,SAAS,CAAC,OAAO,CAAC,eAAO,CAAC,IAAI,CAAC,CAAA;IAC5C,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAA;IAEhD,MAAM,WAAW,GAAG,IAAA,wBAAgB,EAElC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAA;IAE7B,MAAM,UAAU,GAAG,IAAI,qBAAa,CAAC,WAAW,CAAC,CAAA;IACjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAA;IAEtC,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,MAAM,CAElD,CAAC,MAAM,EAAE,cAAc,EAAE,EAAE;QAC3B,MAAM,CAAC,IAAI,CACT,yBAAiB,CAAC,aAAa,CAAC,KAAK,CACnC,QAAQ,CAAC,QAAQ,CAAC,cAAc,CAAC,CACF,CAClC,CAAA;QACD,OAAO,MAAM,CAAA;IACf,CAAC,EAAE,EAAE,CAAC,CAAA;IAEN,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,MAAM,CAElD,CAAC,MAAM,EAAE,UAAU,EAAE,EAAE;QACvB,MAAM,CAAC,IAAI,CACT,yBAAiB,CAAC,aAAa,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CACrE,CAAA;QACD,OAAO,MAAM,CAAA;IACf,CAAC,EAAE,EAAE,CAAC,CAAA;IAEN,MAAM,EAAE,EAAE,EAAE,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC;QACpC,QAAQ,EAAE,GAAG,OAAO,OAAO;QAC3B,OAAO,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC;QAC3C,QAAQ,EAAE,kBAAkB;KAC7B,CAAC,CAAA;IAEF,OAAO,IAAI,4BAAY,CAAC;QACtB,MAAM,EAAE,CAAC,EAAE,CAAC;QACZ,OAAO,EAAE;YACP,QAAQ,EAAE,MAAM,CAAC,MAAM;YACvB,QAAQ,EAAE,MAAM,CAAC,MAAM;SACxB;KACF,CAAC,CAAA;AACJ,CAAC,CACF,CAAA"}
1
+ {"version":3,"file":"normalize-products-to-chunks.js","sourceRoot":"","sources":["../../../src/product/steps/normalize-products-to-chunks.ts"],"names":[],"mappings":";;;AAAA,yCAAyC;AAEzC,qDAIkC;AAClC,qEAA4E;AAO/D,QAAA,0BAA0B,GAAG,iCAAiC,CAAA;AAE3E;;;GAGG;AACH,KAAK,UAAU,YAAY,CACzB,IAAwB,EACxB,OAAe,EACf,OAA2D,EAC3D,gBAAwB;IAExB,MAAM,UAAU,GAAG,IAAI,qBAAa,CAAC,OAAO,CAAC,CAAA;IAC7C,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAA;IAEtD,IAAI,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,MAAM,CAEhD,CAAC,MAAM,EAAE,cAAc,EAAE,EAAE;QAC3B,MAAM,CAAC,IAAI,CACT,yBAAiB,CAAC,aAAa,CAAC,KAAK,CACnC,QAAQ,CAAC,QAAQ,CAAC,cAAc,CAAC,CACF,CAClC,CAAA;QACD,OAAO,MAAM,CAAA;IACf,CAAC,EAAE,EAAE,CAAC,CAAA;IAEN,IAAI,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,MAAM,CAEhD,CAAC,MAAM,EAAE,UAAU,EAAE,EAAE;QACvB,MAAM,CAAC,IAAI,CACT,yBAAiB,CAAC,aAAa,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CACrE,CAAA;QACD,OAAO,MAAM,CAAA;IACf,CAAC,EAAE,EAAE,CAAC,CAAA;IAEN,MAAM,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAA;IAC9B,MAAM,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAA;IAE9B,MAAM,EAAE,EAAE,EAAE,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC;QACpC,QAAQ,EAAE,GAAG,OAAO,OAAO;QAC3B,OAAO,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC;QAC3C,QAAQ,EAAE,kBAAkB;KAC7B,CAAC,CAAA;IAEF;;OAEG;IACH,MAAM,GAAG,EAAE,CAAA;IACX,MAAM,GAAG,EAAE,CAAA;IAEX,OAAO;QACL,EAAE;QACF,QAAQ;QACR,QAAQ;KACT,CAAA;AACH,CAAC;AAED;;GAEG;AACH,KAAK,UAAU,YAAY,CACzB,IAAwB,EACxB,OAAe,EACf,MAAc;IAEd;;OAEG;IACH,IAAI,aAAa,GAAG,CAAC,CAAA;IAErB;;;OAGG;IACH,MAAM,UAAU,GAAG,IAAI,CAAA;IAEvB;;OAEG;IACH,IAAI,aAAa,GAAG,CAAC,CAAA;IAErB;;;OAGG;IACH,MAAM,MAAM,GAAyD,EAAE,CAAA;IAEvE;;OAEG;IACH,IAAI,IAAI,GAAuD,EAAE,CAAA;IAEjE;;;;OAIG;IACH,IAAI,qBAAyC,CAAA;IAE7C,IAAI,CAAC;QACH,IAAI,KAAK,EAAE,MAAM,GAAG,IAAI,MAAM,EAAE,CAAC;YAC/B,aAAa,EAAE,CAAA;YACf,aAAa,EAAE,CAAA;YACf,MAAM,aAAa,GAAG,qBAAa,CAAC,UAAU,CAAC,GAAG,EAAE,aAAa,CAAC,CAAA;YAClE,MAAM,aAAa,GACjB,aAAa,CAAC,YAAY,CAAC,IAAI,aAAa,CAAC,gBAAgB,CAAC,CAAA;YAEhE;;eAEG;YACH,IAAI,aAAa,GAAG,UAAU,EAAE,CAAC;gBAC/B;;;;mBAIG;gBACH,IAAI,aAAa,KAAK,qBAAqB,EAAE,CAAC;oBAC5C,MAAM,CAAC,IAAI,CACT,MAAM,YAAY,CAChB,IAAI,EACJ,GAAG,OAAO,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,EACjC,IAAI,EACJ,aAAa,CACd,CACF,CAAA;oBAED;;uBAEG;oBACH,IAAI,GAAG,CAAC,aAAa,CAAC,CAAA;oBACtB,aAAa,GAAG,CAAC,CAAA;gBACnB,CAAC;qBAAM,CAAC;oBACN,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;gBAC1B,CAAC;YACH,CAAC;iBAAM,CAAC;gBACN,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;YAC1B,CAAC;YAED,qBAAqB,GAAG,aAAa,CAAA;QACvC,CAAC;QAED;;;WAGG;QACH,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,MAAM,CAAC,IAAI,CACT,MAAM,YAAY,CAChB,IAAI,EACJ,GAAG,OAAO,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,EACjC,IAAI,EACJ,aAAa,CACd,CACF,CAAA;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;YACtB,MAAM,CAAC,OAAO,EAAE,CAAA;QAClB,CAAC;QAED;;WAEG;QACH,MAAM,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAA;QACvE,MAAM,KAAK,CAAA;IACb,CAAC;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAED;;;;;;GAMG;AACU,QAAA,wBAAwB,GAAG,IAAA,0BAAU,EAChD,kCAA0B,EAC1B,KAAK,EAAE,OAAuC,EAAE,EAAE,SAAS,EAAE,EAAE,EAAE;IAC/D,MAAM,IAAI,GAAG,SAAS,CAAC,OAAO,CAAC,eAAO,CAAC,IAAI,CAAC,CAAA;IAC5C,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,CAAA;IACtD,MAAM,MAAM,GAAG,MAAM,YAAY,CAC/B,IAAI,EACJ,OAAO,EACP,QAAQ,CAAC,IAAI,CACX,IAAA,iBAAK,EAAC;QACJ,OAAO,EAAE,IAAI;QACb,gBAAgB,EAAE,IAAI;KACvB,CAAC,CACH,CACF,CAAA;IAED,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAC3B,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE;QAChB,MAAM,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAA;QAClD,MAAM,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAA;QAClD,OAAO,MAAM,CAAA;IACf,CAAC,EACD,EAAE,QAAQ,EAAE,CAAC,EAAE,QAAQ,EAAE,CAAC,EAAE,CAC7B,CAAA;IAED;;OAEG;IACH,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAA;IAE/B,OAAO,IAAI,4BAAY,CAAC;QACtB,MAAM;QACN,OAAO;KACR,CAAC,CAAA;AACJ,CAAC,CACF,CAAA"}
@@ -14,7 +14,7 @@ exports.normalizeCsvStepId = "normalize-product-csv";
14
14
  */
15
15
  exports.normalizeCsvStep = (0, workflows_sdk_1.createStep)(exports.normalizeCsvStepId, async (fileContent) => {
16
16
  const csvProducts = (0, utils_2.convertCsvToJson)(fileContent);
17
- const normalizer = new utils_1.CSVNormalizer(csvProducts);
17
+ const normalizer = new utils_1.CSVNormalizer(csvProducts.map((row, index) => utils_1.CSVNormalizer.preProcess(row, index + 1)));
18
18
  const products = normalizer.proccess();
19
19
  const create = Object.keys(products.toCreate).reduce((result, toCreateHandle) => {
20
20
  result.push(utils_1.productValidators.CreateProduct.parse(products.toCreate[toCreateHandle]));
@@ -1 +1 @@
1
- {"version":3,"file":"normalize-products.js","sourceRoot":"","sources":["../../../src/product/steps/normalize-products.ts"],"names":[],"mappings":";;;AACA,qDAA4E;AAC5E,qEAA4E;AAC5E,oCAA2C;AAO9B,QAAA,kBAAkB,GAAG,uBAAuB,CAAA;AACzD;;;;;;GAMG;AACU,QAAA,gBAAgB,GAAG,IAAA,0BAAU,EACxC,0BAAkB,EAClB,KAAK,EAAE,WAAyC,EAAE,EAAE;IAClD,MAAM,WAAW,GACf,IAAA,wBAAgB,EACd,WAAW,CACZ,CAAA;IACH,MAAM,UAAU,GAAG,IAAI,qBAAa,CAAC,WAAW,CAAC,CAAA;IACjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAA;IAEtC,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,MAAM,CAElD,CAAC,MAAM,EAAE,cAAc,EAAE,EAAE;QAC3B,MAAM,CAAC,IAAI,CACT,yBAAiB,CAAC,aAAa,CAAC,KAAK,CACnC,QAAQ,CAAC,QAAQ,CAAC,cAAc,CAAC,CACF,CAClC,CAAA;QACD,OAAO,MAAM,CAAA;IACf,CAAC,EAAE,EAAE,CAAC,CAAA;IAEN,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,MAAM,CAElD,CAAC,MAAM,EAAE,UAAU,EAAE,EAAE;QACvB,MAAM,CAAC,IAAI,CACT,yBAAiB,CAAC,aAAa,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CACrE,CAAA;QACD,OAAO,MAAM,CAAA;IACf,CAAC,EAAE,EAAE,CAAC,CAAA;IAEN,OAAO,IAAI,4BAAY,CAAC;QACtB,MAAM;QACN,MAAM;KACP,CAAC,CAAA;AACJ,CAAC,CACF,CAAA"}
1
+ {"version":3,"file":"normalize-products.js","sourceRoot":"","sources":["../../../src/product/steps/normalize-products.ts"],"names":[],"mappings":";;;AACA,qDAA4E;AAC5E,qEAA4E;AAC5E,oCAA2C;AAO9B,QAAA,kBAAkB,GAAG,uBAAuB,CAAA;AACzD;;;;;;GAMG;AACU,QAAA,gBAAgB,GAAG,IAAA,0BAAU,EACxC,0BAAkB,EAClB,KAAK,EAAE,WAAyC,EAAE,EAAE;IAClD,MAAM,WAAW,GACf,IAAA,wBAAgB,EAA4C,WAAW,CAAC,CAAA;IAC1E,MAAM,UAAU,GAAG,IAAI,qBAAa,CAClC,WAAW,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,qBAAa,CAAC,UAAU,CAAC,GAAG,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC,CAC1E,CAAA;IACD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAA;IAEtC,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,MAAM,CAElD,CAAC,MAAM,EAAE,cAAc,EAAE,EAAE;QAC3B,MAAM,CAAC,IAAI,CACT,yBAAiB,CAAC,aAAa,CAAC,KAAK,CACnC,QAAQ,CAAC,QAAQ,CAAC,cAAc,CAAC,CACF,CAClC,CAAA;QACD,OAAO,MAAM,CAAA;IACf,CAAC,EAAE,EAAE,CAAC,CAAA;IAEN,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,MAAM,CAElD,CAAC,MAAM,EAAE,UAAU,EAAE,EAAE;QACvB,MAAM,CAAC,IAAI,CACT,yBAAiB,CAAC,aAAa,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CACrE,CAAA;QACD,OAAO,MAAM,CAAA;IACf,CAAC,EAAE,EAAE,CAAC,CAAA;IAEN,OAAO,IAAI,4BAAY,CAAC;QACtB,MAAM;QACN,MAAM;KACP,CAAC,CAAA;AACJ,CAAC,CACF,CAAA"}
@@ -7,7 +7,9 @@ export declare const processImportChunksStepId = "process-import-chunks";
7
7
  * const data = parseProductCsvStep("products.csv")
8
8
  */
9
9
  export declare const processImportChunksStep: import("@medusajs/framework/workflows-sdk").StepFunction<{
10
- chunks: string[];
10
+ chunks: {
11
+ id: string;
12
+ }[];
11
13
  }, {
12
14
  completed: boolean;
13
15
  }>;
@@ -1 +1 @@
1
- {"version":3,"file":"process-import-chunks.d.ts","sourceRoot":"","sources":["../../../src/product/steps/process-import-chunks.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,yBAAyB,0BAA0B,CAAA;AAEhE;;;;;;GAMG;AACH,eAAO,MAAM,uBAAuB;YAEV,MAAM,EAAE;;;EAYjC,CAAA"}
1
+ {"version":3,"file":"process-import-chunks.d.ts","sourceRoot":"","sources":["../../../src/product/steps/process-import-chunks.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,yBAAyB,0BAA0B,CAAA;AAEhE;;;;;;GAMG;AACH,eAAO,MAAM,uBAAuB;YAEV;QAAE,EAAE,EAAE,MAAM,CAAA;KAAE,EAAE;;;EAqBzC,CAAA"}
@@ -14,11 +14,21 @@ exports.processImportChunksStepId = "process-import-chunks";
14
14
  */
15
15
  exports.processImportChunksStep = (0, workflows_sdk_1.createStep)(exports.processImportChunksStepId, async (input, { container }) => {
16
16
  const file = container.resolve(utils_1.Modules.FILE);
17
- for (let chunk of input.chunks) {
18
- const contents = await file.getAsBuffer(chunk);
19
- await (0, batch_products_1.batchProductsWorkflow)(container).run({
20
- input: JSON.parse(contents.toString("utf-8")),
21
- });
17
+ try {
18
+ for (let chunk of input.chunks) {
19
+ const contents = await file.getAsBuffer(chunk.id);
20
+ let products = JSON.parse(contents.toString("utf-8"));
21
+ await (0, batch_products_1.batchProductsWorkflow)(container).run({
22
+ input: products,
23
+ });
24
+ products = undefined;
25
+ }
26
+ }
27
+ finally {
28
+ /**
29
+ * Delete chunks regardless of the import status
30
+ */
31
+ await file.deleteFiles(input.chunks.map((chunk) => chunk.id));
22
32
  }
23
33
  return new workflows_sdk_1.StepResponse({ completed: true });
24
34
  });
@@ -1 +1 @@
1
- {"version":3,"file":"process-import-chunks.js","sourceRoot":"","sources":["../../../src/product/steps/process-import-chunks.ts"],"names":[],"mappings":";;;AAAA,qDAAmD;AACnD,qEAA4E;AAC5E,gEAAmE;AAEtD,QAAA,yBAAyB,GAAG,uBAAuB,CAAA;AAEhE;;;;;;GAMG;AACU,QAAA,uBAAuB,GAAG,IAAA,0BAAU,EAC/C,iCAAyB,EACzB,KAAK,EAAE,KAA2B,EAAE,EAAE,SAAS,EAAE,EAAE,EAAE;IACnD,MAAM,IAAI,GAAG,SAAS,CAAC,OAAO,CAAC,eAAO,CAAC,IAAI,CAAC,CAAA;IAE5C,KAAK,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,CAAC;QAC/B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAA;QAC9C,MAAM,IAAA,sCAAqB,EAAC,SAAS,CAAC,CAAC,GAAG,CAAC;YACzC,KAAK,EAAE,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;SAC9C,CAAC,CAAA;IACJ,CAAC;IAED,OAAO,IAAI,4BAAY,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAA;AAC9C,CAAC,CACF,CAAA"}
1
+ {"version":3,"file":"process-import-chunks.js","sourceRoot":"","sources":["../../../src/product/steps/process-import-chunks.ts"],"names":[],"mappings":";;;AAAA,qDAAmD;AACnD,qEAA4E;AAC5E,gEAAmE;AAEtD,QAAA,yBAAyB,GAAG,uBAAuB,CAAA;AAEhE;;;;;;GAMG;AACU,QAAA,uBAAuB,GAAG,IAAA,0BAAU,EAC/C,iCAAyB,EACzB,KAAK,EAAE,KAAmC,EAAE,EAAE,SAAS,EAAE,EAAE,EAAE;IAC3D,MAAM,IAAI,GAAG,SAAS,CAAC,OAAO,CAAC,eAAO,CAAC,IAAI,CAAC,CAAA;IAE5C,IAAI,CAAC;QACH,KAAK,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,CAAC;YAC/B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,EAAE,CAAC,CAAA;YACjD,IAAI,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAA;YACrD,MAAM,IAAA,sCAAqB,EAAC,SAAS,CAAC,CAAC,GAAG,CAAC;gBACzC,KAAK,EAAE,QAAQ;aAChB,CAAC,CAAA;YACF,QAAQ,GAAG,SAAS,CAAA;QACtB,CAAC;IACH,CAAC;YAAS,CAAC;QACT;;WAEG;QACH,MAAM,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAA;IAC/D,CAAC;IAED,OAAO,IAAI,4BAAY,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAA;AAC9C,CAAC,CACF,CAAA"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@medusajs/core-flows",
3
- "version": "2.8.4-preview-20250529032055",
3
+ "version": "2.8.4-preview-20250529090137",
4
4
  "description": "Set of workflow definitions for Medusa",
5
5
  "main": "dist/index.js",
6
6
  "exports": {
@@ -26,7 +26,7 @@
26
26
  "author": "Medusa",
27
27
  "license": "MIT",
28
28
  "devDependencies": {
29
- "@medusajs/framework": "2.8.4-preview-20250529032055",
29
+ "@medusajs/framework": "2.8.4-preview-20250529090137",
30
30
  "@mikro-orm/core": "6.4.3",
31
31
  "@mikro-orm/knex": "6.4.3",
32
32
  "@mikro-orm/migrations": "6.4.3",
@@ -41,10 +41,11 @@
41
41
  "typescript": "^5.6.2"
42
42
  },
43
43
  "dependencies": {
44
+ "csv-parse": "^5.6.0",
44
45
  "json-2-csv": "^5.5.4"
45
46
  },
46
47
  "peerDependencies": {
47
- "@medusajs/framework": "2.8.4-preview-20250529032055",
48
+ "@medusajs/framework": "2.8.4-preview-20250529090137",
48
49
  "awilix": "^8.0.1"
49
50
  },
50
51
  "scripts": {