@bulkimport/state-sequelize 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +56 -0
- package/dist/index.cjs +330 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +36 -0
- package/dist/index.d.ts +36 -0
- package/dist/index.js +293 -0
- package/dist/index.js.map +1 -0
- package/package.json +61 -0
package/README.md
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
# @bulkimport/state-sequelize
|
|
2
|
+
|
|
3
|
+
Sequelize-based `StateStore` adapter for [@bulkimport/core](https://www.npmjs.com/package/@bulkimport/core).
|
|
4
|
+
|
|
5
|
+
Persists import job state and processed records to any relational database supported by Sequelize v6 (PostgreSQL, MySQL, MariaDB, SQLite, MS SQL Server).
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install @bulkimport/state-sequelize
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
**Peer dependencies:** `@bulkimport/core` (>=0.1.0) and `sequelize` (^6.0.0) must be installed in your project.
|
|
14
|
+
|
|
15
|
+
## Usage
|
|
16
|
+
|
|
17
|
+
```typescript
|
|
18
|
+
import { BulkImport } from '@bulkimport/core';
|
|
19
|
+
import { SequelizeStateStore } from '@bulkimport/state-sequelize';
|
|
20
|
+
import { Sequelize } from 'sequelize';
|
|
21
|
+
|
|
22
|
+
// Use your existing Sequelize instance
|
|
23
|
+
const sequelize = new Sequelize('postgres://user:pass@localhost:5432/mydb');
|
|
24
|
+
|
|
25
|
+
// Create and initialize the store (creates tables if they don't exist)
|
|
26
|
+
const stateStore = new SequelizeStateStore(sequelize);
|
|
27
|
+
await stateStore.initialize();
|
|
28
|
+
|
|
29
|
+
// Pass it to BulkImport
|
|
30
|
+
const importer = new BulkImport({
|
|
31
|
+
schema: { fields: [/* ... */] },
|
|
32
|
+
source: mySource,
|
|
33
|
+
parser: myParser,
|
|
34
|
+
processor: myProcessor,
|
|
35
|
+
stateStore,
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
await importer.start();
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
## Database Tables
|
|
42
|
+
|
|
43
|
+
The adapter creates two tables:
|
|
44
|
+
|
|
45
|
+
- **`bulkimport_jobs`** - Import job state (status, config, batches as JSON)
|
|
46
|
+
- **`bulkimport_records`** - Individual processed records (status, raw/parsed data, errors)
|
|
47
|
+
|
|
48
|
+
Tables are created automatically when you call `initialize()`. The call is idempotent.
|
|
49
|
+
|
|
50
|
+
## Limitations
|
|
51
|
+
|
|
52
|
+
Schema fields containing non-serializable values (`customValidator`, `transform`, `pattern`) are stripped when saving to the database. When restoring a job, the consumer must re-inject these fields.
|
|
53
|
+
|
|
54
|
+
## License
|
|
55
|
+
|
|
56
|
+
MIT
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
SequelizeStateStore: () => SequelizeStateStore
|
|
34
|
+
});
|
|
35
|
+
module.exports = __toCommonJS(index_exports);
|
|
36
|
+
|
|
37
|
+
// src/models/JobModel.ts
|
|
38
|
+
var import_sequelize = require("sequelize");
|
|
39
|
+
function defineJobModel(sequelize) {
|
|
40
|
+
return sequelize.define(
|
|
41
|
+
"BulkImportJob",
|
|
42
|
+
{
|
|
43
|
+
id: {
|
|
44
|
+
type: import_sequelize.DataTypes.STRING(36),
|
|
45
|
+
primaryKey: true,
|
|
46
|
+
allowNull: false
|
|
47
|
+
},
|
|
48
|
+
status: {
|
|
49
|
+
type: import_sequelize.DataTypes.STRING(20),
|
|
50
|
+
allowNull: false
|
|
51
|
+
},
|
|
52
|
+
config: {
|
|
53
|
+
type: import_sequelize.DataTypes.JSON,
|
|
54
|
+
allowNull: false
|
|
55
|
+
},
|
|
56
|
+
batches: {
|
|
57
|
+
type: import_sequelize.DataTypes.JSON,
|
|
58
|
+
allowNull: false,
|
|
59
|
+
defaultValue: []
|
|
60
|
+
},
|
|
61
|
+
totalRecords: {
|
|
62
|
+
type: import_sequelize.DataTypes.INTEGER,
|
|
63
|
+
allowNull: false,
|
|
64
|
+
defaultValue: 0
|
|
65
|
+
},
|
|
66
|
+
startedAt: {
|
|
67
|
+
type: import_sequelize.DataTypes.BIGINT,
|
|
68
|
+
allowNull: true
|
|
69
|
+
},
|
|
70
|
+
completedAt: {
|
|
71
|
+
type: import_sequelize.DataTypes.BIGINT,
|
|
72
|
+
allowNull: true
|
|
73
|
+
}
|
|
74
|
+
},
|
|
75
|
+
{
|
|
76
|
+
tableName: "bulkimport_jobs",
|
|
77
|
+
timestamps: false
|
|
78
|
+
}
|
|
79
|
+
);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// src/models/RecordModel.ts
|
|
83
|
+
var import_sequelize2 = require("sequelize");
|
|
84
|
+
function defineRecordModel(sequelize) {
|
|
85
|
+
return sequelize.define(
|
|
86
|
+
"BulkImportRecord",
|
|
87
|
+
{
|
|
88
|
+
id: {
|
|
89
|
+
type: import_sequelize2.DataTypes.INTEGER,
|
|
90
|
+
primaryKey: true,
|
|
91
|
+
autoIncrement: true
|
|
92
|
+
},
|
|
93
|
+
jobId: {
|
|
94
|
+
type: import_sequelize2.DataTypes.STRING(36),
|
|
95
|
+
allowNull: false
|
|
96
|
+
},
|
|
97
|
+
batchId: {
|
|
98
|
+
type: import_sequelize2.DataTypes.STRING(36),
|
|
99
|
+
allowNull: false
|
|
100
|
+
},
|
|
101
|
+
recordIndex: {
|
|
102
|
+
type: import_sequelize2.DataTypes.INTEGER,
|
|
103
|
+
allowNull: false
|
|
104
|
+
},
|
|
105
|
+
status: {
|
|
106
|
+
type: import_sequelize2.DataTypes.STRING(10),
|
|
107
|
+
allowNull: false
|
|
108
|
+
},
|
|
109
|
+
raw: {
|
|
110
|
+
type: import_sequelize2.DataTypes.JSON,
|
|
111
|
+
allowNull: false
|
|
112
|
+
},
|
|
113
|
+
parsed: {
|
|
114
|
+
type: import_sequelize2.DataTypes.JSON,
|
|
115
|
+
allowNull: false
|
|
116
|
+
},
|
|
117
|
+
errors: {
|
|
118
|
+
type: import_sequelize2.DataTypes.JSON,
|
|
119
|
+
allowNull: false,
|
|
120
|
+
defaultValue: []
|
|
121
|
+
},
|
|
122
|
+
processingError: {
|
|
123
|
+
type: import_sequelize2.DataTypes.TEXT,
|
|
124
|
+
allowNull: true
|
|
125
|
+
}
|
|
126
|
+
},
|
|
127
|
+
{
|
|
128
|
+
tableName: "bulkimport_records",
|
|
129
|
+
timestamps: false,
|
|
130
|
+
indexes: [
|
|
131
|
+
{ fields: ["jobId", "status"] },
|
|
132
|
+
{ fields: ["jobId", "batchId"] },
|
|
133
|
+
{ unique: true, fields: ["jobId", "recordIndex"] }
|
|
134
|
+
]
|
|
135
|
+
}
|
|
136
|
+
);
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// src/mappers/JobMapper.ts
|
|
140
|
+
function stripNonSerializableFields(config) {
|
|
141
|
+
const fields = config.schema.fields.map((f) => {
|
|
142
|
+
const stripped = {
|
|
143
|
+
name: f.name,
|
|
144
|
+
type: f.type,
|
|
145
|
+
required: f.required
|
|
146
|
+
};
|
|
147
|
+
const result = { ...stripped };
|
|
148
|
+
if (f.defaultValue !== void 0) result["defaultValue"] = f.defaultValue;
|
|
149
|
+
if (f.separator !== void 0) result["separator"] = f.separator;
|
|
150
|
+
if (f.aliases !== void 0) result["aliases"] = f.aliases;
|
|
151
|
+
return result;
|
|
152
|
+
});
|
|
153
|
+
return {
|
|
154
|
+
...config,
|
|
155
|
+
schema: {
|
|
156
|
+
...config.schema,
|
|
157
|
+
fields
|
|
158
|
+
}
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
function toRow(state) {
|
|
162
|
+
return {
|
|
163
|
+
id: state.id,
|
|
164
|
+
status: state.status,
|
|
165
|
+
config: stripNonSerializableFields(state.config),
|
|
166
|
+
batches: state.batches.map((b) => ({
|
|
167
|
+
id: b.id,
|
|
168
|
+
index: b.index,
|
|
169
|
+
status: b.status,
|
|
170
|
+
records: [],
|
|
171
|
+
processedCount: b.processedCount,
|
|
172
|
+
failedCount: b.failedCount
|
|
173
|
+
})),
|
|
174
|
+
totalRecords: state.totalRecords,
|
|
175
|
+
startedAt: state.startedAt ?? null,
|
|
176
|
+
completedAt: state.completedAt ?? null
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
function toDomain(row) {
|
|
180
|
+
const config = row.config;
|
|
181
|
+
const batches = row.batches;
|
|
182
|
+
const base = {
|
|
183
|
+
id: row.id,
|
|
184
|
+
config,
|
|
185
|
+
status: row.status,
|
|
186
|
+
batches,
|
|
187
|
+
totalRecords: row.totalRecords
|
|
188
|
+
};
|
|
189
|
+
if (row.startedAt !== null && row.completedAt !== null) {
|
|
190
|
+
return { ...base, startedAt: Number(row.startedAt), completedAt: Number(row.completedAt) };
|
|
191
|
+
}
|
|
192
|
+
if (row.startedAt !== null) {
|
|
193
|
+
return { ...base, startedAt: Number(row.startedAt) };
|
|
194
|
+
}
|
|
195
|
+
if (row.completedAt !== null) {
|
|
196
|
+
return { ...base, completedAt: Number(row.completedAt) };
|
|
197
|
+
}
|
|
198
|
+
return base;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// src/mappers/RecordMapper.ts
|
|
202
|
+
function toRow2(jobId, batchId, record) {
|
|
203
|
+
return {
|
|
204
|
+
jobId,
|
|
205
|
+
batchId,
|
|
206
|
+
recordIndex: record.index,
|
|
207
|
+
status: record.status,
|
|
208
|
+
raw: record.raw,
|
|
209
|
+
parsed: record.parsed,
|
|
210
|
+
errors: record.errors,
|
|
211
|
+
processingError: record.processingError ?? null
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
function toDomain2(row) {
|
|
215
|
+
const result = {
|
|
216
|
+
index: row.recordIndex,
|
|
217
|
+
raw: row.raw,
|
|
218
|
+
parsed: row.parsed,
|
|
219
|
+
status: row.status,
|
|
220
|
+
errors: row.errors
|
|
221
|
+
};
|
|
222
|
+
if (row.processingError !== null) {
|
|
223
|
+
return { ...result, processingError: row.processingError };
|
|
224
|
+
}
|
|
225
|
+
return result;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
// src/SequelizeStateStore.ts
|
|
229
|
+
var SequelizeStateStore = class {
|
|
230
|
+
constructor(sequelize, _options) {
|
|
231
|
+
this.sequelize = sequelize;
|
|
232
|
+
this.Job = defineJobModel(this.sequelize);
|
|
233
|
+
this.Record = defineRecordModel(this.sequelize);
|
|
234
|
+
}
|
|
235
|
+
async initialize() {
|
|
236
|
+
await this.Job.sync();
|
|
237
|
+
await this.Record.sync();
|
|
238
|
+
}
|
|
239
|
+
async saveJobState(job) {
|
|
240
|
+
const row = toRow(job);
|
|
241
|
+
await this.Job.upsert(row);
|
|
242
|
+
}
|
|
243
|
+
async getJobState(jobId) {
|
|
244
|
+
const row = await this.Job.findByPk(jobId);
|
|
245
|
+
if (!row) return null;
|
|
246
|
+
return toDomain(row.get({ plain: true }));
|
|
247
|
+
}
|
|
248
|
+
async updateBatchState(jobId, batchId, state) {
|
|
249
|
+
const row = await this.Job.findByPk(jobId);
|
|
250
|
+
if (!row) return;
|
|
251
|
+
const plain = row.get({ plain: true });
|
|
252
|
+
const batches = plain.batches;
|
|
253
|
+
const updated = batches.map(
|
|
254
|
+
(b) => b.id === batchId ? { ...b, status: state.status, processedCount: state.processedCount, failedCount: state.failedCount } : b
|
|
255
|
+
);
|
|
256
|
+
await row.update({ batches: updated });
|
|
257
|
+
}
|
|
258
|
+
async saveProcessedRecord(jobId, batchId, record) {
|
|
259
|
+
const row = toRow2(jobId, batchId, record);
|
|
260
|
+
const existing = await this.Record.findOne({
|
|
261
|
+
where: { jobId, recordIndex: record.index }
|
|
262
|
+
});
|
|
263
|
+
if (existing) {
|
|
264
|
+
await existing.update(row);
|
|
265
|
+
} else {
|
|
266
|
+
await this.Record.create(row);
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
async getFailedRecords(jobId) {
|
|
270
|
+
const { Op } = await import("sequelize");
|
|
271
|
+
const rows = await this.Record.findAll({
|
|
272
|
+
where: { jobId, status: { [Op.in]: ["failed", "invalid"] } },
|
|
273
|
+
order: [["recordIndex", "ASC"]]
|
|
274
|
+
});
|
|
275
|
+
return rows.map((r) => toDomain2(r.get({ plain: true })));
|
|
276
|
+
}
|
|
277
|
+
async getPendingRecords(jobId) {
|
|
278
|
+
const { Op } = await import("sequelize");
|
|
279
|
+
const rows = await this.Record.findAll({
|
|
280
|
+
where: { jobId, status: { [Op.in]: ["pending", "valid"] } },
|
|
281
|
+
order: [["recordIndex", "ASC"]]
|
|
282
|
+
});
|
|
283
|
+
return rows.map((r) => toDomain2(r.get({ plain: true })));
|
|
284
|
+
}
|
|
285
|
+
async getProcessedRecords(jobId) {
|
|
286
|
+
const rows = await this.Record.findAll({
|
|
287
|
+
where: { jobId, status: "processed" },
|
|
288
|
+
order: [["recordIndex", "ASC"]]
|
|
289
|
+
});
|
|
290
|
+
return rows.map((r) => toDomain2(r.get({ plain: true })));
|
|
291
|
+
}
|
|
292
|
+
async getProgress(jobId) {
|
|
293
|
+
const jobRow = await this.Job.findByPk(jobId);
|
|
294
|
+
const plain = jobRow ? jobRow.get({ plain: true }) : null;
|
|
295
|
+
const { fn, col } = await import("sequelize");
|
|
296
|
+
const counts = await this.Record.findAll({
|
|
297
|
+
attributes: ["status", [fn("COUNT", col("status")), "count"]],
|
|
298
|
+
where: { jobId },
|
|
299
|
+
group: ["status"],
|
|
300
|
+
raw: true
|
|
301
|
+
});
|
|
302
|
+
const countMap = /* @__PURE__ */ new Map();
|
|
303
|
+
for (const row of counts) {
|
|
304
|
+
countMap.set(row.status, parseInt(row.count, 10));
|
|
305
|
+
}
|
|
306
|
+
const processed = countMap.get("processed") ?? 0;
|
|
307
|
+
const failed = (countMap.get("failed") ?? 0) + (countMap.get("invalid") ?? 0);
|
|
308
|
+
const totalRecords = plain?.totalRecords ?? 0;
|
|
309
|
+
const pending = Math.max(0, totalRecords - processed - failed);
|
|
310
|
+
const completed = processed + failed;
|
|
311
|
+
const batches = plain?.batches ?? [];
|
|
312
|
+
const completedBatches = batches.filter((b) => b.status === "COMPLETED").length;
|
|
313
|
+
const elapsed = plain?.startedAt ? Date.now() - Number(plain.startedAt) : 0;
|
|
314
|
+
return {
|
|
315
|
+
totalRecords,
|
|
316
|
+
processedRecords: processed,
|
|
317
|
+
failedRecords: failed,
|
|
318
|
+
pendingRecords: pending,
|
|
319
|
+
percentage: totalRecords > 0 ? Math.round(completed / totalRecords * 100) : 0,
|
|
320
|
+
currentBatch: completedBatches,
|
|
321
|
+
totalBatches: batches.length,
|
|
322
|
+
elapsedMs: elapsed
|
|
323
|
+
};
|
|
324
|
+
}
|
|
325
|
+
};
|
|
326
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
327
|
+
0 && (module.exports = {
|
|
328
|
+
SequelizeStateStore
|
|
329
|
+
});
|
|
330
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/models/JobModel.ts","../src/models/RecordModel.ts","../src/mappers/JobMapper.ts","../src/mappers/RecordMapper.ts","../src/SequelizeStateStore.ts"],"sourcesContent":["export { SequelizeStateStore } from './SequelizeStateStore.js';\nexport type { SequelizeStateStoreOptions } from './SequelizeStateStore.js';\n","import { DataTypes } from 'sequelize';\nimport type { Sequelize, ModelStatic, Model } from 'sequelize';\n\nexport interface JobRow {\n id: string;\n status: string;\n config: unknown;\n batches: unknown;\n totalRecords: number;\n startedAt: number | null;\n completedAt: number | null;\n}\n\nexport type JobModel = ModelStatic<Model>;\n\nexport function defineJobModel(sequelize: Sequelize): JobModel {\n return sequelize.define(\n 'BulkImportJob',\n {\n id: {\n type: DataTypes.STRING(36),\n primaryKey: true,\n allowNull: false,\n },\n status: {\n type: DataTypes.STRING(20),\n allowNull: false,\n },\n config: {\n type: DataTypes.JSON,\n allowNull: false,\n },\n batches: {\n type: DataTypes.JSON,\n allowNull: false,\n defaultValue: [],\n },\n totalRecords: {\n type: DataTypes.INTEGER,\n allowNull: false,\n defaultValue: 0,\n },\n startedAt: {\n type: DataTypes.BIGINT,\n allowNull: true,\n },\n completedAt: {\n type: DataTypes.BIGINT,\n allowNull: true,\n },\n },\n {\n tableName: 'bulkimport_jobs',\n timestamps: false,\n },\n );\n}\n","import { DataTypes } from 'sequelize';\nimport type { Sequelize, ModelStatic, Model } from 'sequelize';\n\nexport interface RecordRow {\n id?: number;\n jobId: string;\n batchId: string;\n recordIndex: number;\n status: string;\n raw: unknown;\n parsed: unknown;\n errors: unknown;\n processingError: string | null;\n}\n\nexport type RecordModel = ModelStatic<Model>;\n\nexport function defineRecordModel(sequelize: Sequelize): RecordModel {\n return sequelize.define(\n 'BulkImportRecord',\n {\n id: {\n type: DataTypes.INTEGER,\n primaryKey: true,\n autoIncrement: true,\n },\n jobId: {\n type: DataTypes.STRING(36),\n allowNull: false,\n },\n batchId: {\n type: DataTypes.STRING(36),\n allowNull: false,\n },\n recordIndex: {\n type: DataTypes.INTEGER,\n allowNull: false,\n },\n status: {\n type: DataTypes.STRING(10),\n allowNull: false,\n },\n raw: {\n type: DataTypes.JSON,\n allowNull: false,\n },\n parsed: {\n type: DataTypes.JSON,\n allowNull: false,\n },\n errors: {\n type: DataTypes.JSON,\n allowNull: false,\n defaultValue: [],\n },\n processingError: {\n type: DataTypes.TEXT,\n allowNull: true,\n },\n },\n {\n tableName: 'bulkimport_records',\n timestamps: false,\n indexes: [\n { fields: ['jobId', 'status'] },\n { fields: ['jobId', 'batchId'] },\n { unique: true, fields: ['jobId', 'recordIndex'] },\n ],\n },\n );\n}\n","import type { ImportJobState, ImportJobConfig } from '@bulkimport/core';\nimport type { JobRow } from '../models/JobModel.js';\n\ninterface SerializableFieldDefinition {\n readonly name: string;\n readonly type: string;\n readonly required: boolean;\n readonly defaultValue?: unknown;\n readonly separator?: string;\n readonly aliases?: readonly string[];\n}\n\nfunction stripNonSerializableFields(config: ImportJobConfig): object {\n const fields: SerializableFieldDefinition[] = config.schema.fields.map((f) => {\n const stripped: SerializableFieldDefinition = {\n name: f.name,\n type: f.type,\n required: f.required,\n };\n const result: Record<string, unknown> = { ...stripped };\n if (f.defaultValue !== undefined) result['defaultValue'] = f.defaultValue;\n if (f.separator !== undefined) result['separator'] = f.separator;\n if (f.aliases !== undefined) result['aliases'] = f.aliases;\n return result as unknown as SerializableFieldDefinition;\n });\n\n return {\n ...config,\n schema: {\n ...config.schema,\n fields,\n },\n };\n}\n\nexport function toRow(state: ImportJobState): JobRow {\n return {\n id: state.id,\n status: state.status,\n config: stripNonSerializableFields(state.config),\n batches: state.batches.map((b) => ({\n id: b.id,\n index: b.index,\n status: b.status,\n records: [],\n processedCount: b.processedCount,\n failedCount: b.failedCount,\n })),\n totalRecords: state.totalRecords,\n startedAt: state.startedAt ?? null,\n completedAt: state.completedAt ?? null,\n };\n}\n\nexport function toDomain(row: JobRow): ImportJobState {\n const config = row.config as ImportJobConfig;\n const batches = row.batches as ImportJobState['batches'];\n\n const base: ImportJobState = {\n id: row.id,\n config,\n status: row.status as ImportJobState['status'],\n batches,\n totalRecords: row.totalRecords,\n };\n\n if (row.startedAt !== null && row.completedAt !== null) {\n return { ...base, startedAt: Number(row.startedAt), completedAt: Number(row.completedAt) };\n }\n if (row.startedAt !== null) {\n return { ...base, startedAt: Number(row.startedAt) };\n }\n if (row.completedAt !== null) {\n return { ...base, completedAt: Number(row.completedAt) };\n }\n\n return base;\n}\n","import type { ProcessedRecord, RawRecord, ValidationError } from '@bulkimport/core';\nimport type { RecordRow } from '../models/RecordModel.js';\n\nexport function toRow(jobId: string, batchId: string, record: ProcessedRecord): RecordRow {\n return {\n jobId,\n batchId,\n recordIndex: record.index,\n status: record.status,\n raw: record.raw,\n parsed: record.parsed,\n errors: record.errors,\n processingError: record.processingError ?? null,\n };\n}\n\nexport function toDomain(row: RecordRow): ProcessedRecord {\n const result: ProcessedRecord = {\n index: row.recordIndex,\n raw: row.raw as RawRecord,\n parsed: row.parsed as RawRecord,\n status: row.status as ProcessedRecord['status'],\n errors: row.errors as readonly ValidationError[],\n };\n\n if (row.processingError !== null) {\n return { ...result, processingError: row.processingError };\n }\n\n return result;\n}\n","import type { Sequelize } from 'sequelize';\nimport type { StateStore, BatchState, ImportJobState, ImportProgress, ProcessedRecord } from '@bulkimport/core';\nimport { defineJobModel } from './models/JobModel.js';\nimport type { JobModel, JobRow } from './models/JobModel.js';\nimport { defineRecordModel } from './models/RecordModel.js';\nimport type { RecordModel, RecordRow } from './models/RecordModel.js';\nimport * as JobMapper from './mappers/JobMapper.js';\nimport * as RecordMapper from './mappers/RecordMapper.js';\n\nexport interface SequelizeStateStoreOptions {\n readonly tablePrefix?: string;\n}\n\n/**\n * Sequelize-based StateStore adapter for `@bulkimport/core`.\n *\n * Persists import job state and processed records to a relational database\n * using Sequelize v6. Supports any dialect supported by Sequelize (PostgreSQL,\n * MySQL, MariaDB, SQLite, MS SQL Server).\n *\n * Call `initialize()` after construction to create tables.\n *\n * **Limitation:** Non-serializable schema fields (`customValidator`, `transform`,\n * `pattern`) are stripped when saving. The consumer must re-inject them when\n * restoring a job from the database.\n */\nexport class SequelizeStateStore implements StateStore {\n private readonly sequelize: Sequelize;\n private readonly Job: JobModel;\n private readonly Record: RecordModel;\n\n constructor(sequelize: Sequelize, _options?: SequelizeStateStoreOptions) {\n this.sequelize = sequelize;\n this.Job = defineJobModel(this.sequelize);\n this.Record = defineRecordModel(this.sequelize);\n }\n\n async initialize(): Promise<void> {\n await this.Job.sync();\n await this.Record.sync();\n }\n\n async saveJobState(job: ImportJobState): Promise<void> {\n const row = JobMapper.toRow(job);\n await this.Job.upsert(row as unknown as Record<string, unknown>);\n }\n\n async getJobState(jobId: string): Promise<ImportJobState | null> {\n const row = await this.Job.findByPk(jobId);\n if (!row) return null;\n return JobMapper.toDomain(row.get({ plain: true }) as JobRow);\n }\n\n async updateBatchState(jobId: string, batchId: string, state: BatchState): Promise<void> {\n const row = await this.Job.findByPk(jobId);\n if (!row) return;\n\n const plain = row.get({ plain: true }) as JobRow;\n const batches = plain.batches as Array<{ id: string; status: string; processedCount: number; failedCount: number }>;\n\n const updated = batches.map((b) =>\n b.id === batchId\n ? { ...b, status: state.status, processedCount: state.processedCount, failedCount: state.failedCount }\n : b,\n );\n\n await row.update({ batches: updated });\n }\n\n async saveProcessedRecord(jobId: string, batchId: string, record: ProcessedRecord): Promise<void> {\n const row = RecordMapper.toRow(jobId, batchId, record);\n\n const existing = await this.Record.findOne({\n where: { jobId, recordIndex: record.index },\n });\n\n if (existing) {\n await existing.update(row);\n } else {\n await this.Record.create(row as unknown as Record<string, unknown>);\n }\n }\n\n async getFailedRecords(jobId: string): Promise<readonly ProcessedRecord[]> {\n const { Op } = await import('sequelize');\n const rows = await this.Record.findAll({\n where: { jobId, status: { [Op.in]: ['failed', 'invalid'] } },\n order: [['recordIndex', 'ASC']],\n });\n return rows.map((r) => RecordMapper.toDomain(r.get({ plain: true }) as RecordRow));\n }\n\n async getPendingRecords(jobId: string): Promise<readonly ProcessedRecord[]> {\n const { Op } = await import('sequelize');\n const rows = await this.Record.findAll({\n where: { jobId, status: { [Op.in]: ['pending', 'valid'] } },\n order: [['recordIndex', 'ASC']],\n });\n return rows.map((r) => RecordMapper.toDomain(r.get({ plain: true }) as RecordRow));\n }\n\n async getProcessedRecords(jobId: string): Promise<readonly ProcessedRecord[]> {\n const rows = await this.Record.findAll({\n where: { jobId, status: 'processed' },\n order: [['recordIndex', 'ASC']],\n });\n return rows.map((r) => RecordMapper.toDomain(r.get({ plain: true }) as RecordRow));\n }\n\n async getProgress(jobId: string): Promise<ImportProgress> {\n const jobRow = await this.Job.findByPk(jobId);\n const plain = jobRow ? (jobRow.get({ plain: true }) as JobRow) : null;\n\n const { fn, col } = await import('sequelize');\n const counts = (await this.Record.findAll({\n attributes: ['status', [fn('COUNT', col('status')), 'count']],\n where: { jobId },\n group: ['status'],\n raw: true,\n })) as unknown as Array<{ status: string; count: string }>;\n\n const countMap = new Map<string, number>();\n for (const row of counts) {\n countMap.set(row.status, parseInt(row.count, 10));\n }\n\n const processed = countMap.get('processed') ?? 0;\n const failed = (countMap.get('failed') ?? 0) + (countMap.get('invalid') ?? 0);\n const totalRecords = plain?.totalRecords ?? 0;\n const pending = Math.max(0, totalRecords - processed - failed);\n const completed = processed + failed;\n\n const batches = (plain?.batches ?? []) as Array<{ status: string }>;\n const completedBatches = batches.filter((b) => b.status === 'COMPLETED').length;\n const elapsed = plain?.startedAt ? Date.now() - Number(plain.startedAt) : 0;\n\n return {\n totalRecords,\n processedRecords: processed,\n failedRecords: failed,\n pendingRecords: pending,\n percentage: totalRecords > 0 ? Math.round((completed / totalRecords) * 100) : 0,\n currentBatch: completedBatches,\n totalBatches: batches.length,\n elapsedMs: elapsed,\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,uBAA0B;AAenB,SAAS,eAAe,WAAgC;AAC7D,SAAO,UAAU;AAAA,IACf;AAAA,IACA;AAAA,MACE,IAAI;AAAA,QACF,MAAM,2BAAU,OAAO,EAAE;AAAA,QACzB,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,MACA,QAAQ;AAAA,QACN,MAAM,2BAAU,OAAO,EAAE;AAAA,QACzB,WAAW;AAAA,MACb;AAAA,MACA,QAAQ;AAAA,QACN,MAAM,2BAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,MACA,SAAS;AAAA,QACP,MAAM,2BAAU;AAAA,QAChB,WAAW;AAAA,QACX,cAAc,CAAC;AAAA,MACjB;AAAA,MACA,cAAc;AAAA,QACZ,MAAM,2BAAU;AAAA,QAChB,WAAW;AAAA,QACX,cAAc;AAAA,MAChB;AAAA,MACA,WAAW;AAAA,QACT,MAAM,2BAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,MACA,aAAa;AAAA,QACX,MAAM,2BAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,IACF;AAAA,IACA;AAAA,MACE,WAAW;AAAA,MACX,YAAY;AAAA,IACd;AAAA,EACF;AACF;;;ACxDA,IAAAA,oBAA0B;AAiBnB,SAAS,kBAAkB,WAAmC;AACnE,SAAO,UAAU;AAAA,IACf;AAAA,IACA;AAAA,MACE,IAAI;AAAA,QACF,MAAM,4BAAU;AAAA,QAChB,YAAY;AAAA,QACZ,eAAe;AAAA,MACjB;AAAA,MACA,OAAO;AAAA,QACL,MAAM,4BAAU,OAAO,EAAE;AAAA,QACzB,WAAW;AAAA,MACb;AAAA,MACA,SAAS;AAAA,QACP,MAAM,4BAAU,OAAO,EAAE;AAAA,QACzB,WAAW;AAAA,MACb;AAAA,MACA,aAAa;AAAA,QACX,MAAM,4BAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,MACA,QAAQ;AAAA,QACN,MAAM,4BAAU,OAAO,EAAE;AAAA,QACzB,WAAW;AAAA,MACb;AAAA,MACA,KAAK;AAAA,QACH,MAAM,4BAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,MACA,QAAQ;AAAA,QACN,MAAM,4BAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,MACA,QAAQ;AAAA,QACN,MAAM,4BAAU;AAAA,QAChB,WAAW;AAAA,QACX,cAAc,CAAC;AAAA,MACjB;AAAA,MACA,iBAAiB;AAAA,QACf,MAAM,4BAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,IACF;AAAA,IACA;AAAA,MACE,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,SAAS;AAAA,QACP,EAAE,QAAQ,CAAC,SAAS,QAAQ,EAAE;AAAA,QAC9B,EAAE,QAAQ,CAAC,SAAS,SAAS,EAAE;AAAA,QAC/B,EAAE,QAAQ,MAAM,QAAQ,CAAC,SAAS,aAAa,EAAE;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACF;;;AC1DA,SAAS,2BAA2B,QAAiC;AACnE,QAAM,SAAwC,OAAO,OAAO,OAAO,IAAI,CAAC,MAAM;AAC5E,UAAM,WAAwC;AAAA,MAC5C,MAAM,EAAE;AAAA,MACR,MAAM,EAAE;AAAA,MACR,UAAU,EAAE;AAAA,IACd;AACA,UAAM,SAAkC,EAAE,GAAG,SAAS;AACtD,QAAI,EAAE,iBAAiB,OAAW,QAAO,cAAc,IAAI,EAAE;AAC7D,QAAI,EAAE,cAAc,OAAW,QAAO,WAAW,IAAI,EAAE;AACvD,QAAI,EAAE,YAAY,OAAW,QAAO,SAAS,IAAI,EAAE;AACnD,WAAO;AAAA,EACT,CAAC;AAED,SAAO;AAAA,IACL,GAAG;AAAA,IACH,QAAQ;AAAA,MACN,GAAG,OAAO;AAAA,MACV;AAAA,IACF;AAAA,EACF;AACF;AAEO,SAAS,MAAM,OAA+B;AACnD,SAAO;AAAA,IACL,IAAI,MAAM;AAAA,IACV,QAAQ,MAAM;AAAA,IACd,QAAQ,2BAA2B,MAAM,MAAM;AAAA,IAC/C,SAAS,MAAM,QAAQ,IAAI,CAAC,OAAO;AAAA,MACjC,IAAI,EAAE;AAAA,MACN,OAAO,EAAE;AAAA,MACT,QAAQ,EAAE;AAAA,MACV,SAAS,CAAC;AAAA,MACV,gBAAgB,EAAE;AAAA,MAClB,aAAa,EAAE;AAAA,IACjB,EAAE;AAAA,IACF,cAAc,MAAM;AAAA,IACpB,WAAW,MAAM,aAAa;AAAA,IAC9B,aAAa,MAAM,eAAe;AAAA,EACpC;AACF;AAEO,SAAS,SAAS,KAA6B;AACpD,QAAM,SAAS,IAAI;AACnB,QAAM,UAAU,IAAI;AAEpB,QAAM,OAAuB;AAAA,IAC3B,IAAI,IAAI;AAAA,IACR;AAAA,IACA,QAAQ,IAAI;AAAA,IACZ;AAAA,IACA,cAAc,IAAI;AAAA,EACpB;AAEA,MAAI,IAAI,cAAc,QAAQ,IAAI,gBAAgB,MAAM;AACtD,WAAO,EAAE,GAAG,MAAM,WAAW,OAAO,IAAI,SAAS,GAAG,aAAa,OAAO,IAAI,WAAW,EAAE;AAAA,EAC3F;AACA,MAAI,IAAI,cAAc,MAAM;AAC1B,WAAO,EAAE,GAAG,MAAM,WAAW,OAAO,IAAI,SAAS,EAAE;AAAA,EACrD;AACA,MAAI,IAAI,gBAAgB,MAAM;AAC5B,WAAO,EAAE,GAAG,MAAM,aAAa,OAAO,IAAI,WAAW,EAAE;AAAA,EACzD;AAEA,SAAO;AACT;;;AC1EO,SAASC,OAAM,OAAe,SAAiB,QAAoC;AACxF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,aAAa,OAAO;AAAA,IACpB,QAAQ,OAAO;AAAA,IACf,KAAK,OAAO;AAAA,IACZ,QAAQ,OAAO;AAAA,IACf,QAAQ,OAAO;AAAA,IACf,iBAAiB,OAAO,mBAAmB;AAAA,EAC7C;AACF;AAEO,SAASC,UAAS,KAAiC;AACxD,QAAM,SAA0B;AAAA,IAC9B,OAAO,IAAI;AAAA,IACX,KAAK,IAAI;AAAA,IACT,QAAQ,IAAI;AAAA,IACZ,QAAQ,IAAI;AAAA,IACZ,QAAQ,IAAI;AAAA,EACd;AAEA,MAAI,IAAI,oBAAoB,MAAM;AAChC,WAAO,EAAE,GAAG,QAAQ,iBAAiB,IAAI,gBAAgB;AAAA,EAC3D;AAEA,SAAO;AACT;;;ACJO,IAAM,sBAAN,MAAgD;AAAA,EAKrD,YAAY,WAAsB,UAAuC;AACvE,SAAK,YAAY;AACjB,SAAK,MAAM,eAAe,KAAK,SAAS;AACxC,SAAK,SAAS,kBAAkB,KAAK,SAAS;AAAA,EAChD;AAAA,EAEA,MAAM,aAA4B;AAChC,UAAM,KAAK,IAAI,KAAK;AACpB,UAAM,KAAK,OAAO,KAAK;AAAA,EACzB;AAAA,EAEA,MAAM,aAAa,KAAoC;AACrD,UAAM,MAAgB,MAAM,GAAG;AAC/B,UAAM,KAAK,IAAI,OAAO,GAAyC;AAAA,EACjE;AAAA,EAEA,MAAM,YAAY,OAA+C;AAC/D,UAAM,MAAM,MAAM,KAAK,IAAI,SAAS,KAAK;AACzC,QAAI,CAAC,IAAK,QAAO;AACjB,WAAiB,SAAS,IAAI,IAAI,EAAE,OAAO,KAAK,CAAC,CAAW;AAAA,EAC9D;AAAA,EAEA,MAAM,iBAAiB,OAAe,SAAiB,OAAkC;AACvF,UAAM,MAAM,MAAM,KAAK,IAAI,SAAS,KAAK;AACzC,QAAI,CAAC,IAAK;AAEV,UAAM,QAAQ,IAAI,IAAI,EAAE,OAAO,KAAK,CAAC;AACrC,UAAM,UAAU,MAAM;AAEtB,UAAM,UAAU,QAAQ;AAAA,MAAI,CAAC,MAC3B,EAAE,OAAO,UACL,EAAE,GAAG,GAAG,QAAQ,MAAM,QAAQ,gBAAgB,MAAM,gBAAgB,aAAa,MAAM,YAAY,IACnG;AAAA,IACN;AAEA,UAAM,IAAI,OAAO,EAAE,SAAS,QAAQ,CAAC;AAAA,EACvC;AAAA,EAEA,MAAM,oBAAoB,OAAe,SAAiB,QAAwC;AAChG,UAAM,MAAmBC,OAAM,OAAO,SAAS,MAAM;AAErD,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MACzC,OAAO,EAAE,OAAO,aAAa,OAAO,MAAM;AAAA,IAC5C,CAAC;AAED,QAAI,UAAU;AACZ,YAAM,SAAS,OAAO,GAAG;AAAA,IAC3B,OAAO;AACL,YAAM,KAAK,OAAO,OAAO,GAAyC;AAAA,IACpE;AAAA,EACF;AAAA,EAEA,MAAM,iBAAiB,OAAoD;AACzE,UAAM,EAAE,GAAG,IAAI,MAAM,OAAO,WAAW;AACvC,UAAM,OAAO,MAAM,KAAK,OAAO,QAAQ;AAAA,MACrC,OAAO,EAAE,OAAO,QAAQ,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC,UAAU,SAAS,EAAE,EAAE;AAAA,MAC3D,OAAO,CAAC,CAAC,eAAe,KAAK,CAAC;AAAA,IAChC,CAAC;AACD,WAAO,KAAK,IAAI,CAAC,MAAmBC,UAAS,EAAE,IAAI,EAAE,OAAO,KAAK,CAAC,CAAc,CAAC;AAAA,EACnF;AAAA,EAEA,MAAM,kBAAkB,OAAoD;AAC1E,UAAM,EAAE,GAAG,IAAI,MAAM,OAAO,WAAW;AACvC,UAAM,OAAO,MAAM,KAAK,OAAO,QAAQ;AAAA,MACrC,OAAO,EAAE,OAAO,QAAQ,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC,WAAW,OAAO,EAAE,EAAE;AAAA,MAC1D,OAAO,CAAC,CAAC,eAAe,KAAK,CAAC;AAAA,IAChC,CAAC;AACD,WAAO,KAAK,IAAI,CAAC,MAAmBA,UAAS,EAAE,IAAI,EAAE,OAAO,KAAK,CAAC,CAAc,CAAC;AAAA,EACnF;AAAA,EAEA,MAAM,oBAAoB,OAAoD;AAC5E,UAAM,OAAO,MAAM,KAAK,OAAO,QAAQ;AAAA,MACrC,OAAO,EAAE,OAAO,QAAQ,YAAY;AAAA,MACpC,OAAO,CAAC,CAAC,eAAe,KAAK,CAAC;AAAA,IAChC,CAAC;AACD,WAAO,KAAK,IAAI,CAAC,MAAmBA,UAAS,EAAE,IAAI,EAAE,OAAO,KAAK,CAAC,CAAc,CAAC;AAAA,EACnF;AAAA,EAEA,MAAM,YAAY,OAAwC;AACxD,UAAM,SAAS,MAAM,KAAK,IAAI,SAAS,KAAK;AAC5C,UAAM,QAAQ,SAAU,OAAO,IAAI,EAAE,OAAO,KAAK,CAAC,IAAe;AAEjE,UAAM,EAAE,IAAI,IAAI,IAAI,MAAM,OAAO,WAAW;AAC5C,UAAM,SAAU,MAAM,KAAK,OAAO,QAAQ;AAAA,MACxC,YAAY,CAAC,UAAU,CAAC,GAAG,SAAS,IAAI,QAAQ,CAAC,GAAG,OAAO,CAAC;AAAA,MAC5D,OAAO,EAAE,MAAM;AAAA,MACf,OAAO,CAAC,QAAQ;AAAA,MAChB,KAAK;AAAA,IACP,CAAC;AAED,UAAM,WAAW,oBAAI,IAAoB;AACzC,eAAW,OAAO,QAAQ;AACxB,eAAS,IAAI,IAAI,QAAQ,SAAS,IAAI,OAAO,EAAE,CAAC;AAAA,IAClD;AAEA,UAAM,YAAY,SAAS,IAAI,WAAW,KAAK;AAC/C,UAAM,UAAU,SAAS,IAAI,QAAQ,KAAK,MAAM,SAAS,IAAI,SAAS,KAAK;AAC3E,UAAM,eAAe,OAAO,gBAAgB;AAC5C,UAAM,UAAU,KAAK,IAAI,GAAG,eAAe,YAAY,MAAM;AAC7D,UAAM,YAAY,YAAY;AAE9B,UAAM,UAAW,OAAO,WAAW,CAAC;AACpC,UAAM,mBAAmB,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,WAAW,EAAE;AACzE,UAAM,UAAU,OAAO,YAAY,KAAK,IAAI,IAAI,OAAO,MAAM,SAAS,IAAI;AAE1E,WAAO;AAAA,MACL;AAAA,MACA,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,gBAAgB;AAAA,MAChB,YAAY,eAAe,IAAI,KAAK,MAAO,YAAY,eAAgB,GAAG,IAAI;AAAA,MAC9E,cAAc;AAAA,MACd,cAAc,QAAQ;AAAA,MACtB,WAAW;AAAA,IACb;AAAA,EACF;AACF;","names":["import_sequelize","toRow","toDomain","toRow","toDomain"]}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { Sequelize } from 'sequelize';
|
|
2
|
+
import { StateStore, ImportJobState, BatchState, ProcessedRecord, ImportProgress } from '@bulkimport/core';
|
|
3
|
+
|
|
4
|
+
interface SequelizeStateStoreOptions {
|
|
5
|
+
readonly tablePrefix?: string;
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Sequelize-based StateStore adapter for `@bulkimport/core`.
|
|
9
|
+
*
|
|
10
|
+
* Persists import job state and processed records to a relational database
|
|
11
|
+
* using Sequelize v6. Supports any dialect supported by Sequelize (PostgreSQL,
|
|
12
|
+
* MySQL, MariaDB, SQLite, MS SQL Server).
|
|
13
|
+
*
|
|
14
|
+
* Call `initialize()` after construction to create tables.
|
|
15
|
+
*
|
|
16
|
+
* **Limitation:** Non-serializable schema fields (`customValidator`, `transform`,
|
|
17
|
+
* `pattern`) are stripped when saving. The consumer must re-inject them when
|
|
18
|
+
* restoring a job from the database.
|
|
19
|
+
*/
|
|
20
|
+
declare class SequelizeStateStore implements StateStore {
|
|
21
|
+
private readonly sequelize;
|
|
22
|
+
private readonly Job;
|
|
23
|
+
private readonly Record;
|
|
24
|
+
constructor(sequelize: Sequelize, _options?: SequelizeStateStoreOptions);
|
|
25
|
+
initialize(): Promise<void>;
|
|
26
|
+
saveJobState(job: ImportJobState): Promise<void>;
|
|
27
|
+
getJobState(jobId: string): Promise<ImportJobState | null>;
|
|
28
|
+
updateBatchState(jobId: string, batchId: string, state: BatchState): Promise<void>;
|
|
29
|
+
saveProcessedRecord(jobId: string, batchId: string, record: ProcessedRecord): Promise<void>;
|
|
30
|
+
getFailedRecords(jobId: string): Promise<readonly ProcessedRecord[]>;
|
|
31
|
+
getPendingRecords(jobId: string): Promise<readonly ProcessedRecord[]>;
|
|
32
|
+
getProcessedRecords(jobId: string): Promise<readonly ProcessedRecord[]>;
|
|
33
|
+
getProgress(jobId: string): Promise<ImportProgress>;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export { SequelizeStateStore, type SequelizeStateStoreOptions };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { Sequelize } from 'sequelize';
|
|
2
|
+
import { StateStore, ImportJobState, BatchState, ProcessedRecord, ImportProgress } from '@bulkimport/core';
|
|
3
|
+
|
|
4
|
+
interface SequelizeStateStoreOptions {
|
|
5
|
+
readonly tablePrefix?: string;
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Sequelize-based StateStore adapter for `@bulkimport/core`.
|
|
9
|
+
*
|
|
10
|
+
* Persists import job state and processed records to a relational database
|
|
11
|
+
* using Sequelize v6. Supports any dialect supported by Sequelize (PostgreSQL,
|
|
12
|
+
* MySQL, MariaDB, SQLite, MS SQL Server).
|
|
13
|
+
*
|
|
14
|
+
* Call `initialize()` after construction to create tables.
|
|
15
|
+
*
|
|
16
|
+
* **Limitation:** Non-serializable schema fields (`customValidator`, `transform`,
|
|
17
|
+
* `pattern`) are stripped when saving. The consumer must re-inject them when
|
|
18
|
+
* restoring a job from the database.
|
|
19
|
+
*/
|
|
20
|
+
declare class SequelizeStateStore implements StateStore {
|
|
21
|
+
private readonly sequelize;
|
|
22
|
+
private readonly Job;
|
|
23
|
+
private readonly Record;
|
|
24
|
+
constructor(sequelize: Sequelize, _options?: SequelizeStateStoreOptions);
|
|
25
|
+
initialize(): Promise<void>;
|
|
26
|
+
saveJobState(job: ImportJobState): Promise<void>;
|
|
27
|
+
getJobState(jobId: string): Promise<ImportJobState | null>;
|
|
28
|
+
updateBatchState(jobId: string, batchId: string, state: BatchState): Promise<void>;
|
|
29
|
+
saveProcessedRecord(jobId: string, batchId: string, record: ProcessedRecord): Promise<void>;
|
|
30
|
+
getFailedRecords(jobId: string): Promise<readonly ProcessedRecord[]>;
|
|
31
|
+
getPendingRecords(jobId: string): Promise<readonly ProcessedRecord[]>;
|
|
32
|
+
getProcessedRecords(jobId: string): Promise<readonly ProcessedRecord[]>;
|
|
33
|
+
getProgress(jobId: string): Promise<ImportProgress>;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export { SequelizeStateStore, type SequelizeStateStoreOptions };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
// src/models/JobModel.ts
|
|
2
|
+
import { DataTypes } from "sequelize";
|
|
3
|
+
function defineJobModel(sequelize) {
|
|
4
|
+
return sequelize.define(
|
|
5
|
+
"BulkImportJob",
|
|
6
|
+
{
|
|
7
|
+
id: {
|
|
8
|
+
type: DataTypes.STRING(36),
|
|
9
|
+
primaryKey: true,
|
|
10
|
+
allowNull: false
|
|
11
|
+
},
|
|
12
|
+
status: {
|
|
13
|
+
type: DataTypes.STRING(20),
|
|
14
|
+
allowNull: false
|
|
15
|
+
},
|
|
16
|
+
config: {
|
|
17
|
+
type: DataTypes.JSON,
|
|
18
|
+
allowNull: false
|
|
19
|
+
},
|
|
20
|
+
batches: {
|
|
21
|
+
type: DataTypes.JSON,
|
|
22
|
+
allowNull: false,
|
|
23
|
+
defaultValue: []
|
|
24
|
+
},
|
|
25
|
+
totalRecords: {
|
|
26
|
+
type: DataTypes.INTEGER,
|
|
27
|
+
allowNull: false,
|
|
28
|
+
defaultValue: 0
|
|
29
|
+
},
|
|
30
|
+
startedAt: {
|
|
31
|
+
type: DataTypes.BIGINT,
|
|
32
|
+
allowNull: true
|
|
33
|
+
},
|
|
34
|
+
completedAt: {
|
|
35
|
+
type: DataTypes.BIGINT,
|
|
36
|
+
allowNull: true
|
|
37
|
+
}
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
tableName: "bulkimport_jobs",
|
|
41
|
+
timestamps: false
|
|
42
|
+
}
|
|
43
|
+
);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// src/models/RecordModel.ts
|
|
47
|
+
import { DataTypes as DataTypes2 } from "sequelize";
|
|
48
|
+
function defineRecordModel(sequelize) {
|
|
49
|
+
return sequelize.define(
|
|
50
|
+
"BulkImportRecord",
|
|
51
|
+
{
|
|
52
|
+
id: {
|
|
53
|
+
type: DataTypes2.INTEGER,
|
|
54
|
+
primaryKey: true,
|
|
55
|
+
autoIncrement: true
|
|
56
|
+
},
|
|
57
|
+
jobId: {
|
|
58
|
+
type: DataTypes2.STRING(36),
|
|
59
|
+
allowNull: false
|
|
60
|
+
},
|
|
61
|
+
batchId: {
|
|
62
|
+
type: DataTypes2.STRING(36),
|
|
63
|
+
allowNull: false
|
|
64
|
+
},
|
|
65
|
+
recordIndex: {
|
|
66
|
+
type: DataTypes2.INTEGER,
|
|
67
|
+
allowNull: false
|
|
68
|
+
},
|
|
69
|
+
status: {
|
|
70
|
+
type: DataTypes2.STRING(10),
|
|
71
|
+
allowNull: false
|
|
72
|
+
},
|
|
73
|
+
raw: {
|
|
74
|
+
type: DataTypes2.JSON,
|
|
75
|
+
allowNull: false
|
|
76
|
+
},
|
|
77
|
+
parsed: {
|
|
78
|
+
type: DataTypes2.JSON,
|
|
79
|
+
allowNull: false
|
|
80
|
+
},
|
|
81
|
+
errors: {
|
|
82
|
+
type: DataTypes2.JSON,
|
|
83
|
+
allowNull: false,
|
|
84
|
+
defaultValue: []
|
|
85
|
+
},
|
|
86
|
+
processingError: {
|
|
87
|
+
type: DataTypes2.TEXT,
|
|
88
|
+
allowNull: true
|
|
89
|
+
}
|
|
90
|
+
},
|
|
91
|
+
{
|
|
92
|
+
tableName: "bulkimport_records",
|
|
93
|
+
timestamps: false,
|
|
94
|
+
indexes: [
|
|
95
|
+
{ fields: ["jobId", "status"] },
|
|
96
|
+
{ fields: ["jobId", "batchId"] },
|
|
97
|
+
{ unique: true, fields: ["jobId", "recordIndex"] }
|
|
98
|
+
]
|
|
99
|
+
}
|
|
100
|
+
);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// src/mappers/JobMapper.ts
|
|
104
|
+
function stripNonSerializableFields(config) {
|
|
105
|
+
const fields = config.schema.fields.map((f) => {
|
|
106
|
+
const stripped = {
|
|
107
|
+
name: f.name,
|
|
108
|
+
type: f.type,
|
|
109
|
+
required: f.required
|
|
110
|
+
};
|
|
111
|
+
const result = { ...stripped };
|
|
112
|
+
if (f.defaultValue !== void 0) result["defaultValue"] = f.defaultValue;
|
|
113
|
+
if (f.separator !== void 0) result["separator"] = f.separator;
|
|
114
|
+
if (f.aliases !== void 0) result["aliases"] = f.aliases;
|
|
115
|
+
return result;
|
|
116
|
+
});
|
|
117
|
+
return {
|
|
118
|
+
...config,
|
|
119
|
+
schema: {
|
|
120
|
+
...config.schema,
|
|
121
|
+
fields
|
|
122
|
+
}
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
function toRow(state) {
|
|
126
|
+
return {
|
|
127
|
+
id: state.id,
|
|
128
|
+
status: state.status,
|
|
129
|
+
config: stripNonSerializableFields(state.config),
|
|
130
|
+
batches: state.batches.map((b) => ({
|
|
131
|
+
id: b.id,
|
|
132
|
+
index: b.index,
|
|
133
|
+
status: b.status,
|
|
134
|
+
records: [],
|
|
135
|
+
processedCount: b.processedCount,
|
|
136
|
+
failedCount: b.failedCount
|
|
137
|
+
})),
|
|
138
|
+
totalRecords: state.totalRecords,
|
|
139
|
+
startedAt: state.startedAt ?? null,
|
|
140
|
+
completedAt: state.completedAt ?? null
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
function toDomain(row) {
|
|
144
|
+
const config = row.config;
|
|
145
|
+
const batches = row.batches;
|
|
146
|
+
const base = {
|
|
147
|
+
id: row.id,
|
|
148
|
+
config,
|
|
149
|
+
status: row.status,
|
|
150
|
+
batches,
|
|
151
|
+
totalRecords: row.totalRecords
|
|
152
|
+
};
|
|
153
|
+
if (row.startedAt !== null && row.completedAt !== null) {
|
|
154
|
+
return { ...base, startedAt: Number(row.startedAt), completedAt: Number(row.completedAt) };
|
|
155
|
+
}
|
|
156
|
+
if (row.startedAt !== null) {
|
|
157
|
+
return { ...base, startedAt: Number(row.startedAt) };
|
|
158
|
+
}
|
|
159
|
+
if (row.completedAt !== null) {
|
|
160
|
+
return { ...base, completedAt: Number(row.completedAt) };
|
|
161
|
+
}
|
|
162
|
+
return base;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
// src/mappers/RecordMapper.ts
|
|
166
|
+
function toRow2(jobId, batchId, record) {
|
|
167
|
+
return {
|
|
168
|
+
jobId,
|
|
169
|
+
batchId,
|
|
170
|
+
recordIndex: record.index,
|
|
171
|
+
status: record.status,
|
|
172
|
+
raw: record.raw,
|
|
173
|
+
parsed: record.parsed,
|
|
174
|
+
errors: record.errors,
|
|
175
|
+
processingError: record.processingError ?? null
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
function toDomain2(row) {
|
|
179
|
+
const result = {
|
|
180
|
+
index: row.recordIndex,
|
|
181
|
+
raw: row.raw,
|
|
182
|
+
parsed: row.parsed,
|
|
183
|
+
status: row.status,
|
|
184
|
+
errors: row.errors
|
|
185
|
+
};
|
|
186
|
+
if (row.processingError !== null) {
|
|
187
|
+
return { ...result, processingError: row.processingError };
|
|
188
|
+
}
|
|
189
|
+
return result;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// src/SequelizeStateStore.ts
|
|
193
|
+
var SequelizeStateStore = class {
|
|
194
|
+
constructor(sequelize, _options) {
|
|
195
|
+
this.sequelize = sequelize;
|
|
196
|
+
this.Job = defineJobModel(this.sequelize);
|
|
197
|
+
this.Record = defineRecordModel(this.sequelize);
|
|
198
|
+
}
|
|
199
|
+
async initialize() {
|
|
200
|
+
await this.Job.sync();
|
|
201
|
+
await this.Record.sync();
|
|
202
|
+
}
|
|
203
|
+
async saveJobState(job) {
|
|
204
|
+
const row = toRow(job);
|
|
205
|
+
await this.Job.upsert(row);
|
|
206
|
+
}
|
|
207
|
+
async getJobState(jobId) {
|
|
208
|
+
const row = await this.Job.findByPk(jobId);
|
|
209
|
+
if (!row) return null;
|
|
210
|
+
return toDomain(row.get({ plain: true }));
|
|
211
|
+
}
|
|
212
|
+
async updateBatchState(jobId, batchId, state) {
|
|
213
|
+
const row = await this.Job.findByPk(jobId);
|
|
214
|
+
if (!row) return;
|
|
215
|
+
const plain = row.get({ plain: true });
|
|
216
|
+
const batches = plain.batches;
|
|
217
|
+
const updated = batches.map(
|
|
218
|
+
(b) => b.id === batchId ? { ...b, status: state.status, processedCount: state.processedCount, failedCount: state.failedCount } : b
|
|
219
|
+
);
|
|
220
|
+
await row.update({ batches: updated });
|
|
221
|
+
}
|
|
222
|
+
async saveProcessedRecord(jobId, batchId, record) {
|
|
223
|
+
const row = toRow2(jobId, batchId, record);
|
|
224
|
+
const existing = await this.Record.findOne({
|
|
225
|
+
where: { jobId, recordIndex: record.index }
|
|
226
|
+
});
|
|
227
|
+
if (existing) {
|
|
228
|
+
await existing.update(row);
|
|
229
|
+
} else {
|
|
230
|
+
await this.Record.create(row);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
async getFailedRecords(jobId) {
|
|
234
|
+
const { Op } = await import("sequelize");
|
|
235
|
+
const rows = await this.Record.findAll({
|
|
236
|
+
where: { jobId, status: { [Op.in]: ["failed", "invalid"] } },
|
|
237
|
+
order: [["recordIndex", "ASC"]]
|
|
238
|
+
});
|
|
239
|
+
return rows.map((r) => toDomain2(r.get({ plain: true })));
|
|
240
|
+
}
|
|
241
|
+
async getPendingRecords(jobId) {
|
|
242
|
+
const { Op } = await import("sequelize");
|
|
243
|
+
const rows = await this.Record.findAll({
|
|
244
|
+
where: { jobId, status: { [Op.in]: ["pending", "valid"] } },
|
|
245
|
+
order: [["recordIndex", "ASC"]]
|
|
246
|
+
});
|
|
247
|
+
return rows.map((r) => toDomain2(r.get({ plain: true })));
|
|
248
|
+
}
|
|
249
|
+
async getProcessedRecords(jobId) {
|
|
250
|
+
const rows = await this.Record.findAll({
|
|
251
|
+
where: { jobId, status: "processed" },
|
|
252
|
+
order: [["recordIndex", "ASC"]]
|
|
253
|
+
});
|
|
254
|
+
return rows.map((r) => toDomain2(r.get({ plain: true })));
|
|
255
|
+
}
|
|
256
|
+
async getProgress(jobId) {
|
|
257
|
+
const jobRow = await this.Job.findByPk(jobId);
|
|
258
|
+
const plain = jobRow ? jobRow.get({ plain: true }) : null;
|
|
259
|
+
const { fn, col } = await import("sequelize");
|
|
260
|
+
const counts = await this.Record.findAll({
|
|
261
|
+
attributes: ["status", [fn("COUNT", col("status")), "count"]],
|
|
262
|
+
where: { jobId },
|
|
263
|
+
group: ["status"],
|
|
264
|
+
raw: true
|
|
265
|
+
});
|
|
266
|
+
const countMap = /* @__PURE__ */ new Map();
|
|
267
|
+
for (const row of counts) {
|
|
268
|
+
countMap.set(row.status, parseInt(row.count, 10));
|
|
269
|
+
}
|
|
270
|
+
const processed = countMap.get("processed") ?? 0;
|
|
271
|
+
const failed = (countMap.get("failed") ?? 0) + (countMap.get("invalid") ?? 0);
|
|
272
|
+
const totalRecords = plain?.totalRecords ?? 0;
|
|
273
|
+
const pending = Math.max(0, totalRecords - processed - failed);
|
|
274
|
+
const completed = processed + failed;
|
|
275
|
+
const batches = plain?.batches ?? [];
|
|
276
|
+
const completedBatches = batches.filter((b) => b.status === "COMPLETED").length;
|
|
277
|
+
const elapsed = plain?.startedAt ? Date.now() - Number(plain.startedAt) : 0;
|
|
278
|
+
return {
|
|
279
|
+
totalRecords,
|
|
280
|
+
processedRecords: processed,
|
|
281
|
+
failedRecords: failed,
|
|
282
|
+
pendingRecords: pending,
|
|
283
|
+
percentage: totalRecords > 0 ? Math.round(completed / totalRecords * 100) : 0,
|
|
284
|
+
currentBatch: completedBatches,
|
|
285
|
+
totalBatches: batches.length,
|
|
286
|
+
elapsedMs: elapsed
|
|
287
|
+
};
|
|
288
|
+
}
|
|
289
|
+
};
|
|
290
|
+
export {
|
|
291
|
+
SequelizeStateStore
|
|
292
|
+
};
|
|
293
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/models/JobModel.ts","../src/models/RecordModel.ts","../src/mappers/JobMapper.ts","../src/mappers/RecordMapper.ts","../src/SequelizeStateStore.ts"],"sourcesContent":["import { DataTypes } from 'sequelize';\nimport type { Sequelize, ModelStatic, Model } from 'sequelize';\n\nexport interface JobRow {\n id: string;\n status: string;\n config: unknown;\n batches: unknown;\n totalRecords: number;\n startedAt: number | null;\n completedAt: number | null;\n}\n\nexport type JobModel = ModelStatic<Model>;\n\nexport function defineJobModel(sequelize: Sequelize): JobModel {\n return sequelize.define(\n 'BulkImportJob',\n {\n id: {\n type: DataTypes.STRING(36),\n primaryKey: true,\n allowNull: false,\n },\n status: {\n type: DataTypes.STRING(20),\n allowNull: false,\n },\n config: {\n type: DataTypes.JSON,\n allowNull: false,\n },\n batches: {\n type: DataTypes.JSON,\n allowNull: false,\n defaultValue: [],\n },\n totalRecords: {\n type: DataTypes.INTEGER,\n allowNull: false,\n defaultValue: 0,\n },\n startedAt: {\n type: DataTypes.BIGINT,\n allowNull: true,\n },\n completedAt: {\n type: DataTypes.BIGINT,\n allowNull: true,\n },\n },\n {\n tableName: 'bulkimport_jobs',\n timestamps: false,\n },\n );\n}\n","import { DataTypes } from 'sequelize';\nimport type { Sequelize, ModelStatic, Model } from 'sequelize';\n\nexport interface RecordRow {\n id?: number;\n jobId: string;\n batchId: string;\n recordIndex: number;\n status: string;\n raw: unknown;\n parsed: unknown;\n errors: unknown;\n processingError: string | null;\n}\n\nexport type RecordModel = ModelStatic<Model>;\n\nexport function defineRecordModel(sequelize: Sequelize): RecordModel {\n return sequelize.define(\n 'BulkImportRecord',\n {\n id: {\n type: DataTypes.INTEGER,\n primaryKey: true,\n autoIncrement: true,\n },\n jobId: {\n type: DataTypes.STRING(36),\n allowNull: false,\n },\n batchId: {\n type: DataTypes.STRING(36),\n allowNull: false,\n },\n recordIndex: {\n type: DataTypes.INTEGER,\n allowNull: false,\n },\n status: {\n type: DataTypes.STRING(10),\n allowNull: false,\n },\n raw: {\n type: DataTypes.JSON,\n allowNull: false,\n },\n parsed: {\n type: DataTypes.JSON,\n allowNull: false,\n },\n errors: {\n type: DataTypes.JSON,\n allowNull: false,\n defaultValue: [],\n },\n processingError: {\n type: DataTypes.TEXT,\n allowNull: true,\n },\n },\n {\n tableName: 'bulkimport_records',\n timestamps: false,\n indexes: [\n { fields: ['jobId', 'status'] },\n { fields: ['jobId', 'batchId'] },\n { unique: true, fields: ['jobId', 'recordIndex'] },\n ],\n },\n );\n}\n","import type { ImportJobState, ImportJobConfig } from '@bulkimport/core';\nimport type { JobRow } from '../models/JobModel.js';\n\ninterface SerializableFieldDefinition {\n readonly name: string;\n readonly type: string;\n readonly required: boolean;\n readonly defaultValue?: unknown;\n readonly separator?: string;\n readonly aliases?: readonly string[];\n}\n\nfunction stripNonSerializableFields(config: ImportJobConfig): object {\n const fields: SerializableFieldDefinition[] = config.schema.fields.map((f) => {\n const stripped: SerializableFieldDefinition = {\n name: f.name,\n type: f.type,\n required: f.required,\n };\n const result: Record<string, unknown> = { ...stripped };\n if (f.defaultValue !== undefined) result['defaultValue'] = f.defaultValue;\n if (f.separator !== undefined) result['separator'] = f.separator;\n if (f.aliases !== undefined) result['aliases'] = f.aliases;\n return result as unknown as SerializableFieldDefinition;\n });\n\n return {\n ...config,\n schema: {\n ...config.schema,\n fields,\n },\n };\n}\n\nexport function toRow(state: ImportJobState): JobRow {\n return {\n id: state.id,\n status: state.status,\n config: stripNonSerializableFields(state.config),\n batches: state.batches.map((b) => ({\n id: b.id,\n index: b.index,\n status: b.status,\n records: [],\n processedCount: b.processedCount,\n failedCount: b.failedCount,\n })),\n totalRecords: state.totalRecords,\n startedAt: state.startedAt ?? null,\n completedAt: state.completedAt ?? null,\n };\n}\n\nexport function toDomain(row: JobRow): ImportJobState {\n const config = row.config as ImportJobConfig;\n const batches = row.batches as ImportJobState['batches'];\n\n const base: ImportJobState = {\n id: row.id,\n config,\n status: row.status as ImportJobState['status'],\n batches,\n totalRecords: row.totalRecords,\n };\n\n if (row.startedAt !== null && row.completedAt !== null) {\n return { ...base, startedAt: Number(row.startedAt), completedAt: Number(row.completedAt) };\n }\n if (row.startedAt !== null) {\n return { ...base, startedAt: Number(row.startedAt) };\n }\n if (row.completedAt !== null) {\n return { ...base, completedAt: Number(row.completedAt) };\n }\n\n return base;\n}\n","import type { ProcessedRecord, RawRecord, ValidationError } from '@bulkimport/core';\nimport type { RecordRow } from '../models/RecordModel.js';\n\nexport function toRow(jobId: string, batchId: string, record: ProcessedRecord): RecordRow {\n return {\n jobId,\n batchId,\n recordIndex: record.index,\n status: record.status,\n raw: record.raw,\n parsed: record.parsed,\n errors: record.errors,\n processingError: record.processingError ?? null,\n };\n}\n\nexport function toDomain(row: RecordRow): ProcessedRecord {\n const result: ProcessedRecord = {\n index: row.recordIndex,\n raw: row.raw as RawRecord,\n parsed: row.parsed as RawRecord,\n status: row.status as ProcessedRecord['status'],\n errors: row.errors as readonly ValidationError[],\n };\n\n if (row.processingError !== null) {\n return { ...result, processingError: row.processingError };\n }\n\n return result;\n}\n","import type { Sequelize } from 'sequelize';\nimport type { StateStore, BatchState, ImportJobState, ImportProgress, ProcessedRecord } from '@bulkimport/core';\nimport { defineJobModel } from './models/JobModel.js';\nimport type { JobModel, JobRow } from './models/JobModel.js';\nimport { defineRecordModel } from './models/RecordModel.js';\nimport type { RecordModel, RecordRow } from './models/RecordModel.js';\nimport * as JobMapper from './mappers/JobMapper.js';\nimport * as RecordMapper from './mappers/RecordMapper.js';\n\nexport interface SequelizeStateStoreOptions {\n readonly tablePrefix?: string;\n}\n\n/**\n * Sequelize-based StateStore adapter for `@bulkimport/core`.\n *\n * Persists import job state and processed records to a relational database\n * using Sequelize v6. Supports any dialect supported by Sequelize (PostgreSQL,\n * MySQL, MariaDB, SQLite, MS SQL Server).\n *\n * Call `initialize()` after construction to create tables.\n *\n * **Limitation:** Non-serializable schema fields (`customValidator`, `transform`,\n * `pattern`) are stripped when saving. The consumer must re-inject them when\n * restoring a job from the database.\n */\nexport class SequelizeStateStore implements StateStore {\n private readonly sequelize: Sequelize;\n private readonly Job: JobModel;\n private readonly Record: RecordModel;\n\n constructor(sequelize: Sequelize, _options?: SequelizeStateStoreOptions) {\n this.sequelize = sequelize;\n this.Job = defineJobModel(this.sequelize);\n this.Record = defineRecordModel(this.sequelize);\n }\n\n async initialize(): Promise<void> {\n await this.Job.sync();\n await this.Record.sync();\n }\n\n async saveJobState(job: ImportJobState): Promise<void> {\n const row = JobMapper.toRow(job);\n await this.Job.upsert(row as unknown as Record<string, unknown>);\n }\n\n async getJobState(jobId: string): Promise<ImportJobState | null> {\n const row = await this.Job.findByPk(jobId);\n if (!row) return null;\n return JobMapper.toDomain(row.get({ plain: true }) as JobRow);\n }\n\n async updateBatchState(jobId: string, batchId: string, state: BatchState): Promise<void> {\n const row = await this.Job.findByPk(jobId);\n if (!row) return;\n\n const plain = row.get({ plain: true }) as JobRow;\n const batches = plain.batches as Array<{ id: string; status: string; processedCount: number; failedCount: number }>;\n\n const updated = batches.map((b) =>\n b.id === batchId\n ? { ...b, status: state.status, processedCount: state.processedCount, failedCount: state.failedCount }\n : b,\n );\n\n await row.update({ batches: updated });\n }\n\n async saveProcessedRecord(jobId: string, batchId: string, record: ProcessedRecord): Promise<void> {\n const row = RecordMapper.toRow(jobId, batchId, record);\n\n const existing = await this.Record.findOne({\n where: { jobId, recordIndex: record.index },\n });\n\n if (existing) {\n await existing.update(row);\n } else {\n await this.Record.create(row as unknown as Record<string, unknown>);\n }\n }\n\n async getFailedRecords(jobId: string): Promise<readonly ProcessedRecord[]> {\n const { Op } = await import('sequelize');\n const rows = await this.Record.findAll({\n where: { jobId, status: { [Op.in]: ['failed', 'invalid'] } },\n order: [['recordIndex', 'ASC']],\n });\n return rows.map((r) => RecordMapper.toDomain(r.get({ plain: true }) as RecordRow));\n }\n\n async getPendingRecords(jobId: string): Promise<readonly ProcessedRecord[]> {\n const { Op } = await import('sequelize');\n const rows = await this.Record.findAll({\n where: { jobId, status: { [Op.in]: ['pending', 'valid'] } },\n order: [['recordIndex', 'ASC']],\n });\n return rows.map((r) => RecordMapper.toDomain(r.get({ plain: true }) as RecordRow));\n }\n\n async getProcessedRecords(jobId: string): Promise<readonly ProcessedRecord[]> {\n const rows = await this.Record.findAll({\n where: { jobId, status: 'processed' },\n order: [['recordIndex', 'ASC']],\n });\n return rows.map((r) => RecordMapper.toDomain(r.get({ plain: true }) as RecordRow));\n }\n\n async getProgress(jobId: string): Promise<ImportProgress> {\n const jobRow = await this.Job.findByPk(jobId);\n const plain = jobRow ? (jobRow.get({ plain: true }) as JobRow) : null;\n\n const { fn, col } = await import('sequelize');\n const counts = (await this.Record.findAll({\n attributes: ['status', [fn('COUNT', col('status')), 'count']],\n where: { jobId },\n group: ['status'],\n raw: true,\n })) as unknown as Array<{ status: string; count: string }>;\n\n const countMap = new Map<string, number>();\n for (const row of counts) {\n countMap.set(row.status, parseInt(row.count, 10));\n }\n\n const processed = countMap.get('processed') ?? 0;\n const failed = (countMap.get('failed') ?? 0) + (countMap.get('invalid') ?? 0);\n const totalRecords = plain?.totalRecords ?? 0;\n const pending = Math.max(0, totalRecords - processed - failed);\n const completed = processed + failed;\n\n const batches = (plain?.batches ?? []) as Array<{ status: string }>;\n const completedBatches = batches.filter((b) => b.status === 'COMPLETED').length;\n const elapsed = plain?.startedAt ? Date.now() - Number(plain.startedAt) : 0;\n\n return {\n totalRecords,\n processedRecords: processed,\n failedRecords: failed,\n pendingRecords: pending,\n percentage: totalRecords > 0 ? Math.round((completed / totalRecords) * 100) : 0,\n currentBatch: completedBatches,\n totalBatches: batches.length,\n elapsedMs: elapsed,\n };\n }\n}\n"],"mappings":";AAAA,SAAS,iBAAiB;AAenB,SAAS,eAAe,WAAgC;AAC7D,SAAO,UAAU;AAAA,IACf;AAAA,IACA;AAAA,MACE,IAAI;AAAA,QACF,MAAM,UAAU,OAAO,EAAE;AAAA,QACzB,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,MACA,QAAQ;AAAA,QACN,MAAM,UAAU,OAAO,EAAE;AAAA,QACzB,WAAW;AAAA,MACb;AAAA,MACA,QAAQ;AAAA,QACN,MAAM,UAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,MACA,SAAS;AAAA,QACP,MAAM,UAAU;AAAA,QAChB,WAAW;AAAA,QACX,cAAc,CAAC;AAAA,MACjB;AAAA,MACA,cAAc;AAAA,QACZ,MAAM,UAAU;AAAA,QAChB,WAAW;AAAA,QACX,cAAc;AAAA,MAChB;AAAA,MACA,WAAW;AAAA,QACT,MAAM,UAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,MACA,aAAa;AAAA,QACX,MAAM,UAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,IACF;AAAA,IACA;AAAA,MACE,WAAW;AAAA,MACX,YAAY;AAAA,IACd;AAAA,EACF;AACF;;;ACxDA,SAAS,aAAAA,kBAAiB;AAiBnB,SAAS,kBAAkB,WAAmC;AACnE,SAAO,UAAU;AAAA,IACf;AAAA,IACA;AAAA,MACE,IAAI;AAAA,QACF,MAAMA,WAAU;AAAA,QAChB,YAAY;AAAA,QACZ,eAAe;AAAA,MACjB;AAAA,MACA,OAAO;AAAA,QACL,MAAMA,WAAU,OAAO,EAAE;AAAA,QACzB,WAAW;AAAA,MACb;AAAA,MACA,SAAS;AAAA,QACP,MAAMA,WAAU,OAAO,EAAE;AAAA,QACzB,WAAW;AAAA,MACb;AAAA,MACA,aAAa;AAAA,QACX,MAAMA,WAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,MACA,QAAQ;AAAA,QACN,MAAMA,WAAU,OAAO,EAAE;AAAA,QACzB,WAAW;AAAA,MACb;AAAA,MACA,KAAK;AAAA,QACH,MAAMA,WAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,MACA,QAAQ;AAAA,QACN,MAAMA,WAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,MACA,QAAQ;AAAA,QACN,MAAMA,WAAU;AAAA,QAChB,WAAW;AAAA,QACX,cAAc,CAAC;AAAA,MACjB;AAAA,MACA,iBAAiB;AAAA,QACf,MAAMA,WAAU;AAAA,QAChB,WAAW;AAAA,MACb;AAAA,IACF;AAAA,IACA;AAAA,MACE,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,SAAS;AAAA,QACP,EAAE,QAAQ,CAAC,SAAS,QAAQ,EAAE;AAAA,QAC9B,EAAE,QAAQ,CAAC,SAAS,SAAS,EAAE;AAAA,QAC/B,EAAE,QAAQ,MAAM,QAAQ,CAAC,SAAS,aAAa,EAAE;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACF;;;AC1DA,SAAS,2BAA2B,QAAiC;AACnE,QAAM,SAAwC,OAAO,OAAO,OAAO,IAAI,CAAC,MAAM;AAC5E,UAAM,WAAwC;AAAA,MAC5C,MAAM,EAAE;AAAA,MACR,MAAM,EAAE;AAAA,MACR,UAAU,EAAE;AAAA,IACd;AACA,UAAM,SAAkC,EAAE,GAAG,SAAS;AACtD,QAAI,EAAE,iBAAiB,OAAW,QAAO,cAAc,IAAI,EAAE;AAC7D,QAAI,EAAE,cAAc,OAAW,QAAO,WAAW,IAAI,EAAE;AACvD,QAAI,EAAE,YAAY,OAAW,QAAO,SAAS,IAAI,EAAE;AACnD,WAAO;AAAA,EACT,CAAC;AAED,SAAO;AAAA,IACL,GAAG;AAAA,IACH,QAAQ;AAAA,MACN,GAAG,OAAO;AAAA,MACV;AAAA,IACF;AAAA,EACF;AACF;AAEO,SAAS,MAAM,OAA+B;AACnD,SAAO;AAAA,IACL,IAAI,MAAM;AAAA,IACV,QAAQ,MAAM;AAAA,IACd,QAAQ,2BAA2B,MAAM,MAAM;AAAA,IAC/C,SAAS,MAAM,QAAQ,IAAI,CAAC,OAAO;AAAA,MACjC,IAAI,EAAE;AAAA,MACN,OAAO,EAAE;AAAA,MACT,QAAQ,EAAE;AAAA,MACV,SAAS,CAAC;AAAA,MACV,gBAAgB,EAAE;AAAA,MAClB,aAAa,EAAE;AAAA,IACjB,EAAE;AAAA,IACF,cAAc,MAAM;AAAA,IACpB,WAAW,MAAM,aAAa;AAAA,IAC9B,aAAa,MAAM,eAAe;AAAA,EACpC;AACF;AAEO,SAAS,SAAS,KAA6B;AACpD,QAAM,SAAS,IAAI;AACnB,QAAM,UAAU,IAAI;AAEpB,QAAM,OAAuB;AAAA,IAC3B,IAAI,IAAI;AAAA,IACR;AAAA,IACA,QAAQ,IAAI;AAAA,IACZ;AAAA,IACA,cAAc,IAAI;AAAA,EACpB;AAEA,MAAI,IAAI,cAAc,QAAQ,IAAI,gBAAgB,MAAM;AACtD,WAAO,EAAE,GAAG,MAAM,WAAW,OAAO,IAAI,SAAS,GAAG,aAAa,OAAO,IAAI,WAAW,EAAE;AAAA,EAC3F;AACA,MAAI,IAAI,cAAc,MAAM;AAC1B,WAAO,EAAE,GAAG,MAAM,WAAW,OAAO,IAAI,SAAS,EAAE;AAAA,EACrD;AACA,MAAI,IAAI,gBAAgB,MAAM;AAC5B,WAAO,EAAE,GAAG,MAAM,aAAa,OAAO,IAAI,WAAW,EAAE;AAAA,EACzD;AAEA,SAAO;AACT;;;AC1EO,SAASC,OAAM,OAAe,SAAiB,QAAoC;AACxF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,aAAa,OAAO;AAAA,IACpB,QAAQ,OAAO;AAAA,IACf,KAAK,OAAO;AAAA,IACZ,QAAQ,OAAO;AAAA,IACf,QAAQ,OAAO;AAAA,IACf,iBAAiB,OAAO,mBAAmB;AAAA,EAC7C;AACF;AAEO,SAASC,UAAS,KAAiC;AACxD,QAAM,SAA0B;AAAA,IAC9B,OAAO,IAAI;AAAA,IACX,KAAK,IAAI;AAAA,IACT,QAAQ,IAAI;AAAA,IACZ,QAAQ,IAAI;AAAA,IACZ,QAAQ,IAAI;AAAA,EACd;AAEA,MAAI,IAAI,oBAAoB,MAAM;AAChC,WAAO,EAAE,GAAG,QAAQ,iBAAiB,IAAI,gBAAgB;AAAA,EAC3D;AAEA,SAAO;AACT;;;ACJO,IAAM,sBAAN,MAAgD;AAAA,EAKrD,YAAY,WAAsB,UAAuC;AACvE,SAAK,YAAY;AACjB,SAAK,MAAM,eAAe,KAAK,SAAS;AACxC,SAAK,SAAS,kBAAkB,KAAK,SAAS;AAAA,EAChD;AAAA,EAEA,MAAM,aAA4B;AAChC,UAAM,KAAK,IAAI,KAAK;AACpB,UAAM,KAAK,OAAO,KAAK;AAAA,EACzB;AAAA,EAEA,MAAM,aAAa,KAAoC;AACrD,UAAM,MAAgB,MAAM,GAAG;AAC/B,UAAM,KAAK,IAAI,OAAO,GAAyC;AAAA,EACjE;AAAA,EAEA,MAAM,YAAY,OAA+C;AAC/D,UAAM,MAAM,MAAM,KAAK,IAAI,SAAS,KAAK;AACzC,QAAI,CAAC,IAAK,QAAO;AACjB,WAAiB,SAAS,IAAI,IAAI,EAAE,OAAO,KAAK,CAAC,CAAW;AAAA,EAC9D;AAAA,EAEA,MAAM,iBAAiB,OAAe,SAAiB,OAAkC;AACvF,UAAM,MAAM,MAAM,KAAK,IAAI,SAAS,KAAK;AACzC,QAAI,CAAC,IAAK;AAEV,UAAM,QAAQ,IAAI,IAAI,EAAE,OAAO,KAAK,CAAC;AACrC,UAAM,UAAU,MAAM;AAEtB,UAAM,UAAU,QAAQ;AAAA,MAAI,CAAC,MAC3B,EAAE,OAAO,UACL,EAAE,GAAG,GAAG,QAAQ,MAAM,QAAQ,gBAAgB,MAAM,gBAAgB,aAAa,MAAM,YAAY,IACnG;AAAA,IACN;AAEA,UAAM,IAAI,OAAO,EAAE,SAAS,QAAQ,CAAC;AAAA,EACvC;AAAA,EAEA,MAAM,oBAAoB,OAAe,SAAiB,QAAwC;AAChG,UAAM,MAAmBC,OAAM,OAAO,SAAS,MAAM;AAErD,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ;AAAA,MACzC,OAAO,EAAE,OAAO,aAAa,OAAO,MAAM;AAAA,IAC5C,CAAC;AAED,QAAI,UAAU;AACZ,YAAM,SAAS,OAAO,GAAG;AAAA,IAC3B,OAAO;AACL,YAAM,KAAK,OAAO,OAAO,GAAyC;AAAA,IACpE;AAAA,EACF;AAAA,EAEA,MAAM,iBAAiB,OAAoD;AACzE,UAAM,EAAE,GAAG,IAAI,MAAM,OAAO,WAAW;AACvC,UAAM,OAAO,MAAM,KAAK,OAAO,QAAQ;AAAA,MACrC,OAAO,EAAE,OAAO,QAAQ,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC,UAAU,SAAS,EAAE,EAAE;AAAA,MAC3D,OAAO,CAAC,CAAC,eAAe,KAAK,CAAC;AAAA,IAChC,CAAC;AACD,WAAO,KAAK,IAAI,CAAC,MAAmBC,UAAS,EAAE,IAAI,EAAE,OAAO,KAAK,CAAC,CAAc,CAAC;AAAA,EACnF;AAAA,EAEA,MAAM,kBAAkB,OAAoD;AAC1E,UAAM,EAAE,GAAG,IAAI,MAAM,OAAO,WAAW;AACvC,UAAM,OAAO,MAAM,KAAK,OAAO,QAAQ;AAAA,MACrC,OAAO,EAAE,OAAO,QAAQ,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC,WAAW,OAAO,EAAE,EAAE;AAAA,MAC1D,OAAO,CAAC,CAAC,eAAe,KAAK,CAAC;AAAA,IAChC,CAAC;AACD,WAAO,KAAK,IAAI,CAAC,MAAmBA,UAAS,EAAE,IAAI,EAAE,OAAO,KAAK,CAAC,CAAc,CAAC;AAAA,EACnF;AAAA,EAEA,MAAM,oBAAoB,OAAoD;AAC5E,UAAM,OAAO,MAAM,KAAK,OAAO,QAAQ;AAAA,MACrC,OAAO,EAAE,OAAO,QAAQ,YAAY;AAAA,MACpC,OAAO,CAAC,CAAC,eAAe,KAAK,CAAC;AAAA,IAChC,CAAC;AACD,WAAO,KAAK,IAAI,CAAC,MAAmBA,UAAS,EAAE,IAAI,EAAE,OAAO,KAAK,CAAC,CAAc,CAAC;AAAA,EACnF;AAAA,EAEA,MAAM,YAAY,OAAwC;AACxD,UAAM,SAAS,MAAM,KAAK,IAAI,SAAS,KAAK;AAC5C,UAAM,QAAQ,SAAU,OAAO,IAAI,EAAE,OAAO,KAAK,CAAC,IAAe;AAEjE,UAAM,EAAE,IAAI,IAAI,IAAI,MAAM,OAAO,WAAW;AAC5C,UAAM,SAAU,MAAM,KAAK,OAAO,QAAQ;AAAA,MACxC,YAAY,CAAC,UAAU,CAAC,GAAG,SAAS,IAAI,QAAQ,CAAC,GAAG,OAAO,CAAC;AAAA,MAC5D,OAAO,EAAE,MAAM;AAAA,MACf,OAAO,CAAC,QAAQ;AAAA,MAChB,KAAK;AAAA,IACP,CAAC;AAED,UAAM,WAAW,oBAAI,IAAoB;AACzC,eAAW,OAAO,QAAQ;AACxB,eAAS,IAAI,IAAI,QAAQ,SAAS,IAAI,OAAO,EAAE,CAAC;AAAA,IAClD;AAEA,UAAM,YAAY,SAAS,IAAI,WAAW,KAAK;AAC/C,UAAM,UAAU,SAAS,IAAI,QAAQ,KAAK,MAAM,SAAS,IAAI,SAAS,KAAK;AAC3E,UAAM,eAAe,OAAO,gBAAgB;AAC5C,UAAM,UAAU,KAAK,IAAI,GAAG,eAAe,YAAY,MAAM;AAC7D,UAAM,YAAY,YAAY;AAE9B,UAAM,UAAW,OAAO,WAAW,CAAC;AACpC,UAAM,mBAAmB,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,WAAW,EAAE;AACzE,UAAM,UAAU,OAAO,YAAY,KAAK,IAAI,IAAI,OAAO,MAAM,SAAS,IAAI;AAE1E,WAAO;AAAA,MACL;AAAA,MACA,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,gBAAgB;AAAA,MAChB,YAAY,eAAe,IAAI,KAAK,MAAO,YAAY,eAAgB,GAAG,IAAI;AAAA,MAC9E,cAAc;AAAA,MACd,cAAc,QAAQ;AAAA,MACtB,WAAW;AAAA,IACb;AAAA,EACF;AACF;","names":["DataTypes","toRow","toDomain","toRow","toDomain"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@bulkimport/state-sequelize",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Sequelize-based StateStore adapter for @bulkimport/core",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"exports": {
|
|
7
|
+
".": {
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"import": "./dist/index.mjs",
|
|
10
|
+
"require": "./dist/index.cjs"
|
|
11
|
+
}
|
|
12
|
+
},
|
|
13
|
+
"main": "./dist/index.cjs",
|
|
14
|
+
"module": "./dist/index.mjs",
|
|
15
|
+
"types": "./dist/index.d.ts",
|
|
16
|
+
"files": [
|
|
17
|
+
"dist",
|
|
18
|
+
"LICENSE"
|
|
19
|
+
],
|
|
20
|
+
"scripts": {
|
|
21
|
+
"build": "tsup",
|
|
22
|
+
"test": "vitest run",
|
|
23
|
+
"test:watch": "vitest",
|
|
24
|
+
"typecheck": "tsc --noEmit",
|
|
25
|
+
"lint": "eslint src/ tests/"
|
|
26
|
+
},
|
|
27
|
+
"keywords": [
|
|
28
|
+
"bulkimport",
|
|
29
|
+
"sequelize",
|
|
30
|
+
"state-store",
|
|
31
|
+
"persistence",
|
|
32
|
+
"database"
|
|
33
|
+
],
|
|
34
|
+
"license": "MIT",
|
|
35
|
+
"author": "Víctor Garcia <vgpastor@ingenierosweb.co>",
|
|
36
|
+
"repository": {
|
|
37
|
+
"type": "git",
|
|
38
|
+
"url": "https://github.com/vgpastor/bulkimport",
|
|
39
|
+
"directory": "packages/state-sequelize"
|
|
40
|
+
},
|
|
41
|
+
"engines": {
|
|
42
|
+
"node": ">=16.7.0"
|
|
43
|
+
},
|
|
44
|
+
"peerDependencies": {
|
|
45
|
+
"@bulkimport/core": ">=0.1.0",
|
|
46
|
+
"sequelize": "^6.0.0"
|
|
47
|
+
},
|
|
48
|
+
"devDependencies": {
|
|
49
|
+
"@bulkimport/core": "file:../../",
|
|
50
|
+
"@types/node": "^22.13.4",
|
|
51
|
+
"@typescript-eslint/eslint-plugin": "^8.24.1",
|
|
52
|
+
"@typescript-eslint/parser": "^8.24.1",
|
|
53
|
+
"eslint": "^9.20.0",
|
|
54
|
+
"eslint-config-prettier": "^10.0.1",
|
|
55
|
+
"sequelize": "^6.37.0",
|
|
56
|
+
"sqlite3": "^5.1.7",
|
|
57
|
+
"tsup": "^8.3.6",
|
|
58
|
+
"typescript": "^5.7.3",
|
|
59
|
+
"vitest": "^3.0.6"
|
|
60
|
+
}
|
|
61
|
+
}
|