@batchactions/state-sequelize 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +109 -0
- package/dist/index.cjs +651 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +52 -0
- package/dist/index.d.ts +52 -0
- package/dist/index.js +614 -0
- package/dist/index.js.map +1 -0
- package/package.json +61 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,614 @@
|
|
|
1
|
+
// src/models/JobModel.ts
|
|
2
|
+
import { DataTypes } from "sequelize";
|
|
3
|
+
function defineJobModel(sequelize) {
|
|
4
|
+
return sequelize.define(
|
|
5
|
+
"BatchActionsJob",
|
|
6
|
+
{
|
|
7
|
+
id: {
|
|
8
|
+
type: DataTypes.STRING(36),
|
|
9
|
+
primaryKey: true,
|
|
10
|
+
allowNull: false
|
|
11
|
+
},
|
|
12
|
+
status: {
|
|
13
|
+
type: DataTypes.STRING(20),
|
|
14
|
+
allowNull: false
|
|
15
|
+
},
|
|
16
|
+
config: {
|
|
17
|
+
type: DataTypes.JSON,
|
|
18
|
+
allowNull: false
|
|
19
|
+
},
|
|
20
|
+
batches: {
|
|
21
|
+
type: DataTypes.JSON,
|
|
22
|
+
allowNull: false,
|
|
23
|
+
defaultValue: []
|
|
24
|
+
},
|
|
25
|
+
totalRecords: {
|
|
26
|
+
type: DataTypes.INTEGER,
|
|
27
|
+
allowNull: false,
|
|
28
|
+
defaultValue: 0
|
|
29
|
+
},
|
|
30
|
+
startedAt: {
|
|
31
|
+
type: DataTypes.BIGINT,
|
|
32
|
+
allowNull: true
|
|
33
|
+
},
|
|
34
|
+
completedAt: {
|
|
35
|
+
type: DataTypes.BIGINT,
|
|
36
|
+
allowNull: true
|
|
37
|
+
},
|
|
38
|
+
distributed: {
|
|
39
|
+
type: DataTypes.BOOLEAN,
|
|
40
|
+
allowNull: false,
|
|
41
|
+
defaultValue: false
|
|
42
|
+
}
|
|
43
|
+
},
|
|
44
|
+
{
|
|
45
|
+
tableName: "bulkimport_jobs",
|
|
46
|
+
timestamps: false
|
|
47
|
+
}
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// src/models/RecordModel.ts
|
|
52
|
+
import { DataTypes as DataTypes2 } from "sequelize";
|
|
53
|
+
function defineRecordModel(sequelize) {
|
|
54
|
+
return sequelize.define(
|
|
55
|
+
"BatchActionsRecord",
|
|
56
|
+
{
|
|
57
|
+
id: {
|
|
58
|
+
type: DataTypes2.INTEGER,
|
|
59
|
+
primaryKey: true,
|
|
60
|
+
autoIncrement: true
|
|
61
|
+
},
|
|
62
|
+
jobId: {
|
|
63
|
+
type: DataTypes2.STRING(36),
|
|
64
|
+
allowNull: false
|
|
65
|
+
},
|
|
66
|
+
batchId: {
|
|
67
|
+
type: DataTypes2.STRING(36),
|
|
68
|
+
allowNull: false
|
|
69
|
+
},
|
|
70
|
+
recordIndex: {
|
|
71
|
+
type: DataTypes2.INTEGER,
|
|
72
|
+
allowNull: false
|
|
73
|
+
},
|
|
74
|
+
status: {
|
|
75
|
+
type: DataTypes2.STRING(10),
|
|
76
|
+
allowNull: false
|
|
77
|
+
},
|
|
78
|
+
raw: {
|
|
79
|
+
type: DataTypes2.JSON,
|
|
80
|
+
allowNull: false
|
|
81
|
+
},
|
|
82
|
+
parsed: {
|
|
83
|
+
type: DataTypes2.JSON,
|
|
84
|
+
allowNull: false
|
|
85
|
+
},
|
|
86
|
+
errors: {
|
|
87
|
+
type: DataTypes2.JSON,
|
|
88
|
+
allowNull: false,
|
|
89
|
+
defaultValue: []
|
|
90
|
+
},
|
|
91
|
+
processingError: {
|
|
92
|
+
type: DataTypes2.TEXT,
|
|
93
|
+
allowNull: true
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
{
|
|
97
|
+
tableName: "bulkimport_records",
|
|
98
|
+
timestamps: false,
|
|
99
|
+
indexes: [
|
|
100
|
+
{ fields: ["jobId", "status"] },
|
|
101
|
+
{ fields: ["jobId", "batchId"] },
|
|
102
|
+
{ unique: true, fields: ["jobId", "recordIndex"] }
|
|
103
|
+
]
|
|
104
|
+
}
|
|
105
|
+
);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// src/models/BatchModel.ts
|
|
109
|
+
import { DataTypes as DataTypes3 } from "sequelize";
|
|
110
|
+
function defineBatchModel(sequelize) {
|
|
111
|
+
return sequelize.define(
|
|
112
|
+
"BatchActionsBatch",
|
|
113
|
+
{
|
|
114
|
+
id: {
|
|
115
|
+
type: DataTypes3.STRING(36),
|
|
116
|
+
primaryKey: true,
|
|
117
|
+
allowNull: false
|
|
118
|
+
},
|
|
119
|
+
jobId: {
|
|
120
|
+
type: DataTypes3.STRING(36),
|
|
121
|
+
allowNull: false
|
|
122
|
+
},
|
|
123
|
+
batchIndex: {
|
|
124
|
+
type: DataTypes3.INTEGER,
|
|
125
|
+
allowNull: false
|
|
126
|
+
},
|
|
127
|
+
status: {
|
|
128
|
+
type: DataTypes3.STRING(20),
|
|
129
|
+
allowNull: false,
|
|
130
|
+
defaultValue: "PENDING"
|
|
131
|
+
},
|
|
132
|
+
workerId: {
|
|
133
|
+
type: DataTypes3.STRING(128),
|
|
134
|
+
allowNull: true
|
|
135
|
+
},
|
|
136
|
+
claimedAt: {
|
|
137
|
+
type: DataTypes3.BIGINT,
|
|
138
|
+
allowNull: true
|
|
139
|
+
},
|
|
140
|
+
recordStartIndex: {
|
|
141
|
+
type: DataTypes3.INTEGER,
|
|
142
|
+
allowNull: false,
|
|
143
|
+
defaultValue: 0
|
|
144
|
+
},
|
|
145
|
+
recordEndIndex: {
|
|
146
|
+
type: DataTypes3.INTEGER,
|
|
147
|
+
allowNull: false,
|
|
148
|
+
defaultValue: 0
|
|
149
|
+
},
|
|
150
|
+
processedCount: {
|
|
151
|
+
type: DataTypes3.INTEGER,
|
|
152
|
+
allowNull: false,
|
|
153
|
+
defaultValue: 0
|
|
154
|
+
},
|
|
155
|
+
failedCount: {
|
|
156
|
+
type: DataTypes3.INTEGER,
|
|
157
|
+
allowNull: false,
|
|
158
|
+
defaultValue: 0
|
|
159
|
+
},
|
|
160
|
+
version: {
|
|
161
|
+
type: DataTypes3.INTEGER,
|
|
162
|
+
allowNull: false,
|
|
163
|
+
defaultValue: 0
|
|
164
|
+
}
|
|
165
|
+
},
|
|
166
|
+
{
|
|
167
|
+
tableName: "bulkimport_batches",
|
|
168
|
+
timestamps: false,
|
|
169
|
+
indexes: [{ fields: ["jobId", "status"] }, { unique: true, fields: ["jobId", "batchIndex"] }]
|
|
170
|
+
}
|
|
171
|
+
);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// src/utils/parseJson.ts
|
|
175
|
+
function parseJson(value) {
|
|
176
|
+
if (typeof value === "string") {
|
|
177
|
+
return JSON.parse(value);
|
|
178
|
+
}
|
|
179
|
+
return value;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// src/mappers/JobMapper.ts
|
|
183
|
+
function stripNonSerializableFields(config) {
|
|
184
|
+
if (!config.schema) return config;
|
|
185
|
+
const schema = config.schema;
|
|
186
|
+
if (!schema.fields || !Array.isArray(schema.fields)) return config;
|
|
187
|
+
const rawFields = schema.fields;
|
|
188
|
+
const fields = rawFields.map((f) => {
|
|
189
|
+
const name = typeof f["name"] === "string" ? f["name"] : "";
|
|
190
|
+
const type = typeof f["type"] === "string" ? f["type"] : "";
|
|
191
|
+
const required = typeof f["required"] === "boolean" ? f["required"] : false;
|
|
192
|
+
const result = { name, type, required };
|
|
193
|
+
if (f["defaultValue"] !== void 0) result["defaultValue"] = f["defaultValue"];
|
|
194
|
+
if (f["separator"] !== void 0) result["separator"] = f["separator"];
|
|
195
|
+
if (f["aliases"] !== void 0) result["aliases"] = f["aliases"];
|
|
196
|
+
return result;
|
|
197
|
+
});
|
|
198
|
+
return {
|
|
199
|
+
...config,
|
|
200
|
+
schema: {
|
|
201
|
+
...config.schema,
|
|
202
|
+
fields
|
|
203
|
+
}
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
function toRow(state) {
|
|
207
|
+
return {
|
|
208
|
+
id: state.id,
|
|
209
|
+
status: state.status,
|
|
210
|
+
config: stripNonSerializableFields(state.config),
|
|
211
|
+
batches: state.batches.map((b) => ({
|
|
212
|
+
id: b.id,
|
|
213
|
+
index: b.index,
|
|
214
|
+
status: b.status,
|
|
215
|
+
records: [],
|
|
216
|
+
processedCount: b.processedCount,
|
|
217
|
+
failedCount: b.failedCount
|
|
218
|
+
})),
|
|
219
|
+
totalRecords: state.totalRecords,
|
|
220
|
+
startedAt: state.startedAt ?? null,
|
|
221
|
+
completedAt: state.completedAt ?? null,
|
|
222
|
+
distributed: state.distributed ?? false
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
function toDomain(row) {
|
|
226
|
+
const config = parseJson(row.config);
|
|
227
|
+
const batches = parseJson(row.batches);
|
|
228
|
+
const base = {
|
|
229
|
+
id: row.id,
|
|
230
|
+
config,
|
|
231
|
+
status: row.status,
|
|
232
|
+
batches,
|
|
233
|
+
totalRecords: row.totalRecords
|
|
234
|
+
};
|
|
235
|
+
const result = { ...base };
|
|
236
|
+
if (row.startedAt !== null) {
|
|
237
|
+
result["startedAt"] = Number(row.startedAt);
|
|
238
|
+
}
|
|
239
|
+
if (row.completedAt !== null) {
|
|
240
|
+
result["completedAt"] = Number(row.completedAt);
|
|
241
|
+
}
|
|
242
|
+
if (row.distributed) {
|
|
243
|
+
result["distributed"] = true;
|
|
244
|
+
}
|
|
245
|
+
return result;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// src/mappers/RecordMapper.ts
|
|
249
|
+
function toRow2(jobId, batchId, record) {
|
|
250
|
+
return {
|
|
251
|
+
jobId,
|
|
252
|
+
batchId,
|
|
253
|
+
recordIndex: record.index,
|
|
254
|
+
status: record.status,
|
|
255
|
+
raw: record.raw,
|
|
256
|
+
parsed: record.parsed,
|
|
257
|
+
errors: record.errors,
|
|
258
|
+
processingError: record.processingError ?? null
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
function toDomain2(row) {
|
|
262
|
+
const result = {
|
|
263
|
+
index: row.recordIndex,
|
|
264
|
+
raw: parseJson(row.raw),
|
|
265
|
+
parsed: parseJson(row.parsed),
|
|
266
|
+
status: row.status,
|
|
267
|
+
errors: parseJson(row.errors)
|
|
268
|
+
};
|
|
269
|
+
if (row.processingError !== null) {
|
|
270
|
+
return { ...result, processingError: row.processingError };
|
|
271
|
+
}
|
|
272
|
+
return result;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
// src/SequelizeStateStore.ts
|
|
276
|
+
var SequelizeStateStore = class {
|
|
277
|
+
constructor(sequelize, _options) {
|
|
278
|
+
this.sequelize = sequelize;
|
|
279
|
+
this.Job = defineJobModel(this.sequelize);
|
|
280
|
+
this.Record = defineRecordModel(this.sequelize);
|
|
281
|
+
this.Batch = defineBatchModel(this.sequelize);
|
|
282
|
+
}
|
|
283
|
+
async initialize() {
|
|
284
|
+
await this.Job.sync();
|
|
285
|
+
await this.Record.sync();
|
|
286
|
+
await this.Batch.sync();
|
|
287
|
+
}
|
|
288
|
+
// ── StateStore methods ──────────────────────────────────────────────
|
|
289
|
+
async saveJobState(job) {
|
|
290
|
+
const row = toRow(job);
|
|
291
|
+
await this.Job.upsert(row);
|
|
292
|
+
}
|
|
293
|
+
async getJobState(jobId) {
|
|
294
|
+
const row = await this.Job.findByPk(jobId);
|
|
295
|
+
if (!row) return null;
|
|
296
|
+
return toDomain(row.get({ plain: true }));
|
|
297
|
+
}
|
|
298
|
+
async updateBatchState(jobId, batchId, state) {
|
|
299
|
+
const row = await this.Job.findByPk(jobId);
|
|
300
|
+
if (!row) return;
|
|
301
|
+
const plain = row.get({ plain: true });
|
|
302
|
+
const batches = parseJson(plain.batches);
|
|
303
|
+
const updated = batches.map(
|
|
304
|
+
(b) => b.id === batchId ? { ...b, status: state.status, processedCount: state.processedCount, failedCount: state.failedCount } : b
|
|
305
|
+
);
|
|
306
|
+
await row.update({ batches: updated });
|
|
307
|
+
const batchRow = await this.Batch.findByPk(batchId);
|
|
308
|
+
if (batchRow) {
|
|
309
|
+
await batchRow.update({
|
|
310
|
+
status: state.status,
|
|
311
|
+
processedCount: state.processedCount,
|
|
312
|
+
failedCount: state.failedCount
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
async saveProcessedRecord(jobId, batchId, record) {
|
|
317
|
+
const row = toRow2(jobId, batchId, record);
|
|
318
|
+
const existing = await this.Record.findOne({
|
|
319
|
+
where: { jobId, recordIndex: record.index }
|
|
320
|
+
});
|
|
321
|
+
if (existing) {
|
|
322
|
+
await existing.update(row);
|
|
323
|
+
} else {
|
|
324
|
+
await this.Record.create(row);
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
async getFailedRecords(jobId) {
|
|
328
|
+
const { Op } = await import("sequelize");
|
|
329
|
+
const rows = await this.Record.findAll({
|
|
330
|
+
where: { jobId, status: { [Op.in]: ["failed", "invalid"] } },
|
|
331
|
+
order: [["recordIndex", "ASC"]]
|
|
332
|
+
});
|
|
333
|
+
return rows.map((r) => toDomain2(r.get({ plain: true })));
|
|
334
|
+
}
|
|
335
|
+
async getPendingRecords(jobId) {
|
|
336
|
+
const { Op } = await import("sequelize");
|
|
337
|
+
const rows = await this.Record.findAll({
|
|
338
|
+
where: { jobId, status: { [Op.in]: ["pending", "valid"] } },
|
|
339
|
+
order: [["recordIndex", "ASC"]]
|
|
340
|
+
});
|
|
341
|
+
return rows.map((r) => toDomain2(r.get({ plain: true })));
|
|
342
|
+
}
|
|
343
|
+
async getProcessedRecords(jobId) {
|
|
344
|
+
const rows = await this.Record.findAll({
|
|
345
|
+
where: { jobId, status: "processed" },
|
|
346
|
+
order: [["recordIndex", "ASC"]]
|
|
347
|
+
});
|
|
348
|
+
return rows.map((r) => toDomain2(r.get({ plain: true })));
|
|
349
|
+
}
|
|
350
|
+
async getProgress(jobId) {
|
|
351
|
+
const jobRow = await this.Job.findByPk(jobId);
|
|
352
|
+
const plain = jobRow ? jobRow.get({ plain: true }) : null;
|
|
353
|
+
const { fn, col } = await import("sequelize");
|
|
354
|
+
const counts = await this.Record.findAll({
|
|
355
|
+
attributes: ["status", [fn("COUNT", col("status")), "count"]],
|
|
356
|
+
where: { jobId },
|
|
357
|
+
group: ["status"],
|
|
358
|
+
raw: true
|
|
359
|
+
});
|
|
360
|
+
const countMap = /* @__PURE__ */ new Map();
|
|
361
|
+
for (const row of counts) {
|
|
362
|
+
countMap.set(row.status, parseInt(row.count, 10));
|
|
363
|
+
}
|
|
364
|
+
const processed = countMap.get("processed") ?? 0;
|
|
365
|
+
const failed = (countMap.get("failed") ?? 0) + (countMap.get("invalid") ?? 0);
|
|
366
|
+
const totalRecords = plain?.totalRecords ?? 0;
|
|
367
|
+
const pending = Math.max(0, totalRecords - processed - failed);
|
|
368
|
+
const completed = processed + failed;
|
|
369
|
+
const batches = plain ? parseJson(plain.batches) : [];
|
|
370
|
+
const completedBatches = batches.filter((b) => b.status === "COMPLETED").length;
|
|
371
|
+
const elapsed = plain?.startedAt ? Date.now() - Number(plain.startedAt) : 0;
|
|
372
|
+
return {
|
|
373
|
+
totalRecords,
|
|
374
|
+
processedRecords: processed,
|
|
375
|
+
failedRecords: failed,
|
|
376
|
+
pendingRecords: pending,
|
|
377
|
+
percentage: totalRecords > 0 ? Math.round(completed / totalRecords * 100) : 0,
|
|
378
|
+
currentBatch: completedBatches,
|
|
379
|
+
totalBatches: batches.length,
|
|
380
|
+
elapsedMs: elapsed
|
|
381
|
+
};
|
|
382
|
+
}
|
|
383
|
+
// ── DistributedStateStore methods ───────────────────────────────────
|
|
384
|
+
async claimBatch(jobId, workerId) {
|
|
385
|
+
const jobRow = await this.Job.findByPk(jobId);
|
|
386
|
+
if (!jobRow) {
|
|
387
|
+
return { claimed: false, reason: "JOB_NOT_FOUND" };
|
|
388
|
+
}
|
|
389
|
+
const plain = jobRow.get({ plain: true });
|
|
390
|
+
if (plain.status !== "PROCESSING") {
|
|
391
|
+
return { claimed: false, reason: "JOB_NOT_PROCESSING" };
|
|
392
|
+
}
|
|
393
|
+
return await this.sequelize.transaction(async (transaction) => {
|
|
394
|
+
const pendingBatch = await this.Batch.findOne({
|
|
395
|
+
where: { jobId, status: "PENDING" },
|
|
396
|
+
order: [["batchIndex", "ASC"]],
|
|
397
|
+
transaction,
|
|
398
|
+
lock: true
|
|
399
|
+
});
|
|
400
|
+
if (!pendingBatch) {
|
|
401
|
+
return { claimed: false, reason: "NO_PENDING_BATCHES" };
|
|
402
|
+
}
|
|
403
|
+
const batchPlain = pendingBatch.get({ plain: true });
|
|
404
|
+
const now = Date.now();
|
|
405
|
+
const [affectedRows] = await this.Batch.update(
|
|
406
|
+
{
|
|
407
|
+
status: "PROCESSING",
|
|
408
|
+
workerId,
|
|
409
|
+
claimedAt: now,
|
|
410
|
+
version: batchPlain.version + 1
|
|
411
|
+
},
|
|
412
|
+
{
|
|
413
|
+
where: {
|
|
414
|
+
id: batchPlain.id,
|
|
415
|
+
version: batchPlain.version
|
|
416
|
+
},
|
|
417
|
+
transaction
|
|
418
|
+
}
|
|
419
|
+
);
|
|
420
|
+
if (affectedRows === 0) {
|
|
421
|
+
return { claimed: false, reason: "NO_PENDING_BATCHES" };
|
|
422
|
+
}
|
|
423
|
+
const jobBatches = parseJson(plain.batches);
|
|
424
|
+
const updatedBatches = jobBatches.map((b) => b.id === batchPlain.id ? { ...b, status: "PROCESSING" } : b);
|
|
425
|
+
await jobRow.update({ batches: updatedBatches }, { transaction });
|
|
426
|
+
return {
|
|
427
|
+
claimed: true,
|
|
428
|
+
reservation: {
|
|
429
|
+
jobId,
|
|
430
|
+
batchId: batchPlain.id,
|
|
431
|
+
batchIndex: batchPlain.batchIndex,
|
|
432
|
+
workerId,
|
|
433
|
+
claimedAt: now,
|
|
434
|
+
recordStartIndex: batchPlain.recordStartIndex,
|
|
435
|
+
recordEndIndex: batchPlain.recordEndIndex
|
|
436
|
+
}
|
|
437
|
+
};
|
|
438
|
+
});
|
|
439
|
+
}
|
|
440
|
+
async releaseBatch(jobId, batchId, workerId) {
|
|
441
|
+
await this.sequelize.transaction(async (transaction) => {
|
|
442
|
+
const batch = await this.Batch.findOne({
|
|
443
|
+
where: { id: batchId, jobId, workerId },
|
|
444
|
+
transaction,
|
|
445
|
+
lock: true
|
|
446
|
+
});
|
|
447
|
+
if (!batch) return;
|
|
448
|
+
const batchPlain = batch.get({ plain: true });
|
|
449
|
+
await this.Batch.update(
|
|
450
|
+
{
|
|
451
|
+
status: "PENDING",
|
|
452
|
+
workerId: null,
|
|
453
|
+
claimedAt: null,
|
|
454
|
+
version: batchPlain.version + 1
|
|
455
|
+
},
|
|
456
|
+
{
|
|
457
|
+
where: { id: batchId, version: batchPlain.version },
|
|
458
|
+
transaction
|
|
459
|
+
}
|
|
460
|
+
);
|
|
461
|
+
const jobRow = await this.Job.findByPk(jobId, { transaction });
|
|
462
|
+
if (jobRow) {
|
|
463
|
+
const plain = jobRow.get({ plain: true });
|
|
464
|
+
const jobBatches = parseJson(plain.batches);
|
|
465
|
+
const updatedBatches = jobBatches.map((b) => b.id === batchId ? { ...b, status: "PENDING" } : b);
|
|
466
|
+
await jobRow.update({ batches: updatedBatches }, { transaction });
|
|
467
|
+
}
|
|
468
|
+
});
|
|
469
|
+
}
|
|
470
|
+
async reclaimStaleBatches(jobId, timeoutMs) {
|
|
471
|
+
const cutoff = Date.now() - timeoutMs;
|
|
472
|
+
const { Op } = await import("sequelize");
|
|
473
|
+
return await this.sequelize.transaction(async (transaction) => {
|
|
474
|
+
const staleBatches = await this.Batch.findAll({
|
|
475
|
+
where: {
|
|
476
|
+
jobId,
|
|
477
|
+
status: "PROCESSING",
|
|
478
|
+
claimedAt: { [Op.lt]: cutoff }
|
|
479
|
+
},
|
|
480
|
+
transaction,
|
|
481
|
+
lock: true
|
|
482
|
+
});
|
|
483
|
+
if (staleBatches.length === 0) return 0;
|
|
484
|
+
let reclaimed = 0;
|
|
485
|
+
for (const batch of staleBatches) {
|
|
486
|
+
const batchPlain = batch.get({ plain: true });
|
|
487
|
+
const [affected] = await this.Batch.update(
|
|
488
|
+
{
|
|
489
|
+
status: "PENDING",
|
|
490
|
+
workerId: null,
|
|
491
|
+
claimedAt: null,
|
|
492
|
+
version: batchPlain.version + 1
|
|
493
|
+
},
|
|
494
|
+
{
|
|
495
|
+
where: { id: batchPlain.id, version: batchPlain.version },
|
|
496
|
+
transaction
|
|
497
|
+
}
|
|
498
|
+
);
|
|
499
|
+
reclaimed += affected;
|
|
500
|
+
}
|
|
501
|
+
if (reclaimed > 0) {
|
|
502
|
+
const reclaimedIds = new Set(staleBatches.map((b) => b.get({ plain: true }).id));
|
|
503
|
+
const jobRow = await this.Job.findByPk(jobId, { transaction });
|
|
504
|
+
if (jobRow) {
|
|
505
|
+
const plain = jobRow.get({ plain: true });
|
|
506
|
+
const jobBatches = parseJson(plain.batches);
|
|
507
|
+
const updatedBatches = jobBatches.map((b) => reclaimedIds.has(b.id) ? { ...b, status: "PENDING" } : b);
|
|
508
|
+
await jobRow.update({ batches: updatedBatches }, { transaction });
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
return reclaimed;
|
|
512
|
+
});
|
|
513
|
+
}
|
|
514
|
+
async saveBatchRecords(jobId, batchId, records) {
|
|
515
|
+
const rows = records.map((r) => toRow2(jobId, batchId, r));
|
|
516
|
+
await this.Record.bulkCreate(rows);
|
|
517
|
+
}
|
|
518
|
+
async getBatchRecords(jobId, batchId) {
|
|
519
|
+
const rows = await this.Record.findAll({
|
|
520
|
+
where: { jobId, batchId },
|
|
521
|
+
order: [["recordIndex", "ASC"]]
|
|
522
|
+
});
|
|
523
|
+
return rows.map((r) => toDomain2(r.get({ plain: true })));
|
|
524
|
+
}
|
|
525
|
+
async getDistributedStatus(jobId) {
|
|
526
|
+
const { fn, col } = await import("sequelize");
|
|
527
|
+
const counts = await this.Batch.findAll({
|
|
528
|
+
attributes: ["status", [fn("COUNT", col("status")), "count"]],
|
|
529
|
+
where: { jobId },
|
|
530
|
+
group: ["status"],
|
|
531
|
+
raw: true
|
|
532
|
+
});
|
|
533
|
+
const countMap = /* @__PURE__ */ new Map();
|
|
534
|
+
let total = 0;
|
|
535
|
+
for (const row of counts) {
|
|
536
|
+
const count = parseInt(row.count, 10);
|
|
537
|
+
countMap.set(row.status, count);
|
|
538
|
+
total += count;
|
|
539
|
+
}
|
|
540
|
+
const completed = countMap.get("COMPLETED") ?? 0;
|
|
541
|
+
const failed = countMap.get("FAILED") ?? 0;
|
|
542
|
+
const processing = countMap.get("PROCESSING") ?? 0;
|
|
543
|
+
const pending = countMap.get("PENDING") ?? 0;
|
|
544
|
+
return {
|
|
545
|
+
jobId,
|
|
546
|
+
totalBatches: total,
|
|
547
|
+
completedBatches: completed,
|
|
548
|
+
failedBatches: failed,
|
|
549
|
+
processingBatches: processing,
|
|
550
|
+
pendingBatches: pending,
|
|
551
|
+
isComplete: total > 0 && pending === 0 && processing === 0
|
|
552
|
+
};
|
|
553
|
+
}
|
|
554
|
+
async tryFinalizeJob(jobId) {
|
|
555
|
+
return await this.sequelize.transaction(async (transaction) => {
|
|
556
|
+
const jobRow = await this.Job.findByPk(jobId, { transaction, lock: true });
|
|
557
|
+
if (!jobRow) return false;
|
|
558
|
+
const plain = jobRow.get({ plain: true });
|
|
559
|
+
if (plain.status !== "PROCESSING") return false;
|
|
560
|
+
const status = await this.getDistributedStatusInTransaction(jobId, transaction);
|
|
561
|
+
if (!status.isComplete) return false;
|
|
562
|
+
const finalStatus = status.failedBatches > 0 ? "FAILED" : "COMPLETED";
|
|
563
|
+
const [affectedRows] = await this.Job.update(
|
|
564
|
+
{
|
|
565
|
+
status: finalStatus,
|
|
566
|
+
completedAt: Date.now()
|
|
567
|
+
},
|
|
568
|
+
{
|
|
569
|
+
where: { id: jobId, status: "PROCESSING" },
|
|
570
|
+
transaction
|
|
571
|
+
}
|
|
572
|
+
);
|
|
573
|
+
return affectedRows > 0;
|
|
574
|
+
});
|
|
575
|
+
}
|
|
576
|
+
/**
|
|
577
|
+
* Internal helper to get distributed status within an existing transaction.
|
|
578
|
+
* Avoids creating a nested transaction in tryFinalizeJob.
|
|
579
|
+
*/
|
|
580
|
+
async getDistributedStatusInTransaction(jobId, transaction) {
|
|
581
|
+
const { fn, col } = await import("sequelize");
|
|
582
|
+
const counts = await this.Batch.findAll({
|
|
583
|
+
attributes: ["status", [fn("COUNT", col("status")), "count"]],
|
|
584
|
+
where: { jobId },
|
|
585
|
+
group: ["status"],
|
|
586
|
+
raw: true,
|
|
587
|
+
transaction
|
|
588
|
+
});
|
|
589
|
+
const countMap = /* @__PURE__ */ new Map();
|
|
590
|
+
let total = 0;
|
|
591
|
+
for (const row of counts) {
|
|
592
|
+
const count = parseInt(row.count, 10);
|
|
593
|
+
countMap.set(row.status, count);
|
|
594
|
+
total += count;
|
|
595
|
+
}
|
|
596
|
+
const completed = countMap.get("COMPLETED") ?? 0;
|
|
597
|
+
const failed = countMap.get("FAILED") ?? 0;
|
|
598
|
+
const processing = countMap.get("PROCESSING") ?? 0;
|
|
599
|
+
const pending = countMap.get("PENDING") ?? 0;
|
|
600
|
+
return {
|
|
601
|
+
jobId,
|
|
602
|
+
totalBatches: total,
|
|
603
|
+
completedBatches: completed,
|
|
604
|
+
failedBatches: failed,
|
|
605
|
+
processingBatches: processing,
|
|
606
|
+
pendingBatches: pending,
|
|
607
|
+
isComplete: total > 0 && pending === 0 && processing === 0
|
|
608
|
+
};
|
|
609
|
+
}
|
|
610
|
+
};
|
|
611
|
+
export {
|
|
612
|
+
SequelizeStateStore
|
|
613
|
+
};
|
|
614
|
+
//# sourceMappingURL=index.js.map
|