@batchactions/distributed 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +329 -0
- package/dist/index.cjs +510 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +143 -0
- package/dist/index.d.ts +143 -0
- package/dist/index.js +489 -0
- package/dist/index.js.map +1 -0
- package/package.json +62 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,510 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
DistributedImport: () => DistributedImport,
|
|
24
|
+
isDistributedStateStore: () => import_core4.isDistributedStateStore
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(index_exports);
|
|
27
|
+
|
|
28
|
+
// src/DistributedImport.ts
|
|
29
|
+
var import_core3 = require("@batchactions/core");
|
|
30
|
+
|
|
31
|
+
// src/PrepareDistributedImport.ts
|
|
32
|
+
var import_core = require("@batchactions/core");
|
|
33
|
+
var import_import = require("@batchactions/import");
|
|
34
|
+
var PrepareDistributedImport = class {
|
|
35
|
+
constructor(schema, stateStore, eventBus, batchSize) {
|
|
36
|
+
this.schema = schema;
|
|
37
|
+
if (!(0, import_core.isDistributedStateStore)(stateStore)) {
|
|
38
|
+
throw new Error(
|
|
39
|
+
"Distributed processing requires a DistributedStateStore implementation (e.g. SequelizeStateStore). The InMemoryStateStore does not support distributed batch claiming."
|
|
40
|
+
);
|
|
41
|
+
}
|
|
42
|
+
this.stateStore = stateStore;
|
|
43
|
+
this.eventBus = eventBus;
|
|
44
|
+
this.validator = new import_import.SchemaValidator(schema);
|
|
45
|
+
this.batchSize = batchSize;
|
|
46
|
+
}
|
|
47
|
+
async execute(source, parser) {
|
|
48
|
+
const jobId = crypto.randomUUID();
|
|
49
|
+
const splitter = new import_core.BatchSplitter(this.batchSize);
|
|
50
|
+
const batches = [];
|
|
51
|
+
let totalRecords = 0;
|
|
52
|
+
for await (const { records: rawRecords, batchIndex } of splitter.split(this.streamRecords(source, parser))) {
|
|
53
|
+
const batchId = crypto.randomUUID();
|
|
54
|
+
const recordStartIndex = rawRecords[0]?.index ?? 0;
|
|
55
|
+
const recordEndIndex = rawRecords[rawRecords.length - 1]?.index ?? 0;
|
|
56
|
+
const batch = {
|
|
57
|
+
...(0, import_core.createBatch)(batchId, batchIndex, []),
|
|
58
|
+
recordStartIndex,
|
|
59
|
+
recordEndIndex
|
|
60
|
+
};
|
|
61
|
+
batches.push(batch);
|
|
62
|
+
await this.stateStore.saveBatchRecords(jobId, batchId, rawRecords);
|
|
63
|
+
totalRecords += rawRecords.length;
|
|
64
|
+
}
|
|
65
|
+
await this.stateStore.saveJobState({
|
|
66
|
+
id: jobId,
|
|
67
|
+
config: {
|
|
68
|
+
schema: this.schema,
|
|
69
|
+
batchSize: this.batchSize,
|
|
70
|
+
continueOnError: true
|
|
71
|
+
},
|
|
72
|
+
status: "PROCESSING",
|
|
73
|
+
batches,
|
|
74
|
+
totalRecords,
|
|
75
|
+
startedAt: Date.now(),
|
|
76
|
+
distributed: true
|
|
77
|
+
});
|
|
78
|
+
this.eventBus.emit({
|
|
79
|
+
type: "distributed:prepared",
|
|
80
|
+
jobId,
|
|
81
|
+
totalRecords,
|
|
82
|
+
totalBatches: batches.length,
|
|
83
|
+
timestamp: Date.now()
|
|
84
|
+
});
|
|
85
|
+
return { jobId, totalRecords, totalBatches: batches.length };
|
|
86
|
+
}
|
|
87
|
+
async *streamRecords(source, parser) {
|
|
88
|
+
let recordIndex = 0;
|
|
89
|
+
for await (const chunk of source.read()) {
|
|
90
|
+
for await (const raw of parser.parse(chunk)) {
|
|
91
|
+
if (this.validator.skipEmptyRows && this.validator.isEmptyRow(raw)) {
|
|
92
|
+
continue;
|
|
93
|
+
}
|
|
94
|
+
yield (0, import_core.createPendingRecord)(recordIndex, raw);
|
|
95
|
+
recordIndex++;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
// src/ProcessDistributedBatch.ts
|
|
102
|
+
var import_core2 = require("@batchactions/core");
|
|
103
|
+
var import_import2 = require("@batchactions/import");
|
|
104
|
+
var ProcessDistributedBatch = class {
|
|
105
|
+
constructor(config, eventBus) {
|
|
106
|
+
this.config = config;
|
|
107
|
+
if (!(0, import_core2.isDistributedStateStore)(config.stateStore)) {
|
|
108
|
+
throw new Error("Distributed processing requires a DistributedStateStore implementation.");
|
|
109
|
+
}
|
|
110
|
+
this.stateStore = config.stateStore;
|
|
111
|
+
this.validator = new import_import2.SchemaValidator(config.schema);
|
|
112
|
+
this.eventBus = eventBus;
|
|
113
|
+
this.continueOnError = config.continueOnError ?? true;
|
|
114
|
+
this.maxRetries = config.maxRetries ?? 0;
|
|
115
|
+
this.retryDelayMs = config.retryDelayMs ?? 1e3;
|
|
116
|
+
this.hooks = config.hooks ?? null;
|
|
117
|
+
this.duplicateChecker = config.duplicateChecker ?? null;
|
|
118
|
+
}
|
|
119
|
+
async execute(jobId, processor, workerId) {
|
|
120
|
+
const claimResult = await this.stateStore.claimBatch(jobId, workerId);
|
|
121
|
+
if (!claimResult.claimed) {
|
|
122
|
+
return {
|
|
123
|
+
claimed: false,
|
|
124
|
+
processedCount: 0,
|
|
125
|
+
failedCount: 0,
|
|
126
|
+
jobComplete: false,
|
|
127
|
+
jobId
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
const { reservation } = claimResult;
|
|
131
|
+
const { batchId, batchIndex } = reservation;
|
|
132
|
+
this.eventBus.emit({
|
|
133
|
+
type: "batch:claimed",
|
|
134
|
+
jobId,
|
|
135
|
+
batchId,
|
|
136
|
+
batchIndex,
|
|
137
|
+
workerId,
|
|
138
|
+
timestamp: Date.now()
|
|
139
|
+
});
|
|
140
|
+
const records = await this.stateStore.getBatchRecords(jobId, batchId);
|
|
141
|
+
this.eventBus.emit({
|
|
142
|
+
type: "batch:started",
|
|
143
|
+
jobId,
|
|
144
|
+
batchId,
|
|
145
|
+
batchIndex,
|
|
146
|
+
recordCount: records.length,
|
|
147
|
+
timestamp: Date.now()
|
|
148
|
+
});
|
|
149
|
+
let processedCount = 0;
|
|
150
|
+
let failedCount = 0;
|
|
151
|
+
let batchFailed = false;
|
|
152
|
+
try {
|
|
153
|
+
for (const record of records) {
|
|
154
|
+
if (this.validator.skipEmptyRows && this.validator.isEmptyRow(record.raw)) {
|
|
155
|
+
continue;
|
|
156
|
+
}
|
|
157
|
+
const hookCtx = {
|
|
158
|
+
jobId,
|
|
159
|
+
batchId,
|
|
160
|
+
batchIndex,
|
|
161
|
+
recordIndex: record.index,
|
|
162
|
+
totalRecords: records.length,
|
|
163
|
+
signal: new AbortController().signal
|
|
164
|
+
};
|
|
165
|
+
let aliased = this.validator.resolveAliases(record.raw);
|
|
166
|
+
if (this.hooks?.beforeValidate) {
|
|
167
|
+
try {
|
|
168
|
+
aliased = await this.hooks.beforeValidate(aliased, hookCtx);
|
|
169
|
+
} catch (hookError) {
|
|
170
|
+
const errorMsg = hookError instanceof Error ? hookError.message : String(hookError);
|
|
171
|
+
await this.handleRecordFailure(record, jobId, batchId, `beforeValidate hook failed: ${errorMsg}`);
|
|
172
|
+
failedCount++;
|
|
173
|
+
if (!this.continueOnError) throw new Error(errorMsg);
|
|
174
|
+
continue;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
const transformed = this.validator.applyTransforms(aliased);
|
|
178
|
+
const validation = this.validator.validate(transformed);
|
|
179
|
+
const externalDupErrors = [];
|
|
180
|
+
if (this.duplicateChecker && validation.errors.length === 0) {
|
|
181
|
+
try {
|
|
182
|
+
const dupResult = await this.duplicateChecker.check(transformed, hookCtx);
|
|
183
|
+
if (dupResult.isDuplicate) {
|
|
184
|
+
externalDupErrors.push({
|
|
185
|
+
field: "_external",
|
|
186
|
+
message: `Duplicate record found${dupResult.existingId ? ` (existing ID: ${dupResult.existingId})` : ""}`,
|
|
187
|
+
code: "EXTERNAL_DUPLICATE",
|
|
188
|
+
value: void 0
|
|
189
|
+
});
|
|
190
|
+
}
|
|
191
|
+
} catch (checkerError) {
|
|
192
|
+
const errorMsg = checkerError instanceof Error ? checkerError.message : String(checkerError);
|
|
193
|
+
externalDupErrors.push({
|
|
194
|
+
field: "_external",
|
|
195
|
+
message: `Duplicate check failed: ${errorMsg}`,
|
|
196
|
+
code: "EXTERNAL_DUPLICATE",
|
|
197
|
+
value: void 0
|
|
198
|
+
});
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
let allErrors = [...validation.errors, ...externalDupErrors];
|
|
202
|
+
if (this.hooks?.afterValidate) {
|
|
203
|
+
try {
|
|
204
|
+
const tempRecord = allErrors.length > 0 ? (0, import_core2.markRecordInvalid)(record, allErrors) : (0, import_core2.markRecordValid)(record, transformed);
|
|
205
|
+
const modifiedRecord = await this.hooks.afterValidate(tempRecord, hookCtx);
|
|
206
|
+
allErrors = [...modifiedRecord.errors];
|
|
207
|
+
} catch (hookError) {
|
|
208
|
+
const errorMsg = hookError instanceof Error ? hookError.message : String(hookError);
|
|
209
|
+
await this.handleRecordFailure(record, jobId, batchId, `afterValidate hook failed: ${errorMsg}`);
|
|
210
|
+
failedCount++;
|
|
211
|
+
if (!this.continueOnError) throw new Error(errorMsg);
|
|
212
|
+
continue;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
if ((0, import_core2.hasErrors)(allErrors)) {
|
|
216
|
+
const invalidRecord = (0, import_core2.markRecordInvalid)(record, allErrors);
|
|
217
|
+
failedCount++;
|
|
218
|
+
await this.stateStore.saveProcessedRecord(jobId, batchId, invalidRecord);
|
|
219
|
+
this.eventBus.emit({
|
|
220
|
+
type: "record:failed",
|
|
221
|
+
jobId,
|
|
222
|
+
batchId,
|
|
223
|
+
recordIndex: record.index,
|
|
224
|
+
error: allErrors.map((e) => e.message).join("; "),
|
|
225
|
+
record: invalidRecord,
|
|
226
|
+
timestamp: Date.now()
|
|
227
|
+
});
|
|
228
|
+
if (!this.continueOnError) throw new Error(`Validation failed for record ${String(record.index)}`);
|
|
229
|
+
continue;
|
|
230
|
+
}
|
|
231
|
+
const warnings = (0, import_core2.getWarnings)(allErrors);
|
|
232
|
+
const validRecord = (0, import_core2.markRecordValid)(record, transformed, warnings.length > 0 ? warnings : void 0);
|
|
233
|
+
const context = {
|
|
234
|
+
jobId,
|
|
235
|
+
batchId,
|
|
236
|
+
batchIndex,
|
|
237
|
+
recordIndex: record.index,
|
|
238
|
+
totalRecords: records.length,
|
|
239
|
+
signal: new AbortController().signal
|
|
240
|
+
};
|
|
241
|
+
let parsedForProcessor = validRecord.parsed;
|
|
242
|
+
if (this.hooks?.beforeProcess) {
|
|
243
|
+
try {
|
|
244
|
+
parsedForProcessor = await this.hooks.beforeProcess(parsedForProcessor, hookCtx);
|
|
245
|
+
} catch (hookError) {
|
|
246
|
+
const errorMsg = hookError instanceof Error ? hookError.message : String(hookError);
|
|
247
|
+
await this.handleRecordFailure(record, jobId, batchId, `beforeProcess hook failed: ${errorMsg}`);
|
|
248
|
+
failedCount++;
|
|
249
|
+
if (!this.continueOnError) throw new Error(errorMsg);
|
|
250
|
+
continue;
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
const recordForProcessor = { ...validRecord, parsed: parsedForProcessor };
|
|
254
|
+
const result = await this.executeWithRetry(recordForProcessor, context, processor, jobId, batchId);
|
|
255
|
+
if (result.success) {
|
|
256
|
+
processedCount++;
|
|
257
|
+
const processedRecord = {
|
|
258
|
+
...recordForProcessor,
|
|
259
|
+
status: "processed",
|
|
260
|
+
retryCount: result.attempts - 1
|
|
261
|
+
};
|
|
262
|
+
await this.stateStore.saveProcessedRecord(jobId, batchId, processedRecord);
|
|
263
|
+
this.eventBus.emit({
|
|
264
|
+
type: "record:processed",
|
|
265
|
+
jobId,
|
|
266
|
+
batchId,
|
|
267
|
+
recordIndex: record.index,
|
|
268
|
+
timestamp: Date.now()
|
|
269
|
+
});
|
|
270
|
+
if (this.hooks?.afterProcess) {
|
|
271
|
+
try {
|
|
272
|
+
await this.hooks.afterProcess(processedRecord, hookCtx);
|
|
273
|
+
} catch (hookError) {
|
|
274
|
+
const errorMsg = hookError instanceof Error ? hookError.message : String(hookError);
|
|
275
|
+
processedCount--;
|
|
276
|
+
const failedAfterHook = (0, import_core2.markRecordFailed)(recordForProcessor, `afterProcess hook failed: ${errorMsg}`);
|
|
277
|
+
failedCount++;
|
|
278
|
+
await this.stateStore.saveProcessedRecord(jobId, batchId, failedAfterHook);
|
|
279
|
+
this.eventBus.emit({
|
|
280
|
+
type: "record:failed",
|
|
281
|
+
jobId,
|
|
282
|
+
batchId,
|
|
283
|
+
recordIndex: record.index,
|
|
284
|
+
error: errorMsg,
|
|
285
|
+
record: failedAfterHook,
|
|
286
|
+
timestamp: Date.now()
|
|
287
|
+
});
|
|
288
|
+
if (!this.continueOnError) throw new Error(errorMsg);
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
} else {
|
|
292
|
+
const failedRecord = (0, import_core2.markRecordFailed)(validRecord, result.error);
|
|
293
|
+
const failedWithRetries = { ...failedRecord, retryCount: result.attempts - 1 };
|
|
294
|
+
failedCount++;
|
|
295
|
+
await this.stateStore.saveProcessedRecord(jobId, batchId, failedWithRetries);
|
|
296
|
+
this.eventBus.emit({
|
|
297
|
+
type: "record:failed",
|
|
298
|
+
jobId,
|
|
299
|
+
batchId,
|
|
300
|
+
recordIndex: record.index,
|
|
301
|
+
error: result.error,
|
|
302
|
+
record: failedWithRetries,
|
|
303
|
+
timestamp: Date.now()
|
|
304
|
+
});
|
|
305
|
+
if (!this.continueOnError) throw new Error(result.error);
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
} catch {
|
|
309
|
+
batchFailed = true;
|
|
310
|
+
}
|
|
311
|
+
const batchStatus = batchFailed ? "FAILED" : "COMPLETED";
|
|
312
|
+
await this.stateStore.updateBatchState(jobId, batchId, {
|
|
313
|
+
batchId,
|
|
314
|
+
status: batchStatus,
|
|
315
|
+
processedCount,
|
|
316
|
+
failedCount
|
|
317
|
+
});
|
|
318
|
+
if (batchFailed) {
|
|
319
|
+
this.eventBus.emit({
|
|
320
|
+
type: "batch:failed",
|
|
321
|
+
jobId,
|
|
322
|
+
batchId,
|
|
323
|
+
batchIndex,
|
|
324
|
+
error: `Batch failed with ${String(failedCount)} errors`,
|
|
325
|
+
timestamp: Date.now()
|
|
326
|
+
});
|
|
327
|
+
} else {
|
|
328
|
+
this.eventBus.emit({
|
|
329
|
+
type: "batch:completed",
|
|
330
|
+
jobId,
|
|
331
|
+
batchId,
|
|
332
|
+
batchIndex,
|
|
333
|
+
processedCount,
|
|
334
|
+
failedCount,
|
|
335
|
+
totalCount: records.length,
|
|
336
|
+
timestamp: Date.now()
|
|
337
|
+
});
|
|
338
|
+
}
|
|
339
|
+
const jobComplete = await this.stateStore.tryFinalizeJob(jobId);
|
|
340
|
+
if (jobComplete) {
|
|
341
|
+
const status = await this.stateStore.getDistributedStatus(jobId);
|
|
342
|
+
this.eventBus.emit({
|
|
343
|
+
type: "job:completed",
|
|
344
|
+
jobId,
|
|
345
|
+
summary: {
|
|
346
|
+
total: status.totalBatches,
|
|
347
|
+
processed: status.completedBatches,
|
|
348
|
+
failed: status.failedBatches,
|
|
349
|
+
skipped: 0,
|
|
350
|
+
elapsedMs: 0
|
|
351
|
+
},
|
|
352
|
+
timestamp: Date.now()
|
|
353
|
+
});
|
|
354
|
+
}
|
|
355
|
+
return {
|
|
356
|
+
claimed: true,
|
|
357
|
+
batchId,
|
|
358
|
+
batchIndex,
|
|
359
|
+
processedCount,
|
|
360
|
+
failedCount,
|
|
361
|
+
jobComplete,
|
|
362
|
+
jobId
|
|
363
|
+
};
|
|
364
|
+
}
|
|
365
|
+
async executeWithRetry(validRecord, context, processor, jobId, batchId) {
|
|
366
|
+
const maxAttempts = 1 + this.maxRetries;
|
|
367
|
+
let lastError = "";
|
|
368
|
+
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
369
|
+
try {
|
|
370
|
+
await processor(validRecord.parsed, context);
|
|
371
|
+
return { success: true, attempts: attempt };
|
|
372
|
+
} catch (error) {
|
|
373
|
+
lastError = error instanceof Error ? error.message : String(error);
|
|
374
|
+
if (attempt < maxAttempts) {
|
|
375
|
+
this.eventBus.emit({
|
|
376
|
+
type: "record:retried",
|
|
377
|
+
jobId,
|
|
378
|
+
batchId,
|
|
379
|
+
recordIndex: validRecord.index,
|
|
380
|
+
attempt,
|
|
381
|
+
maxRetries: this.maxRetries,
|
|
382
|
+
error: lastError,
|
|
383
|
+
timestamp: Date.now()
|
|
384
|
+
});
|
|
385
|
+
const delay = this.retryDelayMs * Math.pow(2, attempt - 1);
|
|
386
|
+
await this.sleep(delay);
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
return { success: false, attempts: maxAttempts, error: lastError };
|
|
391
|
+
}
|
|
392
|
+
async handleRecordFailure(record, jobId, batchId, errorMsg) {
|
|
393
|
+
const failedRecord = (0, import_core2.markRecordFailed)(record, errorMsg);
|
|
394
|
+
await this.stateStore.saveProcessedRecord(jobId, batchId, failedRecord);
|
|
395
|
+
this.eventBus.emit({
|
|
396
|
+
type: "record:failed",
|
|
397
|
+
jobId,
|
|
398
|
+
batchId,
|
|
399
|
+
recordIndex: record.index,
|
|
400
|
+
error: errorMsg,
|
|
401
|
+
record: failedRecord,
|
|
402
|
+
timestamp: Date.now()
|
|
403
|
+
});
|
|
404
|
+
}
|
|
405
|
+
sleep(ms) {
|
|
406
|
+
return new Promise((resolve) => {
|
|
407
|
+
setTimeout(resolve, ms);
|
|
408
|
+
});
|
|
409
|
+
}
|
|
410
|
+
};
|
|
411
|
+
|
|
412
|
+
// src/DistributedImport.ts
|
|
413
|
+
var DistributedImport = class {
|
|
414
|
+
constructor(config) {
|
|
415
|
+
if (!(0, import_core3.isDistributedStateStore)(config.stateStore)) {
|
|
416
|
+
throw new Error(
|
|
417
|
+
"DistributedImport requires a DistributedStateStore implementation (e.g. SequelizeStateStore). The InMemoryStateStore does not support distributed batch claiming."
|
|
418
|
+
);
|
|
419
|
+
}
|
|
420
|
+
this.config = config;
|
|
421
|
+
this.eventBus = new import_core3.EventBus();
|
|
422
|
+
}
|
|
423
|
+
/**
|
|
424
|
+
* Phase 1: Prepare the job for distributed processing.
|
|
425
|
+
*
|
|
426
|
+
* Streams the entire source, materializes all records in the StateStore,
|
|
427
|
+
* and creates batch metadata. Call this from a single orchestrator.
|
|
428
|
+
*
|
|
429
|
+
* @param source - Data source to read from.
|
|
430
|
+
* @param parser - Parser for the source format.
|
|
431
|
+
* @returns Preparation result with jobId, totalRecords, totalBatches.
|
|
432
|
+
*/
|
|
433
|
+
async prepare(source, parser) {
|
|
434
|
+
const useCase = new PrepareDistributedImport(
|
|
435
|
+
this.config.schema,
|
|
436
|
+
this.config.stateStore,
|
|
437
|
+
this.eventBus,
|
|
438
|
+
this.config.batchSize ?? 100
|
|
439
|
+
);
|
|
440
|
+
return useCase.execute(source, parser);
|
|
441
|
+
}
|
|
442
|
+
/**
|
|
443
|
+
* Phase 2: Claim and process the next available batch.
|
|
444
|
+
*
|
|
445
|
+
* Atomically claims an unclaimed batch, loads its records from the
|
|
446
|
+
* StateStore, validates and processes them. Returns immediately if
|
|
447
|
+
* no batches are available.
|
|
448
|
+
*
|
|
449
|
+
* Before claiming, reclaims any stale batches that have been stuck
|
|
450
|
+
* in PROCESSING longer than `staleBatchTimeoutMs`.
|
|
451
|
+
*
|
|
452
|
+
* Call this from each worker in a loop until `claimed` is `false`
|
|
453
|
+
* or `jobComplete` is `true`.
|
|
454
|
+
*
|
|
455
|
+
* @param jobId - The job ID returned by `prepare()`.
|
|
456
|
+
* @param processor - Callback invoked for each valid record. Must be idempotent.
|
|
457
|
+
* @param workerId - Unique identifier for this worker (e.g. Lambda request ID).
|
|
458
|
+
* @returns Result with batch details, counts, and completion status.
|
|
459
|
+
*/
|
|
460
|
+
async processWorkerBatch(jobId, processor, workerId) {
|
|
461
|
+
const timeoutMs = this.config.staleBatchTimeoutMs ?? 9e5;
|
|
462
|
+
const store = this.config.stateStore;
|
|
463
|
+
if ((0, import_core3.isDistributedStateStore)(store)) {
|
|
464
|
+
await store.reclaimStaleBatches(jobId, timeoutMs);
|
|
465
|
+
}
|
|
466
|
+
const useCase = new ProcessDistributedBatch(
|
|
467
|
+
{
|
|
468
|
+
schema: this.config.schema,
|
|
469
|
+
stateStore: this.config.stateStore,
|
|
470
|
+
continueOnError: this.config.continueOnError,
|
|
471
|
+
maxRetries: this.config.maxRetries,
|
|
472
|
+
retryDelayMs: this.config.retryDelayMs,
|
|
473
|
+
hooks: this.config.hooks,
|
|
474
|
+
duplicateChecker: this.config.duplicateChecker
|
|
475
|
+
},
|
|
476
|
+
this.eventBus
|
|
477
|
+
);
|
|
478
|
+
return useCase.execute(jobId, processor, workerId);
|
|
479
|
+
}
|
|
480
|
+
/**
|
|
481
|
+
* Subscribe to a specific domain event type.
|
|
482
|
+
*
|
|
483
|
+
* Events are local to this `DistributedImport` instance. Each worker
|
|
484
|
+
* has its own event bus. The `job:completed` event is only emitted
|
|
485
|
+
* by the worker that finalizes the job (exactly-once).
|
|
486
|
+
*/
|
|
487
|
+
on(type, handler) {
|
|
488
|
+
this.eventBus.on(type, handler);
|
|
489
|
+
return this;
|
|
490
|
+
}
|
|
491
|
+
/** Subscribe to all domain events. */
|
|
492
|
+
onAny(handler) {
|
|
493
|
+
this.eventBus.onAny(handler);
|
|
494
|
+
return this;
|
|
495
|
+
}
|
|
496
|
+
/** Unsubscribe a wildcard handler. */
|
|
497
|
+
offAny(handler) {
|
|
498
|
+
this.eventBus.offAny(handler);
|
|
499
|
+
return this;
|
|
500
|
+
}
|
|
501
|
+
};
|
|
502
|
+
|
|
503
|
+
// src/index.ts
|
|
504
|
+
var import_core4 = require("@batchactions/core");
|
|
505
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
506
|
+
0 && (module.exports = {
|
|
507
|
+
DistributedImport,
|
|
508
|
+
isDistributedStateStore
|
|
509
|
+
});
|
|
510
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/DistributedImport.ts","../src/PrepareDistributedImport.ts","../src/ProcessDistributedBatch.ts"],"sourcesContent":["// Main facade\nexport { DistributedImport } from './DistributedImport.js';\nexport type { DistributedImportConfig } from './DistributedImport.js';\n\n// Use case result types\nexport type { PrepareResult } from './PrepareDistributedImport.js';\nexport type { DistributedBatchResult, DistributedBatchConfig } from './ProcessDistributedBatch.js';\n\n// Re-export core distributed types for convenience\nexport type {\n DistributedStateStore,\n BatchReservation,\n ClaimBatchResult,\n ClaimBatchFailureReason,\n DistributedJobStatus,\n} from '@batchactions/core';\nexport { isDistributedStateStore } from '@batchactions/core';\n","import type {\n DataSource,\n StateStore,\n RecordProcessorFn,\n JobHooks,\n DomainEvent,\n EventType,\n EventPayload,\n} from '@batchactions/core';\nimport { EventBus, isDistributedStateStore } from '@batchactions/core';\nimport type { SchemaDefinition, SourceParser, DuplicateChecker } from '@batchactions/import';\nimport { PrepareDistributedImport } from './PrepareDistributedImport.js';\nimport type { PrepareResult } from './PrepareDistributedImport.js';\nimport { ProcessDistributedBatch } from './ProcessDistributedBatch.js';\nimport type { DistributedBatchResult } from './ProcessDistributedBatch.js';\n\n/** Configuration for distributed import processing. */\nexport interface DistributedImportConfig {\n /** Schema definition for validation. */\n readonly schema: SchemaDefinition;\n /** Number of records per batch. Default: 100. */\n readonly batchSize?: number;\n /** Whether to continue processing on record errors. Default: true. */\n readonly continueOnError?: boolean;\n /**\n * State store that implements `DistributedStateStore`.\n * Required — must support atomic batch claiming.\n */\n readonly stateStore: StateStore;\n /** Maximum retry attempts for processor failures. Default: 0. */\n readonly maxRetries?: number;\n /** Base delay in ms for retry backoff. Default: 1000. */\n readonly retryDelayMs?: number;\n /** Optional lifecycle hooks. */\n readonly hooks?: JobHooks;\n /** Optional external duplicate detection. */\n readonly duplicateChecker?: DuplicateChecker;\n /**\n * Timeout in ms for stale batch reclamation. Default: 900000 (15 min).\n * Batches stuck in PROCESSING longer than this are reclaimed for other workers.\n */\n readonly staleBatchTimeoutMs?: number;\n}\n\n/**\n * Facade for distributed parallel batch processing.\n *\n * Two-phase processing model:\n * 1. **Prepare** (single orchestrator): streams the source file, materializes\n * records in the StateStore, and registers batch boundaries.\n * 2. **Process** (N parallel workers): each worker calls `processWorkerBatch()`\n * in a loop to claim and process batches until none remain.\n *\n * @example\n * ```typescript\n * // === Orchestrator Lambda ===\n * const di = new DistributedImport(config);\n * const { jobId, totalBatches } = await di.prepare(source, parser);\n * // Fan out: send { jobId } to N worker Lambdas via SQS\n *\n * // === Worker Lambda ===\n * const di = new DistributedImport(config);\n * const workerId = context.awsRequestId;\n * while (true) {\n * const result = await di.processWorkerBatch(jobId, processor, workerId);\n * if (!result.claimed || result.jobComplete) break;\n * }\n * ```\n */\nexport class DistributedImport {\n private readonly config: DistributedImportConfig;\n private readonly eventBus: EventBus;\n\n constructor(config: DistributedImportConfig) {\n if (!isDistributedStateStore(config.stateStore)) {\n throw new Error(\n 'DistributedImport requires a DistributedStateStore implementation ' +\n '(e.g. SequelizeStateStore). The InMemoryStateStore does not support ' +\n 'distributed batch claiming.',\n );\n }\n this.config = config;\n this.eventBus = new EventBus();\n }\n\n /**\n * Phase 1: Prepare the job for distributed processing.\n *\n * Streams the entire source, materializes all records in the StateStore,\n * and creates batch metadata. Call this from a single orchestrator.\n *\n * @param source - Data source to read from.\n * @param parser - Parser for the source format.\n * @returns Preparation result with jobId, totalRecords, totalBatches.\n */\n async prepare(source: DataSource, parser: SourceParser): Promise<PrepareResult> {\n const useCase = new PrepareDistributedImport(\n this.config.schema,\n this.config.stateStore,\n this.eventBus,\n this.config.batchSize ?? 100,\n );\n return useCase.execute(source, parser);\n }\n\n /**\n * Phase 2: Claim and process the next available batch.\n *\n * Atomically claims an unclaimed batch, loads its records from the\n * StateStore, validates and processes them. Returns immediately if\n * no batches are available.\n *\n * Before claiming, reclaims any stale batches that have been stuck\n * in PROCESSING longer than `staleBatchTimeoutMs`.\n *\n * Call this from each worker in a loop until `claimed` is `false`\n * or `jobComplete` is `true`.\n *\n * @param jobId - The job ID returned by `prepare()`.\n * @param processor - Callback invoked for each valid record. Must be idempotent.\n * @param workerId - Unique identifier for this worker (e.g. Lambda request ID).\n * @returns Result with batch details, counts, and completion status.\n */\n async processWorkerBatch(\n jobId: string,\n processor: RecordProcessorFn,\n workerId: string,\n ): Promise<DistributedBatchResult> {\n // Reclaim stale batches before claiming\n const timeoutMs = this.config.staleBatchTimeoutMs ?? 900_000;\n const store = this.config.stateStore;\n if (isDistributedStateStore(store)) {\n await store.reclaimStaleBatches(jobId, timeoutMs);\n }\n\n const useCase = new ProcessDistributedBatch(\n {\n schema: this.config.schema,\n stateStore: this.config.stateStore,\n continueOnError: this.config.continueOnError,\n maxRetries: this.config.maxRetries,\n retryDelayMs: this.config.retryDelayMs,\n hooks: this.config.hooks,\n duplicateChecker: this.config.duplicateChecker,\n },\n this.eventBus,\n );\n return useCase.execute(jobId, processor, workerId);\n }\n\n /**\n * Subscribe to a specific domain event type.\n *\n * Events are local to this `DistributedImport` instance. Each worker\n * has its own event bus. The `job:completed` event is only emitted\n * by the worker that finalizes the job (exactly-once).\n */\n on<T extends EventType>(type: T, handler: (event: EventPayload<T>) => void): this {\n this.eventBus.on(type, handler);\n return this;\n }\n\n /** Subscribe to all domain events. */\n onAny(handler: (event: DomainEvent) => void): this {\n this.eventBus.onAny(handler);\n return this;\n }\n\n /** Unsubscribe a wildcard handler. */\n offAny(handler: (event: DomainEvent) => void): this {\n this.eventBus.offAny(handler);\n return this;\n }\n}\n","import type { DataSource, StateStore, DistributedStateStore, ProcessedRecord, Batch } from '@batchactions/core';\nimport type { EventBus } from '@batchactions/core';\nimport { BatchSplitter, createPendingRecord, createBatch, isDistributedStateStore } from '@batchactions/core';\nimport type { SchemaDefinition, SourceParser } from '@batchactions/import';\nimport { SchemaValidator } from '@batchactions/import';\n\n/** Result of the prepare phase for distributed processing. */\nexport interface PrepareResult {\n /** Unique job identifier. Use this to dispatch workers. */\n readonly jobId: string;\n /** Total number of records found in the source. */\n readonly totalRecords: number;\n /** Total number of batches created. */\n readonly totalBatches: number;\n}\n\n/**\n * Phase 1 of distributed processing: stream the source, materialize records\n * in the StateStore, and register batch boundaries.\n *\n * After this phase, multiple workers can call `ProcessDistributedBatch`\n * to claim and process individual batches in parallel.\n */\nexport class PrepareDistributedImport {\n private readonly stateStore: DistributedStateStore;\n private readonly eventBus: EventBus;\n private readonly validator: SchemaValidator;\n private readonly batchSize: number;\n\n constructor(\n private readonly schema: SchemaDefinition,\n stateStore: StateStore,\n eventBus: EventBus,\n batchSize: number,\n ) {\n if (!isDistributedStateStore(stateStore)) {\n throw new Error(\n 'Distributed processing requires a DistributedStateStore implementation ' +\n '(e.g. SequelizeStateStore). The InMemoryStateStore does not support distributed batch claiming.',\n );\n }\n this.stateStore = stateStore;\n this.eventBus = eventBus;\n this.validator = new SchemaValidator(schema);\n this.batchSize = batchSize;\n }\n\n async execute(source: DataSource, parser: SourceParser): Promise<PrepareResult> {\n const jobId = crypto.randomUUID();\n const splitter = new BatchSplitter(this.batchSize);\n const batches: Batch[] = [];\n let totalRecords = 0;\n\n for await (const { records: rawRecords, batchIndex } of splitter.split(this.streamRecords(source, parser))) {\n const batchId = crypto.randomUUID();\n const recordStartIndex = rawRecords[0]?.index ?? 0;\n const recordEndIndex = rawRecords[rawRecords.length - 1]?.index ?? 0;\n\n const batch: Batch = {\n ...createBatch(batchId, batchIndex, []),\n recordStartIndex,\n recordEndIndex,\n };\n batches.push(batch);\n\n await this.stateStore.saveBatchRecords(jobId, batchId, rawRecords);\n totalRecords += rawRecords.length;\n }\n\n await this.stateStore.saveJobState({\n id: jobId,\n config: {\n schema: this.schema as unknown as Record<string, unknown>,\n batchSize: this.batchSize,\n continueOnError: true,\n },\n status: 'PROCESSING',\n batches,\n totalRecords,\n startedAt: Date.now(),\n distributed: true,\n });\n\n this.eventBus.emit({\n type: 'distributed:prepared',\n jobId,\n totalRecords,\n totalBatches: batches.length,\n timestamp: Date.now(),\n });\n\n return { jobId, totalRecords, totalBatches: batches.length };\n }\n\n private async *streamRecords(source: DataSource, parser: SourceParser): AsyncIterable<ProcessedRecord> {\n let recordIndex = 0;\n\n for await (const chunk of source.read()) {\n for await (const raw of parser.parse(chunk)) {\n if (this.validator.skipEmptyRows && this.validator.isEmptyRow(raw)) {\n continue;\n }\n yield createPendingRecord(recordIndex, raw);\n recordIndex++;\n }\n }\n }\n}\n","import type {\n StateStore,\n DistributedStateStore,\n ProcessedRecord,\n RecordProcessorFn,\n ProcessingContext,\n HookContext,\n JobHooks,\n ValidationError,\n} from '@batchactions/core';\nimport type { EventBus } from '@batchactions/core';\nimport {\n markRecordValid,\n markRecordInvalid,\n markRecordFailed,\n hasErrors,\n getWarnings,\n isDistributedStateStore,\n} from '@batchactions/core';\nimport type { SchemaDefinition, DuplicateChecker } from '@batchactions/import';\nimport { SchemaValidator } from '@batchactions/import';\n\n/** Result of processing a single distributed batch. */\nexport interface DistributedBatchResult {\n /** Whether a batch was successfully claimed. */\n readonly claimed: boolean;\n /** Batch ID that was processed (only if claimed). */\n readonly batchId?: string;\n /** Batch index that was processed (only if claimed). */\n readonly batchIndex?: number;\n /** Records successfully processed in this batch. */\n readonly processedCount: number;\n /** Records that failed in this batch. */\n readonly failedCount: number;\n /** Whether this worker finalized the entire job. */\n readonly jobComplete: boolean;\n /** The job identifier. */\n readonly jobId: string;\n}\n\n/** Configuration for the distributed batch processor. */\nexport interface DistributedBatchConfig {\n readonly schema: SchemaDefinition;\n readonly stateStore: StateStore;\n readonly continueOnError?: boolean;\n readonly maxRetries?: number;\n readonly retryDelayMs?: number;\n readonly hooks?: JobHooks;\n readonly duplicateChecker?: DuplicateChecker;\n}\n\n/**\n * Phase 2 of distributed processing: claim and process a single batch.\n *\n * Each worker creates an instance and calls `execute()` in a loop until\n * no more batches are available (`claimed: false`) or the job is complete.\n */\nexport class ProcessDistributedBatch {\n private readonly stateStore: DistributedStateStore;\n private readonly validator: SchemaValidator;\n private readonly eventBus: EventBus;\n private readonly continueOnError: boolean;\n private readonly maxRetries: number;\n private readonly retryDelayMs: number;\n private readonly hooks: JobHooks | null;\n private readonly duplicateChecker: DuplicateChecker | null;\n\n constructor(\n private readonly config: DistributedBatchConfig,\n eventBus: EventBus,\n ) {\n if (!isDistributedStateStore(config.stateStore)) {\n throw new Error('Distributed processing requires a DistributedStateStore implementation.');\n }\n this.stateStore = config.stateStore;\n this.validator = new SchemaValidator(config.schema);\n this.eventBus = eventBus;\n this.continueOnError = config.continueOnError ?? true;\n this.maxRetries = config.maxRetries ?? 0;\n this.retryDelayMs = config.retryDelayMs ?? 1000;\n this.hooks = config.hooks ?? null;\n this.duplicateChecker = config.duplicateChecker ?? null;\n }\n\n async execute(jobId: string, processor: RecordProcessorFn, workerId: string): Promise<DistributedBatchResult> {\n const claimResult = await this.stateStore.claimBatch(jobId, workerId);\n\n if (!claimResult.claimed) {\n return {\n claimed: false,\n processedCount: 0,\n failedCount: 0,\n jobComplete: false,\n jobId,\n };\n }\n\n const { reservation } = claimResult;\n const { batchId, batchIndex } = reservation;\n\n this.eventBus.emit({\n type: 'batch:claimed',\n jobId,\n batchId,\n batchIndex,\n workerId,\n timestamp: Date.now(),\n });\n\n const records = await this.stateStore.getBatchRecords(jobId, batchId);\n\n this.eventBus.emit({\n type: 'batch:started',\n jobId,\n batchId,\n batchIndex,\n recordCount: records.length,\n timestamp: Date.now(),\n });\n\n let processedCount = 0;\n let failedCount = 0;\n let batchFailed = false;\n\n try {\n for (const record of records) {\n if (this.validator.skipEmptyRows && this.validator.isEmptyRow(record.raw)) {\n continue;\n }\n\n const hookCtx: HookContext = {\n jobId,\n batchId,\n batchIndex,\n recordIndex: record.index,\n totalRecords: records.length,\n signal: new AbortController().signal,\n };\n\n // --- beforeValidate hook ---\n let aliased = this.validator.resolveAliases(record.raw);\n if (this.hooks?.beforeValidate) {\n try {\n aliased = await this.hooks.beforeValidate(aliased, hookCtx);\n } catch (hookError) {\n const errorMsg = hookError instanceof Error ? hookError.message : String(hookError);\n await this.handleRecordFailure(record, jobId, batchId, `beforeValidate hook failed: ${errorMsg}`);\n failedCount++;\n if (!this.continueOnError) throw new Error(errorMsg);\n continue;\n }\n }\n\n // --- Validation ---\n const transformed = this.validator.applyTransforms(aliased);\n const validation = this.validator.validate(transformed);\n\n // Note: in distributed mode, in-memory uniqueness is skipped.\n // Use DuplicateChecker for cross-worker duplicate detection.\n\n // --- External duplicate check ---\n const externalDupErrors: ValidationError[] = [];\n if (this.duplicateChecker && validation.errors.length === 0) {\n try {\n const dupResult = await this.duplicateChecker.check(transformed, hookCtx);\n if (dupResult.isDuplicate) {\n externalDupErrors.push({\n field: '_external',\n message: `Duplicate record found${dupResult.existingId ? ` (existing ID: ${dupResult.existingId})` : ''}`,\n code: 'EXTERNAL_DUPLICATE',\n value: undefined,\n });\n }\n } catch (checkerError) {\n const errorMsg = checkerError instanceof Error ? checkerError.message : String(checkerError);\n externalDupErrors.push({\n field: '_external',\n message: `Duplicate check failed: ${errorMsg}`,\n code: 'EXTERNAL_DUPLICATE',\n value: undefined,\n });\n }\n }\n\n let allErrors = [...validation.errors, ...externalDupErrors];\n\n // --- afterValidate hook ---\n if (this.hooks?.afterValidate) {\n try {\n const tempRecord =\n allErrors.length > 0 ? markRecordInvalid(record, allErrors) : markRecordValid(record, transformed);\n const modifiedRecord = await this.hooks.afterValidate(tempRecord, hookCtx);\n allErrors = [...modifiedRecord.errors];\n } catch (hookError) {\n const errorMsg = hookError instanceof Error ? hookError.message : String(hookError);\n await this.handleRecordFailure(record, jobId, batchId, `afterValidate hook failed: ${errorMsg}`);\n failedCount++;\n if (!this.continueOnError) throw new Error(errorMsg);\n continue;\n }\n }\n\n // --- Validation result ---\n if (hasErrors(allErrors)) {\n const invalidRecord = markRecordInvalid(record, allErrors);\n failedCount++;\n await this.stateStore.saveProcessedRecord(jobId, batchId, invalidRecord);\n this.eventBus.emit({\n type: 'record:failed',\n jobId,\n batchId,\n recordIndex: record.index,\n error: allErrors.map((e) => e.message).join('; '),\n record: invalidRecord,\n timestamp: Date.now(),\n });\n if (!this.continueOnError) throw new Error(`Validation failed for record ${String(record.index)}`);\n continue;\n }\n\n // --- Warnings (non-blocking) ---\n const warnings = getWarnings(allErrors);\n const validRecord = markRecordValid(record, transformed, warnings.length > 0 ? warnings : undefined);\n const context: ProcessingContext = {\n jobId,\n batchId,\n batchIndex,\n recordIndex: record.index,\n totalRecords: records.length,\n signal: new AbortController().signal,\n };\n\n // --- beforeProcess hook ---\n let parsedForProcessor = validRecord.parsed;\n if (this.hooks?.beforeProcess) {\n try {\n parsedForProcessor = await this.hooks.beforeProcess(parsedForProcessor, hookCtx);\n } catch (hookError) {\n const errorMsg = hookError instanceof Error ? hookError.message : String(hookError);\n await this.handleRecordFailure(record, jobId, batchId, `beforeProcess hook failed: ${errorMsg}`);\n failedCount++;\n if (!this.continueOnError) throw new Error(errorMsg);\n continue;\n }\n }\n\n // --- Process with retry ---\n const recordForProcessor: ProcessedRecord = { ...validRecord, parsed: parsedForProcessor };\n const result = await this.executeWithRetry(recordForProcessor, context, processor, jobId, batchId);\n\n if (result.success) {\n processedCount++;\n const processedRecord: ProcessedRecord = {\n ...recordForProcessor,\n status: 'processed',\n retryCount: result.attempts - 1,\n };\n await this.stateStore.saveProcessedRecord(jobId, batchId, processedRecord);\n this.eventBus.emit({\n type: 'record:processed',\n jobId,\n batchId,\n recordIndex: record.index,\n timestamp: Date.now(),\n });\n\n // --- afterProcess hook ---\n if (this.hooks?.afterProcess) {\n try {\n await this.hooks.afterProcess(processedRecord, hookCtx);\n } catch (hookError) {\n const errorMsg = hookError instanceof Error ? hookError.message : String(hookError);\n processedCount--;\n const failedAfterHook = markRecordFailed(recordForProcessor, `afterProcess hook failed: ${errorMsg}`);\n failedCount++;\n await this.stateStore.saveProcessedRecord(jobId, batchId, failedAfterHook);\n this.eventBus.emit({\n type: 'record:failed',\n jobId,\n batchId,\n recordIndex: record.index,\n error: errorMsg,\n record: failedAfterHook,\n timestamp: Date.now(),\n });\n if (!this.continueOnError) throw new Error(errorMsg);\n }\n }\n } else {\n const failedRecord = markRecordFailed(validRecord, result.error);\n const failedWithRetries: ProcessedRecord = { ...failedRecord, retryCount: result.attempts - 1 };\n failedCount++;\n await this.stateStore.saveProcessedRecord(jobId, batchId, failedWithRetries);\n this.eventBus.emit({\n type: 'record:failed',\n jobId,\n batchId,\n recordIndex: record.index,\n error: result.error,\n record: failedWithRetries,\n timestamp: Date.now(),\n });\n if (!this.continueOnError) throw new Error(result.error);\n }\n }\n } catch {\n batchFailed = true;\n }\n\n // Update batch state\n const batchStatus = batchFailed ? 'FAILED' : 'COMPLETED';\n await this.stateStore.updateBatchState(jobId, batchId, {\n batchId,\n status: batchStatus,\n processedCount,\n failedCount,\n });\n\n if (batchFailed) {\n this.eventBus.emit({\n type: 'batch:failed',\n jobId,\n batchId,\n batchIndex,\n error: `Batch failed with ${String(failedCount)} errors`,\n timestamp: Date.now(),\n });\n } else {\n this.eventBus.emit({\n type: 'batch:completed',\n jobId,\n batchId,\n batchIndex,\n processedCount,\n failedCount,\n totalCount: records.length,\n timestamp: Date.now(),\n });\n }\n\n // Try to finalize the job (exactly-once)\n const jobComplete = await this.stateStore.tryFinalizeJob(jobId);\n\n if (jobComplete) {\n const status = await this.stateStore.getDistributedStatus(jobId);\n this.eventBus.emit({\n type: 'job:completed',\n jobId,\n summary: {\n total: status.totalBatches,\n processed: status.completedBatches,\n failed: status.failedBatches,\n skipped: 0,\n elapsedMs: 0,\n },\n timestamp: Date.now(),\n });\n }\n\n return {\n claimed: true,\n batchId,\n batchIndex,\n processedCount,\n failedCount,\n jobComplete,\n jobId,\n };\n }\n\n private async executeWithRetry(\n validRecord: ProcessedRecord,\n context: ProcessingContext,\n processor: RecordProcessorFn,\n jobId: string,\n batchId: string,\n ): Promise<{ success: true; attempts: number } | { success: false; attempts: number; error: string }> {\n const maxAttempts = 1 + this.maxRetries;\n let lastError = '';\n\n for (let attempt = 1; attempt <= maxAttempts; attempt++) {\n try {\n await processor(validRecord.parsed, context);\n return { success: true, attempts: attempt };\n } catch (error) {\n lastError = error instanceof Error ? error.message : String(error);\n\n if (attempt < maxAttempts) {\n this.eventBus.emit({\n type: 'record:retried',\n jobId,\n batchId,\n recordIndex: validRecord.index,\n attempt,\n maxRetries: this.maxRetries,\n error: lastError,\n timestamp: Date.now(),\n });\n\n const delay = this.retryDelayMs * Math.pow(2, attempt - 1);\n await this.sleep(delay);\n }\n }\n }\n\n return { success: false, attempts: maxAttempts, error: lastError };\n }\n\n private async handleRecordFailure(\n record: ProcessedRecord,\n jobId: string,\n batchId: string,\n errorMsg: string,\n ): Promise<void> {\n const failedRecord = markRecordFailed(record, errorMsg);\n await this.stateStore.saveProcessedRecord(jobId, batchId, failedRecord);\n this.eventBus.emit({\n type: 'record:failed',\n jobId,\n batchId,\n recordIndex: record.index,\n error: errorMsg,\n record: failedRecord,\n timestamp: Date.now(),\n });\n }\n\n private sleep(ms: number): Promise<void> {\n return new Promise((resolve) => {\n setTimeout(resolve, ms);\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACSA,IAAAA,eAAkD;;;ACPlD,kBAAyF;AAEzF,oBAAgC;AAmBzB,IAAM,2BAAN,MAA+B;AAAA,EAMpC,YACmB,QACjB,YACA,UACA,WACA;AAJiB;AAKjB,QAAI,KAAC,qCAAwB,UAAU,GAAG;AACxC,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AACA,SAAK,aAAa;AAClB,SAAK,WAAW;AAChB,SAAK,YAAY,IAAI,8BAAgB,MAAM;AAC3C,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,QAAQ,QAAoB,QAA8C;AAC9E,UAAM,QAAQ,OAAO,WAAW;AAChC,UAAM,WAAW,IAAI,0BAAc,KAAK,SAAS;AACjD,UAAM,UAAmB,CAAC;AAC1B,QAAI,eAAe;AAEnB,qBAAiB,EAAE,SAAS,YAAY,WAAW,KAAK,SAAS,MAAM,KAAK,cAAc,QAAQ,MAAM,CAAC,GAAG;AAC1G,YAAM,UAAU,OAAO,WAAW;AAClC,YAAM,mBAAmB,WAAW,CAAC,GAAG,SAAS;AACjD,YAAM,iBAAiB,WAAW,WAAW,SAAS,CAAC,GAAG,SAAS;AAEnE,YAAM,QAAe;AAAA,QACnB,OAAG,yBAAY,SAAS,YAAY,CAAC,CAAC;AAAA,QACtC;AAAA,QACA;AAAA,MACF;AACA,cAAQ,KAAK,KAAK;AAElB,YAAM,KAAK,WAAW,iBAAiB,OAAO,SAAS,UAAU;AACjE,sBAAgB,WAAW;AAAA,IAC7B;AAEA,UAAM,KAAK,WAAW,aAAa;AAAA,MACjC,IAAI;AAAA,MACJ,QAAQ;AAAA,QACN,QAAQ,KAAK;AAAA,QACb,WAAW,KAAK;AAAA,QAChB,iBAAiB;AAAA,MACnB;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA,WAAW,KAAK,IAAI;AAAA,MACpB,aAAa;AAAA,IACf,CAAC;AAED,SAAK,SAAS,KAAK;AAAA,MACjB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,cAAc,QAAQ;AAAA,MACtB,WAAW,KAAK,IAAI;AAAA,IACtB,CAAC;AAED,WAAO,EAAE,OAAO,cAAc,cAAc,QAAQ,OAAO;AAAA,EAC7D;AAAA,EAEA,OAAe,cAAc,QAAoB,QAAsD;AACrG,QAAI,cAAc;AAElB,qBAAiB,SAAS,OAAO,KAAK,GAAG;AACvC,uBAAiB,OAAO,OAAO,MAAM,KAAK,GAAG;AAC3C,YAAI,KAAK,UAAU,iBAAiB,KAAK,UAAU,WAAW,GAAG,GAAG;AAClE;AAAA,QACF;AACA,kBAAM,iCAAoB,aAAa,GAAG;AAC1C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;AChGA,IAAAC,eAOO;AAEP,IAAAC,iBAAgC;AAqCzB,IAAM,0BAAN,MAA8B;AAAA,EAUnC,YACmB,QACjB,UACA;AAFiB;AAGjB,QAAI,KAAC,sCAAwB,OAAO,UAAU,GAAG;AAC/C,YAAM,IAAI,MAAM,yEAAyE;AAAA,IAC3F;AACA,SAAK,aAAa,OAAO;AACzB,SAAK,YAAY,IAAI,+BAAgB,OAAO,MAAM;AAClD,SAAK,WAAW;AAChB,SAAK,kBAAkB,OAAO,mBAAmB;AACjD,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,eAAe,OAAO,gBAAgB;AAC3C,SAAK,QAAQ,OAAO,SAAS;AAC7B,SAAK,mBAAmB,OAAO,oBAAoB;AAAA,EACrD;AAAA,EAEA,MAAM,QAAQ,OAAe,WAA8B,UAAmD;AAC5G,UAAM,cAAc,MAAM,KAAK,WAAW,WAAW,OAAO,QAAQ;AAEpE,QAAI,CAAC,YAAY,SAAS;AACxB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,gBAAgB;AAAA,QAChB,aAAa;AAAA,QACb,aAAa;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAEA,UAAM,EAAE,YAAY,IAAI;AACxB,UAAM,EAAE,SAAS,WAAW,IAAI;AAEhC,SAAK,SAAS,KAAK;AAAA,MACjB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,WAAW,KAAK,IAAI;AAAA,IACtB,CAAC;AAED,UAAM,UAAU,MAAM,KAAK,WAAW,gBAAgB,OAAO,OAAO;AAEpE,SAAK,SAAS,KAAK;AAAA,MACjB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,WAAW,KAAK,IAAI;AAAA,IACtB,CAAC;AAED,QAAI,iBAAiB;AACrB,QAAI,cAAc;AAClB,QAAI,cAAc;AAElB,QAAI;AACF,iBAAW,UAAU,SAAS;AAC5B,YAAI,KAAK,UAAU,iBAAiB,KAAK,UAAU,WAAW,OAAO,GAAG,GAAG;AACzE;AAAA,QACF;AAEA,cAAM,UAAuB;AAAA,UAC3B;AAAA,UACA;AAAA,UACA;AAAA,UACA,aAAa,OAAO;AAAA,UACpB,cAAc,QAAQ;AAAA,UACtB,QAAQ,IAAI,gBAAgB,EAAE;AAAA,QAChC;AAGA,YAAI,UAAU,KAAK,UAAU,eAAe,OAAO,GAAG;AACtD,YAAI,KAAK,OAAO,gBAAgB;AAC9B,cAAI;AACF,sBAAU,MAAM,KAAK,MAAM,eAAe,SAAS,OAAO;AAAA,UAC5D,SAAS,WAAW;AAClB,kBAAM,WAAW,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS;AAClF,kBAAM,KAAK,oBAAoB,QAAQ,OAAO,SAAS,+BAA+B,QAAQ,EAAE;AAChG;AACA,gBAAI,CAAC,KAAK,gBAAiB,OAAM,IAAI,MAAM,QAAQ;AACnD;AAAA,UACF;AAAA,QACF;AAGA,cAAM,cAAc,KAAK,UAAU,gBAAgB,OAAO;AAC1D,cAAM,aAAa,KAAK,UAAU,SAAS,WAAW;AAMtD,cAAM,oBAAuC,CAAC;AAC9C,YAAI,KAAK,oBAAoB,WAAW,OAAO,WAAW,GAAG;AAC3D,cAAI;AACF,kBAAM,YAAY,MAAM,KAAK,iBAAiB,MAAM,aAAa,OAAO;AACxE,gBAAI,UAAU,aAAa;AACzB,gCAAkB,KAAK;AAAA,gBACrB,OAAO;AAAA,gBACP,SAAS,yBAAyB,UAAU,aAAa,kBAAkB,UAAU,UAAU,MAAM,EAAE;AAAA,gBACvG,MAAM;AAAA,gBACN,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAAA,UACF,SAAS,cAAc;AACrB,kBAAM,WAAW,wBAAwB,QAAQ,aAAa,UAAU,OAAO,YAAY;AAC3F,8BAAkB,KAAK;AAAA,cACrB,OAAO;AAAA,cACP,SAAS,2BAA2B,QAAQ;AAAA,cAC5C,MAAM;AAAA,cACN,OAAO;AAAA,YACT,CAAC;AAAA,UACH;AAAA,QACF;AAEA,YAAI,YAAY,CAAC,GAAG,WAAW,QAAQ,GAAG,iBAAiB;AAG3D,YAAI,KAAK,OAAO,eAAe;AAC7B,cAAI;AACF,kBAAM,aACJ,UAAU,SAAS,QAAI,gCAAkB,QAAQ,SAAS,QAAI,8BAAgB,QAAQ,WAAW;AACnG,kBAAM,iBAAiB,MAAM,KAAK,MAAM,cAAc,YAAY,OAAO;AACzE,wBAAY,CAAC,GAAG,eAAe,MAAM;AAAA,UACvC,SAAS,WAAW;AAClB,kBAAM,WAAW,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS;AAClF,kBAAM,KAAK,oBAAoB,QAAQ,OAAO,SAAS,8BAA8B,QAAQ,EAAE;AAC/F;AACA,gBAAI,CAAC,KAAK,gBAAiB,OAAM,IAAI,MAAM,QAAQ;AACnD;AAAA,UACF;AAAA,QACF;AAGA,gBAAI,wBAAU,SAAS,GAAG;AACxB,gBAAM,oBAAgB,gCAAkB,QAAQ,SAAS;AACzD;AACA,gBAAM,KAAK,WAAW,oBAAoB,OAAO,SAAS,aAAa;AACvE,eAAK,SAAS,KAAK;AAAA,YACjB,MAAM;AAAA,YACN;AAAA,YACA;AAAA,YACA,aAAa,OAAO;AAAA,YACpB,OAAO,UAAU,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,IAAI;AAAA,YAChD,QAAQ;AAAA,YACR,WAAW,KAAK,IAAI;AAAA,UACtB,CAAC;AACD,cAAI,CAAC,KAAK,gBAAiB,OAAM,IAAI,MAAM,gCAAgC,OAAO,OAAO,KAAK,CAAC,EAAE;AACjG;AAAA,QACF;AAGA,cAAM,eAAW,0BAAY,SAAS;AACtC,cAAM,kBAAc,8BAAgB,QAAQ,aAAa,SAAS,SAAS,IAAI,WAAW,MAAS;AACnG,cAAM,UAA6B;AAAA,UACjC;AAAA,UACA;AAAA,UACA;AAAA,UACA,aAAa,OAAO;AAAA,UACpB,cAAc,QAAQ;AAAA,UACtB,QAAQ,IAAI,gBAAgB,EAAE;AAAA,QAChC;AAGA,YAAI,qBAAqB,YAAY;AACrC,YAAI,KAAK,OAAO,eAAe;AAC7B,cAAI;AACF,iCAAqB,MAAM,KAAK,MAAM,cAAc,oBAAoB,OAAO;AAAA,UACjF,SAAS,WAAW;AAClB,kBAAM,WAAW,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS;AAClF,kBAAM,KAAK,oBAAoB,QAAQ,OAAO,SAAS,8BAA8B,QAAQ,EAAE;AAC/F;AACA,gBAAI,CAAC,KAAK,gBAAiB,OAAM,IAAI,MAAM,QAAQ;AACnD;AAAA,UACF;AAAA,QACF;AAGA,cAAM,qBAAsC,EAAE,GAAG,aAAa,QAAQ,mBAAmB;AACzF,cAAM,SAAS,MAAM,KAAK,iBAAiB,oBAAoB,SAAS,WAAW,OAAO,OAAO;AAEjG,YAAI,OAAO,SAAS;AAClB;AACA,gBAAM,kBAAmC;AAAA,YACvC,GAAG;AAAA,YACH,QAAQ;AAAA,YACR,YAAY,OAAO,WAAW;AAAA,UAChC;AACA,gBAAM,KAAK,WAAW,oBAAoB,OAAO,SAAS,eAAe;AACzE,eAAK,SAAS,KAAK;AAAA,YACjB,MAAM;AAAA,YACN;AAAA,YACA;AAAA,YACA,aAAa,OAAO;AAAA,YACpB,WAAW,KAAK,IAAI;AAAA,UACtB,CAAC;AAGD,cAAI,KAAK,OAAO,cAAc;AAC5B,gBAAI;AACF,oBAAM,KAAK,MAAM,aAAa,iBAAiB,OAAO;AAAA,YACxD,SAAS,WAAW;AAClB,oBAAM,WAAW,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS;AAClF;AACA,oBAAM,sBAAkB,+BAAiB,oBAAoB,6BAA6B,QAAQ,EAAE;AACpG;AACA,oBAAM,KAAK,WAAW,oBAAoB,OAAO,SAAS,eAAe;AACzE,mBAAK,SAAS,KAAK;AAAA,gBACjB,MAAM;AAAA,gBACN;AAAA,gBACA;AAAA,gBACA,aAAa,OAAO;AAAA,gBACpB,OAAO;AAAA,gBACP,QAAQ;AAAA,gBACR,WAAW,KAAK,IAAI;AAAA,cACtB,CAAC;AACD,kBAAI,CAAC,KAAK,gBAAiB,OAAM,IAAI,MAAM,QAAQ;AAAA,YACrD;AAAA,UACF;AAAA,QACF,OAAO;AACL,gBAAM,mBAAe,+BAAiB,aAAa,OAAO,KAAK;AAC/D,gBAAM,oBAAqC,EAAE,GAAG,cAAc,YAAY,OAAO,WAAW,EAAE;AAC9F;AACA,gBAAM,KAAK,WAAW,oBAAoB,OAAO,SAAS,iBAAiB;AAC3E,eAAK,SAAS,KAAK;AAAA,YACjB,MAAM;AAAA,YACN;AAAA,YACA;AAAA,YACA,aAAa,OAAO;AAAA,YACpB,OAAO,OAAO;AAAA,YACd,QAAQ;AAAA,YACR,WAAW,KAAK,IAAI;AAAA,UACtB,CAAC;AACD,cAAI,CAAC,KAAK,gBAAiB,OAAM,IAAI,MAAM,OAAO,KAAK;AAAA,QACzD;AAAA,MACF;AAAA,IACF,QAAQ;AACN,oBAAc;AAAA,IAChB;AAGA,UAAM,cAAc,cAAc,WAAW;AAC7C,UAAM,KAAK,WAAW,iBAAiB,OAAO,SAAS;AAAA,MACrD;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,aAAa;AACf,WAAK,SAAS,KAAK;AAAA,QACjB,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA,OAAO,qBAAqB,OAAO,WAAW,CAAC;AAAA,QAC/C,WAAW,KAAK,IAAI;AAAA,MACtB,CAAC;AAAA,IACH,OAAO;AACL,WAAK,SAAS,KAAK;AAAA,QACjB,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,YAAY,QAAQ;AAAA,QACpB,WAAW,KAAK,IAAI;AAAA,MACtB,CAAC;AAAA,IACH;AAGA,UAAM,cAAc,MAAM,KAAK,WAAW,eAAe,KAAK;AAE9D,QAAI,aAAa;AACf,YAAM,SAAS,MAAM,KAAK,WAAW,qBAAqB,KAAK;AAC/D,WAAK,SAAS,KAAK;AAAA,QACjB,MAAM;AAAA,QACN;AAAA,QACA,SAAS;AAAA,UACP,OAAO,OAAO;AAAA,UACd,WAAW,OAAO;AAAA,UAClB,QAAQ,OAAO;AAAA,UACf,SAAS;AAAA,UACT,WAAW;AAAA,QACb;AAAA,QACA,WAAW,KAAK,IAAI;AAAA,MACtB,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,iBACZ,aACA,SACA,WACA,OACA,SACoG;AACpG,UAAM,cAAc,IAAI,KAAK;AAC7B,QAAI,YAAY;AAEhB,aAAS,UAAU,GAAG,WAAW,aAAa,WAAW;AACvD,UAAI;AACF,cAAM,UAAU,YAAY,QAAQ,OAAO;AAC3C,eAAO,EAAE,SAAS,MAAM,UAAU,QAAQ;AAAA,MAC5C,SAAS,OAAO;AACd,oBAAY,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAEjE,YAAI,UAAU,aAAa;AACzB,eAAK,SAAS,KAAK;AAAA,YACjB,MAAM;AAAA,YACN;AAAA,YACA;AAAA,YACA,aAAa,YAAY;AAAA,YACzB;AAAA,YACA,YAAY,KAAK;AAAA,YACjB,OAAO;AAAA,YACP,WAAW,KAAK,IAAI;AAAA,UACtB,CAAC;AAED,gBAAM,QAAQ,KAAK,eAAe,KAAK,IAAI,GAAG,UAAU,CAAC;AACzD,gBAAM,KAAK,MAAM,KAAK;AAAA,QACxB;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,SAAS,OAAO,UAAU,aAAa,OAAO,UAAU;AAAA,EACnE;AAAA,EAEA,MAAc,oBACZ,QACA,OACA,SACA,UACe;AACf,UAAM,mBAAe,+BAAiB,QAAQ,QAAQ;AACtD,UAAM,KAAK,WAAW,oBAAoB,OAAO,SAAS,YAAY;AACtE,SAAK,SAAS,KAAK;AAAA,MACjB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,aAAa,OAAO;AAAA,MACpB,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,WAAW,KAAK,IAAI;AAAA,IACtB,CAAC;AAAA,EACH;AAAA,EAEQ,MAAM,IAA2B;AACvC,WAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,iBAAW,SAAS,EAAE;AAAA,IACxB,CAAC;AAAA,EACH;AACF;;;AF3WO,IAAM,oBAAN,MAAwB;AAAA,EAI7B,YAAY,QAAiC;AAC3C,QAAI,KAAC,sCAAwB,OAAO,UAAU,GAAG;AAC/C,YAAM,IAAI;AAAA,QACR;AAAA,MAGF;AAAA,IACF;AACA,SAAK,SAAS;AACd,SAAK,WAAW,IAAI,sBAAS;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,QAAQ,QAAoB,QAA8C;AAC9E,UAAM,UAAU,IAAI;AAAA,MAClB,KAAK,OAAO;AAAA,MACZ,KAAK,OAAO;AAAA,MACZ,KAAK;AAAA,MACL,KAAK,OAAO,aAAa;AAAA,IAC3B;AACA,WAAO,QAAQ,QAAQ,QAAQ,MAAM;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBA,MAAM,mBACJ,OACA,WACA,UACiC;AAEjC,UAAM,YAAY,KAAK,OAAO,uBAAuB;AACrD,UAAM,QAAQ,KAAK,OAAO;AAC1B,YAAI,sCAAwB,KAAK,GAAG;AAClC,YAAM,MAAM,oBAAoB,OAAO,SAAS;AAAA,IAClD;AAEA,UAAM,UAAU,IAAI;AAAA,MAClB;AAAA,QACE,QAAQ,KAAK,OAAO;AAAA,QACpB,YAAY,KAAK,OAAO;AAAA,QACxB,iBAAiB,KAAK,OAAO;AAAA,QAC7B,YAAY,KAAK,OAAO;AAAA,QACxB,cAAc,KAAK,OAAO;AAAA,QAC1B,OAAO,KAAK,OAAO;AAAA,QACnB,kBAAkB,KAAK,OAAO;AAAA,MAChC;AAAA,MACA,KAAK;AAAA,IACP;AACA,WAAO,QAAQ,QAAQ,OAAO,WAAW,QAAQ;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,GAAwB,MAAS,SAAiD;AAChF,SAAK,SAAS,GAAG,MAAM,OAAO;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,SAA6C;AACjD,SAAK,SAAS,MAAM,OAAO;AAC3B,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,OAAO,SAA6C;AAClD,SAAK,SAAS,OAAO,OAAO;AAC5B,WAAO;AAAA,EACT;AACF;;;AD7JA,IAAAC,eAAwC;","names":["import_core","import_core","import_import","import_core"]}
|