@coji/durably 0.14.0 → 0.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{index-CDCdrLgw.d.ts → index-CXH4ozmK.d.ts} +100 -16
- package/dist/index.d.ts +6 -4
- package/dist/index.js +1000 -599
- package/dist/index.js.map +1 -1
- package/dist/plugins/index.d.ts +1 -1
- package/docs/llms.md +99 -12
- package/package.json +5 -5
package/dist/index.js
CHANGED
|
@@ -3,7 +3,7 @@ import {
|
|
|
3
3
|
} from "./chunk-L42OCQEV.js";
|
|
4
4
|
|
|
5
5
|
// src/durably.ts
|
|
6
|
-
import { Kysely } from "kysely";
|
|
6
|
+
import { Kysely, sql as sql5 } from "kysely";
|
|
7
7
|
import { monotonicFactory as monotonicFactory2 } from "ulidx";
|
|
8
8
|
|
|
9
9
|
// src/errors.ts
|
|
@@ -52,6 +52,482 @@ function toError(error) {
|
|
|
52
52
|
return error instanceof Error ? error : new Error(String(error));
|
|
53
53
|
}
|
|
54
54
|
|
|
55
|
+
// src/events.ts
|
|
56
|
+
var DOMAIN_EVENT_TYPE_VALUES = [
|
|
57
|
+
"run:trigger",
|
|
58
|
+
"run:coalesced",
|
|
59
|
+
"run:complete",
|
|
60
|
+
"run:fail",
|
|
61
|
+
"run:cancel",
|
|
62
|
+
"run:delete"
|
|
63
|
+
];
|
|
64
|
+
var DOMAIN_EVENT_TYPES = new Set(
|
|
65
|
+
DOMAIN_EVENT_TYPE_VALUES
|
|
66
|
+
);
|
|
67
|
+
function isDomainEvent(event) {
|
|
68
|
+
return DOMAIN_EVENT_TYPES.has(event.type);
|
|
69
|
+
}
|
|
70
|
+
function createEventEmitter() {
|
|
71
|
+
const listeners = /* @__PURE__ */ new Map();
|
|
72
|
+
let sequence = 0;
|
|
73
|
+
let errorHandler = null;
|
|
74
|
+
return {
|
|
75
|
+
on(type, listener) {
|
|
76
|
+
if (!listeners.has(type)) {
|
|
77
|
+
listeners.set(type, /* @__PURE__ */ new Set());
|
|
78
|
+
}
|
|
79
|
+
const typeListeners = listeners.get(type);
|
|
80
|
+
typeListeners?.add(listener);
|
|
81
|
+
return () => {
|
|
82
|
+
typeListeners?.delete(listener);
|
|
83
|
+
};
|
|
84
|
+
},
|
|
85
|
+
onError(handler) {
|
|
86
|
+
errorHandler = handler;
|
|
87
|
+
},
|
|
88
|
+
emit(event) {
|
|
89
|
+
sequence++;
|
|
90
|
+
const fullEvent = {
|
|
91
|
+
...event,
|
|
92
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
93
|
+
sequence
|
|
94
|
+
};
|
|
95
|
+
const typeListeners = listeners.get(event.type);
|
|
96
|
+
if (!typeListeners) {
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
const reportError = (error) => errorHandler?.(toError(error), fullEvent);
|
|
100
|
+
for (const listener of typeListeners) {
|
|
101
|
+
try {
|
|
102
|
+
const result = listener(fullEvent);
|
|
103
|
+
if (result != null && typeof result.then === "function") {
|
|
104
|
+
;
|
|
105
|
+
result.catch(reportError);
|
|
106
|
+
}
|
|
107
|
+
} catch (error) {
|
|
108
|
+
reportError(error);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// src/job.ts
|
|
116
|
+
import { prettifyError } from "zod";
|
|
117
|
+
var DEFAULT_WAIT_POLLING_INTERVAL_MS = 1e3;
|
|
118
|
+
var noop = () => {
|
|
119
|
+
};
|
|
120
|
+
function validateJobInputOrThrow(schema, input, context) {
|
|
121
|
+
const result = schema.safeParse(input);
|
|
122
|
+
if (!result.success) {
|
|
123
|
+
const prefix = context ? `${context}: ` : "";
|
|
124
|
+
throw new ValidationError(
|
|
125
|
+
`${prefix}Invalid input: ${prettifyError(result.error)}`
|
|
126
|
+
);
|
|
127
|
+
}
|
|
128
|
+
return result.data;
|
|
129
|
+
}
|
|
130
|
+
function createJobRegistry() {
|
|
131
|
+
const jobs = /* @__PURE__ */ new Map();
|
|
132
|
+
return {
|
|
133
|
+
set(job) {
|
|
134
|
+
jobs.set(job.name, job);
|
|
135
|
+
},
|
|
136
|
+
get(name) {
|
|
137
|
+
return jobs.get(name);
|
|
138
|
+
},
|
|
139
|
+
has(name) {
|
|
140
|
+
return jobs.has(name);
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
function waitForRunCompletion(runId, storage, eventEmitter, options, timeoutMessagePrefix = "waitForRun") {
|
|
145
|
+
return new Promise((resolve, reject) => {
|
|
146
|
+
let timeoutId;
|
|
147
|
+
let pollIntervalId;
|
|
148
|
+
let resolved = false;
|
|
149
|
+
let pollInFlight = false;
|
|
150
|
+
const unsubscribes = [];
|
|
151
|
+
const pollingMs = options?.pollingIntervalMs ?? DEFAULT_WAIT_POLLING_INTERVAL_MS;
|
|
152
|
+
if (!Number.isFinite(pollingMs) || pollingMs <= 0) {
|
|
153
|
+
throw new ValidationError(
|
|
154
|
+
"pollingIntervalMs must be a positive finite number"
|
|
155
|
+
);
|
|
156
|
+
}
|
|
157
|
+
const cleanup = () => {
|
|
158
|
+
if (resolved) return;
|
|
159
|
+
resolved = true;
|
|
160
|
+
for (const unsub of unsubscribes) unsub();
|
|
161
|
+
if (timeoutId !== void 0) {
|
|
162
|
+
clearTimeout(timeoutId);
|
|
163
|
+
timeoutId = void 0;
|
|
164
|
+
}
|
|
165
|
+
if (pollIntervalId !== void 0) {
|
|
166
|
+
clearInterval(pollIntervalId);
|
|
167
|
+
pollIntervalId = void 0;
|
|
168
|
+
}
|
|
169
|
+
};
|
|
170
|
+
const settleFromStorage = (run) => {
|
|
171
|
+
if (resolved) return;
|
|
172
|
+
if (!run) {
|
|
173
|
+
cleanup();
|
|
174
|
+
reject(new NotFoundError(`Run not found: ${runId}`));
|
|
175
|
+
return;
|
|
176
|
+
}
|
|
177
|
+
if (run.status === "completed") {
|
|
178
|
+
cleanup();
|
|
179
|
+
resolve(run);
|
|
180
|
+
return;
|
|
181
|
+
}
|
|
182
|
+
if (run.status === "failed") {
|
|
183
|
+
cleanup();
|
|
184
|
+
reject(new Error(run.error || "Run failed"));
|
|
185
|
+
return;
|
|
186
|
+
}
|
|
187
|
+
if (run.status === "cancelled") {
|
|
188
|
+
cleanup();
|
|
189
|
+
reject(new CancelledError(runId));
|
|
190
|
+
return;
|
|
191
|
+
}
|
|
192
|
+
};
|
|
193
|
+
const poll = () => {
|
|
194
|
+
if (resolved || pollInFlight) return;
|
|
195
|
+
pollInFlight = true;
|
|
196
|
+
void storage.getRun(runId).then((run) => {
|
|
197
|
+
if (resolved) return;
|
|
198
|
+
settleFromStorage(run);
|
|
199
|
+
}).catch((err) => {
|
|
200
|
+
if (resolved) return;
|
|
201
|
+
cleanup();
|
|
202
|
+
reject(toError(err));
|
|
203
|
+
}).finally(() => {
|
|
204
|
+
pollInFlight = false;
|
|
205
|
+
});
|
|
206
|
+
};
|
|
207
|
+
unsubscribes.push(
|
|
208
|
+
eventEmitter.on("run:complete", (event) => {
|
|
209
|
+
if (event.runId !== runId || resolved) return;
|
|
210
|
+
cleanup();
|
|
211
|
+
storage.getRun(runId).then((run) => {
|
|
212
|
+
if (run) resolve(run);
|
|
213
|
+
else reject(new NotFoundError(`Run not found: ${runId}`));
|
|
214
|
+
}).catch((err) => reject(toError(err)));
|
|
215
|
+
})
|
|
216
|
+
);
|
|
217
|
+
unsubscribes.push(
|
|
218
|
+
eventEmitter.on("run:fail", (event) => {
|
|
219
|
+
if (event.runId !== runId || resolved) return;
|
|
220
|
+
cleanup();
|
|
221
|
+
reject(new Error(event.error));
|
|
222
|
+
})
|
|
223
|
+
);
|
|
224
|
+
unsubscribes.push(
|
|
225
|
+
eventEmitter.on("run:cancel", (event) => {
|
|
226
|
+
if (event.runId !== runId || resolved) return;
|
|
227
|
+
cleanup();
|
|
228
|
+
reject(new CancelledError(runId));
|
|
229
|
+
})
|
|
230
|
+
);
|
|
231
|
+
if (options?.onProgress) {
|
|
232
|
+
const onProgress = options.onProgress;
|
|
233
|
+
unsubscribes.push(
|
|
234
|
+
eventEmitter.on("run:progress", (event) => {
|
|
235
|
+
if (event.runId !== runId || resolved) return;
|
|
236
|
+
void Promise.resolve(onProgress(event.progress)).catch(noop);
|
|
237
|
+
})
|
|
238
|
+
);
|
|
239
|
+
}
|
|
240
|
+
if (options?.onLog) {
|
|
241
|
+
const onLog = options.onLog;
|
|
242
|
+
unsubscribes.push(
|
|
243
|
+
eventEmitter.on("log:write", (event) => {
|
|
244
|
+
if (event.runId !== runId || resolved) return;
|
|
245
|
+
const { level, message, data, stepName } = event;
|
|
246
|
+
void Promise.resolve(onLog({ level, message, data, stepName })).catch(
|
|
247
|
+
noop
|
|
248
|
+
);
|
|
249
|
+
})
|
|
250
|
+
);
|
|
251
|
+
}
|
|
252
|
+
storage.getRun(runId).then((currentRun) => {
|
|
253
|
+
if (resolved) return;
|
|
254
|
+
if (!currentRun) {
|
|
255
|
+
cleanup();
|
|
256
|
+
reject(new NotFoundError(`Run not found: ${runId}`));
|
|
257
|
+
return;
|
|
258
|
+
}
|
|
259
|
+
if (currentRun.status === "completed") {
|
|
260
|
+
cleanup();
|
|
261
|
+
resolve(currentRun);
|
|
262
|
+
return;
|
|
263
|
+
}
|
|
264
|
+
if (currentRun.status === "failed") {
|
|
265
|
+
cleanup();
|
|
266
|
+
reject(new Error(currentRun.error || "Run failed"));
|
|
267
|
+
return;
|
|
268
|
+
}
|
|
269
|
+
if (currentRun.status === "cancelled") {
|
|
270
|
+
cleanup();
|
|
271
|
+
reject(new CancelledError(runId));
|
|
272
|
+
return;
|
|
273
|
+
}
|
|
274
|
+
pollIntervalId = setInterval(poll, pollingMs);
|
|
275
|
+
}).catch((error) => {
|
|
276
|
+
if (resolved) return;
|
|
277
|
+
cleanup();
|
|
278
|
+
reject(toError(error));
|
|
279
|
+
});
|
|
280
|
+
if (options?.timeout !== void 0) {
|
|
281
|
+
timeoutId = setTimeout(() => {
|
|
282
|
+
if (!resolved) {
|
|
283
|
+
cleanup();
|
|
284
|
+
reject(
|
|
285
|
+
new Error(
|
|
286
|
+
`${timeoutMessagePrefix} timeout after ${options.timeout}ms`
|
|
287
|
+
)
|
|
288
|
+
);
|
|
289
|
+
}
|
|
290
|
+
}, options.timeout);
|
|
291
|
+
}
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
function createJobHandle(jobDef, storage, eventEmitter, registry, labelsSchema, pollingIntervalMs) {
|
|
295
|
+
const existingJob = registry.get(jobDef.name);
|
|
296
|
+
if (existingJob) {
|
|
297
|
+
if (existingJob.jobDef === jobDef) {
|
|
298
|
+
return existingJob.handle;
|
|
299
|
+
}
|
|
300
|
+
throw new Error(
|
|
301
|
+
`Job "${jobDef.name}" is already registered with a different definition`
|
|
302
|
+
);
|
|
303
|
+
}
|
|
304
|
+
const inputSchema = jobDef.input;
|
|
305
|
+
const outputSchema = jobDef.output;
|
|
306
|
+
function validateCoalesceOption(coalesce, concurrencyKey, context) {
|
|
307
|
+
if (coalesce === void 0) return;
|
|
308
|
+
const suffix = context ? ` ${context}` : "";
|
|
309
|
+
if (coalesce !== "skip") {
|
|
310
|
+
throw new ValidationError(
|
|
311
|
+
`Invalid coalesce value${suffix}: '${coalesce}'. Valid values: 'skip'`
|
|
312
|
+
);
|
|
313
|
+
}
|
|
314
|
+
if (!concurrencyKey) {
|
|
315
|
+
throw new ValidationError(`coalesce requires concurrencyKey${suffix}`);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
function emitDispositionEvent(disposition, run, input, labels) {
|
|
319
|
+
if (disposition === "created") {
|
|
320
|
+
eventEmitter.emit({
|
|
321
|
+
type: "run:trigger",
|
|
322
|
+
runId: run.id,
|
|
323
|
+
jobName: jobDef.name,
|
|
324
|
+
input,
|
|
325
|
+
labels: run.labels
|
|
326
|
+
});
|
|
327
|
+
} else if (disposition === "coalesced") {
|
|
328
|
+
eventEmitter.emit({
|
|
329
|
+
type: "run:coalesced",
|
|
330
|
+
runId: run.id,
|
|
331
|
+
jobName: jobDef.name,
|
|
332
|
+
labels: run.labels,
|
|
333
|
+
skippedInput: input,
|
|
334
|
+
skippedLabels: labels ?? {}
|
|
335
|
+
});
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
const handle = {
|
|
339
|
+
name: jobDef.name,
|
|
340
|
+
async trigger(input, options) {
|
|
341
|
+
validateCoalesceOption(options?.coalesce, options?.concurrencyKey);
|
|
342
|
+
const validatedInput = validateJobInputOrThrow(inputSchema, input);
|
|
343
|
+
const validatedLabels = labelsSchema && options?.labels ? validateJobInputOrThrow(labelsSchema, options.labels, "labels") : options?.labels;
|
|
344
|
+
const { run, disposition } = await storage.enqueue({
|
|
345
|
+
jobName: jobDef.name,
|
|
346
|
+
input: validatedInput,
|
|
347
|
+
idempotencyKey: options?.idempotencyKey,
|
|
348
|
+
concurrencyKey: options?.concurrencyKey,
|
|
349
|
+
labels: validatedLabels,
|
|
350
|
+
coalesce: options?.coalesce
|
|
351
|
+
});
|
|
352
|
+
emitDispositionEvent(
|
|
353
|
+
disposition,
|
|
354
|
+
run,
|
|
355
|
+
validatedInput,
|
|
356
|
+
validatedLabels
|
|
357
|
+
);
|
|
358
|
+
return { ...run, disposition };
|
|
359
|
+
},
|
|
360
|
+
async triggerAndWait(input, options) {
|
|
361
|
+
const run = await this.trigger(input, options);
|
|
362
|
+
const completedRun = await waitForRunCompletion(
|
|
363
|
+
run.id,
|
|
364
|
+
storage,
|
|
365
|
+
eventEmitter,
|
|
366
|
+
{
|
|
367
|
+
...options,
|
|
368
|
+
pollingIntervalMs: options?.pollingIntervalMs ?? pollingIntervalMs
|
|
369
|
+
},
|
|
370
|
+
"triggerAndWait"
|
|
371
|
+
);
|
|
372
|
+
return {
|
|
373
|
+
id: run.id,
|
|
374
|
+
output: completedRun.output,
|
|
375
|
+
disposition: run.disposition
|
|
376
|
+
};
|
|
377
|
+
},
|
|
378
|
+
async batchTrigger(inputs) {
|
|
379
|
+
if (inputs.length === 0) {
|
|
380
|
+
return [];
|
|
381
|
+
}
|
|
382
|
+
const normalized = inputs.map((item) => {
|
|
383
|
+
if (item && typeof item === "object" && "input" in item) {
|
|
384
|
+
return item;
|
|
385
|
+
}
|
|
386
|
+
return { input: item, options: void 0 };
|
|
387
|
+
});
|
|
388
|
+
const validated = [];
|
|
389
|
+
for (let i = 0; i < normalized.length; i++) {
|
|
390
|
+
const opts = normalized[i].options;
|
|
391
|
+
validateCoalesceOption(
|
|
392
|
+
opts?.coalesce,
|
|
393
|
+
opts?.concurrencyKey,
|
|
394
|
+
`at index ${i}`
|
|
395
|
+
);
|
|
396
|
+
const validatedInput = validateJobInputOrThrow(
|
|
397
|
+
inputSchema,
|
|
398
|
+
normalized[i].input,
|
|
399
|
+
`at index ${i}`
|
|
400
|
+
);
|
|
401
|
+
const validatedLabels = labelsSchema && opts?.labels ? validateJobInputOrThrow(
|
|
402
|
+
labelsSchema,
|
|
403
|
+
opts.labels,
|
|
404
|
+
`labels at index ${i}`
|
|
405
|
+
) : opts?.labels;
|
|
406
|
+
validated.push({
|
|
407
|
+
input: validatedInput,
|
|
408
|
+
options: opts ? { ...opts, labels: validatedLabels } : opts
|
|
409
|
+
});
|
|
410
|
+
}
|
|
411
|
+
const results = await storage.enqueueMany(
|
|
412
|
+
validated.map((v) => ({
|
|
413
|
+
jobName: jobDef.name,
|
|
414
|
+
input: v.input,
|
|
415
|
+
idempotencyKey: v.options?.idempotencyKey,
|
|
416
|
+
concurrencyKey: v.options?.concurrencyKey,
|
|
417
|
+
labels: v.options?.labels,
|
|
418
|
+
coalesce: v.options?.coalesce
|
|
419
|
+
}))
|
|
420
|
+
);
|
|
421
|
+
for (let i = 0; i < results.length; i++) {
|
|
422
|
+
emitDispositionEvent(
|
|
423
|
+
results[i].disposition,
|
|
424
|
+
results[i].run,
|
|
425
|
+
validated[i].input,
|
|
426
|
+
validated[i].options?.labels
|
|
427
|
+
);
|
|
428
|
+
}
|
|
429
|
+
return results.map(
|
|
430
|
+
(r) => ({
|
|
431
|
+
...r.run,
|
|
432
|
+
disposition: r.disposition
|
|
433
|
+
})
|
|
434
|
+
);
|
|
435
|
+
},
|
|
436
|
+
async getRun(id) {
|
|
437
|
+
const run = await storage.getRun(id);
|
|
438
|
+
if (!run || run.jobName !== jobDef.name) {
|
|
439
|
+
return null;
|
|
440
|
+
}
|
|
441
|
+
return run;
|
|
442
|
+
},
|
|
443
|
+
async getRuns(filter) {
|
|
444
|
+
const runs = await storage.getRuns({
|
|
445
|
+
...filter,
|
|
446
|
+
jobName: jobDef.name
|
|
447
|
+
});
|
|
448
|
+
return runs;
|
|
449
|
+
}
|
|
450
|
+
};
|
|
451
|
+
registry.set({
|
|
452
|
+
name: jobDef.name,
|
|
453
|
+
inputSchema,
|
|
454
|
+
outputSchema,
|
|
455
|
+
labelsSchema,
|
|
456
|
+
fn: jobDef.run,
|
|
457
|
+
jobDef,
|
|
458
|
+
handle
|
|
459
|
+
});
|
|
460
|
+
return handle;
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
// src/migrations.ts
|
|
464
|
+
import { sql } from "kysely";
|
|
465
|
+
var migrations = [
|
|
466
|
+
{
|
|
467
|
+
version: 1,
|
|
468
|
+
up: async (db) => {
|
|
469
|
+
await db.schema.createTable("durably_runs").ifNotExists().addColumn("id", "text", (col) => col.primaryKey()).addColumn("job_name", "text", (col) => col.notNull()).addColumn("input", "text", (col) => col.notNull()).addColumn("status", "text", (col) => col.notNull()).addColumn("idempotency_key", "text").addColumn("concurrency_key", "text").addColumn("labels", "text", (col) => col.notNull().defaultTo("{}")).addColumn(
|
|
470
|
+
"current_step_index",
|
|
471
|
+
"integer",
|
|
472
|
+
(col) => col.notNull().defaultTo(0)
|
|
473
|
+
).addColumn(
|
|
474
|
+
"completed_step_count",
|
|
475
|
+
"integer",
|
|
476
|
+
(col) => col.notNull().defaultTo(0)
|
|
477
|
+
).addColumn("progress", "text").addColumn("output", "text").addColumn("error", "text").addColumn("lease_owner", "text").addColumn("lease_expires_at", "text").addColumn(
|
|
478
|
+
"lease_generation",
|
|
479
|
+
"integer",
|
|
480
|
+
(col) => col.notNull().defaultTo(0)
|
|
481
|
+
).addColumn("started_at", "text").addColumn("completed_at", "text").addColumn("created_at", "text", (col) => col.notNull()).addColumn("updated_at", "text", (col) => col.notNull()).execute();
|
|
482
|
+
await db.schema.createIndex("idx_durably_runs_job_idempotency").ifNotExists().on("durably_runs").columns(["job_name", "idempotency_key"]).unique().execute();
|
|
483
|
+
await db.schema.createIndex("idx_durably_runs_status_concurrency").ifNotExists().on("durably_runs").columns(["status", "concurrency_key"]).execute();
|
|
484
|
+
await db.schema.createIndex("idx_durably_runs_status_created").ifNotExists().on("durably_runs").columns(["status", "created_at"]).execute();
|
|
485
|
+
await db.schema.createIndex("idx_durably_runs_status_lease_expires").ifNotExists().on("durably_runs").columns(["status", "lease_expires_at"]).execute();
|
|
486
|
+
await db.schema.createIndex("idx_durably_runs_job_created").ifNotExists().on("durably_runs").columns(["job_name", "created_at"]).execute();
|
|
487
|
+
await db.schema.createIndex("idx_durably_runs_status_completed").ifNotExists().on("durably_runs").columns(["status", "completed_at"]).execute();
|
|
488
|
+
await db.schema.createTable("durably_run_labels").ifNotExists().addColumn("run_id", "text", (col) => col.notNull()).addColumn("key", "text", (col) => col.notNull()).addColumn("value", "text", (col) => col.notNull()).execute();
|
|
489
|
+
await db.schema.createIndex("idx_durably_run_labels_pk").ifNotExists().on("durably_run_labels").columns(["run_id", "key"]).unique().execute();
|
|
490
|
+
await db.schema.createIndex("idx_durably_run_labels_key_value").ifNotExists().on("durably_run_labels").columns(["key", "value"]).execute();
|
|
491
|
+
await db.schema.createTable("durably_steps").ifNotExists().addColumn("id", "text", (col) => col.primaryKey()).addColumn("run_id", "text", (col) => col.notNull()).addColumn("name", "text", (col) => col.notNull()).addColumn("index", "integer", (col) => col.notNull()).addColumn("status", "text", (col) => col.notNull()).addColumn("output", "text").addColumn("error", "text").addColumn("started_at", "text", (col) => col.notNull()).addColumn("completed_at", "text").execute();
|
|
492
|
+
await db.schema.createIndex("idx_durably_steps_run_index").ifNotExists().on("durably_steps").columns(["run_id", "index"]).execute();
|
|
493
|
+
await sql`
|
|
494
|
+
CREATE UNIQUE INDEX IF NOT EXISTS idx_durably_steps_completed_unique
|
|
495
|
+
ON durably_steps(run_id, name) WHERE status = 'completed'
|
|
496
|
+
`.execute(db);
|
|
497
|
+
await db.schema.createTable("durably_logs").ifNotExists().addColumn("id", "text", (col) => col.primaryKey()).addColumn("run_id", "text", (col) => col.notNull()).addColumn("step_name", "text").addColumn("level", "text", (col) => col.notNull()).addColumn("message", "text", (col) => col.notNull()).addColumn("data", "text").addColumn("created_at", "text", (col) => col.notNull()).execute();
|
|
498
|
+
await db.schema.createIndex("idx_durably_logs_run_created").ifNotExists().on("durably_logs").columns(["run_id", "created_at"]).execute();
|
|
499
|
+
await db.schema.createTable("durably_schema_versions").ifNotExists().addColumn("version", "integer", (col) => col.primaryKey()).addColumn("applied_at", "text", (col) => col.notNull()).execute();
|
|
500
|
+
await sql`
|
|
501
|
+
CREATE UNIQUE INDEX IF NOT EXISTS idx_durably_runs_pending_concurrency
|
|
502
|
+
ON durably_runs (job_name, concurrency_key)
|
|
503
|
+
WHERE status = 'pending' AND concurrency_key IS NOT NULL
|
|
504
|
+
`.execute(db);
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
];
|
|
508
|
+
async function getCurrentVersion(db) {
|
|
509
|
+
try {
|
|
510
|
+
const result = await db.selectFrom("durably_schema_versions").select("version").orderBy("version", "desc").limit(1).executeTakeFirst();
|
|
511
|
+
return result?.version ?? 0;
|
|
512
|
+
} catch {
|
|
513
|
+
return 0;
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
async function runMigrations(db) {
|
|
517
|
+
const currentVersion = await getCurrentVersion(db);
|
|
518
|
+
for (const migration of migrations) {
|
|
519
|
+
if (migration.version > currentVersion) {
|
|
520
|
+
await db.transaction().execute(async (trx) => {
|
|
521
|
+
await migration.up(trx);
|
|
522
|
+
await trx.insertInto("durably_schema_versions").values({
|
|
523
|
+
version: migration.version,
|
|
524
|
+
applied_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
525
|
+
}).execute();
|
|
526
|
+
});
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
|
|
55
531
|
// src/context.ts
|
|
56
532
|
var LEASE_LOST = "lease-lost";
|
|
57
533
|
function createStepContext(run, jobName, leaseGeneration, storage, eventEmitter) {
|
|
@@ -250,335 +726,155 @@ function createStepContext(run, jobName, leaseGeneration, storage, eventEmitter)
|
|
|
250
726
|
};
|
|
251
727
|
}
|
|
252
728
|
|
|
253
|
-
// src/
|
|
254
|
-
function
|
|
255
|
-
|
|
256
|
-
let sequence = 0;
|
|
257
|
-
let errorHandler = null;
|
|
258
|
-
return {
|
|
259
|
-
on(type, listener) {
|
|
260
|
-
if (!listeners.has(type)) {
|
|
261
|
-
listeners.set(type, /* @__PURE__ */ new Set());
|
|
262
|
-
}
|
|
263
|
-
const typeListeners = listeners.get(type);
|
|
264
|
-
typeListeners?.add(listener);
|
|
265
|
-
return () => {
|
|
266
|
-
typeListeners?.delete(listener);
|
|
267
|
-
};
|
|
268
|
-
},
|
|
269
|
-
onError(handler) {
|
|
270
|
-
errorHandler = handler;
|
|
271
|
-
},
|
|
272
|
-
emit(event) {
|
|
273
|
-
sequence++;
|
|
274
|
-
const fullEvent = {
|
|
275
|
-
...event,
|
|
276
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
277
|
-
sequence
|
|
278
|
-
};
|
|
279
|
-
const typeListeners = listeners.get(event.type);
|
|
280
|
-
if (!typeListeners) {
|
|
281
|
-
return;
|
|
282
|
-
}
|
|
283
|
-
const reportError = (error) => errorHandler?.(toError(error), fullEvent);
|
|
284
|
-
for (const listener of typeListeners) {
|
|
285
|
-
try {
|
|
286
|
-
const result = listener(fullEvent);
|
|
287
|
-
if (result != null && typeof result.then === "function") {
|
|
288
|
-
;
|
|
289
|
-
result.catch(reportError);
|
|
290
|
-
}
|
|
291
|
-
} catch (error) {
|
|
292
|
-
reportError(error);
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
}
|
|
296
|
-
};
|
|
297
|
-
}
|
|
298
|
-
|
|
299
|
-
// src/job.ts
|
|
300
|
-
import { prettifyError } from "zod";
|
|
301
|
-
var noop = () => {
|
|
302
|
-
};
|
|
303
|
-
function validateJobInputOrThrow(schema, input, context) {
|
|
304
|
-
const result = schema.safeParse(input);
|
|
305
|
-
if (!result.success) {
|
|
306
|
-
const prefix = context ? `${context}: ` : "";
|
|
307
|
-
throw new ValidationError(
|
|
308
|
-
`${prefix}Invalid input: ${prettifyError(result.error)}`
|
|
309
|
-
);
|
|
310
|
-
}
|
|
311
|
-
return result.data;
|
|
312
|
-
}
|
|
313
|
-
function createJobRegistry() {
|
|
314
|
-
const jobs = /* @__PURE__ */ new Map();
|
|
315
|
-
return {
|
|
316
|
-
set(job) {
|
|
317
|
-
jobs.set(job.name, job);
|
|
318
|
-
},
|
|
319
|
-
get(name) {
|
|
320
|
-
return jobs.get(name);
|
|
321
|
-
},
|
|
322
|
-
has(name) {
|
|
323
|
-
return jobs.has(name);
|
|
324
|
-
}
|
|
325
|
-
};
|
|
326
|
-
}
|
|
327
|
-
function createJobHandle(jobDef, storage, eventEmitter, registry, labelsSchema) {
|
|
328
|
-
const existingJob = registry.get(jobDef.name);
|
|
329
|
-
if (existingJob) {
|
|
330
|
-
if (existingJob.jobDef === jobDef) {
|
|
331
|
-
return existingJob.handle;
|
|
332
|
-
}
|
|
333
|
-
throw new Error(
|
|
334
|
-
`Job "${jobDef.name}" is already registered with a different definition`
|
|
335
|
-
);
|
|
336
|
-
}
|
|
337
|
-
const inputSchema = jobDef.input;
|
|
338
|
-
const outputSchema = jobDef.output;
|
|
339
|
-
const handle = {
|
|
340
|
-
name: jobDef.name,
|
|
341
|
-
async trigger(input, options) {
|
|
342
|
-
const validatedInput = validateJobInputOrThrow(inputSchema, input);
|
|
343
|
-
if (labelsSchema && options?.labels) {
|
|
344
|
-
validateJobInputOrThrow(labelsSchema, options.labels, "labels");
|
|
345
|
-
}
|
|
346
|
-
const run = await storage.enqueue({
|
|
347
|
-
jobName: jobDef.name,
|
|
348
|
-
input: validatedInput,
|
|
349
|
-
idempotencyKey: options?.idempotencyKey,
|
|
350
|
-
concurrencyKey: options?.concurrencyKey,
|
|
351
|
-
labels: options?.labels
|
|
352
|
-
});
|
|
353
|
-
eventEmitter.emit({
|
|
354
|
-
type: "run:trigger",
|
|
355
|
-
runId: run.id,
|
|
356
|
-
jobName: jobDef.name,
|
|
357
|
-
input: validatedInput,
|
|
358
|
-
labels: run.labels
|
|
359
|
-
});
|
|
360
|
-
return run;
|
|
361
|
-
},
|
|
362
|
-
async triggerAndWait(input, options) {
|
|
363
|
-
const run = await this.trigger(input, options);
|
|
364
|
-
return new Promise((resolve, reject) => {
|
|
365
|
-
let timeoutId;
|
|
366
|
-
let resolved = false;
|
|
367
|
-
const unsubscribes = [];
|
|
368
|
-
const cleanup = () => {
|
|
369
|
-
if (resolved) return;
|
|
370
|
-
resolved = true;
|
|
371
|
-
for (const unsub of unsubscribes) unsub();
|
|
372
|
-
if (timeoutId) {
|
|
373
|
-
clearTimeout(timeoutId);
|
|
374
|
-
}
|
|
375
|
-
};
|
|
376
|
-
unsubscribes.push(
|
|
377
|
-
eventEmitter.on("run:complete", (event) => {
|
|
378
|
-
if (event.runId === run.id && !resolved) {
|
|
379
|
-
cleanup();
|
|
380
|
-
resolve({
|
|
381
|
-
id: run.id,
|
|
382
|
-
output: event.output
|
|
383
|
-
});
|
|
384
|
-
}
|
|
385
|
-
})
|
|
386
|
-
);
|
|
387
|
-
unsubscribes.push(
|
|
388
|
-
eventEmitter.on("run:fail", (event) => {
|
|
389
|
-
if (event.runId === run.id && !resolved) {
|
|
390
|
-
cleanup();
|
|
391
|
-
reject(new Error(event.error));
|
|
392
|
-
}
|
|
393
|
-
})
|
|
394
|
-
);
|
|
395
|
-
if (options?.onProgress) {
|
|
396
|
-
const onProgress = options.onProgress;
|
|
397
|
-
unsubscribes.push(
|
|
398
|
-
eventEmitter.on("run:progress", (event) => {
|
|
399
|
-
if (event.runId === run.id && !resolved) {
|
|
400
|
-
void Promise.resolve(onProgress(event.progress)).catch(noop);
|
|
401
|
-
}
|
|
402
|
-
})
|
|
403
|
-
);
|
|
404
|
-
}
|
|
405
|
-
if (options?.onLog) {
|
|
406
|
-
const onLog = options.onLog;
|
|
407
|
-
unsubscribes.push(
|
|
408
|
-
eventEmitter.on("log:write", (event) => {
|
|
409
|
-
if (event.runId === run.id && !resolved) {
|
|
410
|
-
const { level, message, data, stepName } = event;
|
|
411
|
-
void Promise.resolve(
|
|
412
|
-
onLog({ level, message, data, stepName })
|
|
413
|
-
).catch(noop);
|
|
414
|
-
}
|
|
415
|
-
})
|
|
416
|
-
);
|
|
417
|
-
}
|
|
418
|
-
storage.getRun(run.id).then((currentRun) => {
|
|
419
|
-
if (resolved || !currentRun) return;
|
|
420
|
-
if (currentRun.status === "completed") {
|
|
421
|
-
cleanup();
|
|
422
|
-
resolve({
|
|
423
|
-
id: run.id,
|
|
424
|
-
output: currentRun.output
|
|
425
|
-
});
|
|
426
|
-
} else if (currentRun.status === "failed") {
|
|
427
|
-
cleanup();
|
|
428
|
-
reject(new Error(currentRun.error || "Run failed"));
|
|
429
|
-
}
|
|
430
|
-
}).catch((error) => {
|
|
431
|
-
if (resolved) return;
|
|
432
|
-
cleanup();
|
|
433
|
-
reject(toError(error));
|
|
434
|
-
});
|
|
435
|
-
if (options?.timeout !== void 0) {
|
|
436
|
-
timeoutId = setTimeout(() => {
|
|
437
|
-
if (!resolved) {
|
|
438
|
-
cleanup();
|
|
439
|
-
reject(
|
|
440
|
-
new Error(`triggerAndWait timeout after ${options.timeout}ms`)
|
|
441
|
-
);
|
|
442
|
-
}
|
|
443
|
-
}, options.timeout);
|
|
444
|
-
}
|
|
445
|
-
});
|
|
446
|
-
},
|
|
447
|
-
async batchTrigger(inputs) {
|
|
448
|
-
if (inputs.length === 0) {
|
|
449
|
-
return [];
|
|
450
|
-
}
|
|
451
|
-
const normalized = inputs.map((item) => {
|
|
452
|
-
if (item && typeof item === "object" && "input" in item) {
|
|
453
|
-
return item;
|
|
454
|
-
}
|
|
455
|
-
return { input: item, options: void 0 };
|
|
456
|
-
});
|
|
457
|
-
const validated = [];
|
|
458
|
-
for (let i = 0; i < normalized.length; i++) {
|
|
459
|
-
const validatedInput = validateJobInputOrThrow(
|
|
460
|
-
inputSchema,
|
|
461
|
-
normalized[i].input,
|
|
462
|
-
`at index ${i}`
|
|
463
|
-
);
|
|
464
|
-
if (labelsSchema && normalized[i].options?.labels) {
|
|
465
|
-
validateJobInputOrThrow(
|
|
466
|
-
labelsSchema,
|
|
467
|
-
normalized[i].options?.labels,
|
|
468
|
-
`labels at index ${i}`
|
|
469
|
-
);
|
|
470
|
-
}
|
|
471
|
-
validated.push({
|
|
472
|
-
input: validatedInput,
|
|
473
|
-
options: normalized[i].options
|
|
474
|
-
});
|
|
475
|
-
}
|
|
476
|
-
const runs = await storage.enqueueMany(
|
|
477
|
-
validated.map((v) => ({
|
|
478
|
-
jobName: jobDef.name,
|
|
479
|
-
input: v.input,
|
|
480
|
-
idempotencyKey: v.options?.idempotencyKey,
|
|
481
|
-
concurrencyKey: v.options?.concurrencyKey,
|
|
482
|
-
labels: v.options?.labels
|
|
483
|
-
}))
|
|
484
|
-
);
|
|
485
|
-
for (let i = 0; i < runs.length; i++) {
|
|
486
|
-
eventEmitter.emit({
|
|
487
|
-
type: "run:trigger",
|
|
488
|
-
runId: runs[i].id,
|
|
489
|
-
jobName: jobDef.name,
|
|
490
|
-
input: validated[i].input,
|
|
491
|
-
labels: runs[i].labels
|
|
492
|
-
});
|
|
493
|
-
}
|
|
494
|
-
return runs;
|
|
495
|
-
},
|
|
496
|
-
async getRun(id) {
|
|
497
|
-
const run = await storage.getRun(id);
|
|
498
|
-
if (!run || run.jobName !== jobDef.name) {
|
|
499
|
-
return null;
|
|
500
|
-
}
|
|
501
|
-
return run;
|
|
502
|
-
},
|
|
503
|
-
async getRuns(filter) {
|
|
504
|
-
const runs = await storage.getRuns({
|
|
505
|
-
...filter,
|
|
506
|
-
jobName: jobDef.name
|
|
507
|
-
});
|
|
508
|
-
return runs;
|
|
509
|
-
}
|
|
510
|
-
};
|
|
511
|
-
registry.set({
|
|
512
|
-
name: jobDef.name,
|
|
513
|
-
inputSchema,
|
|
514
|
-
outputSchema,
|
|
515
|
-
labelsSchema,
|
|
516
|
-
fn: jobDef.run,
|
|
517
|
-
jobDef,
|
|
518
|
-
handle
|
|
519
|
-
});
|
|
520
|
-
return handle;
|
|
521
|
-
}
|
|
522
|
-
|
|
523
|
-
// src/migrations.ts
|
|
524
|
-
import { sql } from "kysely";
|
|
525
|
-
var migrations = [
|
|
526
|
-
{
|
|
527
|
-
version: 1,
|
|
528
|
-
up: async (db) => {
|
|
529
|
-
await db.schema.createTable("durably_runs").ifNotExists().addColumn("id", "text", (col) => col.primaryKey()).addColumn("job_name", "text", (col) => col.notNull()).addColumn("input", "text", (col) => col.notNull()).addColumn("status", "text", (col) => col.notNull()).addColumn("idempotency_key", "text").addColumn("concurrency_key", "text").addColumn("labels", "text", (col) => col.notNull().defaultTo("{}")).addColumn(
|
|
530
|
-
"current_step_index",
|
|
531
|
-
"integer",
|
|
532
|
-
(col) => col.notNull().defaultTo(0)
|
|
533
|
-
).addColumn(
|
|
534
|
-
"completed_step_count",
|
|
535
|
-
"integer",
|
|
536
|
-
(col) => col.notNull().defaultTo(0)
|
|
537
|
-
).addColumn("progress", "text").addColumn("output", "text").addColumn("error", "text").addColumn("lease_owner", "text").addColumn("lease_expires_at", "text").addColumn(
|
|
538
|
-
"lease_generation",
|
|
539
|
-
"integer",
|
|
540
|
-
(col) => col.notNull().defaultTo(0)
|
|
541
|
-
).addColumn("started_at", "text").addColumn("completed_at", "text").addColumn("created_at", "text", (col) => col.notNull()).addColumn("updated_at", "text", (col) => col.notNull()).execute();
|
|
542
|
-
await db.schema.createIndex("idx_durably_runs_job_idempotency").ifNotExists().on("durably_runs").columns(["job_name", "idempotency_key"]).unique().execute();
|
|
543
|
-
await db.schema.createIndex("idx_durably_runs_status_concurrency").ifNotExists().on("durably_runs").columns(["status", "concurrency_key"]).execute();
|
|
544
|
-
await db.schema.createIndex("idx_durably_runs_status_created").ifNotExists().on("durably_runs").columns(["status", "created_at"]).execute();
|
|
545
|
-
await db.schema.createIndex("idx_durably_runs_status_lease_expires").ifNotExists().on("durably_runs").columns(["status", "lease_expires_at"]).execute();
|
|
546
|
-
await db.schema.createIndex("idx_durably_runs_job_created").ifNotExists().on("durably_runs").columns(["job_name", "created_at"]).execute();
|
|
547
|
-
await db.schema.createIndex("idx_durably_runs_status_completed").ifNotExists().on("durably_runs").columns(["status", "completed_at"]).execute();
|
|
548
|
-
await db.schema.createTable("durably_run_labels").ifNotExists().addColumn("run_id", "text", (col) => col.notNull()).addColumn("key", "text", (col) => col.notNull()).addColumn("value", "text", (col) => col.notNull()).execute();
|
|
549
|
-
await db.schema.createIndex("idx_durably_run_labels_pk").ifNotExists().on("durably_run_labels").columns(["run_id", "key"]).unique().execute();
|
|
550
|
-
await db.schema.createIndex("idx_durably_run_labels_key_value").ifNotExists().on("durably_run_labels").columns(["key", "value"]).execute();
|
|
551
|
-
await db.schema.createTable("durably_steps").ifNotExists().addColumn("id", "text", (col) => col.primaryKey()).addColumn("run_id", "text", (col) => col.notNull()).addColumn("name", "text", (col) => col.notNull()).addColumn("index", "integer", (col) => col.notNull()).addColumn("status", "text", (col) => col.notNull()).addColumn("output", "text").addColumn("error", "text").addColumn("started_at", "text", (col) => col.notNull()).addColumn("completed_at", "text").execute();
|
|
552
|
-
await db.schema.createIndex("idx_durably_steps_run_index").ifNotExists().on("durably_steps").columns(["run_id", "index"]).execute();
|
|
553
|
-
await sql`
|
|
554
|
-
CREATE UNIQUE INDEX IF NOT EXISTS idx_durably_steps_completed_unique
|
|
555
|
-
ON durably_steps(run_id, name) WHERE status = 'completed'
|
|
556
|
-
`.execute(db);
|
|
557
|
-
await db.schema.createTable("durably_logs").ifNotExists().addColumn("id", "text", (col) => col.primaryKey()).addColumn("run_id", "text", (col) => col.notNull()).addColumn("step_name", "text").addColumn("level", "text", (col) => col.notNull()).addColumn("message", "text", (col) => col.notNull()).addColumn("data", "text").addColumn("created_at", "text", (col) => col.notNull()).execute();
|
|
558
|
-
await db.schema.createIndex("idx_durably_logs_run_created").ifNotExists().on("durably_logs").columns(["run_id", "created_at"]).execute();
|
|
559
|
-
await db.schema.createTable("durably_schema_versions").ifNotExists().addColumn("version", "integer", (col) => col.primaryKey()).addColumn("applied_at", "text", (col) => col.notNull()).execute();
|
|
560
|
-
}
|
|
561
|
-
}
|
|
562
|
-
];
|
|
563
|
-
async function getCurrentVersion(db) {
|
|
564
|
-
try {
|
|
565
|
-
const result = await db.selectFrom("durably_schema_versions").select("version").orderBy("version", "desc").limit(1).executeTakeFirst();
|
|
566
|
-
return result?.version ?? 0;
|
|
567
|
-
} catch {
|
|
568
|
-
return 0;
|
|
569
|
-
}
|
|
729
|
+
// src/runtime.ts
|
|
730
|
+
function isoNow(clock) {
|
|
731
|
+
return new Date(clock.now()).toISOString();
|
|
570
732
|
}
|
|
571
|
-
async function
|
|
572
|
-
const
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
733
|
+
async function executeRun(run, job, config, environment) {
|
|
734
|
+
const { storage, eventEmitter, clock } = environment;
|
|
735
|
+
const { step, abortLeaseOwnership, dispose } = createStepContext(
|
|
736
|
+
run,
|
|
737
|
+
run.jobName,
|
|
738
|
+
run.leaseGeneration,
|
|
739
|
+
storage,
|
|
740
|
+
eventEmitter
|
|
741
|
+
);
|
|
742
|
+
let leaseDeadlineTimer = null;
|
|
743
|
+
const scheduleLeaseDeadline = (leaseExpiresAt) => {
|
|
744
|
+
if (leaseDeadlineTimer) {
|
|
745
|
+
clock.clearTimeout(leaseDeadlineTimer);
|
|
746
|
+
leaseDeadlineTimer = null;
|
|
747
|
+
}
|
|
748
|
+
if (!leaseExpiresAt) {
|
|
749
|
+
return;
|
|
750
|
+
}
|
|
751
|
+
const delay = Math.max(0, Date.parse(leaseExpiresAt) - clock.now());
|
|
752
|
+
leaseDeadlineTimer = clock.setTimeout(() => {
|
|
753
|
+
abortLeaseOwnership();
|
|
754
|
+
}, delay);
|
|
755
|
+
};
|
|
756
|
+
scheduleLeaseDeadline(run.leaseExpiresAt);
|
|
757
|
+
const leaseTimer = clock.setInterval(() => {
|
|
758
|
+
const now = isoNow(clock);
|
|
759
|
+
storage.renewLease(run.id, run.leaseGeneration, now, config.leaseMs).then((renewed) => {
|
|
760
|
+
if (!renewed) {
|
|
761
|
+
abortLeaseOwnership();
|
|
762
|
+
eventEmitter.emit({
|
|
763
|
+
type: "worker:error",
|
|
764
|
+
error: `Lease renewal lost ownership for run ${run.id}`,
|
|
765
|
+
context: "lease-renewal",
|
|
766
|
+
runId: run.id
|
|
767
|
+
});
|
|
768
|
+
return;
|
|
769
|
+
}
|
|
770
|
+
const renewedLeaseExpiresAt = new Date(
|
|
771
|
+
Date.parse(now) + config.leaseMs
|
|
772
|
+
).toISOString();
|
|
773
|
+
scheduleLeaseDeadline(renewedLeaseExpiresAt);
|
|
774
|
+
eventEmitter.emit({
|
|
775
|
+
type: "run:lease-renewed",
|
|
776
|
+
runId: run.id,
|
|
777
|
+
jobName: run.jobName,
|
|
778
|
+
leaseOwner: run.leaseOwner ?? "",
|
|
779
|
+
leaseExpiresAt: renewedLeaseExpiresAt,
|
|
780
|
+
labels: run.labels
|
|
781
|
+
});
|
|
782
|
+
}).catch((error) => {
|
|
783
|
+
eventEmitter.emit({
|
|
784
|
+
type: "worker:error",
|
|
785
|
+
error: getErrorMessage(error),
|
|
786
|
+
context: "lease-renewal",
|
|
787
|
+
runId: run.id
|
|
581
788
|
});
|
|
789
|
+
});
|
|
790
|
+
}, config.leaseRenewIntervalMs);
|
|
791
|
+
const started = clock.now();
|
|
792
|
+
let reachedTerminalState = false;
|
|
793
|
+
try {
|
|
794
|
+
eventEmitter.emit({
|
|
795
|
+
type: "run:leased",
|
|
796
|
+
runId: run.id,
|
|
797
|
+
jobName: run.jobName,
|
|
798
|
+
input: run.input,
|
|
799
|
+
leaseOwner: run.leaseOwner ?? "",
|
|
800
|
+
leaseExpiresAt: run.leaseExpiresAt ?? isoNow(clock),
|
|
801
|
+
labels: run.labels
|
|
802
|
+
});
|
|
803
|
+
const output = await job.fn(step, run.input);
|
|
804
|
+
if (job.outputSchema) {
|
|
805
|
+
const parseResult = job.outputSchema.safeParse(output);
|
|
806
|
+
if (!parseResult.success) {
|
|
807
|
+
throw new Error(`Invalid output: ${parseResult.error.message}`);
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
const completedAt = isoNow(clock);
|
|
811
|
+
const completed = await storage.completeRun(
|
|
812
|
+
run.id,
|
|
813
|
+
run.leaseGeneration,
|
|
814
|
+
output,
|
|
815
|
+
completedAt
|
|
816
|
+
);
|
|
817
|
+
if (completed) {
|
|
818
|
+
reachedTerminalState = true;
|
|
819
|
+
eventEmitter.emit({
|
|
820
|
+
type: "run:complete",
|
|
821
|
+
runId: run.id,
|
|
822
|
+
jobName: run.jobName,
|
|
823
|
+
output,
|
|
824
|
+
duration: clock.now() - started,
|
|
825
|
+
labels: run.labels
|
|
826
|
+
});
|
|
827
|
+
return { kind: "completed" };
|
|
828
|
+
}
|
|
829
|
+
eventEmitter.emit({
|
|
830
|
+
type: "worker:error",
|
|
831
|
+
error: `Lease lost before completing run ${run.id}`,
|
|
832
|
+
context: "run-completion"
|
|
833
|
+
});
|
|
834
|
+
return { kind: "lease-lost" };
|
|
835
|
+
} catch (error) {
|
|
836
|
+
if (error instanceof LeaseLostError) {
|
|
837
|
+
return { kind: "lease-lost" };
|
|
838
|
+
}
|
|
839
|
+
if (error instanceof CancelledError) {
|
|
840
|
+
return { kind: "cancelled" };
|
|
841
|
+
}
|
|
842
|
+
const errorMessage = getErrorMessage(error);
|
|
843
|
+
const completedAt = isoNow(clock);
|
|
844
|
+
const failed = await storage.failRun(
|
|
845
|
+
run.id,
|
|
846
|
+
run.leaseGeneration,
|
|
847
|
+
errorMessage,
|
|
848
|
+
completedAt
|
|
849
|
+
);
|
|
850
|
+
if (failed) {
|
|
851
|
+
reachedTerminalState = true;
|
|
852
|
+
const steps = await storage.getSteps(run.id);
|
|
853
|
+
const failedStep = steps.find((entry) => entry.status === "failed");
|
|
854
|
+
eventEmitter.emit({
|
|
855
|
+
type: "run:fail",
|
|
856
|
+
runId: run.id,
|
|
857
|
+
jobName: run.jobName,
|
|
858
|
+
error: errorMessage,
|
|
859
|
+
failedStepName: failedStep?.name ?? "unknown",
|
|
860
|
+
labels: run.labels
|
|
861
|
+
});
|
|
862
|
+
return { kind: "failed" };
|
|
863
|
+
}
|
|
864
|
+
eventEmitter.emit({
|
|
865
|
+
type: "worker:error",
|
|
866
|
+
error: `Lease lost before recording failure for run ${run.id}`,
|
|
867
|
+
context: "run-failure"
|
|
868
|
+
});
|
|
869
|
+
return { kind: "lease-lost" };
|
|
870
|
+
} finally {
|
|
871
|
+
clock.clearInterval(leaseTimer);
|
|
872
|
+
if (leaseDeadlineTimer) {
|
|
873
|
+
clock.clearTimeout(leaseDeadlineTimer);
|
|
874
|
+
}
|
|
875
|
+
dispose();
|
|
876
|
+
if (!config.preserveSteps && reachedTerminalState) {
|
|
877
|
+
await storage.deleteSteps(run.id);
|
|
582
878
|
}
|
|
583
879
|
}
|
|
584
880
|
}
|
|
@@ -752,7 +1048,11 @@ function toClientRun(run) {
|
|
|
752
1048
|
updatedAt,
|
|
753
1049
|
...clientRun
|
|
754
1050
|
} = run;
|
|
755
|
-
return
|
|
1051
|
+
return {
|
|
1052
|
+
...clientRun,
|
|
1053
|
+
isTerminal: TERMINAL_STATUSES.includes(run.status),
|
|
1054
|
+
isActive: run.status === "pending" || run.status === "leased"
|
|
1055
|
+
};
|
|
756
1056
|
}
|
|
757
1057
|
function createWriteMutex() {
|
|
758
1058
|
let queue = Promise.resolve();
|
|
@@ -771,6 +1071,29 @@ function createWriteMutex() {
|
|
|
771
1071
|
}
|
|
772
1072
|
};
|
|
773
1073
|
}
|
|
1074
|
+
function isUniqueViolation(err) {
|
|
1075
|
+
if (!(err instanceof Error)) return false;
|
|
1076
|
+
const pgCode = err.code;
|
|
1077
|
+
if (pgCode === "23505") return true;
|
|
1078
|
+
if (err.constraint) return true;
|
|
1079
|
+
if (/unique constraint/i.test(err.message)) return true;
|
|
1080
|
+
return false;
|
|
1081
|
+
}
|
|
1082
|
+
function parseUniqueViolation(err) {
|
|
1083
|
+
if (!(err instanceof Error)) return null;
|
|
1084
|
+
const msg = err.message;
|
|
1085
|
+
const pgConstraint = err.constraint;
|
|
1086
|
+
if (pgConstraint) {
|
|
1087
|
+
if (pgConstraint.includes("idempotency")) return "idempotency";
|
|
1088
|
+
if (pgConstraint.includes("pending_concurrency"))
|
|
1089
|
+
return "pending_concurrency";
|
|
1090
|
+
}
|
|
1091
|
+
if (/unique constraint/i.test(msg)) {
|
|
1092
|
+
if (msg.includes("idempotency_key")) return "idempotency";
|
|
1093
|
+
if (msg.includes("concurrency_key")) return "pending_concurrency";
|
|
1094
|
+
}
|
|
1095
|
+
return null;
|
|
1096
|
+
}
|
|
774
1097
|
function createKyselyStore(db, backend = "generic") {
|
|
775
1098
|
const withWriteLock = createWriteMutex();
|
|
776
1099
|
async function cascadeDeleteRuns(trx, ids) {
|
|
@@ -796,99 +1119,114 @@ function createKyselyStore(db, backend = "generic") {
|
|
|
796
1119
|
}).where("id", "=", runId).where("status", "=", "leased").where("lease_generation", "=", leaseGeneration).executeTakeFirst();
|
|
797
1120
|
return Number(result.numUpdatedRows) > 0;
|
|
798
1121
|
}
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
1122
|
+
function findPendingByConcurrencyKey(queryDb, jobName, concurrencyKey) {
|
|
1123
|
+
return queryDb.selectFrom("durably_runs").selectAll().where("job_name", "=", jobName).where("concurrency_key", "=", concurrencyKey).where("status", "=", "pending").orderBy("created_at", "asc").orderBy("id", "asc").limit(1).executeTakeFirst();
|
|
1124
|
+
}
|
|
1125
|
+
async function enqueueInTx(trx, input, retried = false) {
|
|
1126
|
+
const queryDb = trx ?? db;
|
|
1127
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1128
|
+
if (input.idempotencyKey) {
|
|
1129
|
+
const existing = await queryDb.selectFrom("durably_runs").selectAll().where("job_name", "=", input.jobName).where("idempotency_key", "=", input.idempotencyKey).executeTakeFirst();
|
|
1130
|
+
if (existing) {
|
|
1131
|
+
return { run: rowToRun(existing), disposition: "idempotent" };
|
|
1132
|
+
}
|
|
1133
|
+
}
|
|
1134
|
+
validateLabels(input.labels);
|
|
1135
|
+
const id = ulid();
|
|
1136
|
+
const row = {
|
|
1137
|
+
id,
|
|
1138
|
+
job_name: input.jobName,
|
|
1139
|
+
input: JSON.stringify(input.input),
|
|
1140
|
+
status: "pending",
|
|
1141
|
+
idempotency_key: input.idempotencyKey ?? null,
|
|
1142
|
+
concurrency_key: input.concurrencyKey ?? null,
|
|
1143
|
+
current_step_index: 0,
|
|
1144
|
+
completed_step_count: 0,
|
|
1145
|
+
progress: null,
|
|
1146
|
+
output: null,
|
|
1147
|
+
error: null,
|
|
1148
|
+
labels: JSON.stringify(input.labels ?? {}),
|
|
1149
|
+
lease_owner: null,
|
|
1150
|
+
lease_expires_at: null,
|
|
1151
|
+
lease_generation: 0,
|
|
1152
|
+
started_at: null,
|
|
1153
|
+
completed_at: null,
|
|
1154
|
+
created_at: now,
|
|
1155
|
+
updated_at: now
|
|
1156
|
+
};
|
|
1157
|
+
const doInsert = async (insertDb) => {
|
|
1158
|
+
await sql4`SAVEPOINT sp_enqueue`.execute(insertDb);
|
|
1159
|
+
try {
|
|
1160
|
+
await insertDb.insertInto("durably_runs").values(row).execute();
|
|
1161
|
+
await insertLabelRows(insertDb, id, input.labels);
|
|
1162
|
+
await sql4`RELEASE SAVEPOINT sp_enqueue`.execute(insertDb);
|
|
1163
|
+
} catch (err) {
|
|
1164
|
+
await sql4`ROLLBACK TO SAVEPOINT sp_enqueue`.execute(insertDb);
|
|
1165
|
+
throw err;
|
|
1166
|
+
}
|
|
1167
|
+
};
|
|
1168
|
+
try {
|
|
1169
|
+
if (trx) {
|
|
1170
|
+
await doInsert(trx);
|
|
1171
|
+
} else {
|
|
1172
|
+
await db.transaction().execute(doInsert);
|
|
1173
|
+
}
|
|
1174
|
+
return { run: rowToRun(row), disposition: "created" };
|
|
1175
|
+
} catch (err) {
|
|
1176
|
+
if (!isUniqueViolation(err)) throw err;
|
|
1177
|
+
const violation = parseUniqueViolation(err);
|
|
802
1178
|
if (input.idempotencyKey) {
|
|
803
|
-
const
|
|
804
|
-
if (
|
|
805
|
-
return rowToRun(
|
|
1179
|
+
const idempotent = await queryDb.selectFrom("durably_runs").selectAll().where("job_name", "=", input.jobName).where("idempotency_key", "=", input.idempotencyKey).executeTakeFirst();
|
|
1180
|
+
if (idempotent) {
|
|
1181
|
+
return { run: rowToRun(idempotent), disposition: "idempotent" };
|
|
806
1182
|
}
|
|
807
1183
|
}
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
1184
|
+
if ((violation === "pending_concurrency" || violation === null) && input.concurrencyKey) {
|
|
1185
|
+
if (input.coalesce === "skip") {
|
|
1186
|
+
const pending = await findPendingByConcurrencyKey(
|
|
1187
|
+
queryDb,
|
|
1188
|
+
input.jobName,
|
|
1189
|
+
input.concurrencyKey
|
|
1190
|
+
);
|
|
1191
|
+
if (pending) {
|
|
1192
|
+
return { run: rowToRun(pending), disposition: "coalesced" };
|
|
1193
|
+
}
|
|
1194
|
+
if (!retried) {
|
|
1195
|
+
return enqueueInTx(trx, input, true);
|
|
1196
|
+
}
|
|
1197
|
+
const lastChance = await findPendingByConcurrencyKey(
|
|
1198
|
+
queryDb,
|
|
1199
|
+
input.jobName,
|
|
1200
|
+
input.concurrencyKey
|
|
1201
|
+
);
|
|
1202
|
+
if (lastChance) {
|
|
1203
|
+
return { run: rowToRun(lastChance), disposition: "coalesced" };
|
|
1204
|
+
}
|
|
1205
|
+
throw new ConflictError(
|
|
1206
|
+
`Conflict after retry for concurrency key "${input.concurrencyKey}" in job "${input.jobName}". Concurrent modification detected.`
|
|
1207
|
+
);
|
|
1208
|
+
}
|
|
1209
|
+
throw new ConflictError(
|
|
1210
|
+
`A pending run already exists for concurrency key "${input.concurrencyKey}" in job "${input.jobName}". Use coalesce: 'skip' to return the existing run instead.`
|
|
1211
|
+
);
|
|
1212
|
+
}
|
|
1213
|
+
throw err;
|
|
1214
|
+
}
|
|
1215
|
+
}
|
|
1216
|
+
const store = {
|
|
1217
|
+
async enqueue(input) {
|
|
1218
|
+
return enqueueInTx(null, input);
|
|
836
1219
|
},
|
|
837
1220
|
async enqueueMany(inputs) {
|
|
838
1221
|
if (inputs.length === 0) {
|
|
839
1222
|
return [];
|
|
840
1223
|
}
|
|
841
|
-
return
|
|
842
|
-
const
|
|
843
|
-
const runs = [];
|
|
1224
|
+
return db.transaction().execute(async (trx) => {
|
|
1225
|
+
const results = [];
|
|
844
1226
|
for (const input of inputs) {
|
|
845
|
-
|
|
846
|
-
}
|
|
847
|
-
const allLabelRows = [];
|
|
848
|
-
for (const input of inputs) {
|
|
849
|
-
if (input.idempotencyKey) {
|
|
850
|
-
const existing = await trx.selectFrom("durably_runs").selectAll().where("job_name", "=", input.jobName).where("idempotency_key", "=", input.idempotencyKey).executeTakeFirst();
|
|
851
|
-
if (existing) {
|
|
852
|
-
runs.push(existing);
|
|
853
|
-
continue;
|
|
854
|
-
}
|
|
855
|
-
}
|
|
856
|
-
const id = ulid();
|
|
857
|
-
if (input.labels) {
|
|
858
|
-
for (const [key, value] of Object.entries(input.labels)) {
|
|
859
|
-
allLabelRows.push({ run_id: id, key, value });
|
|
860
|
-
}
|
|
861
|
-
}
|
|
862
|
-
runs.push({
|
|
863
|
-
id,
|
|
864
|
-
job_name: input.jobName,
|
|
865
|
-
input: JSON.stringify(input.input),
|
|
866
|
-
status: "pending",
|
|
867
|
-
idempotency_key: input.idempotencyKey ?? null,
|
|
868
|
-
concurrency_key: input.concurrencyKey ?? null,
|
|
869
|
-
current_step_index: 0,
|
|
870
|
-
completed_step_count: 0,
|
|
871
|
-
progress: null,
|
|
872
|
-
output: null,
|
|
873
|
-
error: null,
|
|
874
|
-
labels: JSON.stringify(input.labels ?? {}),
|
|
875
|
-
lease_owner: null,
|
|
876
|
-
lease_expires_at: null,
|
|
877
|
-
lease_generation: 0,
|
|
878
|
-
started_at: null,
|
|
879
|
-
completed_at: null,
|
|
880
|
-
created_at: now,
|
|
881
|
-
updated_at: now
|
|
882
|
-
});
|
|
883
|
-
}
|
|
884
|
-
const newRuns = runs.filter((r) => r.created_at === now);
|
|
885
|
-
if (newRuns.length > 0) {
|
|
886
|
-
await trx.insertInto("durably_runs").values(newRuns).execute();
|
|
887
|
-
if (allLabelRows.length > 0) {
|
|
888
|
-
await trx.insertInto("durably_run_labels").values(allLabelRows).execute();
|
|
889
|
-
}
|
|
1227
|
+
results.push(await enqueueInTx(trx, input));
|
|
890
1228
|
}
|
|
891
|
-
return
|
|
1229
|
+
return results;
|
|
892
1230
|
});
|
|
893
1231
|
},
|
|
894
1232
|
async getRun(runId) {
|
|
@@ -898,7 +1236,13 @@ function createKyselyStore(db, backend = "generic") {
|
|
|
898
1236
|
async getRuns(filter) {
|
|
899
1237
|
let query = db.selectFrom("durably_runs").selectAll();
|
|
900
1238
|
if (filter?.status) {
|
|
901
|
-
|
|
1239
|
+
if (Array.isArray(filter.status)) {
|
|
1240
|
+
if (filter.status.length > 0) {
|
|
1241
|
+
query = query.where("status", "in", filter.status);
|
|
1242
|
+
}
|
|
1243
|
+
} else {
|
|
1244
|
+
query = query.where("status", "=", filter.status);
|
|
1245
|
+
}
|
|
902
1246
|
}
|
|
903
1247
|
if (filter?.jobName) {
|
|
904
1248
|
if (Array.isArray(filter.jobName)) {
|
|
@@ -992,13 +1336,54 @@ function createKyselyStore(db, backend = "generic") {
|
|
|
992
1336
|
return Number(result.numUpdatedRows) > 0;
|
|
993
1337
|
},
|
|
994
1338
|
async releaseExpiredLeases(now) {
|
|
995
|
-
const
|
|
996
|
-
status: "
|
|
1339
|
+
const conflicting = await db.updateTable("durably_runs").set({
|
|
1340
|
+
status: "failed",
|
|
1341
|
+
error: "Lease expired; pending run already exists",
|
|
997
1342
|
lease_owner: null,
|
|
998
1343
|
lease_expires_at: null,
|
|
1344
|
+
completed_at: now,
|
|
999
1345
|
updated_at: now
|
|
1000
|
-
}).where("status", "=", "leased").where("lease_expires_at", "is not", null).where("lease_expires_at", "<=", now).
|
|
1001
|
-
|
|
1346
|
+
}).where("status", "=", "leased").where("lease_expires_at", "is not", null).where("lease_expires_at", "<=", now).where(
|
|
1347
|
+
({ exists, selectFrom }) => exists(
|
|
1348
|
+
selectFrom("durably_runs as other").select(sql4.lit(1).as("one")).whereRef("other.job_name", "=", "durably_runs.job_name").whereRef(
|
|
1349
|
+
"other.concurrency_key",
|
|
1350
|
+
"=",
|
|
1351
|
+
"durably_runs.concurrency_key"
|
|
1352
|
+
).where("other.status", "=", "pending").whereRef("other.id", "<>", "durably_runs.id")
|
|
1353
|
+
)
|
|
1354
|
+
).executeTakeFirst();
|
|
1355
|
+
let count = Number(conflicting.numUpdatedRows);
|
|
1356
|
+
const remaining = await db.selectFrom("durably_runs").select("id").where("status", "=", "leased").where("lease_expires_at", "is not", null).where("lease_expires_at", "<=", now).execute();
|
|
1357
|
+
if (remaining.length > 0) {
|
|
1358
|
+
await db.transaction().execute(async (trx) => {
|
|
1359
|
+
for (const row of remaining) {
|
|
1360
|
+
try {
|
|
1361
|
+
await sql4`SAVEPOINT sp_release`.execute(trx);
|
|
1362
|
+
const reset = await trx.updateTable("durably_runs").set({
|
|
1363
|
+
status: "pending",
|
|
1364
|
+
lease_owner: null,
|
|
1365
|
+
lease_expires_at: null,
|
|
1366
|
+
updated_at: now
|
|
1367
|
+
}).where("id", "=", row.id).where("status", "=", "leased").where("lease_expires_at", "<=", now).executeTakeFirst();
|
|
1368
|
+
await sql4`RELEASE SAVEPOINT sp_release`.execute(trx);
|
|
1369
|
+
count += Number(reset.numUpdatedRows);
|
|
1370
|
+
} catch (err) {
|
|
1371
|
+
await sql4`ROLLBACK TO SAVEPOINT sp_release`.execute(trx);
|
|
1372
|
+
if (!isUniqueViolation(err)) throw err;
|
|
1373
|
+
const failed = await trx.updateTable("durably_runs").set({
|
|
1374
|
+
status: "failed",
|
|
1375
|
+
error: "Lease expired; pending run already exists",
|
|
1376
|
+
lease_owner: null,
|
|
1377
|
+
lease_expires_at: null,
|
|
1378
|
+
completed_at: now,
|
|
1379
|
+
updated_at: now
|
|
1380
|
+
}).where("id", "=", row.id).where("status", "=", "leased").executeTakeFirst();
|
|
1381
|
+
count += Number(failed.numUpdatedRows);
|
|
1382
|
+
}
|
|
1383
|
+
}
|
|
1384
|
+
});
|
|
1385
|
+
}
|
|
1386
|
+
return count;
|
|
1002
1387
|
},
|
|
1003
1388
|
async completeRun(runId, leaseGeneration, output, completedAt) {
|
|
1004
1389
|
return terminateRun(runId, leaseGeneration, completedAt, {
|
|
@@ -1123,39 +1508,79 @@ function createKyselyStore(db, backend = "generic") {
|
|
|
1123
1508
|
|
|
1124
1509
|
// src/worker.ts
|
|
1125
1510
|
function createWorker(config, processOne, onIdle) {
|
|
1511
|
+
const maxConcurrentRuns = config.maxConcurrentRuns;
|
|
1126
1512
|
let running = false;
|
|
1127
1513
|
let pollingTimeout = null;
|
|
1128
|
-
let
|
|
1129
|
-
let stopResolver = null;
|
|
1514
|
+
let activeCount = 0;
|
|
1130
1515
|
let activeWorkerId;
|
|
1131
|
-
|
|
1516
|
+
const activePromises = /* @__PURE__ */ new Set();
|
|
1517
|
+
let idleMaintenanceInFlight = null;
|
|
1518
|
+
function scheduleDelayedPoll() {
|
|
1132
1519
|
if (!running) {
|
|
1133
1520
|
return;
|
|
1134
1521
|
}
|
|
1522
|
+
if (pollingTimeout) {
|
|
1523
|
+
clearTimeout(pollingTimeout);
|
|
1524
|
+
pollingTimeout = null;
|
|
1525
|
+
}
|
|
1526
|
+
pollingTimeout = setTimeout(() => {
|
|
1527
|
+
pollingTimeout = null;
|
|
1528
|
+
if (running) {
|
|
1529
|
+
fillSlots();
|
|
1530
|
+
}
|
|
1531
|
+
}, config.pollingIntervalMs);
|
|
1532
|
+
}
|
|
1533
|
+
async function runIdleMaintenanceSafe() {
|
|
1534
|
+
if (!onIdle) {
|
|
1535
|
+
return;
|
|
1536
|
+
}
|
|
1135
1537
|
const cycle = (async () => {
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
await onIdle();
|
|
1140
|
-
} catch {
|
|
1141
|
-
}
|
|
1538
|
+
try {
|
|
1539
|
+
await onIdle();
|
|
1540
|
+
} catch {
|
|
1142
1541
|
}
|
|
1143
1542
|
})();
|
|
1144
|
-
|
|
1543
|
+
idleMaintenanceInFlight = cycle;
|
|
1145
1544
|
try {
|
|
1146
1545
|
await cycle;
|
|
1147
1546
|
} finally {
|
|
1148
|
-
|
|
1547
|
+
if (idleMaintenanceInFlight === cycle) {
|
|
1548
|
+
idleMaintenanceInFlight = null;
|
|
1549
|
+
}
|
|
1550
|
+
}
|
|
1551
|
+
}
|
|
1552
|
+
async function processSlotCycle() {
|
|
1553
|
+
try {
|
|
1554
|
+
const didProcess = await processOne({ workerId: activeWorkerId });
|
|
1555
|
+
activeCount--;
|
|
1556
|
+
if (didProcess && running) {
|
|
1557
|
+
fillSlots();
|
|
1558
|
+
} else if (!didProcess && running) {
|
|
1559
|
+
if (activeCount === 0) {
|
|
1560
|
+
await runIdleMaintenanceSafe();
|
|
1561
|
+
}
|
|
1562
|
+
scheduleDelayedPoll();
|
|
1563
|
+
}
|
|
1564
|
+
} catch (err) {
|
|
1565
|
+
activeCount--;
|
|
1566
|
+
if (running) {
|
|
1567
|
+
fillSlots();
|
|
1568
|
+
}
|
|
1569
|
+
throw err;
|
|
1149
1570
|
}
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
}, config.pollingIntervalMs);
|
|
1571
|
+
}
|
|
1572
|
+
function fillSlots() {
|
|
1573
|
+
if (!running) {
|
|
1154
1574
|
return;
|
|
1155
1575
|
}
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1576
|
+
while (running && activeCount < maxConcurrentRuns) {
|
|
1577
|
+
activeCount++;
|
|
1578
|
+
const p = processSlotCycle();
|
|
1579
|
+
activePromises.add(p);
|
|
1580
|
+
void p.finally(() => {
|
|
1581
|
+
activePromises.delete(p);
|
|
1582
|
+
}).catch(() => {
|
|
1583
|
+
});
|
|
1159
1584
|
}
|
|
1160
1585
|
}
|
|
1161
1586
|
return {
|
|
@@ -1168,7 +1593,7 @@ function createWorker(config, processOne, onIdle) {
|
|
|
1168
1593
|
}
|
|
1169
1594
|
activeWorkerId = options?.workerId;
|
|
1170
1595
|
running = true;
|
|
1171
|
-
|
|
1596
|
+
fillSlots();
|
|
1172
1597
|
},
|
|
1173
1598
|
async stop() {
|
|
1174
1599
|
if (!running) {
|
|
@@ -1179,11 +1604,14 @@ function createWorker(config, processOne, onIdle) {
|
|
|
1179
1604
|
clearTimeout(pollingTimeout);
|
|
1180
1605
|
pollingTimeout = null;
|
|
1181
1606
|
}
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1607
|
+
const pending = [...activePromises];
|
|
1608
|
+
if (idleMaintenanceInFlight) {
|
|
1609
|
+
pending.push(idleMaintenanceInFlight);
|
|
1610
|
+
}
|
|
1611
|
+
if (pending.length === 0) {
|
|
1612
|
+
return;
|
|
1186
1613
|
}
|
|
1614
|
+
await Promise.allSettled(pending);
|
|
1187
1615
|
}
|
|
1188
1616
|
};
|
|
1189
1617
|
}
|
|
@@ -1191,10 +1619,20 @@ function createWorker(config, processOne, onIdle) {
|
|
|
1191
1619
|
// src/durably.ts
|
|
1192
1620
|
var DEFAULTS = {
|
|
1193
1621
|
pollingIntervalMs: 1e3,
|
|
1622
|
+
maxConcurrentRuns: 1,
|
|
1194
1623
|
leaseRenewIntervalMs: 5e3,
|
|
1195
1624
|
leaseMs: 3e4,
|
|
1196
1625
|
preserveSteps: false
|
|
1197
1626
|
};
|
|
1627
|
+
var MAX_CONCURRENT_RUNS = 1e3;
|
|
1628
|
+
function validateMaxConcurrentRuns(value) {
|
|
1629
|
+
if (!Number.isSafeInteger(value) || value < 1 || value > MAX_CONCURRENT_RUNS) {
|
|
1630
|
+
throw new ValidationError(
|
|
1631
|
+
`maxConcurrentRuns must be between 1 and ${MAX_CONCURRENT_RUNS}`
|
|
1632
|
+
);
|
|
1633
|
+
}
|
|
1634
|
+
return value;
|
|
1635
|
+
}
|
|
1198
1636
|
function parseDuration(value) {
|
|
1199
1637
|
const match = value.match(/^(\d+)(d|h|m)$/);
|
|
1200
1638
|
if (!match) {
|
|
@@ -1212,6 +1650,14 @@ function parseDuration(value) {
|
|
|
1212
1650
|
return num * multipliers[unit];
|
|
1213
1651
|
}
|
|
1214
1652
|
var PURGE_INTERVAL_MS = 6e4;
|
|
1653
|
+
var CHECKPOINT_INTERVAL_MS = 6e4;
|
|
1654
|
+
var realClock = {
|
|
1655
|
+
now: () => Date.now(),
|
|
1656
|
+
setTimeout: (fn, ms) => globalThis.setTimeout(fn, ms),
|
|
1657
|
+
clearTimeout: (id) => globalThis.clearTimeout(id),
|
|
1658
|
+
setInterval: (fn, ms) => globalThis.setInterval(fn, ms),
|
|
1659
|
+
clearInterval: (id) => globalThis.clearInterval(id)
|
|
1660
|
+
};
|
|
1215
1661
|
var ulid2 = monotonicFactory2();
|
|
1216
1662
|
var BROWSER_SINGLETON_REGISTRY_KEY = "__durablyBrowserSingletonRegistry";
|
|
1217
1663
|
var BROWSER_LOCAL_DIALECT_KEY = "__durablyBrowserLocalKey";
|
|
@@ -1281,7 +1727,7 @@ function createDurablyInstance(state, jobs) {
|
|
|
1281
1727
|
}
|
|
1282
1728
|
return run;
|
|
1283
1729
|
}
|
|
1284
|
-
async function
|
|
1730
|
+
async function executeRun2(run, _workerId) {
|
|
1285
1731
|
const job = jobRegistry.get(run.jobName);
|
|
1286
1732
|
if (!job) {
|
|
1287
1733
|
await storage.failRun(
|
|
@@ -1292,146 +1738,16 @@ function createDurablyInstance(state, jobs) {
|
|
|
1292
1738
|
);
|
|
1293
1739
|
return;
|
|
1294
1740
|
}
|
|
1295
|
-
|
|
1741
|
+
await executeRun(
|
|
1296
1742
|
run,
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1743
|
+
job,
|
|
1744
|
+
{
|
|
1745
|
+
leaseMs: state.leaseMs,
|
|
1746
|
+
leaseRenewIntervalMs: state.leaseRenewIntervalMs,
|
|
1747
|
+
preserveSteps: state.preserveSteps
|
|
1748
|
+
},
|
|
1749
|
+
{ storage, eventEmitter, clock: realClock }
|
|
1301
1750
|
);
|
|
1302
|
-
let leaseDeadlineTimer = null;
|
|
1303
|
-
const scheduleLeaseDeadline = (leaseExpiresAt) => {
|
|
1304
|
-
if (leaseDeadlineTimer) {
|
|
1305
|
-
clearTimeout(leaseDeadlineTimer);
|
|
1306
|
-
leaseDeadlineTimer = null;
|
|
1307
|
-
}
|
|
1308
|
-
if (!leaseExpiresAt) {
|
|
1309
|
-
return;
|
|
1310
|
-
}
|
|
1311
|
-
const delay = Math.max(0, Date.parse(leaseExpiresAt) - Date.now());
|
|
1312
|
-
leaseDeadlineTimer = setTimeout(() => {
|
|
1313
|
-
abortLeaseOwnership();
|
|
1314
|
-
}, delay);
|
|
1315
|
-
};
|
|
1316
|
-
scheduleLeaseDeadline(run.leaseExpiresAt);
|
|
1317
|
-
const leaseTimer = setInterval(() => {
|
|
1318
|
-
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1319
|
-
storage.renewLease(run.id, run.leaseGeneration, now, state.leaseMs).then((renewed) => {
|
|
1320
|
-
if (!renewed) {
|
|
1321
|
-
abortLeaseOwnership();
|
|
1322
|
-
eventEmitter.emit({
|
|
1323
|
-
type: "worker:error",
|
|
1324
|
-
error: `Lease renewal lost ownership for run ${run.id}`,
|
|
1325
|
-
context: "lease-renewal",
|
|
1326
|
-
runId: run.id
|
|
1327
|
-
});
|
|
1328
|
-
return;
|
|
1329
|
-
}
|
|
1330
|
-
const renewedLeaseExpiresAt = new Date(
|
|
1331
|
-
Date.parse(now) + state.leaseMs
|
|
1332
|
-
).toISOString();
|
|
1333
|
-
scheduleLeaseDeadline(renewedLeaseExpiresAt);
|
|
1334
|
-
eventEmitter.emit({
|
|
1335
|
-
type: "run:lease-renewed",
|
|
1336
|
-
runId: run.id,
|
|
1337
|
-
jobName: run.jobName,
|
|
1338
|
-
leaseOwner: workerId,
|
|
1339
|
-
leaseExpiresAt: renewedLeaseExpiresAt,
|
|
1340
|
-
labels: run.labels
|
|
1341
|
-
});
|
|
1342
|
-
}).catch((error) => {
|
|
1343
|
-
eventEmitter.emit({
|
|
1344
|
-
type: "worker:error",
|
|
1345
|
-
error: getErrorMessage(error),
|
|
1346
|
-
context: "lease-renewal",
|
|
1347
|
-
runId: run.id
|
|
1348
|
-
});
|
|
1349
|
-
});
|
|
1350
|
-
}, state.leaseRenewIntervalMs);
|
|
1351
|
-
const started = Date.now();
|
|
1352
|
-
let reachedTerminalState = false;
|
|
1353
|
-
try {
|
|
1354
|
-
eventEmitter.emit({
|
|
1355
|
-
type: "run:leased",
|
|
1356
|
-
runId: run.id,
|
|
1357
|
-
jobName: run.jobName,
|
|
1358
|
-
input: run.input,
|
|
1359
|
-
leaseOwner: workerId,
|
|
1360
|
-
leaseExpiresAt: run.leaseExpiresAt ?? (/* @__PURE__ */ new Date()).toISOString(),
|
|
1361
|
-
labels: run.labels
|
|
1362
|
-
});
|
|
1363
|
-
const output = await job.fn(step, run.input);
|
|
1364
|
-
if (job.outputSchema) {
|
|
1365
|
-
const parseResult = job.outputSchema.safeParse(output);
|
|
1366
|
-
if (!parseResult.success) {
|
|
1367
|
-
throw new Error(`Invalid output: ${parseResult.error.message}`);
|
|
1368
|
-
}
|
|
1369
|
-
}
|
|
1370
|
-
const completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
1371
|
-
const completed = await storage.completeRun(
|
|
1372
|
-
run.id,
|
|
1373
|
-
run.leaseGeneration,
|
|
1374
|
-
output,
|
|
1375
|
-
completedAt
|
|
1376
|
-
);
|
|
1377
|
-
if (completed) {
|
|
1378
|
-
reachedTerminalState = true;
|
|
1379
|
-
eventEmitter.emit({
|
|
1380
|
-
type: "run:complete",
|
|
1381
|
-
runId: run.id,
|
|
1382
|
-
jobName: run.jobName,
|
|
1383
|
-
output,
|
|
1384
|
-
duration: Date.now() - started,
|
|
1385
|
-
labels: run.labels
|
|
1386
|
-
});
|
|
1387
|
-
} else {
|
|
1388
|
-
eventEmitter.emit({
|
|
1389
|
-
type: "worker:error",
|
|
1390
|
-
error: `Lease lost before completing run ${run.id}`,
|
|
1391
|
-
context: "run-completion"
|
|
1392
|
-
});
|
|
1393
|
-
}
|
|
1394
|
-
} catch (error) {
|
|
1395
|
-
if (error instanceof LeaseLostError || error instanceof CancelledError) {
|
|
1396
|
-
return;
|
|
1397
|
-
}
|
|
1398
|
-
const errorMessage = getErrorMessage(error);
|
|
1399
|
-
const completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
1400
|
-
const failed = await storage.failRun(
|
|
1401
|
-
run.id,
|
|
1402
|
-
run.leaseGeneration,
|
|
1403
|
-
errorMessage,
|
|
1404
|
-
completedAt
|
|
1405
|
-
);
|
|
1406
|
-
if (failed) {
|
|
1407
|
-
reachedTerminalState = true;
|
|
1408
|
-
const steps = await storage.getSteps(run.id);
|
|
1409
|
-
const failedStep = steps.find((entry) => entry.status === "failed");
|
|
1410
|
-
eventEmitter.emit({
|
|
1411
|
-
type: "run:fail",
|
|
1412
|
-
runId: run.id,
|
|
1413
|
-
jobName: run.jobName,
|
|
1414
|
-
error: errorMessage,
|
|
1415
|
-
failedStepName: failedStep?.name ?? "unknown",
|
|
1416
|
-
labels: run.labels
|
|
1417
|
-
});
|
|
1418
|
-
} else {
|
|
1419
|
-
eventEmitter.emit({
|
|
1420
|
-
type: "worker:error",
|
|
1421
|
-
error: `Lease lost before recording failure for run ${run.id}`,
|
|
1422
|
-
context: "run-failure"
|
|
1423
|
-
});
|
|
1424
|
-
}
|
|
1425
|
-
} finally {
|
|
1426
|
-
clearInterval(leaseTimer);
|
|
1427
|
-
if (leaseDeadlineTimer) {
|
|
1428
|
-
clearTimeout(leaseDeadlineTimer);
|
|
1429
|
-
}
|
|
1430
|
-
dispose();
|
|
1431
|
-
if (!state.preserveSteps && reachedTerminalState) {
|
|
1432
|
-
await storage.deleteSteps(run.id);
|
|
1433
|
-
}
|
|
1434
|
-
}
|
|
1435
1751
|
}
|
|
1436
1752
|
const durably = {
|
|
1437
1753
|
db,
|
|
@@ -1455,7 +1771,8 @@ function createDurablyInstance(state, jobs) {
|
|
|
1455
1771
|
storage,
|
|
1456
1772
|
eventEmitter,
|
|
1457
1773
|
jobRegistry,
|
|
1458
|
-
state.labelsSchema
|
|
1774
|
+
state.labelsSchema,
|
|
1775
|
+
state.pollingIntervalMs
|
|
1459
1776
|
);
|
|
1460
1777
|
newHandles[key] = handle;
|
|
1461
1778
|
}
|
|
@@ -1467,6 +1784,19 @@ function createDurablyInstance(state, jobs) {
|
|
|
1467
1784
|
},
|
|
1468
1785
|
getRun: storage.getRun.bind(storage),
|
|
1469
1786
|
getRuns: storage.getRuns.bind(storage),
|
|
1787
|
+
async waitForRun(runId, options) {
|
|
1788
|
+
const run = await waitForRunCompletion(
|
|
1789
|
+
runId,
|
|
1790
|
+
storage,
|
|
1791
|
+
eventEmitter,
|
|
1792
|
+
{
|
|
1793
|
+
...options,
|
|
1794
|
+
pollingIntervalMs: options?.pollingIntervalMs ?? state.pollingIntervalMs
|
|
1795
|
+
},
|
|
1796
|
+
"waitForRun"
|
|
1797
|
+
);
|
|
1798
|
+
return run;
|
|
1799
|
+
},
|
|
1470
1800
|
use(plugin) {
|
|
1471
1801
|
plugin.install(durably);
|
|
1472
1802
|
},
|
|
@@ -1598,7 +1928,7 @@ function createDurablyInstance(state, jobs) {
|
|
|
1598
1928
|
run.input,
|
|
1599
1929
|
`Cannot retrigger run ${runId}`
|
|
1600
1930
|
);
|
|
1601
|
-
const nextRun = await storage.enqueue({
|
|
1931
|
+
const { run: nextRun } = await storage.enqueue({
|
|
1602
1932
|
jobName: run.jobName,
|
|
1603
1933
|
input: validatedInput,
|
|
1604
1934
|
concurrencyKey: run.concurrencyKey ?? void 0,
|
|
@@ -1671,7 +2001,7 @@ function createDurablyInstance(state, jobs) {
|
|
|
1671
2001
|
if (!run) {
|
|
1672
2002
|
return false;
|
|
1673
2003
|
}
|
|
1674
|
-
await
|
|
2004
|
+
await executeRun2(run, workerId);
|
|
1675
2005
|
return true;
|
|
1676
2006
|
},
|
|
1677
2007
|
async processUntilIdle(options) {
|
|
@@ -1699,8 +2029,10 @@ function createDurablyInstance(state, jobs) {
|
|
|
1699
2029
|
if (state.migrating) {
|
|
1700
2030
|
return state.migrating;
|
|
1701
2031
|
}
|
|
1702
|
-
state.migrating = runMigrations(db).then(() => {
|
|
2032
|
+
state.migrating = runMigrations(db).then(async () => {
|
|
1703
2033
|
state.migrated = true;
|
|
2034
|
+
await state.probeWalCheckpoint?.();
|
|
2035
|
+
state.probeWalCheckpoint = null;
|
|
1704
2036
|
}).finally(() => {
|
|
1705
2037
|
state.migrating = null;
|
|
1706
2038
|
});
|
|
@@ -1714,8 +2046,10 @@ function createDurablyInstance(state, jobs) {
|
|
|
1714
2046
|
return durably;
|
|
1715
2047
|
}
|
|
1716
2048
|
function createDurably(options) {
|
|
2049
|
+
const maxConcurrentRuns = options.maxConcurrentRuns !== void 0 ? validateMaxConcurrentRuns(options.maxConcurrentRuns) : DEFAULTS.maxConcurrentRuns;
|
|
1717
2050
|
const config = {
|
|
1718
2051
|
pollingIntervalMs: options.pollingIntervalMs ?? DEFAULTS.pollingIntervalMs,
|
|
2052
|
+
maxConcurrentRuns,
|
|
1719
2053
|
leaseRenewIntervalMs: options.leaseRenewIntervalMs ?? DEFAULTS.leaseRenewIntervalMs,
|
|
1720
2054
|
leaseMs: options.leaseMs ?? DEFAULTS.leaseMs,
|
|
1721
2055
|
preserveSteps: options.preserveSteps ?? DEFAULTS.preserveSteps,
|
|
@@ -1738,18 +2072,38 @@ function createDurably(options) {
|
|
|
1738
2072
|
const eventEmitter = createEventEmitter();
|
|
1739
2073
|
const jobRegistry = createJobRegistry();
|
|
1740
2074
|
let lastPurgeAt = 0;
|
|
2075
|
+
let lastCheckpointAt = 0;
|
|
1741
2076
|
const runIdleMaintenance = async () => {
|
|
1742
2077
|
try {
|
|
1743
|
-
const
|
|
1744
|
-
await storage.releaseExpiredLeases(
|
|
2078
|
+
const nowMs = Date.now();
|
|
2079
|
+
await storage.releaseExpiredLeases(new Date(nowMs).toISOString());
|
|
1745
2080
|
if (config.retainRunsMs !== null) {
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
const cutoff = new Date(purgeNow - config.retainRunsMs).toISOString();
|
|
2081
|
+
if (nowMs - lastPurgeAt >= PURGE_INTERVAL_MS) {
|
|
2082
|
+
lastPurgeAt = nowMs;
|
|
2083
|
+
const cutoff = new Date(nowMs - config.retainRunsMs).toISOString();
|
|
1750
2084
|
await storage.purgeRuns({ olderThan: cutoff, limit: 100 });
|
|
1751
2085
|
}
|
|
1752
2086
|
}
|
|
2087
|
+
if (state.walCheckpointSupported) {
|
|
2088
|
+
if (nowMs - lastCheckpointAt >= CHECKPOINT_INTERVAL_MS) {
|
|
2089
|
+
lastCheckpointAt = nowMs;
|
|
2090
|
+
try {
|
|
2091
|
+
const result = await sql5`PRAGMA wal_checkpoint(TRUNCATE)`.execute(
|
|
2092
|
+
db
|
|
2093
|
+
);
|
|
2094
|
+
const row = result.rows[0];
|
|
2095
|
+
if (row?.busy !== 0) {
|
|
2096
|
+
lastCheckpointAt = nowMs - CHECKPOINT_INTERVAL_MS / 2;
|
|
2097
|
+
}
|
|
2098
|
+
} catch (checkpointError) {
|
|
2099
|
+
eventEmitter.emit({
|
|
2100
|
+
type: "worker:error",
|
|
2101
|
+
error: getErrorMessage(checkpointError),
|
|
2102
|
+
context: "wal-checkpoint"
|
|
2103
|
+
});
|
|
2104
|
+
}
|
|
2105
|
+
}
|
|
2106
|
+
}
|
|
1753
2107
|
} catch (error) {
|
|
1754
2108
|
eventEmitter.emit({
|
|
1755
2109
|
type: "worker:error",
|
|
@@ -1760,7 +2114,10 @@ function createDurably(options) {
|
|
|
1760
2114
|
};
|
|
1761
2115
|
let processOneImpl = null;
|
|
1762
2116
|
const worker = createWorker(
|
|
1763
|
-
{
|
|
2117
|
+
{
|
|
2118
|
+
pollingIntervalMs: config.pollingIntervalMs,
|
|
2119
|
+
maxConcurrentRuns: config.maxConcurrentRuns
|
|
2120
|
+
},
|
|
1764
2121
|
(runtimeOptions) => {
|
|
1765
2122
|
if (!processOneImpl) {
|
|
1766
2123
|
throw new Error("Durably runtime is not initialized");
|
|
@@ -1779,12 +2136,27 @@ function createDurably(options) {
|
|
|
1779
2136
|
preserveSteps: config.preserveSteps,
|
|
1780
2137
|
migrating: null,
|
|
1781
2138
|
migrated: false,
|
|
2139
|
+
walCheckpointSupported: false,
|
|
2140
|
+
probeWalCheckpoint: null,
|
|
1782
2141
|
leaseMs: config.leaseMs,
|
|
1783
2142
|
leaseRenewIntervalMs: config.leaseRenewIntervalMs,
|
|
2143
|
+
pollingIntervalMs: config.pollingIntervalMs,
|
|
1784
2144
|
retainRunsMs: config.retainRunsMs,
|
|
1785
2145
|
releaseBrowserSingleton,
|
|
1786
2146
|
runIdleMaintenance
|
|
1787
2147
|
};
|
|
2148
|
+
if (backend === "generic" && !isBrowserLikeEnvironment()) {
|
|
2149
|
+
state.probeWalCheckpoint = async () => {
|
|
2150
|
+
try {
|
|
2151
|
+
const result = await sql5`PRAGMA wal_checkpoint(PASSIVE)`.execute(db);
|
|
2152
|
+
const row = result.rows[0];
|
|
2153
|
+
if (row && row.log !== -1) {
|
|
2154
|
+
state.walCheckpointSupported = true;
|
|
2155
|
+
}
|
|
2156
|
+
} catch {
|
|
2157
|
+
}
|
|
2158
|
+
};
|
|
2159
|
+
}
|
|
1788
2160
|
const instance = createDurablyInstance(
|
|
1789
2161
|
state,
|
|
1790
2162
|
{}
|
|
@@ -2051,15 +2423,27 @@ function parseLabelsFromParams(searchParams) {
|
|
|
2051
2423
|
}
|
|
2052
2424
|
function parseRunFilter(url) {
|
|
2053
2425
|
const jobNames = url.searchParams.getAll("jobName");
|
|
2054
|
-
const
|
|
2426
|
+
const statusParams = url.searchParams.getAll("status");
|
|
2055
2427
|
const limitParam = url.searchParams.get("limit");
|
|
2056
2428
|
const offsetParam = url.searchParams.get("offset");
|
|
2057
2429
|
const labels = parseLabelsFromParams(url.searchParams);
|
|
2058
|
-
|
|
2059
|
-
|
|
2060
|
-
|
|
2061
|
-
|
|
2062
|
-
|
|
2430
|
+
let status;
|
|
2431
|
+
if (statusParams.length > 0) {
|
|
2432
|
+
for (const s of statusParams) {
|
|
2433
|
+
if (s === "") {
|
|
2434
|
+
return errorResponse(
|
|
2435
|
+
`Invalid status: empty value. Must be one of: ${VALID_STATUSES.join(", ")}`,
|
|
2436
|
+
400
|
|
2437
|
+
);
|
|
2438
|
+
}
|
|
2439
|
+
if (!VALID_STATUSES_SET.has(s)) {
|
|
2440
|
+
return errorResponse(
|
|
2441
|
+
`Invalid status: ${s}. Must be one of: ${VALID_STATUSES.join(", ")}`,
|
|
2442
|
+
400
|
|
2443
|
+
);
|
|
2444
|
+
}
|
|
2445
|
+
}
|
|
2446
|
+
status = statusParams.length === 1 ? statusParams[0] : statusParams;
|
|
2063
2447
|
}
|
|
2064
2448
|
let limit;
|
|
2065
2449
|
if (limitParam) {
|
|
@@ -2080,7 +2464,7 @@ function parseRunFilter(url) {
|
|
|
2080
2464
|
}
|
|
2081
2465
|
return {
|
|
2082
2466
|
jobName: jobNames.length > 0 ? jobNames : void 0,
|
|
2083
|
-
status
|
|
2467
|
+
status,
|
|
2084
2468
|
labels,
|
|
2085
2469
|
limit,
|
|
2086
2470
|
offset
|
|
@@ -2150,9 +2534,13 @@ function createDurablyHandler(durably, options) {
|
|
|
2150
2534
|
const run = await job.trigger(body.input, {
|
|
2151
2535
|
idempotencyKey: body.idempotencyKey,
|
|
2152
2536
|
concurrencyKey: body.concurrencyKey,
|
|
2153
|
-
labels: body.labels
|
|
2537
|
+
labels: body.labels,
|
|
2538
|
+
coalesce: body.coalesce
|
|
2154
2539
|
});
|
|
2155
|
-
const response = {
|
|
2540
|
+
const response = {
|
|
2541
|
+
runId: run.id,
|
|
2542
|
+
disposition: run.disposition
|
|
2543
|
+
};
|
|
2156
2544
|
return jsonResponse(response);
|
|
2157
2545
|
});
|
|
2158
2546
|
}
|
|
@@ -2267,6 +2655,18 @@ function createDurablyHandler(durably, options) {
|
|
|
2267
2655
|
});
|
|
2268
2656
|
}
|
|
2269
2657
|
}),
|
|
2658
|
+
durably.on("run:coalesced", (event) => {
|
|
2659
|
+
if (matchesFilter(event.jobName, event.labels)) {
|
|
2660
|
+
ctrl.enqueue({
|
|
2661
|
+
type: "run:coalesced",
|
|
2662
|
+
runId: event.runId,
|
|
2663
|
+
jobName: event.jobName,
|
|
2664
|
+
labels: event.labels,
|
|
2665
|
+
skippedInput: event.skippedInput,
|
|
2666
|
+
skippedLabels: event.skippedLabels
|
|
2667
|
+
});
|
|
2668
|
+
}
|
|
2669
|
+
}),
|
|
2270
2670
|
durably.on("run:leased", (event) => {
|
|
2271
2671
|
if (matchesFilter(event.jobName, event.labels)) {
|
|
2272
2672
|
ctrl.enqueue({
|
|
@@ -2445,6 +2845,7 @@ export {
|
|
|
2445
2845
|
createDurablyHandler,
|
|
2446
2846
|
createKyselyStore,
|
|
2447
2847
|
defineJob,
|
|
2848
|
+
isDomainEvent,
|
|
2448
2849
|
toClientRun,
|
|
2449
2850
|
withLogPersistence
|
|
2450
2851
|
};
|