@bratsos/workflow-engine 0.1.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +274 -513
- package/dist/{chunk-7IITBLFY.js → chunk-NYKMT46J.js} +268 -25
- package/dist/chunk-NYKMT46J.js.map +1 -0
- package/dist/chunk-SPXBCZLB.js +17 -0
- package/dist/chunk-SPXBCZLB.js.map +1 -0
- package/dist/chunk-WZ533CPU.js +1108 -0
- package/dist/chunk-WZ533CPU.js.map +1 -0
- package/dist/{client-5vz5Vv4A.d.ts → client-D4PoxADF.d.ts} +3 -143
- package/dist/client.d.ts +3 -2
- package/dist/{index-DmR3E8D7.d.ts → index-DAzCfO1R.d.ts} +20 -1
- package/dist/index.d.ts +234 -601
- package/dist/index.js +46 -2034
- package/dist/index.js.map +1 -1
- package/dist/{interface-Cv22wvLG.d.ts → interface-MMqhfQQK.d.ts} +69 -2
- package/dist/kernel/index.d.ts +26 -0
- package/dist/kernel/index.js +3 -0
- package/dist/kernel/index.js.map +1 -0
- package/dist/kernel/testing/index.d.ts +44 -0
- package/dist/kernel/testing/index.js +85 -0
- package/dist/kernel/testing/index.js.map +1 -0
- package/dist/persistence/index.d.ts +2 -2
- package/dist/persistence/index.js +2 -1
- package/dist/persistence/prisma/index.d.ts +2 -2
- package/dist/persistence/prisma/index.js +2 -1
- package/dist/plugins-CPC-X0rR.d.ts +421 -0
- package/dist/ports-tU3rzPXJ.d.ts +245 -0
- package/dist/stage-BPw7m9Wx.d.ts +144 -0
- package/dist/testing/index.d.ts +23 -1
- package/dist/testing/index.js +156 -13
- package/dist/testing/index.js.map +1 -1
- package/package.json +11 -1
- package/skills/workflow-engine/SKILL.md +234 -348
- package/skills/workflow-engine/references/03-runtime-setup.md +111 -426
- package/skills/workflow-engine/references/05-persistence-setup.md +32 -0
- package/skills/workflow-engine/references/07-testing-patterns.md +141 -474
- package/skills/workflow-engine/references/08-common-patterns.md +125 -428
- package/dist/chunk-7IITBLFY.js.map +0 -1
|
@@ -0,0 +1,1108 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
|
|
3
|
+
// src/core/types.ts
|
|
4
|
+
z.object({
|
|
5
|
+
batchId: z.string(),
|
|
6
|
+
statusUrl: z.string().optional(),
|
|
7
|
+
apiKey: z.string().optional(),
|
|
8
|
+
submittedAt: z.string(),
|
|
9
|
+
// ISO date string
|
|
10
|
+
pollInterval: z.number(),
|
|
11
|
+
// milliseconds
|
|
12
|
+
maxWaitTime: z.number(),
|
|
13
|
+
// milliseconds
|
|
14
|
+
metadata: z.record(z.string(), z.unknown()).optional()
|
|
15
|
+
});
|
|
16
|
+
function isSuspendedResult(result) {
|
|
17
|
+
return "suspended" in result && result.suspended === true;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// src/kernel/errors.ts
|
|
21
|
+
var IdempotencyInProgressError = class extends Error {
|
|
22
|
+
constructor(key, commandType) {
|
|
23
|
+
super(
|
|
24
|
+
`Command "${commandType}" with idempotency key "${key}" is already in progress`
|
|
25
|
+
);
|
|
26
|
+
this.key = key;
|
|
27
|
+
this.commandType = commandType;
|
|
28
|
+
this.name = "IdempotencyInProgressError";
|
|
29
|
+
}
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
// src/kernel/helpers/load-workflow-context.ts
|
|
33
|
+
async function loadWorkflowContext(workflowRunId, deps) {
|
|
34
|
+
const completedStages = await deps.persistence.getStagesByRun(workflowRunId, {
|
|
35
|
+
status: "COMPLETED",
|
|
36
|
+
orderBy: "asc"
|
|
37
|
+
});
|
|
38
|
+
const context = {};
|
|
39
|
+
for (const stage of completedStages) {
|
|
40
|
+
const outputData = stage.outputData;
|
|
41
|
+
if (outputData?._artifactKey) {
|
|
42
|
+
context[stage.stageId] = await deps.blobStore.get(
|
|
43
|
+
outputData._artifactKey
|
|
44
|
+
);
|
|
45
|
+
} else if (outputData && typeof outputData === "object") {
|
|
46
|
+
context[stage.stageId] = outputData;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return context;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// src/kernel/helpers/save-stage-output.ts
|
|
53
|
+
async function saveStageOutput(runId, workflowType, stageId, output, deps) {
|
|
54
|
+
const key = `workflow-v2/${workflowType}/${runId}/${stageId}/output.json`;
|
|
55
|
+
await deps.blobStore.put(key, output);
|
|
56
|
+
return key;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// src/kernel/helpers/create-storage-shim.ts
|
|
60
|
+
function createStorageShim(workflowRunId, workflowType, deps) {
|
|
61
|
+
return {
|
|
62
|
+
async save(key, data) {
|
|
63
|
+
await deps.blobStore.put(key, data);
|
|
64
|
+
},
|
|
65
|
+
async load(key) {
|
|
66
|
+
return deps.blobStore.get(key);
|
|
67
|
+
},
|
|
68
|
+
async exists(key) {
|
|
69
|
+
return deps.blobStore.has(key);
|
|
70
|
+
},
|
|
71
|
+
async delete(key) {
|
|
72
|
+
return deps.blobStore.delete(key);
|
|
73
|
+
},
|
|
74
|
+
getStageKey(stageId, suffix) {
|
|
75
|
+
const base = `workflow-v2/${workflowType}/${workflowRunId}/${stageId}`;
|
|
76
|
+
return suffix ? `${base}/${suffix}` : `${base}/output.json`;
|
|
77
|
+
}
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// src/kernel/handlers/job-execute.ts
|
|
82
|
+
function resolveStageInput(workflow, stageId, workflowRun, workflowContext) {
|
|
83
|
+
const groupIndex = workflow.getExecutionGroupIndex(stageId);
|
|
84
|
+
if (groupIndex === 0) return workflowRun.input;
|
|
85
|
+
const prevStageId = workflow.getPreviousStageId(stageId);
|
|
86
|
+
if (prevStageId && workflowContext[prevStageId] !== void 0) {
|
|
87
|
+
return workflowContext[prevStageId];
|
|
88
|
+
}
|
|
89
|
+
return workflowRun.input;
|
|
90
|
+
}
|
|
91
|
+
function toOutboxEvents(workflowRunId, causationId, events) {
|
|
92
|
+
return events.map((event) => ({
|
|
93
|
+
workflowRunId,
|
|
94
|
+
eventType: event.type,
|
|
95
|
+
payload: event,
|
|
96
|
+
causationId,
|
|
97
|
+
occurredAt: event.timestamp
|
|
98
|
+
}));
|
|
99
|
+
}
|
|
100
|
+
async function handleJobExecute(command, deps) {
|
|
101
|
+
const { workflowRunId, workflowId, stageId, config } = command;
|
|
102
|
+
const startTime = deps.clock.now().getTime();
|
|
103
|
+
const causationId = command.idempotencyKey ?? crypto.randomUUID();
|
|
104
|
+
const workflow = deps.registry.getWorkflow(workflowId);
|
|
105
|
+
if (!workflow)
|
|
106
|
+
throw new Error(`Workflow ${workflowId} not found in registry`);
|
|
107
|
+
const stageDef = workflow.getStage(stageId);
|
|
108
|
+
if (!stageDef)
|
|
109
|
+
throw new Error(`Stage ${stageId} not found in workflow ${workflowId}`);
|
|
110
|
+
const workflowRun = await deps.persistence.getRun(workflowRunId);
|
|
111
|
+
if (!workflowRun) throw new Error(`WorkflowRun ${workflowRunId} not found`);
|
|
112
|
+
const workflowContext = await loadWorkflowContext(workflowRunId, deps);
|
|
113
|
+
const stageRecord = await deps.persistence.withTransaction(async (tx) => {
|
|
114
|
+
const record = await tx.upsertStage({
|
|
115
|
+
workflowRunId,
|
|
116
|
+
stageId,
|
|
117
|
+
create: {
|
|
118
|
+
workflowRunId,
|
|
119
|
+
stageId,
|
|
120
|
+
stageName: stageDef.name,
|
|
121
|
+
stageNumber: workflow.getStageIndex(stageId) + 1,
|
|
122
|
+
executionGroup: workflow.getExecutionGroupIndex(stageId),
|
|
123
|
+
status: "RUNNING",
|
|
124
|
+
startedAt: deps.clock.now(),
|
|
125
|
+
config
|
|
126
|
+
},
|
|
127
|
+
update: {
|
|
128
|
+
status: "RUNNING",
|
|
129
|
+
startedAt: deps.clock.now()
|
|
130
|
+
}
|
|
131
|
+
});
|
|
132
|
+
await tx.appendOutboxEvents(
|
|
133
|
+
toOutboxEvents(workflowRunId, causationId, [
|
|
134
|
+
{
|
|
135
|
+
type: "stage:started",
|
|
136
|
+
timestamp: deps.clock.now(),
|
|
137
|
+
workflowRunId,
|
|
138
|
+
stageId,
|
|
139
|
+
stageName: stageDef.name,
|
|
140
|
+
stageNumber: record.stageNumber
|
|
141
|
+
}
|
|
142
|
+
])
|
|
143
|
+
);
|
|
144
|
+
return record;
|
|
145
|
+
});
|
|
146
|
+
const progressEvents = [];
|
|
147
|
+
try {
|
|
148
|
+
const rawInput = resolveStageInput(
|
|
149
|
+
workflow,
|
|
150
|
+
stageId,
|
|
151
|
+
workflowRun,
|
|
152
|
+
workflowContext
|
|
153
|
+
);
|
|
154
|
+
const validatedInput = stageDef.inputSchema.parse(rawInput);
|
|
155
|
+
let stageConfig = config[stageId] || {};
|
|
156
|
+
try {
|
|
157
|
+
if (stageDef.configSchema) {
|
|
158
|
+
stageConfig = stageDef.configSchema.parse(stageConfig);
|
|
159
|
+
}
|
|
160
|
+
} catch {
|
|
161
|
+
}
|
|
162
|
+
const logFn = async (level, message, meta) => {
|
|
163
|
+
await deps.persistence.createLog({
|
|
164
|
+
workflowRunId,
|
|
165
|
+
workflowStageId: stageRecord.id,
|
|
166
|
+
level,
|
|
167
|
+
message,
|
|
168
|
+
metadata: meta
|
|
169
|
+
}).catch(() => {
|
|
170
|
+
});
|
|
171
|
+
};
|
|
172
|
+
const context = {
|
|
173
|
+
workflowRunId,
|
|
174
|
+
stageId,
|
|
175
|
+
stageNumber: stageRecord.stageNumber,
|
|
176
|
+
stageName: stageDef.name,
|
|
177
|
+
stageRecordId: stageRecord.id,
|
|
178
|
+
input: validatedInput,
|
|
179
|
+
config: stageConfig,
|
|
180
|
+
resumeState: stageRecord.suspendedState,
|
|
181
|
+
onProgress: (update) => {
|
|
182
|
+
progressEvents.push({
|
|
183
|
+
type: "stage:progress",
|
|
184
|
+
timestamp: deps.clock.now(),
|
|
185
|
+
workflowRunId,
|
|
186
|
+
stageId,
|
|
187
|
+
progress: update.progress,
|
|
188
|
+
message: update.message,
|
|
189
|
+
details: update.details
|
|
190
|
+
});
|
|
191
|
+
},
|
|
192
|
+
onLog: logFn,
|
|
193
|
+
log: logFn,
|
|
194
|
+
storage: createStorageShim(workflowRunId, workflowRun.workflowType, deps),
|
|
195
|
+
workflowContext
|
|
196
|
+
};
|
|
197
|
+
const result = await stageDef.execute(context);
|
|
198
|
+
if (isSuspendedResult(result)) {
|
|
199
|
+
const { state, pollConfig, metrics } = result;
|
|
200
|
+
const nextPollAt = new Date(
|
|
201
|
+
pollConfig.nextPollAt?.getTime() ?? deps.clock.now().getTime() + (pollConfig.pollInterval || 6e4)
|
|
202
|
+
);
|
|
203
|
+
await deps.persistence.withTransaction(async (tx) => {
|
|
204
|
+
await tx.updateStage(stageRecord.id, {
|
|
205
|
+
status: "SUSPENDED",
|
|
206
|
+
suspendedState: state,
|
|
207
|
+
nextPollAt,
|
|
208
|
+
pollInterval: pollConfig.pollInterval,
|
|
209
|
+
maxWaitUntil: pollConfig.maxWaitTime ? new Date(deps.clock.now().getTime() + pollConfig.maxWaitTime) : void 0,
|
|
210
|
+
metrics
|
|
211
|
+
});
|
|
212
|
+
const suspendedEvent = {
|
|
213
|
+
type: "stage:suspended",
|
|
214
|
+
timestamp: deps.clock.now(),
|
|
215
|
+
workflowRunId,
|
|
216
|
+
stageId,
|
|
217
|
+
stageName: stageDef.name,
|
|
218
|
+
nextPollAt
|
|
219
|
+
};
|
|
220
|
+
await tx.appendOutboxEvents(
|
|
221
|
+
toOutboxEvents(workflowRunId, causationId, [
|
|
222
|
+
...progressEvents,
|
|
223
|
+
suspendedEvent
|
|
224
|
+
])
|
|
225
|
+
);
|
|
226
|
+
});
|
|
227
|
+
return { outcome: "suspended", nextPollAt, _events: [] };
|
|
228
|
+
} else {
|
|
229
|
+
const duration = deps.clock.now().getTime() - startTime;
|
|
230
|
+
const outputKey = await saveStageOutput(
|
|
231
|
+
workflowRunId,
|
|
232
|
+
workflowRun.workflowType,
|
|
233
|
+
stageId,
|
|
234
|
+
result.output,
|
|
235
|
+
deps
|
|
236
|
+
);
|
|
237
|
+
await deps.persistence.withTransaction(async (tx) => {
|
|
238
|
+
await tx.updateStage(stageRecord.id, {
|
|
239
|
+
status: "COMPLETED",
|
|
240
|
+
completedAt: deps.clock.now(),
|
|
241
|
+
duration,
|
|
242
|
+
outputData: { _artifactKey: outputKey },
|
|
243
|
+
metrics: result.metrics,
|
|
244
|
+
embeddingInfo: result.embeddings
|
|
245
|
+
});
|
|
246
|
+
const completedEvent = {
|
|
247
|
+
type: "stage:completed",
|
|
248
|
+
timestamp: deps.clock.now(),
|
|
249
|
+
workflowRunId,
|
|
250
|
+
stageId,
|
|
251
|
+
stageName: stageDef.name,
|
|
252
|
+
duration
|
|
253
|
+
};
|
|
254
|
+
await tx.appendOutboxEvents(
|
|
255
|
+
toOutboxEvents(workflowRunId, causationId, [
|
|
256
|
+
...progressEvents,
|
|
257
|
+
completedEvent
|
|
258
|
+
])
|
|
259
|
+
);
|
|
260
|
+
});
|
|
261
|
+
return {
|
|
262
|
+
outcome: "completed",
|
|
263
|
+
output: result.output,
|
|
264
|
+
_events: []
|
|
265
|
+
};
|
|
266
|
+
}
|
|
267
|
+
} catch (error) {
|
|
268
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
269
|
+
const duration = deps.clock.now().getTime() - startTime;
|
|
270
|
+
try {
|
|
271
|
+
await deps.persistence.withTransaction(async (tx) => {
|
|
272
|
+
await tx.updateStage(stageRecord.id, {
|
|
273
|
+
status: "FAILED",
|
|
274
|
+
completedAt: deps.clock.now(),
|
|
275
|
+
duration,
|
|
276
|
+
errorMessage
|
|
277
|
+
});
|
|
278
|
+
const failedEvent = {
|
|
279
|
+
type: "stage:failed",
|
|
280
|
+
timestamp: deps.clock.now(),
|
|
281
|
+
workflowRunId,
|
|
282
|
+
stageId,
|
|
283
|
+
stageName: stageDef.name,
|
|
284
|
+
error: errorMessage
|
|
285
|
+
};
|
|
286
|
+
await tx.appendOutboxEvents(
|
|
287
|
+
toOutboxEvents(workflowRunId, causationId, [
|
|
288
|
+
...progressEvents,
|
|
289
|
+
failedEvent
|
|
290
|
+
])
|
|
291
|
+
);
|
|
292
|
+
});
|
|
293
|
+
} catch {
|
|
294
|
+
throw error;
|
|
295
|
+
}
|
|
296
|
+
await deps.persistence.createLog({
|
|
297
|
+
workflowRunId,
|
|
298
|
+
workflowStageId: stageRecord.id,
|
|
299
|
+
level: "ERROR",
|
|
300
|
+
message: errorMessage
|
|
301
|
+
}).catch(() => {
|
|
302
|
+
});
|
|
303
|
+
return { outcome: "failed", error: errorMessage, _events: [] };
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
// src/kernel/handlers/lease-reap-stale.ts
|
|
308
|
+
async function handleLeaseReapStale(command, deps) {
|
|
309
|
+
const released = await deps.jobTransport.releaseStaleJobs(
|
|
310
|
+
command.staleThresholdMs
|
|
311
|
+
);
|
|
312
|
+
return { released, _events: [] };
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
// src/kernel/handlers/outbox-flush.ts
|
|
316
|
+
async function handleOutboxFlush(command, deps) {
|
|
317
|
+
const limit = command.maxEvents ?? 100;
|
|
318
|
+
const events = await deps.persistence.getUnpublishedOutboxEvents(limit);
|
|
319
|
+
const maxRetries = deps.eventSink.maxRetries ?? 3;
|
|
320
|
+
const publishedIds = [];
|
|
321
|
+
for (const outboxEvent of events) {
|
|
322
|
+
try {
|
|
323
|
+
await deps.eventSink.emit(outboxEvent.payload);
|
|
324
|
+
publishedIds.push(outboxEvent.id);
|
|
325
|
+
} catch {
|
|
326
|
+
const newCount = await deps.persistence.incrementOutboxRetryCount(
|
|
327
|
+
outboxEvent.id
|
|
328
|
+
);
|
|
329
|
+
if (newCount >= maxRetries) {
|
|
330
|
+
await deps.persistence.moveOutboxEventToDLQ(outboxEvent.id);
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
if (publishedIds.length > 0) {
|
|
335
|
+
await deps.persistence.markOutboxEventsPublished(publishedIds);
|
|
336
|
+
}
|
|
337
|
+
return { published: publishedIds.length, _events: [] };
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
// src/kernel/handlers/plugin-replay-dlq.ts
|
|
341
|
+
async function handlePluginReplayDLQ(command, deps) {
|
|
342
|
+
const maxEvents = command.maxEvents ?? 100;
|
|
343
|
+
const replayed = await deps.persistence.replayDLQEvents(maxEvents);
|
|
344
|
+
return { replayed, _events: [] };
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
// src/kernel/handlers/run-cancel.ts
|
|
348
|
+
var TERMINAL_STATUSES = /* @__PURE__ */ new Set(["COMPLETED", "FAILED", "CANCELLED"]);
|
|
349
|
+
async function handleRunCancel(command, deps) {
|
|
350
|
+
const run = await deps.persistence.getRun(command.workflowRunId);
|
|
351
|
+
if (!run || TERMINAL_STATUSES.has(run.status)) {
|
|
352
|
+
return { cancelled: false, _events: [] };
|
|
353
|
+
}
|
|
354
|
+
await deps.persistence.updateRun(command.workflowRunId, {
|
|
355
|
+
status: "CANCELLED",
|
|
356
|
+
completedAt: deps.clock.now()
|
|
357
|
+
});
|
|
358
|
+
return {
|
|
359
|
+
cancelled: true,
|
|
360
|
+
_events: [
|
|
361
|
+
{
|
|
362
|
+
type: "workflow:cancelled",
|
|
363
|
+
timestamp: deps.clock.now(),
|
|
364
|
+
workflowRunId: command.workflowRunId,
|
|
365
|
+
reason: command.reason
|
|
366
|
+
}
|
|
367
|
+
]
|
|
368
|
+
};
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
// src/kernel/handlers/run-claim-pending.ts
|
|
372
|
+
async function handleRunClaimPending(command, deps) {
|
|
373
|
+
const maxClaims = command.maxClaims ?? 10;
|
|
374
|
+
const claimed = [];
|
|
375
|
+
const events = [];
|
|
376
|
+
for (let i = 0; i < maxClaims; i++) {
|
|
377
|
+
const run = await deps.persistence.claimNextPendingRun();
|
|
378
|
+
if (!run) break;
|
|
379
|
+
const workflow = deps.registry.getWorkflow(run.workflowId);
|
|
380
|
+
if (!workflow) {
|
|
381
|
+
const error = `Workflow ${run.workflowId} not found in registry`;
|
|
382
|
+
const failedAt = deps.clock.now();
|
|
383
|
+
await deps.persistence.updateRun(run.id, {
|
|
384
|
+
status: "FAILED",
|
|
385
|
+
completedAt: failedAt,
|
|
386
|
+
output: {
|
|
387
|
+
error: {
|
|
388
|
+
code: "WORKFLOW_NOT_FOUND",
|
|
389
|
+
message: error,
|
|
390
|
+
workerId: command.workerId
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
});
|
|
394
|
+
await deps.persistence.createLog({
|
|
395
|
+
workflowRunId: run.id,
|
|
396
|
+
level: "ERROR",
|
|
397
|
+
message: error,
|
|
398
|
+
metadata: {
|
|
399
|
+
workerId: command.workerId,
|
|
400
|
+
code: "WORKFLOW_NOT_FOUND"
|
|
401
|
+
}
|
|
402
|
+
}).catch(() => {
|
|
403
|
+
});
|
|
404
|
+
events.push({
|
|
405
|
+
type: "workflow:failed",
|
|
406
|
+
timestamp: failedAt,
|
|
407
|
+
workflowRunId: run.id,
|
|
408
|
+
error
|
|
409
|
+
});
|
|
410
|
+
continue;
|
|
411
|
+
}
|
|
412
|
+
const stages = workflow.getStagesInExecutionGroup(1);
|
|
413
|
+
if (stages.length === 0) {
|
|
414
|
+
const error = `Workflow ${run.workflowId} has no stages in execution group 1`;
|
|
415
|
+
const failedAt = deps.clock.now();
|
|
416
|
+
await deps.persistence.updateRun(run.id, {
|
|
417
|
+
status: "FAILED",
|
|
418
|
+
completedAt: failedAt,
|
|
419
|
+
output: {
|
|
420
|
+
error: {
|
|
421
|
+
code: "EMPTY_STAGE_GRAPH",
|
|
422
|
+
message: error,
|
|
423
|
+
workerId: command.workerId
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
});
|
|
427
|
+
await deps.persistence.createLog({
|
|
428
|
+
workflowRunId: run.id,
|
|
429
|
+
level: "ERROR",
|
|
430
|
+
message: error,
|
|
431
|
+
metadata: {
|
|
432
|
+
workerId: command.workerId,
|
|
433
|
+
code: "EMPTY_STAGE_GRAPH"
|
|
434
|
+
}
|
|
435
|
+
}).catch(() => {
|
|
436
|
+
});
|
|
437
|
+
events.push({
|
|
438
|
+
type: "workflow:failed",
|
|
439
|
+
timestamp: failedAt,
|
|
440
|
+
workflowRunId: run.id,
|
|
441
|
+
error
|
|
442
|
+
});
|
|
443
|
+
continue;
|
|
444
|
+
}
|
|
445
|
+
for (const stage of stages) {
|
|
446
|
+
await deps.persistence.createStage({
|
|
447
|
+
workflowRunId: run.id,
|
|
448
|
+
stageId: stage.id,
|
|
449
|
+
stageName: stage.name,
|
|
450
|
+
stageNumber: workflow.getStageIndex(stage.id) + 1,
|
|
451
|
+
executionGroup: 1,
|
|
452
|
+
status: "PENDING",
|
|
453
|
+
config: run.config?.[stage.id] || {}
|
|
454
|
+
});
|
|
455
|
+
}
|
|
456
|
+
const jobIds = await deps.jobTransport.enqueueParallel(
|
|
457
|
+
stages.map((stage) => ({
|
|
458
|
+
workflowRunId: run.id,
|
|
459
|
+
workflowId: run.workflowId,
|
|
460
|
+
stageId: stage.id,
|
|
461
|
+
priority: run.priority,
|
|
462
|
+
payload: { config: run.config || {} }
|
|
463
|
+
}))
|
|
464
|
+
);
|
|
465
|
+
events.push({
|
|
466
|
+
type: "workflow:started",
|
|
467
|
+
timestamp: deps.clock.now(),
|
|
468
|
+
workflowRunId: run.id
|
|
469
|
+
});
|
|
470
|
+
claimed.push({
|
|
471
|
+
workflowRunId: run.id,
|
|
472
|
+
workflowId: run.workflowId,
|
|
473
|
+
jobIds
|
|
474
|
+
});
|
|
475
|
+
}
|
|
476
|
+
return { claimed, _events: events };
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
// src/kernel/handlers/run-create.ts
|
|
480
|
+
async function handleRunCreate(command, deps) {
|
|
481
|
+
const workflow = deps.registry.getWorkflow(command.workflowId);
|
|
482
|
+
if (!workflow) {
|
|
483
|
+
throw new Error(`Workflow ${command.workflowId} not found in registry`);
|
|
484
|
+
}
|
|
485
|
+
try {
|
|
486
|
+
workflow.inputSchema.parse(command.input);
|
|
487
|
+
} catch (error) {
|
|
488
|
+
throw new Error(`Invalid workflow input: ${error}`);
|
|
489
|
+
}
|
|
490
|
+
const defaultConfig = workflow.getDefaultConfig?.() ?? {};
|
|
491
|
+
const mergedConfig = { ...defaultConfig, ...command.config };
|
|
492
|
+
const configValidation = workflow.validateConfig(mergedConfig);
|
|
493
|
+
if (!configValidation.valid) {
|
|
494
|
+
const errors = configValidation.errors.map((e) => `${e.stageId}: ${e.error}`).join(", ");
|
|
495
|
+
throw new Error(`Invalid workflow config: ${errors}`);
|
|
496
|
+
}
|
|
497
|
+
const priority = command.priority ?? 5;
|
|
498
|
+
const run = await deps.persistence.createRun({
|
|
499
|
+
workflowId: command.workflowId,
|
|
500
|
+
workflowName: workflow.name,
|
|
501
|
+
workflowType: command.workflowId,
|
|
502
|
+
input: command.input,
|
|
503
|
+
config: mergedConfig,
|
|
504
|
+
priority,
|
|
505
|
+
metadata: command.metadata
|
|
506
|
+
});
|
|
507
|
+
return {
|
|
508
|
+
workflowRunId: run.id,
|
|
509
|
+
status: "PENDING",
|
|
510
|
+
_events: [
|
|
511
|
+
{
|
|
512
|
+
type: "workflow:created",
|
|
513
|
+
timestamp: deps.clock.now(),
|
|
514
|
+
workflowRunId: run.id,
|
|
515
|
+
workflowId: command.workflowId
|
|
516
|
+
}
|
|
517
|
+
]
|
|
518
|
+
};
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
// src/kernel/handlers/run-rerun-from.ts
|
|
522
|
+
async function handleRunRerunFrom(command, deps) {
|
|
523
|
+
const { workflowRunId, fromStageId } = command;
|
|
524
|
+
const events = [];
|
|
525
|
+
const run = await deps.persistence.getRun(workflowRunId);
|
|
526
|
+
if (!run) throw new Error(`WorkflowRun ${workflowRunId} not found`);
|
|
527
|
+
if (run.status !== "COMPLETED" && run.status !== "FAILED") {
|
|
528
|
+
throw new Error(
|
|
529
|
+
`Cannot rerun workflow in ${run.status} state. Must be COMPLETED or FAILED.`
|
|
530
|
+
);
|
|
531
|
+
}
|
|
532
|
+
const workflow = deps.registry.getWorkflow(run.workflowId);
|
|
533
|
+
if (!workflow)
|
|
534
|
+
throw new Error(`Workflow ${run.workflowId} not found in registry`);
|
|
535
|
+
const stageDef = workflow.getStage(fromStageId);
|
|
536
|
+
if (!stageDef) {
|
|
537
|
+
throw new Error(
|
|
538
|
+
`Stage ${fromStageId} not found in workflow ${run.workflowId}`
|
|
539
|
+
);
|
|
540
|
+
}
|
|
541
|
+
const targetGroup = workflow.getExecutionGroupIndex(fromStageId);
|
|
542
|
+
const existingStages = await deps.persistence.getStagesByRun(workflowRunId);
|
|
543
|
+
if (targetGroup > 1) {
|
|
544
|
+
const priorStages = existingStages.filter(
|
|
545
|
+
(s) => s.executionGroup < targetGroup
|
|
546
|
+
);
|
|
547
|
+
if (priorStages.length === 0) {
|
|
548
|
+
throw new Error(
|
|
549
|
+
`Cannot rerun from stage ${fromStageId}: previous stages have not been executed`
|
|
550
|
+
);
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
const stagesToDelete = existingStages.filter(
|
|
554
|
+
(s) => s.executionGroup >= targetGroup
|
|
555
|
+
);
|
|
556
|
+
const deletedStageIds = stagesToDelete.map((s) => s.stageId);
|
|
557
|
+
for (const stage of stagesToDelete) {
|
|
558
|
+
const outputRef = stage.outputData;
|
|
559
|
+
if (outputRef?._artifactKey) {
|
|
560
|
+
await deps.blobStore.delete(outputRef._artifactKey).catch(() => {
|
|
561
|
+
});
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
for (const stage of stagesToDelete) {
|
|
565
|
+
await deps.persistence.deleteStage(stage.id);
|
|
566
|
+
}
|
|
567
|
+
await deps.persistence.updateRun(workflowRunId, {
|
|
568
|
+
status: "RUNNING",
|
|
569
|
+
completedAt: null
|
|
570
|
+
});
|
|
571
|
+
const targetStages = workflow.getStagesInExecutionGroup(targetGroup);
|
|
572
|
+
for (const stage of targetStages) {
|
|
573
|
+
await deps.persistence.createStage({
|
|
574
|
+
workflowRunId,
|
|
575
|
+
stageId: stage.id,
|
|
576
|
+
stageName: stage.name,
|
|
577
|
+
stageNumber: workflow.getStageIndex(stage.id) + 1,
|
|
578
|
+
executionGroup: targetGroup,
|
|
579
|
+
status: "PENDING",
|
|
580
|
+
config: run.config?.[stage.id] || {}
|
|
581
|
+
});
|
|
582
|
+
}
|
|
583
|
+
await deps.jobTransport.enqueueParallel(
|
|
584
|
+
targetStages.map((stage) => ({
|
|
585
|
+
workflowRunId,
|
|
586
|
+
workflowId: run.workflowId,
|
|
587
|
+
stageId: stage.id,
|
|
588
|
+
priority: run.priority,
|
|
589
|
+
payload: { config: run.config || {} }
|
|
590
|
+
}))
|
|
591
|
+
);
|
|
592
|
+
events.push({
|
|
593
|
+
type: "workflow:started",
|
|
594
|
+
timestamp: deps.clock.now(),
|
|
595
|
+
workflowRunId
|
|
596
|
+
});
|
|
597
|
+
return {
|
|
598
|
+
workflowRunId,
|
|
599
|
+
fromStageId,
|
|
600
|
+
deletedStages: deletedStageIds,
|
|
601
|
+
_events: events
|
|
602
|
+
};
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
// src/kernel/handlers/run-transition.ts
|
|
606
|
+
var TERMINAL_STATUSES2 = /* @__PURE__ */ new Set(["COMPLETED", "FAILED", "CANCELLED"]);
|
|
607
|
+
var ACTIVE_STATUSES = /* @__PURE__ */ new Set(["RUNNING", "PENDING", "SUSPENDED"]);
|
|
608
|
+
async function enqueueExecutionGroup(run, workflow, groupIndex, deps) {
|
|
609
|
+
const stages = workflow.getStagesInExecutionGroup(groupIndex);
|
|
610
|
+
for (const stage of stages) {
|
|
611
|
+
await deps.persistence.createStage({
|
|
612
|
+
workflowRunId: run.id,
|
|
613
|
+
stageId: stage.id,
|
|
614
|
+
stageName: stage.name,
|
|
615
|
+
stageNumber: workflow.getStageIndex(stage.id) + 1,
|
|
616
|
+
executionGroup: groupIndex,
|
|
617
|
+
status: "PENDING",
|
|
618
|
+
config: run.config?.[stage.id] || {}
|
|
619
|
+
});
|
|
620
|
+
}
|
|
621
|
+
return deps.jobTransport.enqueueParallel(
|
|
622
|
+
stages.map((stage) => ({
|
|
623
|
+
workflowRunId: run.id,
|
|
624
|
+
workflowId: run.workflowId,
|
|
625
|
+
stageId: stage.id,
|
|
626
|
+
priority: run.priority,
|
|
627
|
+
payload: { config: run.config || {} }
|
|
628
|
+
}))
|
|
629
|
+
);
|
|
630
|
+
}
|
|
631
|
+
async function handleRunTransition(command, deps) {
|
|
632
|
+
const events = [];
|
|
633
|
+
const run = await deps.persistence.getRun(command.workflowRunId);
|
|
634
|
+
if (!run) {
|
|
635
|
+
return { action: "noop", _events: [] };
|
|
636
|
+
}
|
|
637
|
+
if (TERMINAL_STATUSES2.has(run.status)) {
|
|
638
|
+
return { action: "noop", _events: [] };
|
|
639
|
+
}
|
|
640
|
+
const workflow = deps.registry.getWorkflow(run.workflowId);
|
|
641
|
+
if (!workflow) {
|
|
642
|
+
return { action: "noop", _events: [] };
|
|
643
|
+
}
|
|
644
|
+
const stages = await deps.persistence.getStagesByRun(command.workflowRunId);
|
|
645
|
+
if (stages.length === 0) {
|
|
646
|
+
await enqueueExecutionGroup(run, workflow, 1, deps);
|
|
647
|
+
events.push({
|
|
648
|
+
type: "workflow:started",
|
|
649
|
+
timestamp: deps.clock.now(),
|
|
650
|
+
workflowRunId: run.id
|
|
651
|
+
});
|
|
652
|
+
return { action: "advanced", nextGroup: 1, _events: events };
|
|
653
|
+
}
|
|
654
|
+
const hasActive = stages.some((s) => ACTIVE_STATUSES.has(s.status));
|
|
655
|
+
if (hasActive) {
|
|
656
|
+
return { action: "noop", _events: [] };
|
|
657
|
+
}
|
|
658
|
+
const failedStage = stages.find((s) => s.status === "FAILED");
|
|
659
|
+
if (failedStage) {
|
|
660
|
+
await deps.persistence.updateRun(command.workflowRunId, {
|
|
661
|
+
status: "FAILED",
|
|
662
|
+
completedAt: deps.clock.now()
|
|
663
|
+
});
|
|
664
|
+
events.push({
|
|
665
|
+
type: "workflow:failed",
|
|
666
|
+
timestamp: deps.clock.now(),
|
|
667
|
+
workflowRunId: command.workflowRunId,
|
|
668
|
+
error: failedStage.errorMessage || "Stage failed"
|
|
669
|
+
});
|
|
670
|
+
return { action: "failed", _events: events };
|
|
671
|
+
}
|
|
672
|
+
const maxGroup = stages.reduce(
|
|
673
|
+
(max, s) => s.executionGroup > max ? s.executionGroup : max,
|
|
674
|
+
0
|
|
675
|
+
);
|
|
676
|
+
const nextGroupStages = workflow.getStagesInExecutionGroup(maxGroup + 1);
|
|
677
|
+
if (nextGroupStages.length > 0) {
|
|
678
|
+
await enqueueExecutionGroup(run, workflow, maxGroup + 1, deps);
|
|
679
|
+
return {
|
|
680
|
+
action: "advanced",
|
|
681
|
+
nextGroup: maxGroup + 1,
|
|
682
|
+
_events: events
|
|
683
|
+
};
|
|
684
|
+
}
|
|
685
|
+
let totalCost = 0;
|
|
686
|
+
let totalTokens = 0;
|
|
687
|
+
for (const stage of stages) {
|
|
688
|
+
const metrics = stage.metrics;
|
|
689
|
+
if (metrics) {
|
|
690
|
+
totalCost += metrics.totalCost ?? 0;
|
|
691
|
+
totalTokens += metrics.totalTokens ?? 0;
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
const duration = deps.clock.now().getTime() - run.createdAt.getTime();
|
|
695
|
+
await deps.persistence.updateRun(command.workflowRunId, {
|
|
696
|
+
status: "COMPLETED",
|
|
697
|
+
completedAt: deps.clock.now(),
|
|
698
|
+
duration,
|
|
699
|
+
totalCost,
|
|
700
|
+
totalTokens
|
|
701
|
+
});
|
|
702
|
+
events.push({
|
|
703
|
+
type: "workflow:completed",
|
|
704
|
+
timestamp: deps.clock.now(),
|
|
705
|
+
workflowRunId: command.workflowRunId,
|
|
706
|
+
duration,
|
|
707
|
+
totalCost,
|
|
708
|
+
totalTokens
|
|
709
|
+
});
|
|
710
|
+
return { action: "completed", _events: events };
|
|
711
|
+
}
|
|
712
|
+
|
|
713
|
+
// src/kernel/handlers/stage-poll-suspended.ts
|
|
714
|
+
async function handleStagePollSuspended(command, deps) {
|
|
715
|
+
const events = [];
|
|
716
|
+
const maxChecks = command.maxChecks ?? 50;
|
|
717
|
+
const suspendedStages = await deps.persistence.getSuspendedStages(
|
|
718
|
+
deps.clock.now()
|
|
719
|
+
);
|
|
720
|
+
const stagesToCheck = suspendedStages.slice(0, maxChecks);
|
|
721
|
+
let checked = 0;
|
|
722
|
+
let resumed = 0;
|
|
723
|
+
let failed = 0;
|
|
724
|
+
const resumedWorkflowRunIds = /* @__PURE__ */ new Set();
|
|
725
|
+
for (const stageRecord of stagesToCheck) {
|
|
726
|
+
checked++;
|
|
727
|
+
const run = await deps.persistence.getRun(stageRecord.workflowRunId);
|
|
728
|
+
if (!run) continue;
|
|
729
|
+
const workflow = deps.registry.getWorkflow(run.workflowId);
|
|
730
|
+
if (!workflow) {
|
|
731
|
+
await deps.persistence.updateStage(stageRecord.id, {
|
|
732
|
+
status: "FAILED",
|
|
733
|
+
completedAt: deps.clock.now(),
|
|
734
|
+
errorMessage: `Workflow ${run.workflowId} not found in registry`
|
|
735
|
+
});
|
|
736
|
+
failed++;
|
|
737
|
+
events.push({
|
|
738
|
+
type: "stage:failed",
|
|
739
|
+
timestamp: deps.clock.now(),
|
|
740
|
+
workflowRunId: stageRecord.workflowRunId,
|
|
741
|
+
stageId: stageRecord.stageId,
|
|
742
|
+
stageName: stageRecord.stageName,
|
|
743
|
+
error: `Workflow ${run.workflowId} not found in registry`
|
|
744
|
+
});
|
|
745
|
+
continue;
|
|
746
|
+
}
|
|
747
|
+
const stageDef = workflow.getStage(stageRecord.stageId);
|
|
748
|
+
if (!stageDef || !stageDef.checkCompletion) {
|
|
749
|
+
const errorMsg = !stageDef ? `Stage ${stageRecord.stageId} not found in workflow ${run.workflowId}` : `Stage ${stageRecord.stageId} does not support checkCompletion`;
|
|
750
|
+
await deps.persistence.updateStage(stageRecord.id, {
|
|
751
|
+
status: "FAILED",
|
|
752
|
+
completedAt: deps.clock.now(),
|
|
753
|
+
errorMessage: errorMsg
|
|
754
|
+
});
|
|
755
|
+
failed++;
|
|
756
|
+
events.push({
|
|
757
|
+
type: "stage:failed",
|
|
758
|
+
timestamp: deps.clock.now(),
|
|
759
|
+
workflowRunId: stageRecord.workflowRunId,
|
|
760
|
+
stageId: stageRecord.stageId,
|
|
761
|
+
stageName: stageRecord.stageName,
|
|
762
|
+
error: errorMsg
|
|
763
|
+
});
|
|
764
|
+
continue;
|
|
765
|
+
}
|
|
766
|
+
const storage = createStorageShim(
|
|
767
|
+
stageRecord.workflowRunId,
|
|
768
|
+
run.workflowType,
|
|
769
|
+
deps
|
|
770
|
+
);
|
|
771
|
+
const logFn = async (level, message, meta) => {
|
|
772
|
+
await deps.persistence.createLog({
|
|
773
|
+
workflowRunId: stageRecord.workflowRunId,
|
|
774
|
+
workflowStageId: stageRecord.id,
|
|
775
|
+
level,
|
|
776
|
+
message,
|
|
777
|
+
metadata: meta
|
|
778
|
+
}).catch(() => {
|
|
779
|
+
});
|
|
780
|
+
};
|
|
781
|
+
const checkContext = {
|
|
782
|
+
workflowRunId: run.id,
|
|
783
|
+
stageId: stageRecord.stageId,
|
|
784
|
+
stageRecordId: stageRecord.id,
|
|
785
|
+
config: stageRecord.config || {},
|
|
786
|
+
log: logFn,
|
|
787
|
+
onLog: logFn,
|
|
788
|
+
storage
|
|
789
|
+
};
|
|
790
|
+
try {
|
|
791
|
+
const checkResult = await stageDef.checkCompletion(
|
|
792
|
+
stageRecord.suspendedState,
|
|
793
|
+
checkContext
|
|
794
|
+
);
|
|
795
|
+
if (checkResult.error) {
|
|
796
|
+
await deps.persistence.updateStage(stageRecord.id, {
|
|
797
|
+
status: "FAILED",
|
|
798
|
+
completedAt: deps.clock.now(),
|
|
799
|
+
errorMessage: checkResult.error,
|
|
800
|
+
nextPollAt: null
|
|
801
|
+
});
|
|
802
|
+
await deps.persistence.updateRun(stageRecord.workflowRunId, {
|
|
803
|
+
status: "FAILED",
|
|
804
|
+
completedAt: deps.clock.now()
|
|
805
|
+
});
|
|
806
|
+
failed++;
|
|
807
|
+
events.push({
|
|
808
|
+
type: "stage:failed",
|
|
809
|
+
timestamp: deps.clock.now(),
|
|
810
|
+
workflowRunId: stageRecord.workflowRunId,
|
|
811
|
+
stageId: stageRecord.stageId,
|
|
812
|
+
stageName: stageRecord.stageName,
|
|
813
|
+
error: checkResult.error
|
|
814
|
+
});
|
|
815
|
+
events.push({
|
|
816
|
+
type: "workflow:failed",
|
|
817
|
+
timestamp: deps.clock.now(),
|
|
818
|
+
workflowRunId: stageRecord.workflowRunId,
|
|
819
|
+
error: checkResult.error
|
|
820
|
+
});
|
|
821
|
+
} else if (checkResult.ready) {
|
|
822
|
+
let outputRef;
|
|
823
|
+
if (checkResult.output !== void 0) {
|
|
824
|
+
let validatedOutput = checkResult.output;
|
|
825
|
+
try {
|
|
826
|
+
validatedOutput = stageDef.outputSchema.parse(checkResult.output);
|
|
827
|
+
} catch {
|
|
828
|
+
}
|
|
829
|
+
const outputKey = await saveStageOutput(
|
|
830
|
+
stageRecord.workflowRunId,
|
|
831
|
+
run.workflowType,
|
|
832
|
+
stageRecord.stageId,
|
|
833
|
+
validatedOutput,
|
|
834
|
+
deps
|
|
835
|
+
);
|
|
836
|
+
outputRef = { _artifactKey: outputKey };
|
|
837
|
+
}
|
|
838
|
+
const duration = deps.clock.now().getTime() - (stageRecord.startedAt?.getTime() ?? deps.clock.now().getTime());
|
|
839
|
+
await deps.persistence.updateStage(stageRecord.id, {
|
|
840
|
+
status: "COMPLETED",
|
|
841
|
+
completedAt: deps.clock.now(),
|
|
842
|
+
duration,
|
|
843
|
+
outputData: outputRef,
|
|
844
|
+
nextPollAt: null,
|
|
845
|
+
metrics: checkResult.metrics,
|
|
846
|
+
embeddingInfo: checkResult.embeddings
|
|
847
|
+
});
|
|
848
|
+
resumed++;
|
|
849
|
+
resumedWorkflowRunIds.add(stageRecord.workflowRunId);
|
|
850
|
+
events.push({
|
|
851
|
+
type: "stage:completed",
|
|
852
|
+
timestamp: deps.clock.now(),
|
|
853
|
+
workflowRunId: stageRecord.workflowRunId,
|
|
854
|
+
stageId: stageRecord.stageId,
|
|
855
|
+
stageName: stageRecord.stageName,
|
|
856
|
+
duration
|
|
857
|
+
});
|
|
858
|
+
} else {
|
|
859
|
+
const pollInterval = checkResult.nextCheckIn ?? stageRecord.pollInterval ?? 6e4;
|
|
860
|
+
const nextPollAt = new Date(deps.clock.now().getTime() + pollInterval);
|
|
861
|
+
await deps.persistence.updateStage(stageRecord.id, {
|
|
862
|
+
nextPollAt
|
|
863
|
+
});
|
|
864
|
+
}
|
|
865
|
+
} catch (error) {
|
|
866
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
867
|
+
await deps.persistence.updateStage(stageRecord.id, {
|
|
868
|
+
status: "FAILED",
|
|
869
|
+
completedAt: deps.clock.now(),
|
|
870
|
+
errorMessage,
|
|
871
|
+
nextPollAt: null
|
|
872
|
+
});
|
|
873
|
+
await deps.persistence.updateRun(stageRecord.workflowRunId, {
|
|
874
|
+
status: "FAILED",
|
|
875
|
+
completedAt: deps.clock.now()
|
|
876
|
+
});
|
|
877
|
+
failed++;
|
|
878
|
+
events.push({
|
|
879
|
+
type: "stage:failed",
|
|
880
|
+
timestamp: deps.clock.now(),
|
|
881
|
+
workflowRunId: stageRecord.workflowRunId,
|
|
882
|
+
stageId: stageRecord.stageId,
|
|
883
|
+
stageName: stageRecord.stageName,
|
|
884
|
+
error: errorMessage
|
|
885
|
+
});
|
|
886
|
+
events.push({
|
|
887
|
+
type: "workflow:failed",
|
|
888
|
+
timestamp: deps.clock.now(),
|
|
889
|
+
workflowRunId: stageRecord.workflowRunId,
|
|
890
|
+
error: errorMessage
|
|
891
|
+
});
|
|
892
|
+
}
|
|
893
|
+
}
|
|
894
|
+
return {
|
|
895
|
+
checked,
|
|
896
|
+
resumed,
|
|
897
|
+
failed,
|
|
898
|
+
resumedWorkflowRunIds: [...resumedWorkflowRunIds],
|
|
899
|
+
_events: events
|
|
900
|
+
};
|
|
901
|
+
}
|
|
902
|
+
|
|
903
|
+
// src/kernel/kernel.ts
|
|
904
|
+
function getIdempotencyKey(command) {
|
|
905
|
+
if (command.type === "run.create") return command.idempotencyKey;
|
|
906
|
+
if (command.type === "job.execute") return command.idempotencyKey;
|
|
907
|
+
return void 0;
|
|
908
|
+
}
|
|
909
|
+
function createKernel(config) {
|
|
910
|
+
const {
|
|
911
|
+
persistence,
|
|
912
|
+
blobStore,
|
|
913
|
+
jobTransport,
|
|
914
|
+
eventSink,
|
|
915
|
+
scheduler,
|
|
916
|
+
clock,
|
|
917
|
+
registry
|
|
918
|
+
} = config;
|
|
919
|
+
const deps = {
|
|
920
|
+
persistence,
|
|
921
|
+
blobStore,
|
|
922
|
+
jobTransport,
|
|
923
|
+
eventSink,
|
|
924
|
+
scheduler,
|
|
925
|
+
clock,
|
|
926
|
+
registry
|
|
927
|
+
};
|
|
928
|
+
async function dispatch(command) {
|
|
929
|
+
if (command.type === "outbox.flush") {
|
|
930
|
+
const result = await handleOutboxFlush(
|
|
931
|
+
command,
|
|
932
|
+
deps
|
|
933
|
+
);
|
|
934
|
+
const { _events: _, ...publicResult } = result;
|
|
935
|
+
return publicResult;
|
|
936
|
+
}
|
|
937
|
+
if (command.type === "plugin.replayDLQ") {
|
|
938
|
+
const result = await handlePluginReplayDLQ(
|
|
939
|
+
command,
|
|
940
|
+
deps
|
|
941
|
+
);
|
|
942
|
+
const { _events: _, ...publicResult } = result;
|
|
943
|
+
return publicResult;
|
|
944
|
+
}
|
|
945
|
+
if (command.type === "job.execute") {
|
|
946
|
+
const jobCommand = command;
|
|
947
|
+
const jobIdempotencyKey = jobCommand.idempotencyKey;
|
|
948
|
+
let jobIdempotencyAcquired = false;
|
|
949
|
+
if (jobIdempotencyKey) {
|
|
950
|
+
const acquired = await persistence.acquireIdempotencyKey(
|
|
951
|
+
jobIdempotencyKey,
|
|
952
|
+
command.type
|
|
953
|
+
);
|
|
954
|
+
if (acquired.status === "replay") {
|
|
955
|
+
return acquired.result;
|
|
956
|
+
}
|
|
957
|
+
if (acquired.status === "in_progress") {
|
|
958
|
+
throw new IdempotencyInProgressError(jobIdempotencyKey, command.type);
|
|
959
|
+
}
|
|
960
|
+
jobIdempotencyAcquired = true;
|
|
961
|
+
}
|
|
962
|
+
try {
|
|
963
|
+
const result = await handleJobExecute(jobCommand, deps);
|
|
964
|
+
const { _events: _, ...publicResult } = result;
|
|
965
|
+
if (jobIdempotencyKey && jobIdempotencyAcquired) {
|
|
966
|
+
await persistence.completeIdempotencyKey(
|
|
967
|
+
jobIdempotencyKey,
|
|
968
|
+
command.type,
|
|
969
|
+
publicResult
|
|
970
|
+
);
|
|
971
|
+
}
|
|
972
|
+
return publicResult;
|
|
973
|
+
} catch (error) {
|
|
974
|
+
if (jobIdempotencyKey && jobIdempotencyAcquired) {
|
|
975
|
+
await persistence.releaseIdempotencyKey(jobIdempotencyKey, command.type).catch(() => {
|
|
976
|
+
});
|
|
977
|
+
}
|
|
978
|
+
throw error;
|
|
979
|
+
}
|
|
980
|
+
}
|
|
981
|
+
const idempotencyKey = getIdempotencyKey(command);
|
|
982
|
+
let idempotencyAcquired = false;
|
|
983
|
+
if (idempotencyKey) {
|
|
984
|
+
const acquired = await persistence.acquireIdempotencyKey(
|
|
985
|
+
idempotencyKey,
|
|
986
|
+
command.type
|
|
987
|
+
);
|
|
988
|
+
if (acquired.status === "replay") {
|
|
989
|
+
return acquired.result;
|
|
990
|
+
}
|
|
991
|
+
if (acquired.status === "in_progress") {
|
|
992
|
+
throw new IdempotencyInProgressError(idempotencyKey, command.type);
|
|
993
|
+
}
|
|
994
|
+
idempotencyAcquired = true;
|
|
995
|
+
}
|
|
996
|
+
try {
|
|
997
|
+
const publicResult = await persistence.withTransaction(async (tx) => {
|
|
998
|
+
const txDeps = { ...deps, persistence: tx };
|
|
999
|
+
let result;
|
|
1000
|
+
switch (command.type) {
|
|
1001
|
+
case "run.create":
|
|
1002
|
+
result = await handleRunCreate(command, txDeps);
|
|
1003
|
+
break;
|
|
1004
|
+
case "run.claimPending":
|
|
1005
|
+
result = await handleRunClaimPending(
|
|
1006
|
+
command,
|
|
1007
|
+
txDeps
|
|
1008
|
+
);
|
|
1009
|
+
break;
|
|
1010
|
+
case "run.transition":
|
|
1011
|
+
result = await handleRunTransition(
|
|
1012
|
+
command,
|
|
1013
|
+
txDeps
|
|
1014
|
+
);
|
|
1015
|
+
break;
|
|
1016
|
+
case "run.cancel":
|
|
1017
|
+
result = await handleRunCancel(command, txDeps);
|
|
1018
|
+
break;
|
|
1019
|
+
case "run.rerunFrom":
|
|
1020
|
+
result = await handleRunRerunFrom(
|
|
1021
|
+
command,
|
|
1022
|
+
txDeps
|
|
1023
|
+
);
|
|
1024
|
+
break;
|
|
1025
|
+
case "stage.pollSuspended":
|
|
1026
|
+
result = await handleStagePollSuspended(
|
|
1027
|
+
command,
|
|
1028
|
+
txDeps
|
|
1029
|
+
);
|
|
1030
|
+
break;
|
|
1031
|
+
case "lease.reapStale":
|
|
1032
|
+
result = await handleLeaseReapStale(
|
|
1033
|
+
command,
|
|
1034
|
+
txDeps
|
|
1035
|
+
);
|
|
1036
|
+
break;
|
|
1037
|
+
default: {
|
|
1038
|
+
const _exhaustive = command;
|
|
1039
|
+
throw new Error(
|
|
1040
|
+
`Unknown command type: ${_exhaustive.type}`
|
|
1041
|
+
);
|
|
1042
|
+
}
|
|
1043
|
+
}
|
|
1044
|
+
const events = result._events;
|
|
1045
|
+
if (events.length > 0) {
|
|
1046
|
+
const causationId = idempotencyKey ?? crypto.randomUUID();
|
|
1047
|
+
const outboxEvents = events.map(
|
|
1048
|
+
(event) => ({
|
|
1049
|
+
workflowRunId: event.workflowRunId,
|
|
1050
|
+
eventType: event.type,
|
|
1051
|
+
payload: event,
|
|
1052
|
+
causationId,
|
|
1053
|
+
occurredAt: event.timestamp
|
|
1054
|
+
})
|
|
1055
|
+
);
|
|
1056
|
+
await tx.appendOutboxEvents(outboxEvents);
|
|
1057
|
+
}
|
|
1058
|
+
const { _events: _, ...stripped } = result;
|
|
1059
|
+
return stripped;
|
|
1060
|
+
});
|
|
1061
|
+
if (idempotencyKey && idempotencyAcquired) {
|
|
1062
|
+
await persistence.completeIdempotencyKey(
|
|
1063
|
+
idempotencyKey,
|
|
1064
|
+
command.type,
|
|
1065
|
+
publicResult
|
|
1066
|
+
);
|
|
1067
|
+
}
|
|
1068
|
+
return publicResult;
|
|
1069
|
+
} catch (error) {
|
|
1070
|
+
if (idempotencyKey && idempotencyAcquired) {
|
|
1071
|
+
await persistence.releaseIdempotencyKey(idempotencyKey, command.type).catch(() => {
|
|
1072
|
+
});
|
|
1073
|
+
}
|
|
1074
|
+
throw error;
|
|
1075
|
+
}
|
|
1076
|
+
}
|
|
1077
|
+
return { dispatch };
|
|
1078
|
+
}
|
|
1079
|
+
|
|
1080
|
+
// src/kernel/plugins.ts
|
|
1081
|
+
function definePlugin(definition) {
|
|
1082
|
+
return definition;
|
|
1083
|
+
}
|
|
1084
|
+
function createPluginRunner(config) {
|
|
1085
|
+
const { plugins, maxRetries = 3 } = config;
|
|
1086
|
+
const handlersByType = /* @__PURE__ */ new Map();
|
|
1087
|
+
for (const plugin of plugins) {
|
|
1088
|
+
for (const eventType of plugin.on) {
|
|
1089
|
+
const existing = handlersByType.get(eventType) ?? [];
|
|
1090
|
+
existing.push(plugin);
|
|
1091
|
+
handlersByType.set(eventType, existing);
|
|
1092
|
+
}
|
|
1093
|
+
}
|
|
1094
|
+
return {
|
|
1095
|
+
maxRetries,
|
|
1096
|
+
async emit(event) {
|
|
1097
|
+
const matching = handlersByType.get(event.type);
|
|
1098
|
+
if (!matching || matching.length === 0) return;
|
|
1099
|
+
for (const plugin of matching) {
|
|
1100
|
+
await plugin.handle(event);
|
|
1101
|
+
}
|
|
1102
|
+
}
|
|
1103
|
+
};
|
|
1104
|
+
}
|
|
1105
|
+
|
|
1106
|
+
export { IdempotencyInProgressError, createKernel, createPluginRunner, definePlugin, loadWorkflowContext, saveStageOutput };
|
|
1107
|
+
//# sourceMappingURL=chunk-WZ533CPU.js.map
|
|
1108
|
+
//# sourceMappingURL=chunk-WZ533CPU.js.map
|