pg-workflows 0.0.1-claimed → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/README.md +430 -0
- package/dist/index.cjs +1050 -0
- package/dist/index.d.cts +202 -0
- package/dist/index.d.ts +202 -0
- package/dist/index.js +1004 -0
- package/dist/index.js.map +16 -0
- package/package.json +83 -7
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1050 @@
|
|
|
1
|
+
var import_node_module = require("node:module");
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __toESM = (mod, isNodeMode, target) => {
|
|
9
|
+
target = mod != null ? __create(__getProtoOf(mod)) : {};
|
|
10
|
+
const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
|
|
11
|
+
for (let key of __getOwnPropNames(mod))
|
|
12
|
+
if (!__hasOwnProp.call(to, key))
|
|
13
|
+
__defProp(to, key, {
|
|
14
|
+
get: () => mod[key],
|
|
15
|
+
enumerable: true
|
|
16
|
+
});
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __moduleCache = /* @__PURE__ */ new WeakMap;
|
|
20
|
+
var __toCommonJS = (from) => {
|
|
21
|
+
var entry = __moduleCache.get(from), desc;
|
|
22
|
+
if (entry)
|
|
23
|
+
return entry;
|
|
24
|
+
entry = __defProp({}, "__esModule", { value: true });
|
|
25
|
+
if (from && typeof from === "object" || typeof from === "function")
|
|
26
|
+
__getOwnPropNames(from).map((key) => !__hasOwnProp.call(entry, key) && __defProp(entry, key, {
|
|
27
|
+
get: () => from[key],
|
|
28
|
+
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
29
|
+
}));
|
|
30
|
+
__moduleCache.set(from, entry);
|
|
31
|
+
return entry;
|
|
32
|
+
};
|
|
33
|
+
var __export = (target, all) => {
|
|
34
|
+
for (var name in all)
|
|
35
|
+
__defProp(target, name, {
|
|
36
|
+
get: all[name],
|
|
37
|
+
enumerable: true,
|
|
38
|
+
configurable: true,
|
|
39
|
+
set: (newValue) => all[name] = () => newValue
|
|
40
|
+
});
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
// src/index.ts
|
|
44
|
+
var exports_src = {};
|
|
45
|
+
__export(exports_src, {
|
|
46
|
+
workflow: () => workflow,
|
|
47
|
+
WorkflowStatus: () => WorkflowStatus,
|
|
48
|
+
WorkflowRunNotFoundError: () => WorkflowRunNotFoundError,
|
|
49
|
+
WorkflowEngineError: () => WorkflowEngineError,
|
|
50
|
+
WorkflowEngine: () => WorkflowEngine,
|
|
51
|
+
StepType: () => StepType
|
|
52
|
+
});
|
|
53
|
+
module.exports = __toCommonJS(exports_src);
|
|
54
|
+
|
|
55
|
+
// src/definition.ts
|
|
56
|
+
function workflow(id, handler, { inputSchema, timeout, retries } = {}) {
|
|
57
|
+
return {
|
|
58
|
+
id,
|
|
59
|
+
handler,
|
|
60
|
+
inputSchema,
|
|
61
|
+
timeout,
|
|
62
|
+
retries
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
// src/engine.ts
|
|
66
|
+
var import_merge = __toESM(require("lodash/merge"));
|
|
67
|
+
|
|
68
|
+
// src/ast-parser.ts
|
|
69
|
+
var ts = __toESM(require("typescript"));
|
|
70
|
+
function parseWorkflowHandler(handler) {
|
|
71
|
+
const handlerSource = handler.toString();
|
|
72
|
+
const sourceFile = ts.createSourceFile("handler.ts", handlerSource, ts.ScriptTarget.Latest, true);
|
|
73
|
+
const steps = new Map;
|
|
74
|
+
function isInConditional(node) {
|
|
75
|
+
let current = node.parent;
|
|
76
|
+
while (current) {
|
|
77
|
+
if (ts.isIfStatement(current) || ts.isConditionalExpression(current) || ts.isSwitchStatement(current) || ts.isCaseClause(current)) {
|
|
78
|
+
return true;
|
|
79
|
+
}
|
|
80
|
+
current = current.parent;
|
|
81
|
+
}
|
|
82
|
+
return false;
|
|
83
|
+
}
|
|
84
|
+
function isInLoop(node) {
|
|
85
|
+
let current = node.parent;
|
|
86
|
+
while (current) {
|
|
87
|
+
if (ts.isForStatement(current) || ts.isForInStatement(current) || ts.isForOfStatement(current) || ts.isWhileStatement(current) || ts.isDoStatement(current)) {
|
|
88
|
+
return true;
|
|
89
|
+
}
|
|
90
|
+
current = current.parent;
|
|
91
|
+
}
|
|
92
|
+
return false;
|
|
93
|
+
}
|
|
94
|
+
function extractStepId(arg) {
|
|
95
|
+
if (ts.isStringLiteral(arg) || ts.isNoSubstitutionTemplateLiteral(arg)) {
|
|
96
|
+
return { id: arg.text, isDynamic: false };
|
|
97
|
+
}
|
|
98
|
+
if (ts.isTemplateExpression(arg)) {
|
|
99
|
+
let templateStr = arg.head.text;
|
|
100
|
+
for (const span of arg.templateSpans) {
|
|
101
|
+
templateStr += `\${...}`;
|
|
102
|
+
templateStr += span.literal.text;
|
|
103
|
+
}
|
|
104
|
+
return { id: templateStr, isDynamic: true };
|
|
105
|
+
}
|
|
106
|
+
return { id: arg.getText(sourceFile), isDynamic: true };
|
|
107
|
+
}
|
|
108
|
+
function visit(node) {
|
|
109
|
+
if (ts.isCallExpression(node) && ts.isPropertyAccessExpression(node.expression)) {
|
|
110
|
+
const propertyAccess = node.expression;
|
|
111
|
+
const objectName = propertyAccess.expression.getText(sourceFile);
|
|
112
|
+
const methodName = propertyAccess.name.text;
|
|
113
|
+
if (objectName === "step" && (methodName === "run" || methodName === "waitFor" || methodName === "pause")) {
|
|
114
|
+
const firstArg = node.arguments[0];
|
|
115
|
+
if (firstArg) {
|
|
116
|
+
const { id, isDynamic } = extractStepId(firstArg);
|
|
117
|
+
const stepDefinition = {
|
|
118
|
+
id,
|
|
119
|
+
type: methodName,
|
|
120
|
+
conditional: isInConditional(node),
|
|
121
|
+
loop: isInLoop(node),
|
|
122
|
+
isDynamic
|
|
123
|
+
};
|
|
124
|
+
if (steps.has(id)) {
|
|
125
|
+
throw new Error(`Duplicate step ID detected: '${id}'. Step IDs must be unique within a workflow.`);
|
|
126
|
+
}
|
|
127
|
+
steps.set(id, stepDefinition);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
ts.forEachChild(node, visit);
|
|
132
|
+
}
|
|
133
|
+
visit(sourceFile);
|
|
134
|
+
return { steps: Array.from(steps.values()) };
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// src/db/migration.ts
|
|
138
|
+
async function runMigrations(db) {
|
|
139
|
+
const tableExistsResult = await db.executeSql(`
|
|
140
|
+
SELECT EXISTS (
|
|
141
|
+
SELECT FROM information_schema.tables
|
|
142
|
+
WHERE table_schema = 'public'
|
|
143
|
+
AND table_name = 'workflow_runs'
|
|
144
|
+
);
|
|
145
|
+
`, []);
|
|
146
|
+
if (!tableExistsResult.rows[0]?.exists) {
|
|
147
|
+
await db.executeSql(`
|
|
148
|
+
CREATE TABLE workflow_runs (
|
|
149
|
+
id varchar(32) PRIMARY KEY NOT NULL,
|
|
150
|
+
created_at timestamp with time zone DEFAULT now() NOT NULL,
|
|
151
|
+
updated_at timestamp with time zone DEFAULT now() NOT NULL,
|
|
152
|
+
resource_id varchar(32),
|
|
153
|
+
workflow_id varchar(32) NOT NULL,
|
|
154
|
+
status text DEFAULT 'pending' NOT NULL,
|
|
155
|
+
input jsonb NOT NULL,
|
|
156
|
+
output jsonb,
|
|
157
|
+
error text,
|
|
158
|
+
current_step_id varchar(256) NOT NULL,
|
|
159
|
+
timeline jsonb DEFAULT '{}'::jsonb NOT NULL,
|
|
160
|
+
paused_at timestamp with time zone,
|
|
161
|
+
resumed_at timestamp with time zone,
|
|
162
|
+
completed_at timestamp with time zone,
|
|
163
|
+
timeout_at timestamp with time zone,
|
|
164
|
+
retry_count integer DEFAULT 0 NOT NULL,
|
|
165
|
+
max_retries integer DEFAULT 0 NOT NULL,
|
|
166
|
+
job_id varchar(256)
|
|
167
|
+
);
|
|
168
|
+
`, []);
|
|
169
|
+
await db.executeSql(`
|
|
170
|
+
CREATE INDEX workflow_runs_workflow_id_idx ON workflow_runs USING btree (workflow_id);
|
|
171
|
+
`, []);
|
|
172
|
+
await db.executeSql(`
|
|
173
|
+
CREATE INDEX workflow_runs_created_at_idx ON workflow_runs USING btree (created_at);
|
|
174
|
+
`, []);
|
|
175
|
+
await db.executeSql(`
|
|
176
|
+
CREATE INDEX workflow_runs_resource_id_idx ON workflow_runs USING btree (resource_id);
|
|
177
|
+
`, []);
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// src/db/queries.ts
|
|
182
|
+
var import_ksuid = __toESM(require("ksuid"));
|
|
183
|
+
function generateKSUID(prefix) {
|
|
184
|
+
return `${prefix ? `${prefix}_` : ""}${import_ksuid.default.randomSync().string}`;
|
|
185
|
+
}
|
|
186
|
+
function mapRowToWorkflowRun(row) {
|
|
187
|
+
return {
|
|
188
|
+
id: row.id,
|
|
189
|
+
createdAt: new Date(row.created_at),
|
|
190
|
+
updatedAt: new Date(row.updated_at),
|
|
191
|
+
resourceId: row.resource_id,
|
|
192
|
+
workflowId: row.workflow_id,
|
|
193
|
+
status: row.status,
|
|
194
|
+
input: typeof row.input === "string" ? JSON.parse(row.input) : row.input,
|
|
195
|
+
output: typeof row.output === "string" ? row.output.trim().startsWith("{") || row.output.trim().startsWith("[") ? JSON.parse(row.output) : row.output : row.output ?? null,
|
|
196
|
+
error: row.error,
|
|
197
|
+
currentStepId: row.current_step_id,
|
|
198
|
+
timeline: typeof row.timeline === "string" ? JSON.parse(row.timeline) : row.timeline,
|
|
199
|
+
pausedAt: row.paused_at ? new Date(row.paused_at) : null,
|
|
200
|
+
resumedAt: row.resumed_at ? new Date(row.resumed_at) : null,
|
|
201
|
+
completedAt: row.completed_at ? new Date(row.completed_at) : null,
|
|
202
|
+
timeoutAt: row.timeout_at ? new Date(row.timeout_at) : null,
|
|
203
|
+
retryCount: row.retry_count,
|
|
204
|
+
maxRetries: row.max_retries,
|
|
205
|
+
jobId: row.job_id
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
async function insertWorkflowRun({
|
|
209
|
+
resourceId,
|
|
210
|
+
workflowId,
|
|
211
|
+
currentStepId,
|
|
212
|
+
status,
|
|
213
|
+
input,
|
|
214
|
+
maxRetries,
|
|
215
|
+
timeoutAt
|
|
216
|
+
}, db) {
|
|
217
|
+
const runId = generateKSUID("run");
|
|
218
|
+
const now = new Date;
|
|
219
|
+
const result = await db.executeSql(`INSERT INTO workflow_runs (
|
|
220
|
+
id,
|
|
221
|
+
resource_id,
|
|
222
|
+
workflow_id,
|
|
223
|
+
current_step_id,
|
|
224
|
+
status,
|
|
225
|
+
input,
|
|
226
|
+
max_retries,
|
|
227
|
+
timeout_at,
|
|
228
|
+
created_at,
|
|
229
|
+
updated_at,
|
|
230
|
+
timeline,
|
|
231
|
+
retry_count
|
|
232
|
+
)
|
|
233
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
|
234
|
+
RETURNING *`, [
|
|
235
|
+
runId,
|
|
236
|
+
resourceId ?? null,
|
|
237
|
+
workflowId,
|
|
238
|
+
currentStepId,
|
|
239
|
+
status,
|
|
240
|
+
JSON.stringify(input),
|
|
241
|
+
maxRetries,
|
|
242
|
+
timeoutAt,
|
|
243
|
+
now,
|
|
244
|
+
now,
|
|
245
|
+
"{}",
|
|
246
|
+
0
|
|
247
|
+
]);
|
|
248
|
+
const insertedRun = result.rows[0];
|
|
249
|
+
if (!insertedRun) {
|
|
250
|
+
throw new Error("Failed to insert workflow run");
|
|
251
|
+
}
|
|
252
|
+
return mapRowToWorkflowRun(insertedRun);
|
|
253
|
+
}
|
|
254
|
+
async function getWorkflowRun({
|
|
255
|
+
runId,
|
|
256
|
+
resourceId
|
|
257
|
+
}, { exclusiveLock = false, db }) {
|
|
258
|
+
const lockSuffix = exclusiveLock ? "FOR UPDATE" : "";
|
|
259
|
+
const result = resourceId ? await db.executeSql(`SELECT * FROM workflow_runs
|
|
260
|
+
WHERE id = $1 AND resource_id = $2
|
|
261
|
+
${lockSuffix}`, [runId, resourceId]) : await db.executeSql(`SELECT * FROM workflow_runs
|
|
262
|
+
WHERE id = $1
|
|
263
|
+
${lockSuffix}`, [runId]);
|
|
264
|
+
const run = result.rows[0];
|
|
265
|
+
if (!run) {
|
|
266
|
+
return null;
|
|
267
|
+
}
|
|
268
|
+
return mapRowToWorkflowRun(run);
|
|
269
|
+
}
|
|
270
|
+
async function updateWorkflowRun({
|
|
271
|
+
runId,
|
|
272
|
+
resourceId,
|
|
273
|
+
data
|
|
274
|
+
}, db) {
|
|
275
|
+
const now = new Date;
|
|
276
|
+
const updates = ["updated_at = $1"];
|
|
277
|
+
const values = [now];
|
|
278
|
+
let paramIndex = 2;
|
|
279
|
+
if (data.status !== undefined) {
|
|
280
|
+
updates.push(`status = $${paramIndex}`);
|
|
281
|
+
values.push(data.status);
|
|
282
|
+
paramIndex++;
|
|
283
|
+
}
|
|
284
|
+
if (data.currentStepId !== undefined) {
|
|
285
|
+
updates.push(`current_step_id = $${paramIndex}`);
|
|
286
|
+
values.push(data.currentStepId);
|
|
287
|
+
paramIndex++;
|
|
288
|
+
}
|
|
289
|
+
if (data.timeline !== undefined) {
|
|
290
|
+
updates.push(`timeline = $${paramIndex}`);
|
|
291
|
+
values.push(JSON.stringify(data.timeline));
|
|
292
|
+
paramIndex++;
|
|
293
|
+
}
|
|
294
|
+
if (data.pausedAt !== undefined) {
|
|
295
|
+
updates.push(`paused_at = $${paramIndex}`);
|
|
296
|
+
values.push(data.pausedAt);
|
|
297
|
+
paramIndex++;
|
|
298
|
+
}
|
|
299
|
+
if (data.resumedAt !== undefined) {
|
|
300
|
+
updates.push(`resumed_at = $${paramIndex}`);
|
|
301
|
+
values.push(data.resumedAt);
|
|
302
|
+
paramIndex++;
|
|
303
|
+
}
|
|
304
|
+
if (data.completedAt !== undefined) {
|
|
305
|
+
updates.push(`completed_at = $${paramIndex}`);
|
|
306
|
+
values.push(data.completedAt);
|
|
307
|
+
paramIndex++;
|
|
308
|
+
}
|
|
309
|
+
if (data.output !== undefined) {
|
|
310
|
+
updates.push(`output = $${paramIndex}`);
|
|
311
|
+
values.push(JSON.stringify(data.output));
|
|
312
|
+
paramIndex++;
|
|
313
|
+
}
|
|
314
|
+
if (data.error !== undefined) {
|
|
315
|
+
updates.push(`error = $${paramIndex}`);
|
|
316
|
+
values.push(data.error);
|
|
317
|
+
paramIndex++;
|
|
318
|
+
}
|
|
319
|
+
if (data.retryCount !== undefined) {
|
|
320
|
+
updates.push(`retry_count = $${paramIndex}`);
|
|
321
|
+
values.push(data.retryCount);
|
|
322
|
+
paramIndex++;
|
|
323
|
+
}
|
|
324
|
+
if (data.jobId !== undefined) {
|
|
325
|
+
updates.push(`job_id = $${paramIndex}`);
|
|
326
|
+
values.push(data.jobId);
|
|
327
|
+
paramIndex++;
|
|
328
|
+
}
|
|
329
|
+
const whereClause = resourceId ? `WHERE id = $${paramIndex} AND resource_id = $${paramIndex + 1}` : `WHERE id = $${paramIndex}`;
|
|
330
|
+
values.push(runId);
|
|
331
|
+
if (resourceId) {
|
|
332
|
+
values.push(resourceId);
|
|
333
|
+
}
|
|
334
|
+
const query = `
|
|
335
|
+
UPDATE workflow_runs
|
|
336
|
+
SET ${updates.join(", ")}
|
|
337
|
+
${whereClause}
|
|
338
|
+
RETURNING *
|
|
339
|
+
`;
|
|
340
|
+
const result = await db.executeSql(query, values);
|
|
341
|
+
const run = result.rows[0];
|
|
342
|
+
if (!run) {
|
|
343
|
+
return null;
|
|
344
|
+
}
|
|
345
|
+
return mapRowToWorkflowRun(run);
|
|
346
|
+
}
|
|
347
|
+
async function getWorkflowRuns({
|
|
348
|
+
resourceId,
|
|
349
|
+
startingAfter,
|
|
350
|
+
endingBefore,
|
|
351
|
+
limit = 20,
|
|
352
|
+
statuses,
|
|
353
|
+
workflowId
|
|
354
|
+
}, db) {
|
|
355
|
+
const conditions = [];
|
|
356
|
+
const values = [];
|
|
357
|
+
let paramIndex = 1;
|
|
358
|
+
if (resourceId) {
|
|
359
|
+
conditions.push(`resource_id = $${paramIndex}`);
|
|
360
|
+
values.push(resourceId);
|
|
361
|
+
paramIndex++;
|
|
362
|
+
}
|
|
363
|
+
if (statuses && statuses.length > 0) {
|
|
364
|
+
conditions.push(`status = ANY($${paramIndex})`);
|
|
365
|
+
values.push(statuses);
|
|
366
|
+
paramIndex++;
|
|
367
|
+
}
|
|
368
|
+
if (workflowId) {
|
|
369
|
+
conditions.push(`workflow_id = $${paramIndex}`);
|
|
370
|
+
values.push(workflowId);
|
|
371
|
+
paramIndex++;
|
|
372
|
+
}
|
|
373
|
+
if (startingAfter) {
|
|
374
|
+
const cursorResult = await db.executeSql("SELECT created_at FROM workflow_runs WHERE id = $1 LIMIT 1", [startingAfter]);
|
|
375
|
+
if (cursorResult.rows[0]?.created_at) {
|
|
376
|
+
conditions.push(`created_at < $${paramIndex}`);
|
|
377
|
+
values.push(typeof cursorResult.rows[0].created_at === "string" ? new Date(cursorResult.rows[0].created_at) : cursorResult.rows[0].created_at);
|
|
378
|
+
paramIndex++;
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
if (endingBefore) {
|
|
382
|
+
const cursorResult = await db.executeSql("SELECT created_at FROM workflow_runs WHERE id = $1 LIMIT 1", [endingBefore]);
|
|
383
|
+
if (cursorResult.rows[0]?.created_at) {
|
|
384
|
+
conditions.push(`created_at > $${paramIndex}`);
|
|
385
|
+
values.push(typeof cursorResult.rows[0].created_at === "string" ? new Date(cursorResult.rows[0].created_at) : cursorResult.rows[0].created_at);
|
|
386
|
+
paramIndex++;
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
390
|
+
const actualLimit = Math.min(Math.max(limit, 1), 100) + 1;
|
|
391
|
+
const query = `
|
|
392
|
+
SELECT * FROM workflow_runs
|
|
393
|
+
${whereClause}
|
|
394
|
+
ORDER BY created_at DESC
|
|
395
|
+
LIMIT $${paramIndex}
|
|
396
|
+
`;
|
|
397
|
+
values.push(actualLimit);
|
|
398
|
+
const result = await db.executeSql(query, values);
|
|
399
|
+
const rows = result.rows;
|
|
400
|
+
const hasMore = rows.length > (limit ?? 20);
|
|
401
|
+
const rawItems = hasMore ? rows.slice(0, limit) : rows;
|
|
402
|
+
const items = rawItems.map((row) => mapRowToWorkflowRun(row));
|
|
403
|
+
const hasPrev = !!endingBefore;
|
|
404
|
+
const nextCursor = hasMore && items.length > 0 ? items[items.length - 1]?.id ?? null : null;
|
|
405
|
+
const prevCursor = hasPrev && items.length > 0 ? items[0]?.id ?? null : null;
|
|
406
|
+
return { items, nextCursor, prevCursor, hasMore, hasPrev };
|
|
407
|
+
}
|
|
408
|
+
async function withPostgresTransaction(db, callback) {
|
|
409
|
+
try {
|
|
410
|
+
await db.executeSql("BEGIN", []);
|
|
411
|
+
const result = await callback(db);
|
|
412
|
+
await db.executeSql("COMMIT", []);
|
|
413
|
+
return result;
|
|
414
|
+
} catch (error) {
|
|
415
|
+
await db.executeSql("ROLLBACK", []);
|
|
416
|
+
throw error;
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
// src/error.ts
|
|
421
|
+
class WorkflowEngineError extends Error {
|
|
422
|
+
workflowId;
|
|
423
|
+
runId;
|
|
424
|
+
cause;
|
|
425
|
+
constructor(message, workflowId, runId, cause = undefined) {
|
|
426
|
+
super(message);
|
|
427
|
+
this.workflowId = workflowId;
|
|
428
|
+
this.runId = runId;
|
|
429
|
+
this.cause = cause;
|
|
430
|
+
this.name = "WorkflowEngineError";
|
|
431
|
+
if (Error.captureStackTrace) {
|
|
432
|
+
Error.captureStackTrace(this, WorkflowEngineError);
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
class WorkflowRunNotFoundError extends WorkflowEngineError {
|
|
438
|
+
constructor(runId, workflowId) {
|
|
439
|
+
super("Workflow run not found", workflowId, runId);
|
|
440
|
+
this.name = "WorkflowRunNotFoundError";
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
// src/types.ts
|
|
445
|
+
var WorkflowStatus;
|
|
446
|
+
((WorkflowStatus2) => {
|
|
447
|
+
WorkflowStatus2["PENDING"] = "pending";
|
|
448
|
+
WorkflowStatus2["RUNNING"] = "running";
|
|
449
|
+
WorkflowStatus2["PAUSED"] = "paused";
|
|
450
|
+
WorkflowStatus2["COMPLETED"] = "completed";
|
|
451
|
+
WorkflowStatus2["FAILED"] = "failed";
|
|
452
|
+
WorkflowStatus2["CANCELLED"] = "cancelled";
|
|
453
|
+
})(WorkflowStatus ||= {});
|
|
454
|
+
var StepType;
|
|
455
|
+
((StepType2) => {
|
|
456
|
+
StepType2["PAUSE"] = "pause";
|
|
457
|
+
StepType2["RUN"] = "run";
|
|
458
|
+
StepType2["WAIT_FOR"] = "waitFor";
|
|
459
|
+
StepType2["WAIT_UNTIL"] = "waitUntil";
|
|
460
|
+
})(StepType ||= {});
|
|
461
|
+
|
|
462
|
+
// src/engine.ts
|
|
463
|
+
var PAUSE_EVENT_NAME = "__internal_pause";
|
|
464
|
+
var WORKFLOW_RUN_QUEUE_NAME = "workflow-run";
|
|
465
|
+
var LOG_PREFIX = "[WorkflowEngine]";
|
|
466
|
+
var StepTypeToIcon = {
|
|
467
|
+
["run" /* RUN */]: "λ",
|
|
468
|
+
["waitFor" /* WAIT_FOR */]: "○",
|
|
469
|
+
["pause" /* PAUSE */]: "⏸",
|
|
470
|
+
["waitUntil" /* WAIT_UNTIL */]: "⏲"
|
|
471
|
+
};
|
|
472
|
+
var defaultLogger = {
|
|
473
|
+
log: (_message) => console.warn(_message),
|
|
474
|
+
error: (message, error) => console.error(message, error)
|
|
475
|
+
};
|
|
476
|
+
var defaultExpireInSeconds = process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS ? Number.parseInt(process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS, 10) : 5 * 60;
|
|
477
|
+
|
|
478
|
+
class WorkflowEngine {
|
|
479
|
+
boss;
|
|
480
|
+
db;
|
|
481
|
+
unregisteredWorkflows = new Map;
|
|
482
|
+
_started = false;
|
|
483
|
+
workflows = new Map;
|
|
484
|
+
logger;
|
|
485
|
+
constructor({
|
|
486
|
+
workflows,
|
|
487
|
+
logger,
|
|
488
|
+
boss
|
|
489
|
+
} = {}) {
|
|
490
|
+
this.logger = this.buildLogger(logger ?? defaultLogger);
|
|
491
|
+
if (workflows) {
|
|
492
|
+
this.unregisteredWorkflows = new Map(workflows.map((workflow2) => [workflow2.id, workflow2]));
|
|
493
|
+
}
|
|
494
|
+
if (!boss) {
|
|
495
|
+
throw new WorkflowEngineError("PgBoss instance is required in constructor");
|
|
496
|
+
}
|
|
497
|
+
this.boss = boss;
|
|
498
|
+
this.db = boss.getDb();
|
|
499
|
+
}
|
|
500
|
+
async start(asEngine = true, { batchSize } = { batchSize: 1 }) {
|
|
501
|
+
if (this._started) {
|
|
502
|
+
return;
|
|
503
|
+
}
|
|
504
|
+
await this.boss.start();
|
|
505
|
+
await runMigrations(this.boss.getDb());
|
|
506
|
+
if (this.unregisteredWorkflows.size > 0) {
|
|
507
|
+
for (const workflow2 of this.unregisteredWorkflows.values()) {
|
|
508
|
+
await this.registerWorkflow(workflow2);
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
await this.boss.createQueue(WORKFLOW_RUN_QUEUE_NAME);
|
|
512
|
+
const numWorkers = +(process.env.WORKFLOW_RUN_WORKERS ?? 3);
|
|
513
|
+
if (asEngine) {
|
|
514
|
+
for (let i = 0;i < numWorkers; i++) {
|
|
515
|
+
await this.boss.work(WORKFLOW_RUN_QUEUE_NAME, { pollingIntervalSeconds: 0.5, batchSize }, (job) => this.handleWorkflowRun(job));
|
|
516
|
+
this.logger.log(`Worker ${i + 1}/${numWorkers} started for queue ${WORKFLOW_RUN_QUEUE_NAME}`);
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
this._started = true;
|
|
520
|
+
this.logger.log("Workflow engine started!");
|
|
521
|
+
}
|
|
522
|
+
async stop() {
|
|
523
|
+
await this.boss.stop();
|
|
524
|
+
this._started = false;
|
|
525
|
+
this.logger.log("Workflow engine stopped");
|
|
526
|
+
}
|
|
527
|
+
async registerWorkflow(definition) {
|
|
528
|
+
if (this.workflows.has(definition.id)) {
|
|
529
|
+
throw new WorkflowEngineError(`Workflow ${definition.id} is already registered`, definition.id);
|
|
530
|
+
}
|
|
531
|
+
const { steps } = parseWorkflowHandler(definition.handler);
|
|
532
|
+
this.workflows.set(definition.id, {
|
|
533
|
+
...definition,
|
|
534
|
+
steps
|
|
535
|
+
});
|
|
536
|
+
this.logger.log(`Registered workflow "${definition.id}" with steps:`);
|
|
537
|
+
for (const step of steps.values()) {
|
|
538
|
+
const tags = [];
|
|
539
|
+
if (step.conditional)
|
|
540
|
+
tags.push("[conditional]");
|
|
541
|
+
if (step.loop)
|
|
542
|
+
tags.push("[loop]");
|
|
543
|
+
if (step.isDynamic)
|
|
544
|
+
tags.push("[dynamic]");
|
|
545
|
+
this.logger.log(` └─ (${StepTypeToIcon[step.type]}) ${step.id} ${tags.join(" ")}`);
|
|
546
|
+
}
|
|
547
|
+
return this;
|
|
548
|
+
}
|
|
549
|
+
async unregisterWorkflow(workflowId) {
|
|
550
|
+
this.workflows.delete(workflowId);
|
|
551
|
+
return this;
|
|
552
|
+
}
|
|
553
|
+
async unregisterAllWorkflows() {
|
|
554
|
+
this.workflows.clear();
|
|
555
|
+
return this;
|
|
556
|
+
}
|
|
557
|
+
async startWorkflow({
|
|
558
|
+
resourceId,
|
|
559
|
+
workflowId,
|
|
560
|
+
input,
|
|
561
|
+
options
|
|
562
|
+
}) {
|
|
563
|
+
if (!this._started) {
|
|
564
|
+
await this.start(false, { batchSize: options?.batchSize ?? 1 });
|
|
565
|
+
}
|
|
566
|
+
const workflow2 = this.workflows.get(workflowId);
|
|
567
|
+
if (!workflow2) {
|
|
568
|
+
throw new WorkflowEngineError(`Unknown workflow ${workflowId}`);
|
|
569
|
+
}
|
|
570
|
+
if (workflow2.steps.length === 0 || !workflow2.steps[0]) {
|
|
571
|
+
throw new WorkflowEngineError(`Workflow ${workflowId} has no steps`, workflowId);
|
|
572
|
+
}
|
|
573
|
+
const initialStepId = workflow2.steps[0]?.id;
|
|
574
|
+
const run = await withPostgresTransaction(this.boss.getDb(), async (db) => {
|
|
575
|
+
const timeoutAt = options?.timeout ? new Date(Date.now() + options.timeout) : workflow2.timeout ? new Date(Date.now() + workflow2.timeout) : null;
|
|
576
|
+
const insertedRun = await insertWorkflowRun({
|
|
577
|
+
resourceId,
|
|
578
|
+
workflowId,
|
|
579
|
+
currentStepId: initialStepId,
|
|
580
|
+
status: "running" /* RUNNING */,
|
|
581
|
+
input,
|
|
582
|
+
maxRetries: options?.retries ?? workflow2.retries ?? 0,
|
|
583
|
+
timeoutAt
|
|
584
|
+
}, this.boss.getDb());
|
|
585
|
+
const job = {
|
|
586
|
+
runId: insertedRun.id,
|
|
587
|
+
resourceId,
|
|
588
|
+
workflowId,
|
|
589
|
+
input
|
|
590
|
+
};
|
|
591
|
+
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
592
|
+
startAfter: new Date,
|
|
593
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds
|
|
594
|
+
});
|
|
595
|
+
return insertedRun;
|
|
596
|
+
});
|
|
597
|
+
this.logger.log("Started workflow run", {
|
|
598
|
+
runId: run.id,
|
|
599
|
+
workflowId
|
|
600
|
+
});
|
|
601
|
+
return run;
|
|
602
|
+
}
|
|
603
|
+
async pauseWorkflow({
|
|
604
|
+
runId,
|
|
605
|
+
resourceId
|
|
606
|
+
}) {
|
|
607
|
+
await this.checkIfHasStarted();
|
|
608
|
+
const run = await this.updateRun({
|
|
609
|
+
runId,
|
|
610
|
+
resourceId,
|
|
611
|
+
data: {
|
|
612
|
+
status: "paused" /* PAUSED */,
|
|
613
|
+
pausedAt: new Date
|
|
614
|
+
}
|
|
615
|
+
});
|
|
616
|
+
this.logger.log("Paused workflow run", {
|
|
617
|
+
runId,
|
|
618
|
+
workflowId: run.workflowId
|
|
619
|
+
});
|
|
620
|
+
return run;
|
|
621
|
+
}
|
|
622
|
+
async resumeWorkflow({
|
|
623
|
+
runId,
|
|
624
|
+
resourceId,
|
|
625
|
+
options
|
|
626
|
+
}) {
|
|
627
|
+
await this.checkIfHasStarted();
|
|
628
|
+
return this.triggerEvent({
|
|
629
|
+
runId,
|
|
630
|
+
resourceId,
|
|
631
|
+
eventName: PAUSE_EVENT_NAME,
|
|
632
|
+
data: {},
|
|
633
|
+
options
|
|
634
|
+
});
|
|
635
|
+
}
|
|
636
|
+
async cancelWorkflow({
|
|
637
|
+
runId,
|
|
638
|
+
resourceId
|
|
639
|
+
}) {
|
|
640
|
+
await this.checkIfHasStarted();
|
|
641
|
+
const run = await this.updateRun({
|
|
642
|
+
runId,
|
|
643
|
+
resourceId,
|
|
644
|
+
data: {
|
|
645
|
+
status: "cancelled" /* CANCELLED */
|
|
646
|
+
}
|
|
647
|
+
});
|
|
648
|
+
this.logger.log(`cancelled workflow run with id ${runId}`);
|
|
649
|
+
return run;
|
|
650
|
+
}
|
|
651
|
+
async triggerEvent({
|
|
652
|
+
runId,
|
|
653
|
+
resourceId,
|
|
654
|
+
eventName,
|
|
655
|
+
data,
|
|
656
|
+
options
|
|
657
|
+
}) {
|
|
658
|
+
await this.checkIfHasStarted();
|
|
659
|
+
const run = await this.getRun({ runId, resourceId });
|
|
660
|
+
const job = {
|
|
661
|
+
runId: run.id,
|
|
662
|
+
resourceId,
|
|
663
|
+
workflowId: run.workflowId,
|
|
664
|
+
input: run.input,
|
|
665
|
+
event: {
|
|
666
|
+
name: eventName,
|
|
667
|
+
data
|
|
668
|
+
}
|
|
669
|
+
};
|
|
670
|
+
this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
671
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds
|
|
672
|
+
});
|
|
673
|
+
this.logger.log(`event ${eventName} sent for workflow run with id ${runId}`);
|
|
674
|
+
return run;
|
|
675
|
+
}
|
|
676
|
+
async getRun({ runId, resourceId }, { exclusiveLock = false, db } = {}) {
|
|
677
|
+
const run = await getWorkflowRun({ runId, resourceId }, { exclusiveLock, db: db ?? this.db });
|
|
678
|
+
if (!run) {
|
|
679
|
+
throw new WorkflowRunNotFoundError(runId);
|
|
680
|
+
}
|
|
681
|
+
return run;
|
|
682
|
+
}
|
|
683
|
+
async updateRun({
|
|
684
|
+
runId,
|
|
685
|
+
resourceId,
|
|
686
|
+
data
|
|
687
|
+
}, { db } = {}) {
|
|
688
|
+
const run = await updateWorkflowRun({ runId, resourceId, data }, db ?? this.db);
|
|
689
|
+
if (!run) {
|
|
690
|
+
throw new WorkflowRunNotFoundError(runId);
|
|
691
|
+
}
|
|
692
|
+
return run;
|
|
693
|
+
}
|
|
694
|
+
async checkProgress({
|
|
695
|
+
runId,
|
|
696
|
+
resourceId
|
|
697
|
+
}) {
|
|
698
|
+
const run = await this.getRun({ runId, resourceId });
|
|
699
|
+
const workflow2 = this.workflows.get(run.workflowId);
|
|
700
|
+
if (!workflow2) {
|
|
701
|
+
throw new WorkflowEngineError(`Workflow ${run.workflowId} not found`, run.workflowId, runId);
|
|
702
|
+
}
|
|
703
|
+
const steps = workflow2?.steps ?? [];
|
|
704
|
+
let completionPercentage = 0;
|
|
705
|
+
let completedSteps = 0;
|
|
706
|
+
if (steps.length > 0) {
|
|
707
|
+
completedSteps = Object.values(run.timeline).filter((step) => typeof step === "object" && step !== null && ("output" in step) && step.output !== undefined).length;
|
|
708
|
+
if (run.status === "completed" /* COMPLETED */) {
|
|
709
|
+
completionPercentage = 100;
|
|
710
|
+
} else if (run.status === "failed" /* FAILED */ || run.status === "cancelled" /* CANCELLED */) {
|
|
711
|
+
completionPercentage = Math.min(completedSteps / steps.length * 100, 100);
|
|
712
|
+
} else {
|
|
713
|
+
const currentStepIndex = steps.findIndex((step) => step.id === run.currentStepId);
|
|
714
|
+
if (currentStepIndex >= 0) {
|
|
715
|
+
completionPercentage = currentStepIndex / steps.length * 100;
|
|
716
|
+
} else {
|
|
717
|
+
const completedSteps2 = Object.keys(run.timeline).length;
|
|
718
|
+
completionPercentage = Math.min(completedSteps2 / steps.length * 100, 100);
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
return {
|
|
723
|
+
...run,
|
|
724
|
+
completedSteps,
|
|
725
|
+
completionPercentage: Math.round(completionPercentage * 100) / 100,
|
|
726
|
+
totalSteps: steps.length
|
|
727
|
+
};
|
|
728
|
+
}
|
|
729
|
+
async handleWorkflowRun([job]) {
|
|
730
|
+
const { runId, resourceId, workflowId, input, event } = job?.data ?? {};
|
|
731
|
+
if (!runId) {
|
|
732
|
+
throw new WorkflowEngineError("Invalid workflow run job, missing runId", workflowId);
|
|
733
|
+
}
|
|
734
|
+
if (!resourceId) {
|
|
735
|
+
throw new WorkflowEngineError("Invalid workflow run job, missing resourceId", workflowId);
|
|
736
|
+
}
|
|
737
|
+
if (!workflowId) {
|
|
738
|
+
throw new WorkflowEngineError("Invalid workflow run job, missing workflowId", undefined, runId);
|
|
739
|
+
}
|
|
740
|
+
const workflow2 = this.workflows.get(workflowId);
|
|
741
|
+
if (!workflow2) {
|
|
742
|
+
throw new WorkflowEngineError(`Workflow ${workflowId} not found`, workflowId, runId);
|
|
743
|
+
}
|
|
744
|
+
this.logger.log("Processing workflow run...", {
|
|
745
|
+
runId,
|
|
746
|
+
workflowId
|
|
747
|
+
});
|
|
748
|
+
let run = await this.getRun({ runId, resourceId });
|
|
749
|
+
try {
|
|
750
|
+
if (run.status === "cancelled" /* CANCELLED */) {
|
|
751
|
+
this.logger.log(`Workflow run ${runId} is cancelled, skipping`);
|
|
752
|
+
return;
|
|
753
|
+
}
|
|
754
|
+
if (!run.currentStepId) {
|
|
755
|
+
throw new WorkflowEngineError("Missing current step id", workflowId, runId);
|
|
756
|
+
}
|
|
757
|
+
if (run.status === "paused" /* PAUSED */) {
|
|
758
|
+
const waitForStepEntry = run.timeline[`${run.currentStepId}-wait-for`];
|
|
759
|
+
const waitForStep = waitForStepEntry && typeof waitForStepEntry === "object" && "waitFor" in waitForStepEntry ? waitForStepEntry : null;
|
|
760
|
+
const currentStepEntry = run.timeline[run.currentStepId];
|
|
761
|
+
const currentStep = currentStepEntry && typeof currentStepEntry === "object" && "output" in currentStepEntry ? currentStepEntry : null;
|
|
762
|
+
const waitFor = waitForStep?.waitFor;
|
|
763
|
+
const hasCurrentStepOutput = currentStep?.output !== undefined;
|
|
764
|
+
if (waitFor && waitFor.eventName === event?.name && !hasCurrentStepOutput) {
|
|
765
|
+
run = await this.updateRun({
|
|
766
|
+
runId,
|
|
767
|
+
resourceId,
|
|
768
|
+
data: {
|
|
769
|
+
status: "running" /* RUNNING */,
|
|
770
|
+
pausedAt: null,
|
|
771
|
+
resumedAt: new Date,
|
|
772
|
+
timeline: import_merge.default(run.timeline, {
|
|
773
|
+
[run.currentStepId]: {
|
|
774
|
+
output: event?.data ?? {},
|
|
775
|
+
timestamp: new Date
|
|
776
|
+
}
|
|
777
|
+
}),
|
|
778
|
+
jobId: job?.id
|
|
779
|
+
}
|
|
780
|
+
});
|
|
781
|
+
} else {
|
|
782
|
+
run = await this.updateRun({
|
|
783
|
+
runId,
|
|
784
|
+
resourceId,
|
|
785
|
+
data: {
|
|
786
|
+
status: "running" /* RUNNING */,
|
|
787
|
+
pausedAt: null,
|
|
788
|
+
resumedAt: new Date,
|
|
789
|
+
jobId: job?.id
|
|
790
|
+
}
|
|
791
|
+
});
|
|
792
|
+
}
|
|
793
|
+
}
|
|
794
|
+
const context = {
|
|
795
|
+
input: run.input,
|
|
796
|
+
workflowId: run.workflowId,
|
|
797
|
+
runId: run.id,
|
|
798
|
+
timeline: run.timeline,
|
|
799
|
+
logger: this.logger,
|
|
800
|
+
step: {
|
|
801
|
+
run: async (stepId, handler) => {
|
|
802
|
+
if (!run) {
|
|
803
|
+
throw new WorkflowEngineError("Missing workflow run", workflowId, runId);
|
|
804
|
+
}
|
|
805
|
+
return this.runStep({
|
|
806
|
+
stepId,
|
|
807
|
+
run,
|
|
808
|
+
handler
|
|
809
|
+
});
|
|
810
|
+
},
|
|
811
|
+
waitFor: async (stepId, { eventName, timeout }) => {
|
|
812
|
+
if (!run) {
|
|
813
|
+
throw new WorkflowEngineError("Missing workflow run", workflowId, runId);
|
|
814
|
+
}
|
|
815
|
+
return this.waitForEvent({
|
|
816
|
+
run,
|
|
817
|
+
stepId,
|
|
818
|
+
eventName,
|
|
819
|
+
timeout
|
|
820
|
+
});
|
|
821
|
+
},
|
|
822
|
+
waitUntil: async ({ date }) => {
|
|
823
|
+
return this.waitUntil(runId, date);
|
|
824
|
+
},
|
|
825
|
+
pause: async (stepId) => {
|
|
826
|
+
if (!run) {
|
|
827
|
+
throw new WorkflowEngineError("Missing workflow run", workflowId, runId);
|
|
828
|
+
}
|
|
829
|
+
return this.pauseStep({
|
|
830
|
+
stepId,
|
|
831
|
+
run
|
|
832
|
+
});
|
|
833
|
+
}
|
|
834
|
+
}
|
|
835
|
+
};
|
|
836
|
+
const result = await workflow2.handler(context);
|
|
837
|
+
run = await this.getRun({ runId, resourceId });
|
|
838
|
+
if (run.status === "running" /* RUNNING */ && run.currentStepId === workflow2.steps[workflow2.steps.length - 1]?.id) {
|
|
839
|
+
const normalizedResult = result === undefined ? {} : result;
|
|
840
|
+
await this.updateRun({
|
|
841
|
+
runId,
|
|
842
|
+
resourceId,
|
|
843
|
+
data: {
|
|
844
|
+
status: "completed" /* COMPLETED */,
|
|
845
|
+
output: normalizedResult,
|
|
846
|
+
completedAt: new Date,
|
|
847
|
+
jobId: job?.id
|
|
848
|
+
}
|
|
849
|
+
});
|
|
850
|
+
this.logger.log("Workflow run completed.", {
|
|
851
|
+
runId,
|
|
852
|
+
workflowId
|
|
853
|
+
});
|
|
854
|
+
}
|
|
855
|
+
} catch (error) {
|
|
856
|
+
if (run.retryCount < run.maxRetries) {
|
|
857
|
+
await this.updateRun({
|
|
858
|
+
runId,
|
|
859
|
+
resourceId,
|
|
860
|
+
data: {
|
|
861
|
+
retryCount: run.retryCount + 1,
|
|
862
|
+
jobId: job?.id
|
|
863
|
+
}
|
|
864
|
+
});
|
|
865
|
+
const retryDelay = 2 ** run.retryCount * 1000;
|
|
866
|
+
const pgBossJob = {
|
|
867
|
+
runId,
|
|
868
|
+
resourceId,
|
|
869
|
+
workflowId,
|
|
870
|
+
input
|
|
871
|
+
};
|
|
872
|
+
await this.boss?.send("workflow-run", pgBossJob, { retryDelay });
|
|
873
|
+
return;
|
|
874
|
+
}
|
|
875
|
+
await this.updateRun({
|
|
876
|
+
runId,
|
|
877
|
+
resourceId,
|
|
878
|
+
data: {
|
|
879
|
+
status: "failed" /* FAILED */,
|
|
880
|
+
error: error instanceof Error ? error.message : String(error),
|
|
881
|
+
jobId: job?.id
|
|
882
|
+
}
|
|
883
|
+
});
|
|
884
|
+
throw error;
|
|
885
|
+
}
|
|
886
|
+
}
|
|
887
|
+
async runStep({
|
|
888
|
+
stepId,
|
|
889
|
+
run,
|
|
890
|
+
handler
|
|
891
|
+
}) {
|
|
892
|
+
return withPostgresTransaction(this.db, async (db) => {
|
|
893
|
+
const persistedRun = await this.getRun({ runId: run.id, resourceId: run.resourceId ?? undefined }, {
|
|
894
|
+
exclusiveLock: true,
|
|
895
|
+
db
|
|
896
|
+
});
|
|
897
|
+
if (persistedRun.status === "cancelled" /* CANCELLED */ || persistedRun.status === "paused" /* PAUSED */ || persistedRun.status === "failed" /* FAILED */) {
|
|
898
|
+
this.logger.log(`Step ${stepId} skipped, workflow run is ${persistedRun.status}`, {
|
|
899
|
+
runId: run.id,
|
|
900
|
+
workflowId: run.workflowId
|
|
901
|
+
});
|
|
902
|
+
return;
|
|
903
|
+
}
|
|
904
|
+
try {
|
|
905
|
+
let result;
|
|
906
|
+
const timelineStepEntry = persistedRun.timeline[stepId];
|
|
907
|
+
const timelineStep = timelineStepEntry && typeof timelineStepEntry === "object" && "output" in timelineStepEntry ? timelineStepEntry : null;
|
|
908
|
+
if (timelineStep?.output !== undefined) {
|
|
909
|
+
result = timelineStep.output;
|
|
910
|
+
} else {
|
|
911
|
+
await this.updateRun({
|
|
912
|
+
runId: run.id,
|
|
913
|
+
resourceId: run.resourceId ?? undefined,
|
|
914
|
+
data: {
|
|
915
|
+
currentStepId: stepId
|
|
916
|
+
}
|
|
917
|
+
}, { db });
|
|
918
|
+
this.logger.log(`Running step ${stepId}...`, {
|
|
919
|
+
runId: run.id,
|
|
920
|
+
workflowId: run.workflowId
|
|
921
|
+
});
|
|
922
|
+
result = await handler();
|
|
923
|
+
run = await this.updateRun({
|
|
924
|
+
runId: run.id,
|
|
925
|
+
resourceId: run.resourceId ?? undefined,
|
|
926
|
+
data: {
|
|
927
|
+
timeline: import_merge.default(run.timeline, {
|
|
928
|
+
[stepId]: {
|
|
929
|
+
output: result === undefined ? {} : result,
|
|
930
|
+
timestamp: new Date
|
|
931
|
+
}
|
|
932
|
+
})
|
|
933
|
+
}
|
|
934
|
+
}, { db });
|
|
935
|
+
}
|
|
936
|
+
const finalResult = result === undefined ? {} : result;
|
|
937
|
+
return finalResult;
|
|
938
|
+
} catch (error) {
|
|
939
|
+
this.logger.error(`Step ${stepId} failed:`, error, {
|
|
940
|
+
runId: run.id,
|
|
941
|
+
workflowId: run.workflowId
|
|
942
|
+
});
|
|
943
|
+
await this.updateRun({
|
|
944
|
+
runId: run.id,
|
|
945
|
+
resourceId: run.resourceId ?? undefined,
|
|
946
|
+
data: {
|
|
947
|
+
status: "failed" /* FAILED */,
|
|
948
|
+
error: error instanceof Error ? `${error.message}
|
|
949
|
+
${error.stack}` : String(error)
|
|
950
|
+
}
|
|
951
|
+
}, { db });
|
|
952
|
+
throw error;
|
|
953
|
+
}
|
|
954
|
+
});
|
|
955
|
+
}
|
|
956
|
+
async waitForEvent({
|
|
957
|
+
run,
|
|
958
|
+
stepId,
|
|
959
|
+
eventName,
|
|
960
|
+
timeout
|
|
961
|
+
}) {
|
|
962
|
+
const persistedRun = await this.getRun({
|
|
963
|
+
runId: run.id,
|
|
964
|
+
resourceId: run.resourceId ?? undefined
|
|
965
|
+
});
|
|
966
|
+
if (persistedRun.status === "cancelled" /* CANCELLED */ || persistedRun.status === "paused" /* PAUSED */ || persistedRun.status === "failed" /* FAILED */) {
|
|
967
|
+
this.logger.log(`Step ${stepId} skipped, workflow run is ${persistedRun.status}`, {
|
|
968
|
+
runId: run.id,
|
|
969
|
+
workflowId: run.workflowId
|
|
970
|
+
});
|
|
971
|
+
return;
|
|
972
|
+
}
|
|
973
|
+
const timelineStepCheckEntry = persistedRun.timeline[stepId];
|
|
974
|
+
const timelineStepCheck = timelineStepCheckEntry && typeof timelineStepCheckEntry === "object" && "output" in timelineStepCheckEntry ? timelineStepCheckEntry : null;
|
|
975
|
+
if (timelineStepCheck?.output !== undefined) {
|
|
976
|
+
return timelineStepCheck.output;
|
|
977
|
+
}
|
|
978
|
+
await this.updateRun({
|
|
979
|
+
runId: run.id,
|
|
980
|
+
resourceId: run.resourceId ?? undefined,
|
|
981
|
+
data: {
|
|
982
|
+
status: "paused" /* PAUSED */,
|
|
983
|
+
currentStepId: stepId,
|
|
984
|
+
timeline: import_merge.default(run.timeline, {
|
|
985
|
+
[`${stepId}-wait-for`]: {
|
|
986
|
+
waitFor: {
|
|
987
|
+
eventName,
|
|
988
|
+
timeout
|
|
989
|
+
},
|
|
990
|
+
timestamp: new Date
|
|
991
|
+
}
|
|
992
|
+
}),
|
|
993
|
+
pausedAt: new Date
|
|
994
|
+
}
|
|
995
|
+
});
|
|
996
|
+
this.logger.log(`Running step ${stepId}, waiting for event ${eventName}...`, {
|
|
997
|
+
runId: run.id,
|
|
998
|
+
workflowId: run.workflowId
|
|
999
|
+
});
|
|
1000
|
+
}
|
|
1001
|
+
async pauseStep({ stepId, run }) {
|
|
1002
|
+
await this.waitForEvent({
|
|
1003
|
+
run,
|
|
1004
|
+
stepId,
|
|
1005
|
+
eventName: PAUSE_EVENT_NAME
|
|
1006
|
+
});
|
|
1007
|
+
}
|
|
1008
|
+
async waitUntil(runId, _date) {
|
|
1009
|
+
throw new WorkflowEngineError("Not implemented yet", undefined, runId);
|
|
1010
|
+
}
|
|
1011
|
+
async checkIfHasStarted() {
|
|
1012
|
+
if (!this._started) {
|
|
1013
|
+
throw new WorkflowEngineError("Workflow engine not started");
|
|
1014
|
+
}
|
|
1015
|
+
}
|
|
1016
|
+
buildLogger(logger) {
|
|
1017
|
+
return {
|
|
1018
|
+
log: (message, context) => {
|
|
1019
|
+
const { runId, workflowId } = context ?? {};
|
|
1020
|
+
const parts = [LOG_PREFIX, workflowId, runId].filter(Boolean).join(" ");
|
|
1021
|
+
logger.log(`${parts}: ${message}`);
|
|
1022
|
+
},
|
|
1023
|
+
error: (message, error, context) => {
|
|
1024
|
+
const { runId, workflowId } = context ?? {};
|
|
1025
|
+
const parts = [LOG_PREFIX, workflowId, runId].filter(Boolean).join(" ");
|
|
1026
|
+
logger.error(`${parts}: ${message}`, error);
|
|
1027
|
+
}
|
|
1028
|
+
};
|
|
1029
|
+
}
|
|
1030
|
+
async getRuns({
|
|
1031
|
+
resourceId,
|
|
1032
|
+
startingAfter,
|
|
1033
|
+
endingBefore,
|
|
1034
|
+
limit = 20,
|
|
1035
|
+
statuses,
|
|
1036
|
+
workflowId
|
|
1037
|
+
}) {
|
|
1038
|
+
return getWorkflowRuns({
|
|
1039
|
+
resourceId,
|
|
1040
|
+
startingAfter,
|
|
1041
|
+
endingBefore,
|
|
1042
|
+
limit,
|
|
1043
|
+
statuses,
|
|
1044
|
+
workflowId
|
|
1045
|
+
}, this.db);
|
|
1046
|
+
}
|
|
1047
|
+
}
|
|
1048
|
+
|
|
1049
|
+
//# debugId=4D6E0D137080AC1364756E2164756E21
|
|
1050
|
+
//# sourceMappingURL=index.js.map
|