pg-workflows 0.7.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +83 -765
- package/dist/client.entry.cjs +822 -0
- package/dist/client.entry.d.cts +227 -0
- package/dist/client.entry.d.ts +227 -0
- package/dist/client.entry.js +13 -0
- package/dist/client.entry.js.map +16 -0
- package/dist/index.cjs +710 -356
- package/dist/index.d.cts +123 -11
- package/dist/index.d.ts +123 -11
- package/dist/index.js +75 -450
- package/dist/index.js.map +12 -10
- package/dist/shared/chunk-8n9chg7z.js +753 -0
- package/dist/shared/chunk-8n9chg7z.js.map +16 -0
- package/package.json +11 -1
package/dist/index.cjs
CHANGED
|
@@ -66,193 +66,38 @@ var exports_src = {};
|
|
|
66
66
|
__export(exports_src, {
|
|
67
67
|
workflow: () => workflow,
|
|
68
68
|
parseDuration: () => parseDuration,
|
|
69
|
+
createWorkflowRef: () => createWorkflowRef,
|
|
69
70
|
WorkflowStatus: () => WorkflowStatus,
|
|
70
71
|
WorkflowRunNotFoundError: () => WorkflowRunNotFoundError,
|
|
71
72
|
WorkflowEngineError: () => WorkflowEngineError,
|
|
72
73
|
WorkflowEngine: () => WorkflowEngine,
|
|
74
|
+
WorkflowClient: () => WorkflowClient,
|
|
73
75
|
StepType: () => StepType
|
|
74
76
|
});
|
|
75
77
|
module.exports = __toCommonJS(exports_src);
|
|
76
78
|
|
|
77
|
-
// src/
|
|
78
|
-
function createWorkflowFactory(plugins = []) {
|
|
79
|
-
const factory = (id, handler, { inputSchema, timeout, retries } = {}) => ({
|
|
80
|
-
id,
|
|
81
|
-
handler,
|
|
82
|
-
inputSchema,
|
|
83
|
-
timeout,
|
|
84
|
-
retries,
|
|
85
|
-
plugins: plugins.length > 0 ? plugins : undefined
|
|
86
|
-
});
|
|
87
|
-
factory.use = (plugin) => createWorkflowFactory([
|
|
88
|
-
...plugins,
|
|
89
|
-
plugin
|
|
90
|
-
]);
|
|
91
|
-
return factory;
|
|
92
|
-
}
|
|
93
|
-
var workflow = createWorkflowFactory();
|
|
94
|
-
// src/duration.ts
|
|
95
|
-
var import_parse_duration = __toESM(require("parse-duration"));
|
|
96
|
-
|
|
97
|
-
// src/error.ts
|
|
98
|
-
class WorkflowEngineError extends Error {
|
|
99
|
-
workflowId;
|
|
100
|
-
runId;
|
|
101
|
-
cause;
|
|
102
|
-
issues;
|
|
103
|
-
constructor(message, workflowId, runId, cause = undefined, issues) {
|
|
104
|
-
super(message);
|
|
105
|
-
this.workflowId = workflowId;
|
|
106
|
-
this.runId = runId;
|
|
107
|
-
this.cause = cause;
|
|
108
|
-
this.issues = issues;
|
|
109
|
-
this.name = "WorkflowEngineError";
|
|
110
|
-
if (Error.captureStackTrace) {
|
|
111
|
-
Error.captureStackTrace(this, WorkflowEngineError);
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
class WorkflowRunNotFoundError extends WorkflowEngineError {
|
|
117
|
-
constructor(runId, workflowId) {
|
|
118
|
-
super("Workflow run not found", workflowId, runId);
|
|
119
|
-
this.name = "WorkflowRunNotFoundError";
|
|
120
|
-
}
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
// src/duration.ts
|
|
124
|
-
var MS_PER_SECOND = 1000;
|
|
125
|
-
var MS_PER_MINUTE = 60 * MS_PER_SECOND;
|
|
126
|
-
var MS_PER_HOUR = 60 * MS_PER_MINUTE;
|
|
127
|
-
var MS_PER_DAY = 24 * MS_PER_HOUR;
|
|
128
|
-
var MS_PER_WEEK = 7 * MS_PER_DAY;
|
|
129
|
-
function parseDuration(duration) {
|
|
130
|
-
if (typeof duration === "string") {
|
|
131
|
-
if (duration.trim() === "") {
|
|
132
|
-
throw new WorkflowEngineError("Invalid duration: empty string");
|
|
133
|
-
}
|
|
134
|
-
const ms2 = import_parse_duration.default(duration);
|
|
135
|
-
if (ms2 == null || ms2 <= 0) {
|
|
136
|
-
throw new WorkflowEngineError(`Invalid duration: "${duration}"`);
|
|
137
|
-
}
|
|
138
|
-
return ms2;
|
|
139
|
-
}
|
|
140
|
-
const { weeks = 0, days = 0, hours = 0, minutes = 0, seconds = 0 } = duration;
|
|
141
|
-
const ms = weeks * MS_PER_WEEK + days * MS_PER_DAY + hours * MS_PER_HOUR + minutes * MS_PER_MINUTE + seconds * MS_PER_SECOND;
|
|
142
|
-
if (ms <= 0) {
|
|
143
|
-
throw new WorkflowEngineError("Invalid duration: must be a positive value");
|
|
144
|
-
}
|
|
145
|
-
return ms;
|
|
146
|
-
}
|
|
147
|
-
// src/engine.ts
|
|
79
|
+
// src/client.ts
|
|
148
80
|
var import_es_toolkit = require("es-toolkit");
|
|
149
81
|
var import_pg = __toESM(require("pg"));
|
|
150
82
|
var import_pg_boss = require("pg-boss");
|
|
151
83
|
|
|
152
|
-
// src/
|
|
153
|
-
var
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
var WorkflowStatus;
|
|
157
|
-
((WorkflowStatus2) => {
|
|
158
|
-
WorkflowStatus2["PENDING"] = "pending";
|
|
159
|
-
WorkflowStatus2["RUNNING"] = "running";
|
|
160
|
-
WorkflowStatus2["PAUSED"] = "paused";
|
|
161
|
-
WorkflowStatus2["COMPLETED"] = "completed";
|
|
162
|
-
WorkflowStatus2["FAILED"] = "failed";
|
|
163
|
-
WorkflowStatus2["CANCELLED"] = "cancelled";
|
|
164
|
-
})(WorkflowStatus ||= {});
|
|
165
|
-
var StepType;
|
|
166
|
-
((StepType2) => {
|
|
167
|
-
StepType2["PAUSE"] = "pause";
|
|
168
|
-
StepType2["RUN"] = "run";
|
|
169
|
-
StepType2["WAIT_FOR"] = "waitFor";
|
|
170
|
-
StepType2["WAIT_UNTIL"] = "waitUntil";
|
|
171
|
-
StepType2["DELAY"] = "delay";
|
|
172
|
-
StepType2["POLL"] = "poll";
|
|
173
|
-
})(StepType ||= {});
|
|
174
|
-
|
|
175
|
-
// src/ast-parser.ts
|
|
176
|
-
function parseWorkflowHandler(handler) {
|
|
177
|
-
const handlerSource = handler.toString();
|
|
178
|
-
const sourceFile = ts.createSourceFile("handler.ts", handlerSource, ts.ScriptTarget.Latest, true);
|
|
179
|
-
const steps = new Map;
|
|
180
|
-
function isInConditional(node) {
|
|
181
|
-
let current = node.parent;
|
|
182
|
-
while (current) {
|
|
183
|
-
if (ts.isIfStatement(current) || ts.isConditionalExpression(current) || ts.isSwitchStatement(current) || ts.isCaseClause(current)) {
|
|
184
|
-
return true;
|
|
185
|
-
}
|
|
186
|
-
current = current.parent;
|
|
187
|
-
}
|
|
188
|
-
return false;
|
|
189
|
-
}
|
|
190
|
-
function isInLoop(node) {
|
|
191
|
-
let current = node.parent;
|
|
192
|
-
while (current) {
|
|
193
|
-
if (ts.isForStatement(current) || ts.isForInStatement(current) || ts.isForOfStatement(current) || ts.isWhileStatement(current) || ts.isDoStatement(current)) {
|
|
194
|
-
return true;
|
|
195
|
-
}
|
|
196
|
-
current = current.parent;
|
|
197
|
-
}
|
|
198
|
-
return false;
|
|
199
|
-
}
|
|
200
|
-
function extractStepId(arg) {
|
|
201
|
-
if (ts.isStringLiteral(arg) || ts.isNoSubstitutionTemplateLiteral(arg)) {
|
|
202
|
-
return { id: arg.text, isDynamic: false };
|
|
203
|
-
}
|
|
204
|
-
if (ts.isTemplateExpression(arg)) {
|
|
205
|
-
let templateStr = arg.head.text;
|
|
206
|
-
for (const span of arg.templateSpans) {
|
|
207
|
-
templateStr += `\${...}`;
|
|
208
|
-
templateStr += span.literal.text;
|
|
209
|
-
}
|
|
210
|
-
return { id: templateStr, isDynamic: true };
|
|
211
|
-
}
|
|
212
|
-
return { id: arg.getText(sourceFile), isDynamic: true };
|
|
213
|
-
}
|
|
214
|
-
function visit(node) {
|
|
215
|
-
if (ts.isCallExpression(node) && ts.isPropertyAccessExpression(node.expression)) {
|
|
216
|
-
const propertyAccess = node.expression;
|
|
217
|
-
const objectName = propertyAccess.expression.getText(sourceFile);
|
|
218
|
-
const methodName = propertyAccess.name.text;
|
|
219
|
-
if (objectName === "step" && (methodName === "run" || methodName === "waitFor" || methodName === "pause" || methodName === "waitUntil" || methodName === "delay" || methodName === "sleep" || methodName === "poll")) {
|
|
220
|
-
const firstArg = node.arguments[0];
|
|
221
|
-
if (firstArg) {
|
|
222
|
-
const { id, isDynamic } = extractStepId(firstArg);
|
|
223
|
-
const stepType = methodName === "sleep" ? "delay" /* DELAY */ : methodName;
|
|
224
|
-
const stepDefinition = {
|
|
225
|
-
id,
|
|
226
|
-
type: stepType,
|
|
227
|
-
conditional: isInConditional(node),
|
|
228
|
-
loop: isInLoop(node),
|
|
229
|
-
isDynamic
|
|
230
|
-
};
|
|
231
|
-
if (steps.has(id)) {
|
|
232
|
-
throw new Error(`Duplicate step ID detected: '${id}'. Step IDs must be unique within a workflow.`);
|
|
233
|
-
}
|
|
234
|
-
steps.set(id, stepDefinition);
|
|
235
|
-
}
|
|
236
|
-
}
|
|
237
|
-
}
|
|
238
|
-
ts.forEachChild(node, visit);
|
|
239
|
-
}
|
|
240
|
-
visit(sourceFile);
|
|
241
|
-
return { steps: Array.from(steps.values()) };
|
|
242
|
-
}
|
|
84
|
+
// src/constants.ts
|
|
85
|
+
var PAUSE_EVENT_NAME = "__internal_pause";
|
|
86
|
+
var WORKFLOW_RUN_QUEUE_NAME = "workflow-run";
|
|
87
|
+
var DEFAULT_PGBOSS_SCHEMA = "pgboss_v12_pgworkflow";
|
|
243
88
|
|
|
244
89
|
// src/db/migration.ts
|
|
90
|
+
var MIGRATION_LOCK_ID = 738291645;
|
|
91
|
+
var CURRENT_SCHEMA_VERSION = 2;
|
|
245
92
|
async function runMigrations(db) {
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
await db.executeSql(`
|
|
255
|
-
CREATE TABLE workflow_runs (
|
|
93
|
+
if (await isSchemaUpToDate(db)) {
|
|
94
|
+
return;
|
|
95
|
+
}
|
|
96
|
+
const currentVersion = await getCurrentVersion(db);
|
|
97
|
+
const commands = [];
|
|
98
|
+
if (currentVersion < 1) {
|
|
99
|
+
commands.push(`
|
|
100
|
+
CREATE TABLE IF NOT EXISTS workflow_runs (
|
|
256
101
|
id varchar(32) PRIMARY KEY NOT NULL,
|
|
257
102
|
created_at timestamp with time zone DEFAULT now() NOT NULL,
|
|
258
103
|
updated_at timestamp with time zone DEFAULT now() NOT NULL,
|
|
@@ -270,30 +115,67 @@ async function runMigrations(db) {
|
|
|
270
115
|
timeout_at timestamp with time zone,
|
|
271
116
|
retry_count integer DEFAULT 0 NOT NULL,
|
|
272
117
|
max_retries integer DEFAULT 0 NOT NULL,
|
|
273
|
-
job_id varchar(256)
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
CREATE INDEX
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
CREATE
|
|
284
|
-
|
|
118
|
+
job_id varchar(256)
|
|
119
|
+
)
|
|
120
|
+
`);
|
|
121
|
+
commands.push(`
|
|
122
|
+
CREATE INDEX IF NOT EXISTS workflow_runs_created_at_idx ON workflow_runs USING btree (created_at)
|
|
123
|
+
`);
|
|
124
|
+
commands.push(`
|
|
125
|
+
CREATE INDEX IF NOT EXISTS workflow_runs_resource_id_created_at_idx ON workflow_runs USING btree (resource_id, created_at DESC)
|
|
126
|
+
`);
|
|
127
|
+
commands.push(`
|
|
128
|
+
CREATE INDEX IF NOT EXISTS workflow_runs_status_created_at_idx ON workflow_runs USING btree (status, created_at DESC)
|
|
129
|
+
`);
|
|
130
|
+
commands.push(`
|
|
131
|
+
CREATE INDEX IF NOT EXISTS workflow_runs_workflow_id_created_at_idx ON workflow_runs USING btree (workflow_id, created_at DESC)
|
|
132
|
+
`);
|
|
133
|
+
commands.push(`
|
|
134
|
+
CREATE INDEX IF NOT EXISTS workflow_runs_resource_id_workflow_id_created_at_idx ON workflow_runs USING btree (resource_id, workflow_id, created_at DESC)
|
|
135
|
+
`);
|
|
136
|
+
}
|
|
137
|
+
if (currentVersion < 2) {
|
|
138
|
+
commands.push("DROP INDEX IF EXISTS workflow_runs_workflow_id_idx");
|
|
139
|
+
commands.push("DROP INDEX IF EXISTS workflow_runs_resource_id_idx");
|
|
140
|
+
commands.push("ALTER TABLE workflow_runs ADD COLUMN IF NOT EXISTS idempotency_key varchar(256)");
|
|
141
|
+
commands.push(`
|
|
142
|
+
CREATE UNIQUE INDEX IF NOT EXISTS workflow_runs_idempotency_key_idx ON workflow_runs (idempotency_key) WHERE idempotency_key IS NOT NULL
|
|
143
|
+
`);
|
|
144
|
+
}
|
|
145
|
+
if (currentVersion === 0) {
|
|
146
|
+
commands.push(`INSERT INTO workflow_schema_version (version) VALUES (${CURRENT_SCHEMA_VERSION})`);
|
|
285
147
|
} else {
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
148
|
+
commands.push(`UPDATE workflow_schema_version SET version = ${CURRENT_SCHEMA_VERSION}`);
|
|
149
|
+
}
|
|
150
|
+
if (commands.length === 0) {
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
const sql = [
|
|
154
|
+
"BEGIN",
|
|
155
|
+
"SET LOCAL lock_timeout = '30s'",
|
|
156
|
+
"SET LOCAL idle_in_transaction_session_timeout = '30s'",
|
|
157
|
+
`SELECT pg_advisory_xact_lock(${MIGRATION_LOCK_ID})`,
|
|
158
|
+
"CREATE TABLE IF NOT EXISTS workflow_schema_version (version integer NOT NULL)",
|
|
159
|
+
...commands,
|
|
160
|
+
"COMMIT"
|
|
161
|
+
].join(`;
|
|
162
|
+
`);
|
|
163
|
+
await db.executeSql(sql, []);
|
|
164
|
+
}
|
|
165
|
+
async function isSchemaUpToDate(db) {
|
|
166
|
+
try {
|
|
167
|
+
const result = await db.executeSql("SELECT version FROM workflow_schema_version LIMIT 1", []);
|
|
168
|
+
return (result.rows[0]?.version ?? 0) >= CURRENT_SCHEMA_VERSION;
|
|
169
|
+
} catch {
|
|
170
|
+
return false;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
async function getCurrentVersion(db) {
|
|
174
|
+
try {
|
|
175
|
+
const result = await db.executeSql("SELECT version FROM workflow_schema_version LIMIT 1", []);
|
|
176
|
+
return result.rows[0]?.version ?? 0;
|
|
177
|
+
} catch {
|
|
178
|
+
return 0;
|
|
297
179
|
}
|
|
298
180
|
}
|
|
299
181
|
|
|
@@ -471,123 +353,578 @@ async function updateWorkflowRun({
|
|
|
471
353
|
if (expectedStatuses && expectedStatuses.length > 0) {
|
|
472
354
|
whereClause += ` AND status = ANY($${paramIndex - 1})`;
|
|
473
355
|
}
|
|
474
|
-
const query = `
|
|
475
|
-
UPDATE workflow_runs
|
|
476
|
-
SET ${updates.join(", ")}
|
|
477
|
-
${whereClause}
|
|
478
|
-
RETURNING *
|
|
479
|
-
`;
|
|
480
|
-
const result = await db.executeSql(query, values);
|
|
481
|
-
const run = result.rows[0];
|
|
482
|
-
if (!run) {
|
|
483
|
-
return null;
|
|
356
|
+
const query = `
|
|
357
|
+
UPDATE workflow_runs
|
|
358
|
+
SET ${updates.join(", ")}
|
|
359
|
+
${whereClause}
|
|
360
|
+
RETURNING *
|
|
361
|
+
`;
|
|
362
|
+
const result = await db.executeSql(query, values);
|
|
363
|
+
const run = result.rows[0];
|
|
364
|
+
if (!run) {
|
|
365
|
+
return null;
|
|
366
|
+
}
|
|
367
|
+
return mapRowToWorkflowRun(run);
|
|
368
|
+
}
|
|
369
|
+
async function getWorkflowRuns({
|
|
370
|
+
resourceId,
|
|
371
|
+
startingAfter,
|
|
372
|
+
endingBefore,
|
|
373
|
+
limit = 20,
|
|
374
|
+
statuses,
|
|
375
|
+
workflowId
|
|
376
|
+
}, db) {
|
|
377
|
+
const conditions = [];
|
|
378
|
+
const values = [];
|
|
379
|
+
let paramIndex = 1;
|
|
380
|
+
if (resourceId) {
|
|
381
|
+
conditions.push(`resource_id = $${paramIndex}`);
|
|
382
|
+
values.push(resourceId);
|
|
383
|
+
paramIndex++;
|
|
384
|
+
}
|
|
385
|
+
if (statuses && statuses.length > 0) {
|
|
386
|
+
conditions.push(`status = ANY($${paramIndex})`);
|
|
387
|
+
values.push(statuses);
|
|
388
|
+
paramIndex++;
|
|
389
|
+
}
|
|
390
|
+
if (workflowId) {
|
|
391
|
+
conditions.push(`workflow_id = $${paramIndex}`);
|
|
392
|
+
values.push(workflowId);
|
|
393
|
+
paramIndex++;
|
|
394
|
+
}
|
|
395
|
+
const cursorIds = [startingAfter, endingBefore].filter(Boolean);
|
|
396
|
+
if (cursorIds.length > 0) {
|
|
397
|
+
const cursorResult = await db.executeSql("SELECT id, created_at FROM workflow_runs WHERE id = ANY($1)", [cursorIds]);
|
|
398
|
+
const cursorMap = new Map;
|
|
399
|
+
for (const row of cursorResult.rows) {
|
|
400
|
+
cursorMap.set(row.id, typeof row.created_at === "string" ? new Date(row.created_at) : row.created_at);
|
|
401
|
+
}
|
|
402
|
+
if (startingAfter) {
|
|
403
|
+
const cursor = cursorMap.get(startingAfter);
|
|
404
|
+
if (cursor) {
|
|
405
|
+
conditions.push(`created_at < $${paramIndex}`);
|
|
406
|
+
values.push(cursor);
|
|
407
|
+
paramIndex++;
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
if (endingBefore) {
|
|
411
|
+
const cursor = cursorMap.get(endingBefore);
|
|
412
|
+
if (cursor) {
|
|
413
|
+
conditions.push(`created_at > $${paramIndex}`);
|
|
414
|
+
values.push(cursor);
|
|
415
|
+
paramIndex++;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
420
|
+
const actualLimit = Math.min(Math.max(limit, 1), 100) + 1;
|
|
421
|
+
const isBackward = !!endingBefore && !startingAfter;
|
|
422
|
+
const query = `
|
|
423
|
+
SELECT * FROM workflow_runs
|
|
424
|
+
${whereClause}
|
|
425
|
+
ORDER BY created_at ${isBackward ? "ASC" : "DESC"}
|
|
426
|
+
LIMIT $${paramIndex}
|
|
427
|
+
`;
|
|
428
|
+
values.push(actualLimit);
|
|
429
|
+
const result = await db.executeSql(query, values);
|
|
430
|
+
const rows = result.rows;
|
|
431
|
+
const hasExtraRow = rows.length > (limit ?? 20);
|
|
432
|
+
const rawItems = hasExtraRow ? rows.slice(0, limit) : rows;
|
|
433
|
+
if (isBackward) {
|
|
434
|
+
rawItems.reverse();
|
|
435
|
+
}
|
|
436
|
+
const items = rawItems.map((row) => mapRowToWorkflowRun(row));
|
|
437
|
+
const hasMore = isBackward ? items.length > 0 : hasExtraRow;
|
|
438
|
+
const hasPrev = isBackward ? hasExtraRow : !!startingAfter && items.length > 0;
|
|
439
|
+
const nextCursor = hasMore && items.length > 0 ? items[items.length - 1]?.id ?? null : null;
|
|
440
|
+
const prevCursor = hasPrev && items.length > 0 ? items[0]?.id ?? null : null;
|
|
441
|
+
return { items, nextCursor, prevCursor, hasMore, hasPrev };
|
|
442
|
+
}
|
|
443
|
+
async function withPostgresTransaction(db, callback, pool) {
|
|
444
|
+
let txDb;
|
|
445
|
+
let release;
|
|
446
|
+
if (pool) {
|
|
447
|
+
const client = await pool.connect();
|
|
448
|
+
txDb = {
|
|
449
|
+
executeSql: (text, values) => client.query(text, values)
|
|
450
|
+
};
|
|
451
|
+
release = () => client.release();
|
|
452
|
+
} else {
|
|
453
|
+
txDb = db;
|
|
454
|
+
}
|
|
455
|
+
try {
|
|
456
|
+
await txDb.executeSql("BEGIN", []);
|
|
457
|
+
const result = await callback(txDb);
|
|
458
|
+
await txDb.executeSql("COMMIT", []);
|
|
459
|
+
return result;
|
|
460
|
+
} catch (error) {
|
|
461
|
+
await txDb.executeSql("ROLLBACK", []);
|
|
462
|
+
throw error;
|
|
463
|
+
} finally {
|
|
464
|
+
release?.();
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
// src/error.ts
|
|
469
|
+
class WorkflowEngineError extends Error {
|
|
470
|
+
workflowId;
|
|
471
|
+
runId;
|
|
472
|
+
cause;
|
|
473
|
+
issues;
|
|
474
|
+
constructor(message, workflowId, runId, cause = undefined, issues) {
|
|
475
|
+
super(message);
|
|
476
|
+
this.workflowId = workflowId;
|
|
477
|
+
this.runId = runId;
|
|
478
|
+
this.cause = cause;
|
|
479
|
+
this.issues = issues;
|
|
480
|
+
this.name = "WorkflowEngineError";
|
|
481
|
+
if (Error.captureStackTrace) {
|
|
482
|
+
Error.captureStackTrace(this, WorkflowEngineError);
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
class WorkflowRunNotFoundError extends WorkflowEngineError {
|
|
488
|
+
constructor(runId, workflowId) {
|
|
489
|
+
super("Workflow run not found", workflowId, runId);
|
|
490
|
+
this.name = "WorkflowRunNotFoundError";
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
// src/types.ts
|
|
495
|
+
var WorkflowStatus;
|
|
496
|
+
((WorkflowStatus2) => {
|
|
497
|
+
WorkflowStatus2["PENDING"] = "pending";
|
|
498
|
+
WorkflowStatus2["RUNNING"] = "running";
|
|
499
|
+
WorkflowStatus2["PAUSED"] = "paused";
|
|
500
|
+
WorkflowStatus2["COMPLETED"] = "completed";
|
|
501
|
+
WorkflowStatus2["FAILED"] = "failed";
|
|
502
|
+
WorkflowStatus2["CANCELLED"] = "cancelled";
|
|
503
|
+
})(WorkflowStatus ||= {});
|
|
504
|
+
var StepType;
|
|
505
|
+
((StepType2) => {
|
|
506
|
+
StepType2["PAUSE"] = "pause";
|
|
507
|
+
StepType2["RUN"] = "run";
|
|
508
|
+
StepType2["WAIT_FOR"] = "waitFor";
|
|
509
|
+
StepType2["WAIT_UNTIL"] = "waitUntil";
|
|
510
|
+
StepType2["DELAY"] = "delay";
|
|
511
|
+
StepType2["POLL"] = "poll";
|
|
512
|
+
})(StepType ||= {});
|
|
513
|
+
|
|
514
|
+
// src/client.ts
|
|
515
|
+
var LOG_PREFIX = "[WorkflowClient]";
|
|
516
|
+
var defaultLogger = {
|
|
517
|
+
log: (_message) => console.warn(_message),
|
|
518
|
+
error: (message, error) => console.error(message, error)
|
|
519
|
+
};
|
|
520
|
+
var defaultExpireInSeconds = process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS ? Number.parseInt(process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS, 10) : 5 * 60;
|
|
521
|
+
|
|
522
|
+
class WorkflowClient {
|
|
523
|
+
boss;
|
|
524
|
+
db;
|
|
525
|
+
pool;
|
|
526
|
+
_ownsPool = false;
|
|
527
|
+
_started = false;
|
|
528
|
+
logger;
|
|
529
|
+
constructor({ logger, ...connectionOptions }) {
|
|
530
|
+
this.logger = logger ?? defaultLogger;
|
|
531
|
+
if ("pool" in connectionOptions && connectionOptions.pool) {
|
|
532
|
+
this.pool = connectionOptions.pool;
|
|
533
|
+
} else if ("connectionString" in connectionOptions && connectionOptions.connectionString) {
|
|
534
|
+
this.pool = new import_pg.default.Pool({ connectionString: connectionOptions.connectionString });
|
|
535
|
+
this._ownsPool = true;
|
|
536
|
+
} else {
|
|
537
|
+
throw new WorkflowEngineError("Either pool or connectionString must be provided");
|
|
538
|
+
}
|
|
539
|
+
const db = {
|
|
540
|
+
executeSql: (text, values) => this.pool.query(text, values)
|
|
541
|
+
};
|
|
542
|
+
this.boss = new import_pg_boss.PgBoss({ db, schema: DEFAULT_PGBOSS_SCHEMA });
|
|
543
|
+
this.db = db;
|
|
544
|
+
}
|
|
545
|
+
async start() {
|
|
546
|
+
if (this._started) {
|
|
547
|
+
return;
|
|
548
|
+
}
|
|
549
|
+
await this.boss.start();
|
|
550
|
+
this.db = this.boss.getDb();
|
|
551
|
+
await runMigrations(this.db);
|
|
552
|
+
await this.boss.createQueue(WORKFLOW_RUN_QUEUE_NAME);
|
|
553
|
+
this._started = true;
|
|
554
|
+
this.logger.log(`${LOG_PREFIX} Client started`);
|
|
555
|
+
}
|
|
556
|
+
async stop() {
|
|
557
|
+
await this.boss.stop();
|
|
558
|
+
if (this._ownsPool) {
|
|
559
|
+
await this.pool.end();
|
|
560
|
+
}
|
|
561
|
+
this._started = false;
|
|
562
|
+
this.logger.log(`${LOG_PREFIX} Client stopped`);
|
|
563
|
+
}
|
|
564
|
+
async startWorkflow(refOrParams, inputArg, optionsArg) {
|
|
565
|
+
await this.ensureStarted();
|
|
566
|
+
let workflowId;
|
|
567
|
+
let input;
|
|
568
|
+
let resourceId;
|
|
569
|
+
let options;
|
|
570
|
+
if (typeof refOrParams === "function" && "id" in refOrParams) {
|
|
571
|
+
const ref = refOrParams;
|
|
572
|
+
workflowId = ref.id;
|
|
573
|
+
input = inputArg;
|
|
574
|
+
options = optionsArg;
|
|
575
|
+
resourceId = optionsArg?.resourceId;
|
|
576
|
+
if (ref.inputSchema) {
|
|
577
|
+
const result = await ref.inputSchema["~standard"].validate(input);
|
|
578
|
+
if (result.issues) {
|
|
579
|
+
throw new WorkflowEngineError(JSON.stringify(result.issues), workflowId, undefined, undefined, result.issues);
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
} else {
|
|
583
|
+
const params = refOrParams;
|
|
584
|
+
workflowId = params.workflowId;
|
|
585
|
+
input = params.input;
|
|
586
|
+
resourceId = params.resourceId;
|
|
587
|
+
options = params.options;
|
|
588
|
+
}
|
|
589
|
+
const run = await withPostgresTransaction(this.db, async (_db) => {
|
|
590
|
+
const timeoutAt = options?.timeout ? new Date(Date.now() + options.timeout) : null;
|
|
591
|
+
const { run: insertedRun, created } = await insertWorkflowRun({
|
|
592
|
+
resourceId,
|
|
593
|
+
workflowId,
|
|
594
|
+
currentStepId: "__start__",
|
|
595
|
+
status: "running" /* RUNNING */,
|
|
596
|
+
input,
|
|
597
|
+
maxRetries: options?.retries ?? 0,
|
|
598
|
+
timeoutAt
|
|
599
|
+
}, _db);
|
|
600
|
+
if (created) {
|
|
601
|
+
const job = {
|
|
602
|
+
runId: insertedRun.id,
|
|
603
|
+
resourceId,
|
|
604
|
+
workflowId,
|
|
605
|
+
input
|
|
606
|
+
};
|
|
607
|
+
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
608
|
+
startAfter: new Date,
|
|
609
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds
|
|
610
|
+
});
|
|
611
|
+
}
|
|
612
|
+
return insertedRun;
|
|
613
|
+
}, this.pool);
|
|
614
|
+
this.logger.log(`${LOG_PREFIX} Started workflow run ${run.id} for ${workflowId}`);
|
|
615
|
+
return run;
|
|
616
|
+
}
|
|
617
|
+
async triggerEvent({
|
|
618
|
+
runId,
|
|
619
|
+
resourceId,
|
|
620
|
+
eventName,
|
|
621
|
+
data,
|
|
622
|
+
options
|
|
623
|
+
}) {
|
|
624
|
+
await this.ensureStarted();
|
|
625
|
+
const run = await this.getRun({ runId, resourceId });
|
|
626
|
+
const job = {
|
|
627
|
+
runId: run.id,
|
|
628
|
+
resourceId: resourceId ?? run.resourceId ?? undefined,
|
|
629
|
+
workflowId: run.workflowId,
|
|
630
|
+
input: run.input,
|
|
631
|
+
event: {
|
|
632
|
+
name: eventName,
|
|
633
|
+
data
|
|
634
|
+
}
|
|
635
|
+
};
|
|
636
|
+
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
637
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds
|
|
638
|
+
});
|
|
639
|
+
this.logger.log(`${LOG_PREFIX} Event ${eventName} sent for workflow run ${runId}`);
|
|
640
|
+
return run;
|
|
641
|
+
}
|
|
642
|
+
async pauseWorkflow({
|
|
643
|
+
runId,
|
|
644
|
+
resourceId
|
|
645
|
+
}) {
|
|
646
|
+
await this.ensureStarted();
|
|
647
|
+
const run = await updateWorkflowRun({
|
|
648
|
+
runId,
|
|
649
|
+
resourceId,
|
|
650
|
+
data: {
|
|
651
|
+
status: "paused" /* PAUSED */,
|
|
652
|
+
pausedAt: new Date
|
|
653
|
+
},
|
|
654
|
+
expectedStatuses: ["running" /* RUNNING */, "pending" /* PENDING */]
|
|
655
|
+
}, this.db);
|
|
656
|
+
if (!run) {
|
|
657
|
+
throw new WorkflowRunNotFoundError(runId);
|
|
658
|
+
}
|
|
659
|
+
this.logger.log(`${LOG_PREFIX} Paused workflow run ${runId}`);
|
|
660
|
+
return run;
|
|
661
|
+
}
|
|
662
|
+
async resumeWorkflow({
|
|
663
|
+
runId,
|
|
664
|
+
resourceId,
|
|
665
|
+
options
|
|
666
|
+
}) {
|
|
667
|
+
await this.ensureStarted();
|
|
668
|
+
const current = await this.getRun({ runId, resourceId });
|
|
669
|
+
if (current.status !== "paused" /* PAUSED */) {
|
|
670
|
+
throw new WorkflowEngineError(`Cannot resume workflow run in '${current.status}' status, must be 'paused'`, current.workflowId, runId);
|
|
671
|
+
}
|
|
672
|
+
return this.triggerEvent({
|
|
673
|
+
runId,
|
|
674
|
+
resourceId,
|
|
675
|
+
eventName: PAUSE_EVENT_NAME,
|
|
676
|
+
data: {},
|
|
677
|
+
options
|
|
678
|
+
});
|
|
679
|
+
}
|
|
680
|
+
async fastForwardWorkflow({
|
|
681
|
+
runId,
|
|
682
|
+
resourceId,
|
|
683
|
+
data
|
|
684
|
+
}) {
|
|
685
|
+
await this.ensureStarted();
|
|
686
|
+
const run = await this.getRun({ runId, resourceId });
|
|
687
|
+
if (run.status !== "paused" /* PAUSED */) {
|
|
688
|
+
return run;
|
|
689
|
+
}
|
|
690
|
+
const stepId = run.currentStepId;
|
|
691
|
+
const waitForEntry = run.timeline[`${stepId}-wait-for`];
|
|
692
|
+
if (!waitForEntry || typeof waitForEntry !== "object" || !("waitFor" in waitForEntry)) {
|
|
693
|
+
return run;
|
|
694
|
+
}
|
|
695
|
+
const { eventName, timeoutEvent, skipOutput } = waitForEntry.waitFor;
|
|
696
|
+
if (eventName === PAUSE_EVENT_NAME) {
|
|
697
|
+
return this.resumeWorkflow({ runId, resourceId });
|
|
698
|
+
}
|
|
699
|
+
if (skipOutput && timeoutEvent) {
|
|
700
|
+
await withPostgresTransaction(this.db, async (db) => {
|
|
701
|
+
const freshRun = await getWorkflowRun({ runId, resourceId }, { exclusiveLock: true, db });
|
|
702
|
+
if (!freshRun)
|
|
703
|
+
throw new WorkflowRunNotFoundError(runId);
|
|
704
|
+
return updateWorkflowRun({
|
|
705
|
+
runId,
|
|
706
|
+
resourceId,
|
|
707
|
+
data: {
|
|
708
|
+
timeline: import_es_toolkit.merge(freshRun.timeline, {
|
|
709
|
+
[stepId]: {
|
|
710
|
+
output: data ?? {},
|
|
711
|
+
timestamp: new Date
|
|
712
|
+
}
|
|
713
|
+
})
|
|
714
|
+
}
|
|
715
|
+
}, db);
|
|
716
|
+
}, this.pool);
|
|
717
|
+
return this.triggerEvent({ runId, resourceId, eventName: timeoutEvent });
|
|
718
|
+
}
|
|
719
|
+
if (eventName) {
|
|
720
|
+
return this.triggerEvent({ runId, resourceId, eventName, data: data ?? {} });
|
|
721
|
+
}
|
|
722
|
+
if (timeoutEvent) {
|
|
723
|
+
return this.triggerEvent({ runId, resourceId, eventName: timeoutEvent, data: data ?? {} });
|
|
724
|
+
}
|
|
725
|
+
return run;
|
|
726
|
+
}
|
|
727
|
+
async cancelWorkflow({
|
|
728
|
+
runId,
|
|
729
|
+
resourceId
|
|
730
|
+
}) {
|
|
731
|
+
await this.ensureStarted();
|
|
732
|
+
const run = await updateWorkflowRun({
|
|
733
|
+
runId,
|
|
734
|
+
resourceId,
|
|
735
|
+
data: {
|
|
736
|
+
status: "cancelled" /* CANCELLED */
|
|
737
|
+
},
|
|
738
|
+
expectedStatuses: ["pending" /* PENDING */, "running" /* RUNNING */, "paused" /* PAUSED */]
|
|
739
|
+
}, this.db);
|
|
740
|
+
if (!run) {
|
|
741
|
+
throw new WorkflowRunNotFoundError(runId);
|
|
742
|
+
}
|
|
743
|
+
this.logger.log(`${LOG_PREFIX} Cancelled workflow run ${runId}`);
|
|
744
|
+
return run;
|
|
745
|
+
}
|
|
746
|
+
async getRun({
|
|
747
|
+
runId,
|
|
748
|
+
resourceId
|
|
749
|
+
}) {
|
|
750
|
+
await this.ensureStarted();
|
|
751
|
+
const run = await getWorkflowRun({ runId, resourceId }, { db: this.db });
|
|
752
|
+
if (!run) {
|
|
753
|
+
throw new WorkflowRunNotFoundError(runId);
|
|
754
|
+
}
|
|
755
|
+
return run;
|
|
756
|
+
}
|
|
757
|
+
async checkProgress({
|
|
758
|
+
runId,
|
|
759
|
+
resourceId
|
|
760
|
+
}) {
|
|
761
|
+
const run = await this.getRun({ runId, resourceId });
|
|
762
|
+
const completedSteps = Object.values(run.timeline).filter((entry) => typeof entry === "object" && entry !== null && ("output" in entry) && entry.output !== undefined).length;
|
|
763
|
+
const totalSteps = run.status === "completed" /* COMPLETED */ ? completedSteps : 0;
|
|
764
|
+
const completionPercentage = run.status === "completed" /* COMPLETED */ ? 100 : run.status === "failed" /* FAILED */ || run.status === "cancelled" /* CANCELLED */ ? 0 : 0;
|
|
765
|
+
return {
|
|
766
|
+
...run,
|
|
767
|
+
completedSteps,
|
|
768
|
+
completionPercentage,
|
|
769
|
+
totalSteps
|
|
770
|
+
};
|
|
771
|
+
}
|
|
772
|
+
async getRuns({
|
|
773
|
+
resourceId,
|
|
774
|
+
startingAfter,
|
|
775
|
+
endingBefore,
|
|
776
|
+
limit = 20,
|
|
777
|
+
statuses,
|
|
778
|
+
workflowId
|
|
779
|
+
}) {
|
|
780
|
+
await this.ensureStarted();
|
|
781
|
+
return getWorkflowRuns({
|
|
782
|
+
resourceId,
|
|
783
|
+
startingAfter,
|
|
784
|
+
endingBefore,
|
|
785
|
+
limit,
|
|
786
|
+
statuses,
|
|
787
|
+
workflowId
|
|
788
|
+
}, this.db);
|
|
789
|
+
}
|
|
790
|
+
async ensureStarted() {
|
|
791
|
+
if (!this._started) {
|
|
792
|
+
await this.start();
|
|
793
|
+
}
|
|
484
794
|
}
|
|
485
|
-
return mapRowToWorkflowRun(run);
|
|
486
795
|
}
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
796
|
+
// src/definition.ts
|
|
797
|
+
function createWorkflowRef(id, options) {
|
|
798
|
+
const ref = (handler, defineOptions) => ({
|
|
799
|
+
id,
|
|
800
|
+
handler,
|
|
801
|
+
inputSchema: options?.inputSchema,
|
|
802
|
+
timeout: defineOptions?.timeout,
|
|
803
|
+
retries: defineOptions?.retries
|
|
804
|
+
});
|
|
805
|
+
Object.defineProperty(ref, "id", { value: id, enumerable: true });
|
|
806
|
+
Object.defineProperty(ref, "inputSchema", { value: options?.inputSchema, enumerable: true });
|
|
807
|
+
return ref;
|
|
808
|
+
}
|
|
809
|
+
function createWorkflowFactory(plugins = []) {
|
|
810
|
+
const factory = (id, handler, { inputSchema, timeout, retries } = {}) => ({
|
|
811
|
+
id,
|
|
812
|
+
handler,
|
|
813
|
+
inputSchema,
|
|
814
|
+
timeout,
|
|
815
|
+
retries,
|
|
816
|
+
plugins: plugins.length > 0 ? plugins : undefined
|
|
817
|
+
});
|
|
818
|
+
factory.use = (plugin) => createWorkflowFactory([
|
|
819
|
+
...plugins,
|
|
820
|
+
plugin
|
|
821
|
+
]);
|
|
822
|
+
factory.ref = createWorkflowRef;
|
|
823
|
+
return factory;
|
|
824
|
+
}
|
|
825
|
+
var workflow = createWorkflowFactory();
|
|
826
|
+
// src/duration.ts
|
|
827
|
+
var import_parse_duration = __toESM(require("parse-duration"));
|
|
828
|
+
var MS_PER_SECOND = 1000;
|
|
829
|
+
var MS_PER_MINUTE = 60 * MS_PER_SECOND;
|
|
830
|
+
var MS_PER_HOUR = 60 * MS_PER_MINUTE;
|
|
831
|
+
var MS_PER_DAY = 24 * MS_PER_HOUR;
|
|
832
|
+
var MS_PER_WEEK = 7 * MS_PER_DAY;
|
|
833
|
+
function parseDuration(duration) {
|
|
834
|
+
if (typeof duration === "string") {
|
|
835
|
+
if (duration.trim() === "") {
|
|
836
|
+
throw new WorkflowEngineError("Invalid duration: empty string");
|
|
837
|
+
}
|
|
838
|
+
const ms2 = import_parse_duration.default(duration);
|
|
839
|
+
if (ms2 == null || ms2 <= 0) {
|
|
840
|
+
throw new WorkflowEngineError(`Invalid duration: "${duration}"`);
|
|
841
|
+
}
|
|
842
|
+
return ms2;
|
|
507
843
|
}
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
844
|
+
const { weeks = 0, days = 0, hours = 0, minutes = 0, seconds = 0 } = duration;
|
|
845
|
+
const ms = weeks * MS_PER_WEEK + days * MS_PER_DAY + hours * MS_PER_HOUR + minutes * MS_PER_MINUTE + seconds * MS_PER_SECOND;
|
|
846
|
+
if (ms <= 0) {
|
|
847
|
+
throw new WorkflowEngineError("Invalid duration: must be a positive value");
|
|
512
848
|
}
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
849
|
+
return ms;
|
|
850
|
+
}
|
|
851
|
+
// src/engine.ts
|
|
852
|
+
var import_es_toolkit2 = require("es-toolkit");
|
|
853
|
+
var import_pg2 = __toESM(require("pg"));
|
|
854
|
+
var import_pg_boss2 = require("pg-boss");
|
|
855
|
+
|
|
856
|
+
// src/ast-parser.ts
|
|
857
|
+
var ts = __toESM(require("typescript"));
|
|
858
|
+
function parseWorkflowHandler(handler) {
|
|
859
|
+
const handlerSource = handler.toString();
|
|
860
|
+
const sourceFile = ts.createSourceFile("handler.ts", handlerSource, ts.ScriptTarget.Latest, true);
|
|
861
|
+
const steps = new Map;
|
|
862
|
+
function isInConditional(node) {
|
|
863
|
+
let current = node.parent;
|
|
864
|
+
while (current) {
|
|
865
|
+
if (ts.isIfStatement(current) || ts.isConditionalExpression(current) || ts.isSwitchStatement(current) || ts.isCaseClause(current)) {
|
|
866
|
+
return true;
|
|
526
867
|
}
|
|
868
|
+
current = current.parent;
|
|
527
869
|
}
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
870
|
+
return false;
|
|
871
|
+
}
|
|
872
|
+
function isInLoop(node) {
|
|
873
|
+
let current = node.parent;
|
|
874
|
+
while (current) {
|
|
875
|
+
if (ts.isForStatement(current) || ts.isForInStatement(current) || ts.isForOfStatement(current) || ts.isWhileStatement(current) || ts.isDoStatement(current)) {
|
|
876
|
+
return true;
|
|
534
877
|
}
|
|
878
|
+
current = current.parent;
|
|
535
879
|
}
|
|
880
|
+
return false;
|
|
536
881
|
}
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
const rawItems = hasExtraRow ? rows.slice(0, limit) : rows;
|
|
551
|
-
if (isBackward) {
|
|
552
|
-
rawItems.reverse();
|
|
553
|
-
}
|
|
554
|
-
const items = rawItems.map((row) => mapRowToWorkflowRun(row));
|
|
555
|
-
const hasMore = isBackward ? items.length > 0 : hasExtraRow;
|
|
556
|
-
const hasPrev = isBackward ? hasExtraRow : !!startingAfter && items.length > 0;
|
|
557
|
-
const nextCursor = hasMore && items.length > 0 ? items[items.length - 1]?.id ?? null : null;
|
|
558
|
-
const prevCursor = hasPrev && items.length > 0 ? items[0]?.id ?? null : null;
|
|
559
|
-
return { items, nextCursor, prevCursor, hasMore, hasPrev };
|
|
560
|
-
}
|
|
561
|
-
async function withPostgresTransaction(db, callback, pool) {
|
|
562
|
-
let txDb;
|
|
563
|
-
let release;
|
|
564
|
-
if (pool) {
|
|
565
|
-
const client = await pool.connect();
|
|
566
|
-
txDb = {
|
|
567
|
-
executeSql: (text, values) => client.query(text, values)
|
|
568
|
-
};
|
|
569
|
-
release = () => client.release();
|
|
570
|
-
} else {
|
|
571
|
-
txDb = db;
|
|
882
|
+
function extractStepId(arg) {
|
|
883
|
+
if (ts.isStringLiteral(arg) || ts.isNoSubstitutionTemplateLiteral(arg)) {
|
|
884
|
+
return { id: arg.text, isDynamic: false };
|
|
885
|
+
}
|
|
886
|
+
if (ts.isTemplateExpression(arg)) {
|
|
887
|
+
let templateStr = arg.head.text;
|
|
888
|
+
for (const span of arg.templateSpans) {
|
|
889
|
+
templateStr += `\${...}`;
|
|
890
|
+
templateStr += span.literal.text;
|
|
891
|
+
}
|
|
892
|
+
return { id: templateStr, isDynamic: true };
|
|
893
|
+
}
|
|
894
|
+
return { id: arg.getText(sourceFile), isDynamic: true };
|
|
572
895
|
}
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
896
|
+
function visit(node) {
|
|
897
|
+
if (ts.isCallExpression(node) && ts.isPropertyAccessExpression(node.expression)) {
|
|
898
|
+
const propertyAccess = node.expression;
|
|
899
|
+
const objectName = propertyAccess.expression.getText(sourceFile);
|
|
900
|
+
const methodName = propertyAccess.name.text;
|
|
901
|
+
if (objectName === "step" && (methodName === "run" || methodName === "waitFor" || methodName === "pause" || methodName === "waitUntil" || methodName === "delay" || methodName === "sleep" || methodName === "poll")) {
|
|
902
|
+
const firstArg = node.arguments[0];
|
|
903
|
+
if (firstArg) {
|
|
904
|
+
const { id, isDynamic } = extractStepId(firstArg);
|
|
905
|
+
const stepType = methodName === "sleep" ? "delay" /* DELAY */ : methodName;
|
|
906
|
+
const stepDefinition = {
|
|
907
|
+
id,
|
|
908
|
+
type: stepType,
|
|
909
|
+
conditional: isInConditional(node),
|
|
910
|
+
loop: isInLoop(node),
|
|
911
|
+
isDynamic
|
|
912
|
+
};
|
|
913
|
+
if (steps.has(id)) {
|
|
914
|
+
throw new Error(`Duplicate step ID detected: '${id}'. Step IDs must be unique within a workflow.`);
|
|
915
|
+
}
|
|
916
|
+
steps.set(id, stepDefinition);
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
}
|
|
920
|
+
ts.forEachChild(node, visit);
|
|
583
921
|
}
|
|
922
|
+
visit(sourceFile);
|
|
923
|
+
return { steps: Array.from(steps.values()) };
|
|
584
924
|
}
|
|
585
925
|
|
|
586
926
|
// src/engine.ts
|
|
587
|
-
var
|
|
588
|
-
var WORKFLOW_RUN_QUEUE_NAME = "workflow-run";
|
|
589
|
-
var LOG_PREFIX = "[WorkflowEngine]";
|
|
590
|
-
var DEFAULT_PGBOSS_SCHEMA = "pgboss_v12_pgworkflow";
|
|
927
|
+
var LOG_PREFIX2 = "[WorkflowEngine]";
|
|
591
928
|
var StepTypeToIcon = {
|
|
592
929
|
["run" /* RUN */]: "λ",
|
|
593
930
|
["waitFor" /* WAIT_FOR */]: "○",
|
|
@@ -596,11 +933,11 @@ var StepTypeToIcon = {
|
|
|
596
933
|
["delay" /* DELAY */]: "⏱",
|
|
597
934
|
["poll" /* POLL */]: "↻"
|
|
598
935
|
};
|
|
599
|
-
var
|
|
936
|
+
var defaultLogger2 = {
|
|
600
937
|
log: (_message) => console.warn(_message),
|
|
601
938
|
error: (message, error) => console.error(message, error)
|
|
602
939
|
};
|
|
603
|
-
var
|
|
940
|
+
var defaultExpireInSeconds2 = process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS ? Number.parseInt(process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS, 10) : 5 * 60;
|
|
604
941
|
|
|
605
942
|
class WorkflowEngine {
|
|
606
943
|
boss;
|
|
@@ -612,11 +949,11 @@ class WorkflowEngine {
|
|
|
612
949
|
workflows = new Map;
|
|
613
950
|
logger;
|
|
614
951
|
constructor({ workflows, logger, boss, ...connectionOptions }) {
|
|
615
|
-
this.logger = this.buildLogger(logger ??
|
|
952
|
+
this.logger = this.buildLogger(logger ?? defaultLogger2);
|
|
616
953
|
if ("pool" in connectionOptions && connectionOptions.pool) {
|
|
617
954
|
this.pool = connectionOptions.pool;
|
|
618
955
|
} else if ("connectionString" in connectionOptions && connectionOptions.connectionString) {
|
|
619
|
-
this.pool = new
|
|
956
|
+
this.pool = new import_pg2.default.Pool({ connectionString: connectionOptions.connectionString });
|
|
620
957
|
this._ownsPool = true;
|
|
621
958
|
} else {
|
|
622
959
|
throw new WorkflowEngineError("Either pool or connectionString must be provided");
|
|
@@ -630,7 +967,7 @@ class WorkflowEngine {
|
|
|
630
967
|
if (boss) {
|
|
631
968
|
this.boss = boss;
|
|
632
969
|
} else {
|
|
633
|
-
this.boss = new
|
|
970
|
+
this.boss = new import_pg_boss2.PgBoss({ db, schema: DEFAULT_PGBOSS_SCHEMA });
|
|
634
971
|
}
|
|
635
972
|
this.db = this.boss.getDb();
|
|
636
973
|
}
|
|
@@ -694,13 +1031,26 @@ class WorkflowEngine {
|
|
|
694
1031
|
this.workflows.clear();
|
|
695
1032
|
return this;
|
|
696
1033
|
}
|
|
697
|
-
async startWorkflow({
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
idempotencyKey
|
|
702
|
-
options
|
|
703
|
-
|
|
1034
|
+
async startWorkflow(refOrParams, inputArg, optionsArg) {
|
|
1035
|
+
let workflowId;
|
|
1036
|
+
let input;
|
|
1037
|
+
let resourceId;
|
|
1038
|
+
let idempotencyKey;
|
|
1039
|
+
let options;
|
|
1040
|
+
if (typeof refOrParams === "function" && "id" in refOrParams) {
|
|
1041
|
+
workflowId = refOrParams.id;
|
|
1042
|
+
input = inputArg;
|
|
1043
|
+
options = optionsArg;
|
|
1044
|
+
resourceId = optionsArg?.resourceId;
|
|
1045
|
+
idempotencyKey = optionsArg?.idempotencyKey;
|
|
1046
|
+
} else {
|
|
1047
|
+
const params = refOrParams;
|
|
1048
|
+
workflowId = params.workflowId;
|
|
1049
|
+
input = params.input;
|
|
1050
|
+
resourceId = params.resourceId;
|
|
1051
|
+
idempotencyKey = params.idempotencyKey;
|
|
1052
|
+
options = params.options;
|
|
1053
|
+
}
|
|
704
1054
|
if (!this._started) {
|
|
705
1055
|
await this.start(false, { batchSize: options?.batchSize ?? 1 });
|
|
706
1056
|
}
|
|
@@ -741,7 +1091,7 @@ class WorkflowEngine {
|
|
|
741
1091
|
};
|
|
742
1092
|
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
743
1093
|
startAfter: new Date,
|
|
744
|
-
expireInSeconds: options?.expireInSeconds ??
|
|
1094
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds2
|
|
745
1095
|
});
|
|
746
1096
|
}
|
|
747
1097
|
return insertedRun;
|
|
@@ -816,7 +1166,7 @@ class WorkflowEngine {
|
|
|
816
1166
|
runId,
|
|
817
1167
|
resourceId,
|
|
818
1168
|
data: {
|
|
819
|
-
timeline:
|
|
1169
|
+
timeline: import_es_toolkit2.merge(freshRun.timeline, {
|
|
820
1170
|
[stepId]: {
|
|
821
1171
|
output: data ?? {},
|
|
822
1172
|
timestamp: new Date
|
|
@@ -872,7 +1222,7 @@ class WorkflowEngine {
|
|
|
872
1222
|
}
|
|
873
1223
|
};
|
|
874
1224
|
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
875
|
-
expireInSeconds: options?.expireInSeconds ??
|
|
1225
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds2
|
|
876
1226
|
});
|
|
877
1227
|
this.logger.log(`event ${eventName} sent for workflow run with id ${runId}`);
|
|
878
1228
|
return run;
|
|
@@ -951,27 +1301,29 @@ class WorkflowEngine {
|
|
|
951
1301
|
return run.resourceId ?? undefined;
|
|
952
1302
|
}
|
|
953
1303
|
async handleWorkflowRun([job]) {
|
|
954
|
-
const { runId, resourceId, workflowId, input, event } = job?.data ?? {};
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
}
|
|
958
|
-
if (!workflowId) {
|
|
959
|
-
throw new WorkflowEngineError("Invalid workflow run job, missing workflowId", undefined, runId);
|
|
960
|
-
}
|
|
961
|
-
const workflow2 = this.workflows.get(workflowId);
|
|
962
|
-
if (!workflow2) {
|
|
963
|
-
throw new WorkflowEngineError(`Workflow ${workflowId} not found`, workflowId, runId);
|
|
964
|
-
}
|
|
965
|
-
this.logger.log("Processing workflow run...", {
|
|
966
|
-
runId,
|
|
967
|
-
workflowId
|
|
968
|
-
});
|
|
969
|
-
let run = await this.getRun({ runId });
|
|
970
|
-
if (run.workflowId !== workflowId) {
|
|
971
|
-
throw new WorkflowEngineError(`Workflow run ${runId} does not match job workflowId ${workflowId}`, workflowId, runId);
|
|
972
|
-
}
|
|
973
|
-
const scopedResourceId = this.resolveScopedResourceId(resourceId, run);
|
|
1304
|
+
const { runId = "", resourceId, workflowId = "", input, event } = job?.data ?? {};
|
|
1305
|
+
let run;
|
|
1306
|
+
let scopedResourceId;
|
|
974
1307
|
try {
|
|
1308
|
+
if (!runId) {
|
|
1309
|
+
throw new WorkflowEngineError("Invalid workflow run job, missing runId", workflowId);
|
|
1310
|
+
}
|
|
1311
|
+
if (!workflowId) {
|
|
1312
|
+
throw new WorkflowEngineError("Invalid workflow run job, missing workflowId", undefined, runId);
|
|
1313
|
+
}
|
|
1314
|
+
const workflow2 = this.workflows.get(workflowId);
|
|
1315
|
+
if (!workflow2) {
|
|
1316
|
+
throw new WorkflowEngineError(`Workflow ${workflowId} not found`, workflowId, runId);
|
|
1317
|
+
}
|
|
1318
|
+
this.logger.log("Processing workflow run...", {
|
|
1319
|
+
runId,
|
|
1320
|
+
workflowId
|
|
1321
|
+
});
|
|
1322
|
+
run = await this.getRun({ runId });
|
|
1323
|
+
if (run.workflowId !== workflowId) {
|
|
1324
|
+
throw new WorkflowEngineError(`Workflow run ${runId} does not match job workflowId ${workflowId}`, workflowId, runId);
|
|
1325
|
+
}
|
|
1326
|
+
scopedResourceId = this.resolveScopedResourceId(resourceId, run);
|
|
975
1327
|
if (run.status === "cancelled" /* CANCELLED */) {
|
|
976
1328
|
this.logger.log(`Workflow run ${runId} is cancelled, skipping`);
|
|
977
1329
|
return;
|
|
@@ -1002,7 +1354,7 @@ class WorkflowEngine {
|
|
|
1002
1354
|
resumedAt: new Date,
|
|
1003
1355
|
jobId: job?.id,
|
|
1004
1356
|
...skipOutput ? {} : {
|
|
1005
|
-
timeline:
|
|
1357
|
+
timeline: import_es_toolkit2.merge(lockedRun.timeline, {
|
|
1006
1358
|
[lockedRun.currentStepId]: {
|
|
1007
1359
|
output: event?.data ?? {},
|
|
1008
1360
|
...isTimeout ? { timedOut: true } : {},
|
|
@@ -1115,7 +1467,7 @@ class WorkflowEngine {
|
|
|
1115
1467
|
});
|
|
1116
1468
|
}
|
|
1117
1469
|
} catch (error) {
|
|
1118
|
-
if (run.retryCount < run.maxRetries) {
|
|
1470
|
+
if (run && run.retryCount < run.maxRetries) {
|
|
1119
1471
|
await this.updateRun({
|
|
1120
1472
|
runId,
|
|
1121
1473
|
resourceId: scopedResourceId,
|
|
@@ -1133,19 +1485,21 @@ class WorkflowEngine {
|
|
|
1133
1485
|
};
|
|
1134
1486
|
await this.boss?.send("workflow-run", pgBossJob, {
|
|
1135
1487
|
startAfter: new Date(Date.now() + retryDelay),
|
|
1136
|
-
expireInSeconds:
|
|
1488
|
+
expireInSeconds: defaultExpireInSeconds2
|
|
1137
1489
|
});
|
|
1138
1490
|
return;
|
|
1139
1491
|
}
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1492
|
+
if (runId) {
|
|
1493
|
+
await this.updateRun({
|
|
1494
|
+
runId,
|
|
1495
|
+
resourceId: scopedResourceId,
|
|
1496
|
+
data: {
|
|
1497
|
+
status: "failed" /* FAILED */,
|
|
1498
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1499
|
+
jobId: job?.id
|
|
1500
|
+
}
|
|
1501
|
+
});
|
|
1502
|
+
}
|
|
1149
1503
|
throw error;
|
|
1150
1504
|
}
|
|
1151
1505
|
}
|
|
@@ -1198,7 +1552,7 @@ class WorkflowEngine {
|
|
|
1198
1552
|
runId: run.id,
|
|
1199
1553
|
resourceId: run.resourceId ?? undefined,
|
|
1200
1554
|
data: {
|
|
1201
|
-
timeline:
|
|
1555
|
+
timeline: import_es_toolkit2.merge(persistedRun.timeline, {
|
|
1202
1556
|
[stepId]: {
|
|
1203
1557
|
output,
|
|
1204
1558
|
timestamp: new Date
|
|
@@ -1252,7 +1606,7 @@ ${error.stack}` : String(error)
|
|
|
1252
1606
|
status: "paused" /* PAUSED */,
|
|
1253
1607
|
currentStepId: stepId,
|
|
1254
1608
|
pausedAt: new Date,
|
|
1255
|
-
timeline:
|
|
1609
|
+
timeline: import_es_toolkit2.merge(freshRun.timeline, {
|
|
1256
1610
|
[`${stepId}-wait-for`]: {
|
|
1257
1611
|
waitFor: { eventName, timeoutEvent },
|
|
1258
1612
|
timestamp: new Date
|
|
@@ -1272,7 +1626,7 @@ ${error.stack}` : String(error)
|
|
|
1272
1626
|
};
|
|
1273
1627
|
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
1274
1628
|
startAfter: timeoutDate.getTime() <= Date.now() ? new Date : timeoutDate,
|
|
1275
|
-
expireInSeconds:
|
|
1629
|
+
expireInSeconds: defaultExpireInSeconds2
|
|
1276
1630
|
});
|
|
1277
1631
|
} catch (error) {
|
|
1278
1632
|
await this.updateRun({
|
|
@@ -1313,7 +1667,7 @@ ${error.stack}` : String(error)
|
|
|
1313
1667
|
resourceId: run.resourceId ?? undefined,
|
|
1314
1668
|
data: {
|
|
1315
1669
|
currentStepId: stepId,
|
|
1316
|
-
timeline:
|
|
1670
|
+
timeline: import_es_toolkit2.merge(freshRun.timeline, {
|
|
1317
1671
|
[stepId]: { output: {}, timedOut: true, timestamp: new Date }
|
|
1318
1672
|
})
|
|
1319
1673
|
}
|
|
@@ -1334,7 +1688,7 @@ ${error.stack}` : String(error)
|
|
|
1334
1688
|
resourceId: run.resourceId ?? undefined,
|
|
1335
1689
|
data: {
|
|
1336
1690
|
currentStepId: stepId,
|
|
1337
|
-
timeline:
|
|
1691
|
+
timeline: import_es_toolkit2.merge(freshRun.timeline, {
|
|
1338
1692
|
[stepId]: { output: {}, timedOut: true, timestamp: new Date }
|
|
1339
1693
|
})
|
|
1340
1694
|
}
|
|
@@ -1352,7 +1706,7 @@ ${error.stack}` : String(error)
|
|
|
1352
1706
|
resourceId: run.resourceId ?? undefined,
|
|
1353
1707
|
data: {
|
|
1354
1708
|
currentStepId: stepId,
|
|
1355
|
-
timeline:
|
|
1709
|
+
timeline: import_es_toolkit2.merge(freshRun.timeline, {
|
|
1356
1710
|
[stepId]: { output: result, timestamp: new Date }
|
|
1357
1711
|
})
|
|
1358
1712
|
}
|
|
@@ -1370,7 +1724,7 @@ ${error.stack}` : String(error)
|
|
|
1370
1724
|
status: "paused" /* PAUSED */,
|
|
1371
1725
|
currentStepId: stepId,
|
|
1372
1726
|
pausedAt: new Date,
|
|
1373
|
-
timeline:
|
|
1727
|
+
timeline: import_es_toolkit2.merge(freshRun.timeline, {
|
|
1374
1728
|
[`${stepId}-poll`]: { startedAt: startedAt.toISOString() },
|
|
1375
1729
|
[`${stepId}-wait-for`]: {
|
|
1376
1730
|
waitFor: { timeoutEvent: pollEvent, skipOutput: true },
|
|
@@ -1389,7 +1743,7 @@ ${error.stack}` : String(error)
|
|
|
1389
1743
|
event: { name: pollEvent, data: {} }
|
|
1390
1744
|
}, {
|
|
1391
1745
|
startAfter: new Date(Date.now() + intervalMs),
|
|
1392
|
-
expireInSeconds:
|
|
1746
|
+
expireInSeconds: defaultExpireInSeconds2
|
|
1393
1747
|
});
|
|
1394
1748
|
} catch (error) {
|
|
1395
1749
|
await this.updateRun({
|
|
@@ -1414,12 +1768,12 @@ ${error.stack}` : String(error)
|
|
|
1414
1768
|
return {
|
|
1415
1769
|
log: (message, context) => {
|
|
1416
1770
|
const { runId, workflowId } = context ?? {};
|
|
1417
|
-
const parts = [
|
|
1771
|
+
const parts = [LOG_PREFIX2, workflowId, runId].filter(Boolean).join(" ");
|
|
1418
1772
|
logger.log(`${parts}: ${message}`);
|
|
1419
1773
|
},
|
|
1420
1774
|
error: (message, error, context) => {
|
|
1421
1775
|
const { runId, workflowId } = context ?? {};
|
|
1422
|
-
const parts = [
|
|
1776
|
+
const parts = [LOG_PREFIX2, workflowId, runId].filter(Boolean).join(" ");
|
|
1423
1777
|
logger.error(`${parts}: ${message}`, error);
|
|
1424
1778
|
}
|
|
1425
1779
|
};
|
|
@@ -1443,5 +1797,5 @@ ${error.stack}` : String(error)
|
|
|
1443
1797
|
}
|
|
1444
1798
|
}
|
|
1445
1799
|
|
|
1446
|
-
//# debugId=
|
|
1800
|
+
//# debugId=BE8A12714B5354A264756E2164756E21
|
|
1447
1801
|
//# sourceMappingURL=index.js.map
|