pg-workflows 0.7.1 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +83 -765
- package/dist/client.entry.cjs +822 -0
- package/dist/client.entry.d.cts +227 -0
- package/dist/client.entry.d.ts +227 -0
- package/dist/client.entry.js +13 -0
- package/dist/client.entry.js.map +16 -0
- package/dist/index.cjs +640 -323
- package/dist/index.d.cts +123 -11
- package/dist/index.d.ts +123 -11
- package/dist/index.js +75 -487
- package/dist/index.js.map +12 -10
- package/dist/shared/chunk-8n9chg7z.js +753 -0
- package/dist/shared/chunk-8n9chg7z.js.map +16 -0
- package/package.json +11 -1
|
@@ -0,0 +1,822 @@
|
|
|
1
|
+
var import_node_module = require("node:module");
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
function __accessProp(key) {
|
|
9
|
+
return this[key];
|
|
10
|
+
}
|
|
11
|
+
var __toESMCache_node;
|
|
12
|
+
var __toESMCache_esm;
|
|
13
|
+
var __toESM = (mod, isNodeMode, target) => {
|
|
14
|
+
var canCache = mod != null && typeof mod === "object";
|
|
15
|
+
if (canCache) {
|
|
16
|
+
var cache = isNodeMode ? __toESMCache_node ??= new WeakMap : __toESMCache_esm ??= new WeakMap;
|
|
17
|
+
var cached = cache.get(mod);
|
|
18
|
+
if (cached)
|
|
19
|
+
return cached;
|
|
20
|
+
}
|
|
21
|
+
target = mod != null ? __create(__getProtoOf(mod)) : {};
|
|
22
|
+
const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
|
|
23
|
+
for (let key of __getOwnPropNames(mod))
|
|
24
|
+
if (!__hasOwnProp.call(to, key))
|
|
25
|
+
__defProp(to, key, {
|
|
26
|
+
get: __accessProp.bind(mod, key),
|
|
27
|
+
enumerable: true
|
|
28
|
+
});
|
|
29
|
+
if (canCache)
|
|
30
|
+
cache.set(mod, to);
|
|
31
|
+
return to;
|
|
32
|
+
};
|
|
33
|
+
var __toCommonJS = (from) => {
|
|
34
|
+
var entry = (__moduleCache ??= new WeakMap).get(from), desc;
|
|
35
|
+
if (entry)
|
|
36
|
+
return entry;
|
|
37
|
+
entry = __defProp({}, "__esModule", { value: true });
|
|
38
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
39
|
+
for (var key of __getOwnPropNames(from))
|
|
40
|
+
if (!__hasOwnProp.call(entry, key))
|
|
41
|
+
__defProp(entry, key, {
|
|
42
|
+
get: __accessProp.bind(from, key),
|
|
43
|
+
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
__moduleCache.set(from, entry);
|
|
47
|
+
return entry;
|
|
48
|
+
};
|
|
49
|
+
var __moduleCache;
|
|
50
|
+
var __returnValue = (v) => v;
|
|
51
|
+
function __exportSetter(name, newValue) {
|
|
52
|
+
this[name] = __returnValue.bind(null, newValue);
|
|
53
|
+
}
|
|
54
|
+
var __export = (target, all) => {
|
|
55
|
+
for (var name in all)
|
|
56
|
+
__defProp(target, name, {
|
|
57
|
+
get: all[name],
|
|
58
|
+
enumerable: true,
|
|
59
|
+
configurable: true,
|
|
60
|
+
set: __exportSetter.bind(all, name)
|
|
61
|
+
});
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
// src/client.entry.ts
|
|
65
|
+
var exports_client_entry = {};
|
|
66
|
+
__export(exports_client_entry, {
|
|
67
|
+
createWorkflowRef: () => createWorkflowRef,
|
|
68
|
+
WorkflowStatus: () => WorkflowStatus,
|
|
69
|
+
WorkflowClient: () => WorkflowClient
|
|
70
|
+
});
|
|
71
|
+
module.exports = __toCommonJS(exports_client_entry);
|
|
72
|
+
|
|
73
|
+
// src/client.ts
|
|
74
|
+
var import_es_toolkit = require("es-toolkit");
|
|
75
|
+
var import_pg = __toESM(require("pg"));
|
|
76
|
+
var import_pg_boss = require("pg-boss");
|
|
77
|
+
|
|
78
|
+
// src/constants.ts
|
|
79
|
+
var PAUSE_EVENT_NAME = "__internal_pause";
|
|
80
|
+
var WORKFLOW_RUN_QUEUE_NAME = "workflow-run";
|
|
81
|
+
var DEFAULT_PGBOSS_SCHEMA = "pgboss_v12_pgworkflow";
|
|
82
|
+
|
|
83
|
+
// src/db/migration.ts
|
|
84
|
+
var MIGRATION_LOCK_ID = 738291645;
|
|
85
|
+
var CURRENT_SCHEMA_VERSION = 2;
|
|
86
|
+
async function runMigrations(db) {
|
|
87
|
+
if (await isSchemaUpToDate(db)) {
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
const currentVersion = await getCurrentVersion(db);
|
|
91
|
+
const commands = [];
|
|
92
|
+
if (currentVersion < 1) {
|
|
93
|
+
commands.push(`
|
|
94
|
+
CREATE TABLE IF NOT EXISTS workflow_runs (
|
|
95
|
+
id varchar(32) PRIMARY KEY NOT NULL,
|
|
96
|
+
created_at timestamp with time zone DEFAULT now() NOT NULL,
|
|
97
|
+
updated_at timestamp with time zone DEFAULT now() NOT NULL,
|
|
98
|
+
resource_id varchar(32),
|
|
99
|
+
workflow_id varchar(32) NOT NULL,
|
|
100
|
+
status text DEFAULT 'pending' NOT NULL,
|
|
101
|
+
input jsonb NOT NULL,
|
|
102
|
+
output jsonb,
|
|
103
|
+
error text,
|
|
104
|
+
current_step_id varchar(256) NOT NULL,
|
|
105
|
+
timeline jsonb DEFAULT '{}'::jsonb NOT NULL,
|
|
106
|
+
paused_at timestamp with time zone,
|
|
107
|
+
resumed_at timestamp with time zone,
|
|
108
|
+
completed_at timestamp with time zone,
|
|
109
|
+
timeout_at timestamp with time zone,
|
|
110
|
+
retry_count integer DEFAULT 0 NOT NULL,
|
|
111
|
+
max_retries integer DEFAULT 0 NOT NULL,
|
|
112
|
+
job_id varchar(256)
|
|
113
|
+
)
|
|
114
|
+
`);
|
|
115
|
+
commands.push(`
|
|
116
|
+
CREATE INDEX IF NOT EXISTS workflow_runs_created_at_idx ON workflow_runs USING btree (created_at)
|
|
117
|
+
`);
|
|
118
|
+
commands.push(`
|
|
119
|
+
CREATE INDEX IF NOT EXISTS workflow_runs_resource_id_created_at_idx ON workflow_runs USING btree (resource_id, created_at DESC)
|
|
120
|
+
`);
|
|
121
|
+
commands.push(`
|
|
122
|
+
CREATE INDEX IF NOT EXISTS workflow_runs_status_created_at_idx ON workflow_runs USING btree (status, created_at DESC)
|
|
123
|
+
`);
|
|
124
|
+
commands.push(`
|
|
125
|
+
CREATE INDEX IF NOT EXISTS workflow_runs_workflow_id_created_at_idx ON workflow_runs USING btree (workflow_id, created_at DESC)
|
|
126
|
+
`);
|
|
127
|
+
commands.push(`
|
|
128
|
+
CREATE INDEX IF NOT EXISTS workflow_runs_resource_id_workflow_id_created_at_idx ON workflow_runs USING btree (resource_id, workflow_id, created_at DESC)
|
|
129
|
+
`);
|
|
130
|
+
}
|
|
131
|
+
if (currentVersion < 2) {
|
|
132
|
+
commands.push("DROP INDEX IF EXISTS workflow_runs_workflow_id_idx");
|
|
133
|
+
commands.push("DROP INDEX IF EXISTS workflow_runs_resource_id_idx");
|
|
134
|
+
commands.push("ALTER TABLE workflow_runs ADD COLUMN IF NOT EXISTS idempotency_key varchar(256)");
|
|
135
|
+
commands.push(`
|
|
136
|
+
CREATE UNIQUE INDEX IF NOT EXISTS workflow_runs_idempotency_key_idx ON workflow_runs (idempotency_key) WHERE idempotency_key IS NOT NULL
|
|
137
|
+
`);
|
|
138
|
+
}
|
|
139
|
+
if (currentVersion === 0) {
|
|
140
|
+
commands.push(`INSERT INTO workflow_schema_version (version) VALUES (${CURRENT_SCHEMA_VERSION})`);
|
|
141
|
+
} else {
|
|
142
|
+
commands.push(`UPDATE workflow_schema_version SET version = ${CURRENT_SCHEMA_VERSION}`);
|
|
143
|
+
}
|
|
144
|
+
if (commands.length === 0) {
|
|
145
|
+
return;
|
|
146
|
+
}
|
|
147
|
+
const sql = [
|
|
148
|
+
"BEGIN",
|
|
149
|
+
"SET LOCAL lock_timeout = '30s'",
|
|
150
|
+
"SET LOCAL idle_in_transaction_session_timeout = '30s'",
|
|
151
|
+
`SELECT pg_advisory_xact_lock(${MIGRATION_LOCK_ID})`,
|
|
152
|
+
"CREATE TABLE IF NOT EXISTS workflow_schema_version (version integer NOT NULL)",
|
|
153
|
+
...commands,
|
|
154
|
+
"COMMIT"
|
|
155
|
+
].join(`;
|
|
156
|
+
`);
|
|
157
|
+
await db.executeSql(sql, []);
|
|
158
|
+
}
|
|
159
|
+
async function isSchemaUpToDate(db) {
|
|
160
|
+
try {
|
|
161
|
+
const result = await db.executeSql("SELECT version FROM workflow_schema_version LIMIT 1", []);
|
|
162
|
+
return (result.rows[0]?.version ?? 0) >= CURRENT_SCHEMA_VERSION;
|
|
163
|
+
} catch {
|
|
164
|
+
return false;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
async function getCurrentVersion(db) {
|
|
168
|
+
try {
|
|
169
|
+
const result = await db.executeSql("SELECT version FROM workflow_schema_version LIMIT 1", []);
|
|
170
|
+
return result.rows[0]?.version ?? 0;
|
|
171
|
+
} catch {
|
|
172
|
+
return 0;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// src/db/queries.ts
|
|
177
|
+
var import_ksuid = __toESM(require("ksuid"));
|
|
178
|
+
function generateKSUID(prefix) {
|
|
179
|
+
return `${prefix ? `${prefix}_` : ""}${import_ksuid.default.randomSync().string}`;
|
|
180
|
+
}
|
|
181
|
+
function mapRowToWorkflowRun(row) {
|
|
182
|
+
return {
|
|
183
|
+
id: row.id,
|
|
184
|
+
createdAt: new Date(row.created_at),
|
|
185
|
+
updatedAt: new Date(row.updated_at),
|
|
186
|
+
resourceId: row.resource_id,
|
|
187
|
+
workflowId: row.workflow_id,
|
|
188
|
+
status: row.status,
|
|
189
|
+
input: typeof row.input === "string" ? JSON.parse(row.input) : row.input,
|
|
190
|
+
output: typeof row.output === "string" ? row.output.trim().startsWith("{") || row.output.trim().startsWith("[") ? JSON.parse(row.output) : row.output : row.output ?? null,
|
|
191
|
+
error: row.error,
|
|
192
|
+
currentStepId: row.current_step_id,
|
|
193
|
+
timeline: typeof row.timeline === "string" ? JSON.parse(row.timeline) : row.timeline,
|
|
194
|
+
pausedAt: row.paused_at ? new Date(row.paused_at) : null,
|
|
195
|
+
resumedAt: row.resumed_at ? new Date(row.resumed_at) : null,
|
|
196
|
+
completedAt: row.completed_at ? new Date(row.completed_at) : null,
|
|
197
|
+
timeoutAt: row.timeout_at ? new Date(row.timeout_at) : null,
|
|
198
|
+
retryCount: row.retry_count,
|
|
199
|
+
maxRetries: row.max_retries,
|
|
200
|
+
jobId: row.job_id,
|
|
201
|
+
idempotencyKey: row.idempotency_key
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
async function insertWorkflowRun({
|
|
205
|
+
resourceId,
|
|
206
|
+
workflowId,
|
|
207
|
+
currentStepId,
|
|
208
|
+
status,
|
|
209
|
+
input,
|
|
210
|
+
maxRetries,
|
|
211
|
+
timeoutAt,
|
|
212
|
+
idempotencyKey
|
|
213
|
+
}, db) {
|
|
214
|
+
const runId = generateKSUID("run");
|
|
215
|
+
const now = new Date;
|
|
216
|
+
const result = await db.executeSql(`INSERT INTO workflow_runs (
|
|
217
|
+
id,
|
|
218
|
+
resource_id,
|
|
219
|
+
workflow_id,
|
|
220
|
+
current_step_id,
|
|
221
|
+
status,
|
|
222
|
+
input,
|
|
223
|
+
max_retries,
|
|
224
|
+
timeout_at,
|
|
225
|
+
created_at,
|
|
226
|
+
updated_at,
|
|
227
|
+
timeline,
|
|
228
|
+
retry_count,
|
|
229
|
+
idempotency_key
|
|
230
|
+
)
|
|
231
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
|
232
|
+
ON CONFLICT (idempotency_key) WHERE idempotency_key IS NOT NULL DO NOTHING
|
|
233
|
+
RETURNING *`, [
|
|
234
|
+
runId,
|
|
235
|
+
resourceId ?? null,
|
|
236
|
+
workflowId,
|
|
237
|
+
currentStepId,
|
|
238
|
+
status,
|
|
239
|
+
JSON.stringify(input),
|
|
240
|
+
maxRetries,
|
|
241
|
+
timeoutAt,
|
|
242
|
+
now,
|
|
243
|
+
now,
|
|
244
|
+
"{}",
|
|
245
|
+
0,
|
|
246
|
+
idempotencyKey ?? null
|
|
247
|
+
]);
|
|
248
|
+
if (result.rows[0]) {
|
|
249
|
+
return { run: mapRowToWorkflowRun(result.rows[0]), created: true };
|
|
250
|
+
}
|
|
251
|
+
const existing = await db.executeSql("SELECT * FROM workflow_runs WHERE idempotency_key = $1", [
|
|
252
|
+
idempotencyKey
|
|
253
|
+
]);
|
|
254
|
+
if (!existing.rows[0]) {
|
|
255
|
+
throw new Error(`Idempotency conflict: existing run not found for key "${idempotencyKey}"`);
|
|
256
|
+
}
|
|
257
|
+
return { run: mapRowToWorkflowRun(existing.rows[0]), created: false };
|
|
258
|
+
}
|
|
259
|
+
async function getWorkflowRun({
|
|
260
|
+
runId,
|
|
261
|
+
resourceId
|
|
262
|
+
}, { exclusiveLock = false, db }) {
|
|
263
|
+
const lockSuffix = exclusiveLock ? "FOR UPDATE" : "";
|
|
264
|
+
const result = resourceId ? await db.executeSql(`SELECT * FROM workflow_runs
|
|
265
|
+
WHERE id = $1 AND resource_id = $2
|
|
266
|
+
${lockSuffix}`, [runId, resourceId]) : await db.executeSql(`SELECT * FROM workflow_runs
|
|
267
|
+
WHERE id = $1
|
|
268
|
+
${lockSuffix}`, [runId]);
|
|
269
|
+
const run = result.rows[0];
|
|
270
|
+
if (!run) {
|
|
271
|
+
return null;
|
|
272
|
+
}
|
|
273
|
+
return mapRowToWorkflowRun(run);
|
|
274
|
+
}
|
|
275
|
+
async function updateWorkflowRun({
|
|
276
|
+
runId,
|
|
277
|
+
resourceId,
|
|
278
|
+
data,
|
|
279
|
+
expectedStatuses
|
|
280
|
+
}, db) {
|
|
281
|
+
const now = new Date;
|
|
282
|
+
const updates = ["updated_at = $1"];
|
|
283
|
+
const values = [now];
|
|
284
|
+
let paramIndex = 2;
|
|
285
|
+
if (data.status !== undefined) {
|
|
286
|
+
updates.push(`status = $${paramIndex}`);
|
|
287
|
+
values.push(data.status);
|
|
288
|
+
paramIndex++;
|
|
289
|
+
}
|
|
290
|
+
if (data.currentStepId !== undefined) {
|
|
291
|
+
updates.push(`current_step_id = $${paramIndex}`);
|
|
292
|
+
values.push(data.currentStepId);
|
|
293
|
+
paramIndex++;
|
|
294
|
+
}
|
|
295
|
+
if (data.timeline !== undefined) {
|
|
296
|
+
updates.push(`timeline = $${paramIndex}`);
|
|
297
|
+
values.push(JSON.stringify(data.timeline));
|
|
298
|
+
paramIndex++;
|
|
299
|
+
}
|
|
300
|
+
if (data.pausedAt !== undefined) {
|
|
301
|
+
updates.push(`paused_at = $${paramIndex}`);
|
|
302
|
+
values.push(data.pausedAt);
|
|
303
|
+
paramIndex++;
|
|
304
|
+
}
|
|
305
|
+
if (data.resumedAt !== undefined) {
|
|
306
|
+
updates.push(`resumed_at = $${paramIndex}`);
|
|
307
|
+
values.push(data.resumedAt);
|
|
308
|
+
paramIndex++;
|
|
309
|
+
}
|
|
310
|
+
if (data.completedAt !== undefined) {
|
|
311
|
+
updates.push(`completed_at = $${paramIndex}`);
|
|
312
|
+
values.push(data.completedAt);
|
|
313
|
+
paramIndex++;
|
|
314
|
+
}
|
|
315
|
+
if (data.output !== undefined) {
|
|
316
|
+
updates.push(`output = $${paramIndex}`);
|
|
317
|
+
values.push(JSON.stringify(data.output));
|
|
318
|
+
paramIndex++;
|
|
319
|
+
}
|
|
320
|
+
if (data.error !== undefined) {
|
|
321
|
+
updates.push(`error = $${paramIndex}`);
|
|
322
|
+
values.push(data.error);
|
|
323
|
+
paramIndex++;
|
|
324
|
+
}
|
|
325
|
+
if (data.retryCount !== undefined) {
|
|
326
|
+
updates.push(`retry_count = $${paramIndex}`);
|
|
327
|
+
values.push(data.retryCount);
|
|
328
|
+
paramIndex++;
|
|
329
|
+
}
|
|
330
|
+
if (data.jobId !== undefined) {
|
|
331
|
+
updates.push(`job_id = $${paramIndex}`);
|
|
332
|
+
values.push(data.jobId);
|
|
333
|
+
paramIndex++;
|
|
334
|
+
}
|
|
335
|
+
values.push(runId);
|
|
336
|
+
const idParam = paramIndex;
|
|
337
|
+
paramIndex++;
|
|
338
|
+
if (resourceId) {
|
|
339
|
+
values.push(resourceId);
|
|
340
|
+
paramIndex++;
|
|
341
|
+
}
|
|
342
|
+
if (expectedStatuses && expectedStatuses.length > 0) {
|
|
343
|
+
values.push(expectedStatuses);
|
|
344
|
+
paramIndex++;
|
|
345
|
+
}
|
|
346
|
+
let whereClause = resourceId ? `WHERE id = $${idParam} AND resource_id = $${idParam + 1}` : `WHERE id = $${idParam}`;
|
|
347
|
+
if (expectedStatuses && expectedStatuses.length > 0) {
|
|
348
|
+
whereClause += ` AND status = ANY($${paramIndex - 1})`;
|
|
349
|
+
}
|
|
350
|
+
const query = `
|
|
351
|
+
UPDATE workflow_runs
|
|
352
|
+
SET ${updates.join(", ")}
|
|
353
|
+
${whereClause}
|
|
354
|
+
RETURNING *
|
|
355
|
+
`;
|
|
356
|
+
const result = await db.executeSql(query, values);
|
|
357
|
+
const run = result.rows[0];
|
|
358
|
+
if (!run) {
|
|
359
|
+
return null;
|
|
360
|
+
}
|
|
361
|
+
return mapRowToWorkflowRun(run);
|
|
362
|
+
}
|
|
363
|
+
async function getWorkflowRuns({
|
|
364
|
+
resourceId,
|
|
365
|
+
startingAfter,
|
|
366
|
+
endingBefore,
|
|
367
|
+
limit = 20,
|
|
368
|
+
statuses,
|
|
369
|
+
workflowId
|
|
370
|
+
}, db) {
|
|
371
|
+
const conditions = [];
|
|
372
|
+
const values = [];
|
|
373
|
+
let paramIndex = 1;
|
|
374
|
+
if (resourceId) {
|
|
375
|
+
conditions.push(`resource_id = $${paramIndex}`);
|
|
376
|
+
values.push(resourceId);
|
|
377
|
+
paramIndex++;
|
|
378
|
+
}
|
|
379
|
+
if (statuses && statuses.length > 0) {
|
|
380
|
+
conditions.push(`status = ANY($${paramIndex})`);
|
|
381
|
+
values.push(statuses);
|
|
382
|
+
paramIndex++;
|
|
383
|
+
}
|
|
384
|
+
if (workflowId) {
|
|
385
|
+
conditions.push(`workflow_id = $${paramIndex}`);
|
|
386
|
+
values.push(workflowId);
|
|
387
|
+
paramIndex++;
|
|
388
|
+
}
|
|
389
|
+
const cursorIds = [startingAfter, endingBefore].filter(Boolean);
|
|
390
|
+
if (cursorIds.length > 0) {
|
|
391
|
+
const cursorResult = await db.executeSql("SELECT id, created_at FROM workflow_runs WHERE id = ANY($1)", [cursorIds]);
|
|
392
|
+
const cursorMap = new Map;
|
|
393
|
+
for (const row of cursorResult.rows) {
|
|
394
|
+
cursorMap.set(row.id, typeof row.created_at === "string" ? new Date(row.created_at) : row.created_at);
|
|
395
|
+
}
|
|
396
|
+
if (startingAfter) {
|
|
397
|
+
const cursor = cursorMap.get(startingAfter);
|
|
398
|
+
if (cursor) {
|
|
399
|
+
conditions.push(`created_at < $${paramIndex}`);
|
|
400
|
+
values.push(cursor);
|
|
401
|
+
paramIndex++;
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
if (endingBefore) {
|
|
405
|
+
const cursor = cursorMap.get(endingBefore);
|
|
406
|
+
if (cursor) {
|
|
407
|
+
conditions.push(`created_at > $${paramIndex}`);
|
|
408
|
+
values.push(cursor);
|
|
409
|
+
paramIndex++;
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
414
|
+
const actualLimit = Math.min(Math.max(limit, 1), 100) + 1;
|
|
415
|
+
const isBackward = !!endingBefore && !startingAfter;
|
|
416
|
+
const query = `
|
|
417
|
+
SELECT * FROM workflow_runs
|
|
418
|
+
${whereClause}
|
|
419
|
+
ORDER BY created_at ${isBackward ? "ASC" : "DESC"}
|
|
420
|
+
LIMIT $${paramIndex}
|
|
421
|
+
`;
|
|
422
|
+
values.push(actualLimit);
|
|
423
|
+
const result = await db.executeSql(query, values);
|
|
424
|
+
const rows = result.rows;
|
|
425
|
+
const hasExtraRow = rows.length > (limit ?? 20);
|
|
426
|
+
const rawItems = hasExtraRow ? rows.slice(0, limit) : rows;
|
|
427
|
+
if (isBackward) {
|
|
428
|
+
rawItems.reverse();
|
|
429
|
+
}
|
|
430
|
+
const items = rawItems.map((row) => mapRowToWorkflowRun(row));
|
|
431
|
+
const hasMore = isBackward ? items.length > 0 : hasExtraRow;
|
|
432
|
+
const hasPrev = isBackward ? hasExtraRow : !!startingAfter && items.length > 0;
|
|
433
|
+
const nextCursor = hasMore && items.length > 0 ? items[items.length - 1]?.id ?? null : null;
|
|
434
|
+
const prevCursor = hasPrev && items.length > 0 ? items[0]?.id ?? null : null;
|
|
435
|
+
return { items, nextCursor, prevCursor, hasMore, hasPrev };
|
|
436
|
+
}
|
|
437
|
+
async function withPostgresTransaction(db, callback, pool) {
|
|
438
|
+
let txDb;
|
|
439
|
+
let release;
|
|
440
|
+
if (pool) {
|
|
441
|
+
const client = await pool.connect();
|
|
442
|
+
txDb = {
|
|
443
|
+
executeSql: (text, values) => client.query(text, values)
|
|
444
|
+
};
|
|
445
|
+
release = () => client.release();
|
|
446
|
+
} else {
|
|
447
|
+
txDb = db;
|
|
448
|
+
}
|
|
449
|
+
try {
|
|
450
|
+
await txDb.executeSql("BEGIN", []);
|
|
451
|
+
const result = await callback(txDb);
|
|
452
|
+
await txDb.executeSql("COMMIT", []);
|
|
453
|
+
return result;
|
|
454
|
+
} catch (error) {
|
|
455
|
+
await txDb.executeSql("ROLLBACK", []);
|
|
456
|
+
throw error;
|
|
457
|
+
} finally {
|
|
458
|
+
release?.();
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
// src/error.ts
|
|
463
|
+
class WorkflowEngineError extends Error {
|
|
464
|
+
workflowId;
|
|
465
|
+
runId;
|
|
466
|
+
cause;
|
|
467
|
+
issues;
|
|
468
|
+
constructor(message, workflowId, runId, cause = undefined, issues) {
|
|
469
|
+
super(message);
|
|
470
|
+
this.workflowId = workflowId;
|
|
471
|
+
this.runId = runId;
|
|
472
|
+
this.cause = cause;
|
|
473
|
+
this.issues = issues;
|
|
474
|
+
this.name = "WorkflowEngineError";
|
|
475
|
+
if (Error.captureStackTrace) {
|
|
476
|
+
Error.captureStackTrace(this, WorkflowEngineError);
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
class WorkflowRunNotFoundError extends WorkflowEngineError {
|
|
482
|
+
constructor(runId, workflowId) {
|
|
483
|
+
super("Workflow run not found", workflowId, runId);
|
|
484
|
+
this.name = "WorkflowRunNotFoundError";
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
// src/types.ts
|
|
489
|
+
var WorkflowStatus;
|
|
490
|
+
((WorkflowStatus2) => {
|
|
491
|
+
WorkflowStatus2["PENDING"] = "pending";
|
|
492
|
+
WorkflowStatus2["RUNNING"] = "running";
|
|
493
|
+
WorkflowStatus2["PAUSED"] = "paused";
|
|
494
|
+
WorkflowStatus2["COMPLETED"] = "completed";
|
|
495
|
+
WorkflowStatus2["FAILED"] = "failed";
|
|
496
|
+
WorkflowStatus2["CANCELLED"] = "cancelled";
|
|
497
|
+
})(WorkflowStatus ||= {});
|
|
498
|
+
var StepType;
|
|
499
|
+
((StepType2) => {
|
|
500
|
+
StepType2["PAUSE"] = "pause";
|
|
501
|
+
StepType2["RUN"] = "run";
|
|
502
|
+
StepType2["WAIT_FOR"] = "waitFor";
|
|
503
|
+
StepType2["WAIT_UNTIL"] = "waitUntil";
|
|
504
|
+
StepType2["DELAY"] = "delay";
|
|
505
|
+
StepType2["POLL"] = "poll";
|
|
506
|
+
})(StepType ||= {});
|
|
507
|
+
|
|
508
|
+
// src/client.ts
|
|
509
|
+
var LOG_PREFIX = "[WorkflowClient]";
|
|
510
|
+
var defaultLogger = {
|
|
511
|
+
log: (_message) => console.warn(_message),
|
|
512
|
+
error: (message, error) => console.error(message, error)
|
|
513
|
+
};
|
|
514
|
+
var defaultExpireInSeconds = process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS ? Number.parseInt(process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS, 10) : 5 * 60;
|
|
515
|
+
|
|
516
|
+
class WorkflowClient {
|
|
517
|
+
boss;
|
|
518
|
+
db;
|
|
519
|
+
pool;
|
|
520
|
+
_ownsPool = false;
|
|
521
|
+
_started = false;
|
|
522
|
+
logger;
|
|
523
|
+
constructor({ logger, ...connectionOptions }) {
|
|
524
|
+
this.logger = logger ?? defaultLogger;
|
|
525
|
+
if ("pool" in connectionOptions && connectionOptions.pool) {
|
|
526
|
+
this.pool = connectionOptions.pool;
|
|
527
|
+
} else if ("connectionString" in connectionOptions && connectionOptions.connectionString) {
|
|
528
|
+
this.pool = new import_pg.default.Pool({ connectionString: connectionOptions.connectionString });
|
|
529
|
+
this._ownsPool = true;
|
|
530
|
+
} else {
|
|
531
|
+
throw new WorkflowEngineError("Either pool or connectionString must be provided");
|
|
532
|
+
}
|
|
533
|
+
const db = {
|
|
534
|
+
executeSql: (text, values) => this.pool.query(text, values)
|
|
535
|
+
};
|
|
536
|
+
this.boss = new import_pg_boss.PgBoss({ db, schema: DEFAULT_PGBOSS_SCHEMA });
|
|
537
|
+
this.db = db;
|
|
538
|
+
}
|
|
539
|
+
async start() {
|
|
540
|
+
if (this._started) {
|
|
541
|
+
return;
|
|
542
|
+
}
|
|
543
|
+
await this.boss.start();
|
|
544
|
+
this.db = this.boss.getDb();
|
|
545
|
+
await runMigrations(this.db);
|
|
546
|
+
await this.boss.createQueue(WORKFLOW_RUN_QUEUE_NAME);
|
|
547
|
+
this._started = true;
|
|
548
|
+
this.logger.log(`${LOG_PREFIX} Client started`);
|
|
549
|
+
}
|
|
550
|
+
async stop() {
|
|
551
|
+
await this.boss.stop();
|
|
552
|
+
if (this._ownsPool) {
|
|
553
|
+
await this.pool.end();
|
|
554
|
+
}
|
|
555
|
+
this._started = false;
|
|
556
|
+
this.logger.log(`${LOG_PREFIX} Client stopped`);
|
|
557
|
+
}
|
|
558
|
+
async startWorkflow(refOrParams, inputArg, optionsArg) {
|
|
559
|
+
await this.ensureStarted();
|
|
560
|
+
let workflowId;
|
|
561
|
+
let input;
|
|
562
|
+
let resourceId;
|
|
563
|
+
let options;
|
|
564
|
+
if (typeof refOrParams === "function" && "id" in refOrParams) {
|
|
565
|
+
const ref = refOrParams;
|
|
566
|
+
workflowId = ref.id;
|
|
567
|
+
input = inputArg;
|
|
568
|
+
options = optionsArg;
|
|
569
|
+
resourceId = optionsArg?.resourceId;
|
|
570
|
+
if (ref.inputSchema) {
|
|
571
|
+
const result = await ref.inputSchema["~standard"].validate(input);
|
|
572
|
+
if (result.issues) {
|
|
573
|
+
throw new WorkflowEngineError(JSON.stringify(result.issues), workflowId, undefined, undefined, result.issues);
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
} else {
|
|
577
|
+
const params = refOrParams;
|
|
578
|
+
workflowId = params.workflowId;
|
|
579
|
+
input = params.input;
|
|
580
|
+
resourceId = params.resourceId;
|
|
581
|
+
options = params.options;
|
|
582
|
+
}
|
|
583
|
+
const run = await withPostgresTransaction(this.db, async (_db) => {
|
|
584
|
+
const timeoutAt = options?.timeout ? new Date(Date.now() + options.timeout) : null;
|
|
585
|
+
const { run: insertedRun, created } = await insertWorkflowRun({
|
|
586
|
+
resourceId,
|
|
587
|
+
workflowId,
|
|
588
|
+
currentStepId: "__start__",
|
|
589
|
+
status: "running" /* RUNNING */,
|
|
590
|
+
input,
|
|
591
|
+
maxRetries: options?.retries ?? 0,
|
|
592
|
+
timeoutAt
|
|
593
|
+
}, _db);
|
|
594
|
+
if (created) {
|
|
595
|
+
const job = {
|
|
596
|
+
runId: insertedRun.id,
|
|
597
|
+
resourceId,
|
|
598
|
+
workflowId,
|
|
599
|
+
input
|
|
600
|
+
};
|
|
601
|
+
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
602
|
+
startAfter: new Date,
|
|
603
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds
|
|
604
|
+
});
|
|
605
|
+
}
|
|
606
|
+
return insertedRun;
|
|
607
|
+
}, this.pool);
|
|
608
|
+
this.logger.log(`${LOG_PREFIX} Started workflow run ${run.id} for ${workflowId}`);
|
|
609
|
+
return run;
|
|
610
|
+
}
|
|
611
|
+
async triggerEvent({
|
|
612
|
+
runId,
|
|
613
|
+
resourceId,
|
|
614
|
+
eventName,
|
|
615
|
+
data,
|
|
616
|
+
options
|
|
617
|
+
}) {
|
|
618
|
+
await this.ensureStarted();
|
|
619
|
+
const run = await this.getRun({ runId, resourceId });
|
|
620
|
+
const job = {
|
|
621
|
+
runId: run.id,
|
|
622
|
+
resourceId: resourceId ?? run.resourceId ?? undefined,
|
|
623
|
+
workflowId: run.workflowId,
|
|
624
|
+
input: run.input,
|
|
625
|
+
event: {
|
|
626
|
+
name: eventName,
|
|
627
|
+
data
|
|
628
|
+
}
|
|
629
|
+
};
|
|
630
|
+
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
631
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds
|
|
632
|
+
});
|
|
633
|
+
this.logger.log(`${LOG_PREFIX} Event ${eventName} sent for workflow run ${runId}`);
|
|
634
|
+
return run;
|
|
635
|
+
}
|
|
636
|
+
async pauseWorkflow({
|
|
637
|
+
runId,
|
|
638
|
+
resourceId
|
|
639
|
+
}) {
|
|
640
|
+
await this.ensureStarted();
|
|
641
|
+
const run = await updateWorkflowRun({
|
|
642
|
+
runId,
|
|
643
|
+
resourceId,
|
|
644
|
+
data: {
|
|
645
|
+
status: "paused" /* PAUSED */,
|
|
646
|
+
pausedAt: new Date
|
|
647
|
+
},
|
|
648
|
+
expectedStatuses: ["running" /* RUNNING */, "pending" /* PENDING */]
|
|
649
|
+
}, this.db);
|
|
650
|
+
if (!run) {
|
|
651
|
+
throw new WorkflowRunNotFoundError(runId);
|
|
652
|
+
}
|
|
653
|
+
this.logger.log(`${LOG_PREFIX} Paused workflow run ${runId}`);
|
|
654
|
+
return run;
|
|
655
|
+
}
|
|
656
|
+
async resumeWorkflow({
|
|
657
|
+
runId,
|
|
658
|
+
resourceId,
|
|
659
|
+
options
|
|
660
|
+
}) {
|
|
661
|
+
await this.ensureStarted();
|
|
662
|
+
const current = await this.getRun({ runId, resourceId });
|
|
663
|
+
if (current.status !== "paused" /* PAUSED */) {
|
|
664
|
+
throw new WorkflowEngineError(`Cannot resume workflow run in '${current.status}' status, must be 'paused'`, current.workflowId, runId);
|
|
665
|
+
}
|
|
666
|
+
return this.triggerEvent({
|
|
667
|
+
runId,
|
|
668
|
+
resourceId,
|
|
669
|
+
eventName: PAUSE_EVENT_NAME,
|
|
670
|
+
data: {},
|
|
671
|
+
options
|
|
672
|
+
});
|
|
673
|
+
}
|
|
674
|
+
async fastForwardWorkflow({
|
|
675
|
+
runId,
|
|
676
|
+
resourceId,
|
|
677
|
+
data
|
|
678
|
+
}) {
|
|
679
|
+
await this.ensureStarted();
|
|
680
|
+
const run = await this.getRun({ runId, resourceId });
|
|
681
|
+
if (run.status !== "paused" /* PAUSED */) {
|
|
682
|
+
return run;
|
|
683
|
+
}
|
|
684
|
+
const stepId = run.currentStepId;
|
|
685
|
+
const waitForEntry = run.timeline[`${stepId}-wait-for`];
|
|
686
|
+
if (!waitForEntry || typeof waitForEntry !== "object" || !("waitFor" in waitForEntry)) {
|
|
687
|
+
return run;
|
|
688
|
+
}
|
|
689
|
+
const { eventName, timeoutEvent, skipOutput } = waitForEntry.waitFor;
|
|
690
|
+
if (eventName === PAUSE_EVENT_NAME) {
|
|
691
|
+
return this.resumeWorkflow({ runId, resourceId });
|
|
692
|
+
}
|
|
693
|
+
if (skipOutput && timeoutEvent) {
|
|
694
|
+
await withPostgresTransaction(this.db, async (db) => {
|
|
695
|
+
const freshRun = await getWorkflowRun({ runId, resourceId }, { exclusiveLock: true, db });
|
|
696
|
+
if (!freshRun)
|
|
697
|
+
throw new WorkflowRunNotFoundError(runId);
|
|
698
|
+
return updateWorkflowRun({
|
|
699
|
+
runId,
|
|
700
|
+
resourceId,
|
|
701
|
+
data: {
|
|
702
|
+
timeline: import_es_toolkit.merge(freshRun.timeline, {
|
|
703
|
+
[stepId]: {
|
|
704
|
+
output: data ?? {},
|
|
705
|
+
timestamp: new Date
|
|
706
|
+
}
|
|
707
|
+
})
|
|
708
|
+
}
|
|
709
|
+
}, db);
|
|
710
|
+
}, this.pool);
|
|
711
|
+
return this.triggerEvent({ runId, resourceId, eventName: timeoutEvent });
|
|
712
|
+
}
|
|
713
|
+
if (eventName) {
|
|
714
|
+
return this.triggerEvent({ runId, resourceId, eventName, data: data ?? {} });
|
|
715
|
+
}
|
|
716
|
+
if (timeoutEvent) {
|
|
717
|
+
return this.triggerEvent({ runId, resourceId, eventName: timeoutEvent, data: data ?? {} });
|
|
718
|
+
}
|
|
719
|
+
return run;
|
|
720
|
+
}
|
|
721
|
+
async cancelWorkflow({
|
|
722
|
+
runId,
|
|
723
|
+
resourceId
|
|
724
|
+
}) {
|
|
725
|
+
await this.ensureStarted();
|
|
726
|
+
const run = await updateWorkflowRun({
|
|
727
|
+
runId,
|
|
728
|
+
resourceId,
|
|
729
|
+
data: {
|
|
730
|
+
status: "cancelled" /* CANCELLED */
|
|
731
|
+
},
|
|
732
|
+
expectedStatuses: ["pending" /* PENDING */, "running" /* RUNNING */, "paused" /* PAUSED */]
|
|
733
|
+
}, this.db);
|
|
734
|
+
if (!run) {
|
|
735
|
+
throw new WorkflowRunNotFoundError(runId);
|
|
736
|
+
}
|
|
737
|
+
this.logger.log(`${LOG_PREFIX} Cancelled workflow run ${runId}`);
|
|
738
|
+
return run;
|
|
739
|
+
}
|
|
740
|
+
async getRun({
|
|
741
|
+
runId,
|
|
742
|
+
resourceId
|
|
743
|
+
}) {
|
|
744
|
+
await this.ensureStarted();
|
|
745
|
+
const run = await getWorkflowRun({ runId, resourceId }, { db: this.db });
|
|
746
|
+
if (!run) {
|
|
747
|
+
throw new WorkflowRunNotFoundError(runId);
|
|
748
|
+
}
|
|
749
|
+
return run;
|
|
750
|
+
}
|
|
751
|
+
async checkProgress({
|
|
752
|
+
runId,
|
|
753
|
+
resourceId
|
|
754
|
+
}) {
|
|
755
|
+
const run = await this.getRun({ runId, resourceId });
|
|
756
|
+
const completedSteps = Object.values(run.timeline).filter((entry) => typeof entry === "object" && entry !== null && ("output" in entry) && entry.output !== undefined).length;
|
|
757
|
+
const totalSteps = run.status === "completed" /* COMPLETED */ ? completedSteps : 0;
|
|
758
|
+
const completionPercentage = run.status === "completed" /* COMPLETED */ ? 100 : run.status === "failed" /* FAILED */ || run.status === "cancelled" /* CANCELLED */ ? 0 : 0;
|
|
759
|
+
return {
|
|
760
|
+
...run,
|
|
761
|
+
completedSteps,
|
|
762
|
+
completionPercentage,
|
|
763
|
+
totalSteps
|
|
764
|
+
};
|
|
765
|
+
}
|
|
766
|
+
async getRuns({
|
|
767
|
+
resourceId,
|
|
768
|
+
startingAfter,
|
|
769
|
+
endingBefore,
|
|
770
|
+
limit = 20,
|
|
771
|
+
statuses,
|
|
772
|
+
workflowId
|
|
773
|
+
}) {
|
|
774
|
+
await this.ensureStarted();
|
|
775
|
+
return getWorkflowRuns({
|
|
776
|
+
resourceId,
|
|
777
|
+
startingAfter,
|
|
778
|
+
endingBefore,
|
|
779
|
+
limit,
|
|
780
|
+
statuses,
|
|
781
|
+
workflowId
|
|
782
|
+
}, this.db);
|
|
783
|
+
}
|
|
784
|
+
async ensureStarted() {
|
|
785
|
+
if (!this._started) {
|
|
786
|
+
await this.start();
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
// src/definition.ts
|
|
791
|
+
function createWorkflowRef(id, options) {
|
|
792
|
+
const ref = (handler, defineOptions) => ({
|
|
793
|
+
id,
|
|
794
|
+
handler,
|
|
795
|
+
inputSchema: options?.inputSchema,
|
|
796
|
+
timeout: defineOptions?.timeout,
|
|
797
|
+
retries: defineOptions?.retries
|
|
798
|
+
});
|
|
799
|
+
Object.defineProperty(ref, "id", { value: id, enumerable: true });
|
|
800
|
+
Object.defineProperty(ref, "inputSchema", { value: options?.inputSchema, enumerable: true });
|
|
801
|
+
return ref;
|
|
802
|
+
}
|
|
803
|
+
function createWorkflowFactory(plugins = []) {
|
|
804
|
+
const factory = (id, handler, { inputSchema, timeout, retries } = {}) => ({
|
|
805
|
+
id,
|
|
806
|
+
handler,
|
|
807
|
+
inputSchema,
|
|
808
|
+
timeout,
|
|
809
|
+
retries,
|
|
810
|
+
plugins: plugins.length > 0 ? plugins : undefined
|
|
811
|
+
});
|
|
812
|
+
factory.use = (plugin) => createWorkflowFactory([
|
|
813
|
+
...plugins,
|
|
814
|
+
plugin
|
|
815
|
+
]);
|
|
816
|
+
factory.ref = createWorkflowRef;
|
|
817
|
+
return factory;
|
|
818
|
+
}
|
|
819
|
+
var workflow = createWorkflowFactory();
|
|
820
|
+
|
|
821
|
+
//# debugId=3BB2ABF6C479FA4D64756E2164756E21
|
|
822
|
+
//# sourceMappingURL=client.entry.js.map
|