pg-workflows 0.8.3 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.entry.cjs +27 -4
- package/dist/client.entry.js +1 -1
- package/dist/client.entry.js.map +6 -6
- package/dist/index.cjs +93 -37
- package/dist/index.d.cts +13 -2
- package/dist/index.d.ts +13 -2
- package/dist/index.js +71 -35
- package/dist/index.js.map +7 -7
- package/dist/shared/{chunk-2xy8z3xp.js → chunk-fr76gdwj.js} +34 -11
- package/dist/shared/chunk-fr76gdwj.js.map +16 -0
- package/package.json +1 -1
- package/dist/shared/chunk-2xy8z3xp.js.map +0 -16
package/dist/index.d.ts
CHANGED
|
@@ -304,8 +304,9 @@ declare class WorkflowEngine {
|
|
|
304
304
|
workflows: Map<string, WorkflowInternalDefinition>;
|
|
305
305
|
private logger;
|
|
306
306
|
constructor({ workflows, logger, boss,...connectionOptions }: WorkflowEngineOptions);
|
|
307
|
-
start(asEngine?: boolean, { batchSize }?: {
|
|
307
|
+
start(asEngine?: boolean, { batchSize, heartbeatSeconds }?: {
|
|
308
308
|
batchSize?: number;
|
|
309
|
+
heartbeatSeconds?: number;
|
|
309
310
|
}): Promise<void>;
|
|
310
311
|
stop(): Promise<void>;
|
|
311
312
|
registerWorkflow(definition: WorkflowDefinition<InputParameters>): Promise<WorkflowEngine>;
|
|
@@ -375,6 +376,14 @@ declare class WorkflowEngine {
|
|
|
375
376
|
*/
|
|
376
377
|
private resolveScopedResourceId;
|
|
377
378
|
private handleWorkflowRun;
|
|
379
|
+
/**
|
|
380
|
+
* Reconciles workflow runs whose retries pg-boss has exhausted (handler
|
|
381
|
+
* threw on the final attempt, or worker died and missed the heartbeat
|
|
382
|
+
* past the retry budget). The DLQ entry tells us the run is unrecoverable;
|
|
383
|
+
* we mark it FAILED with whatever error message the catch block last
|
|
384
|
+
* persisted, falling back to a worker-death message.
|
|
385
|
+
*/
|
|
386
|
+
private handleWorkflowRunDlq;
|
|
378
387
|
private getCachedStepEntry;
|
|
379
388
|
private getWaitForStepEntry;
|
|
380
389
|
private runStep;
|
|
@@ -398,6 +407,8 @@ declare class WorkflowEngine {
|
|
|
398
407
|
}>;
|
|
399
408
|
}
|
|
400
409
|
import { StandardSchemaV1 as StandardSchemaV12 } from "@standard-schema/spec";
|
|
410
|
+
declare function validateWorkflowId(workflowId: string): void;
|
|
411
|
+
declare function validateResourceId(resourceId: string | undefined | null): void;
|
|
401
412
|
declare class WorkflowEngineError extends Error {
|
|
402
413
|
readonly workflowId?: string | undefined;
|
|
403
414
|
readonly runId?: string | undefined;
|
|
@@ -408,4 +419,4 @@ declare class WorkflowEngineError extends Error {
|
|
|
408
419
|
declare class WorkflowRunNotFoundError extends WorkflowEngineError {
|
|
409
420
|
constructor(runId?: string, workflowId?: string);
|
|
410
421
|
}
|
|
411
|
-
export { workflow, parseDuration, createWorkflowRef, WorkflowStatus, WorkflowRunProgress, WorkflowRunNotFoundError, WorkflowRef, WorkflowPlugin, WorkflowOptions, WorkflowLogger, WorkflowInternalLoggerContext, WorkflowInternalLogger, WorkflowInternalDefinition, WorkflowFactory, WorkflowEngineOptions, WorkflowEngineError, WorkflowEngine, WorkflowDefinition, WorkflowContext, WorkflowClientOptions, WorkflowClient, StepType, StepInternalDefinition, StepBaseContext, StartWorkflowOptions, InputParameters, InferInputParameters, DurationObject, Duration };
|
|
422
|
+
export { workflow, validateWorkflowId, validateResourceId, parseDuration, createWorkflowRef, WorkflowStatus, WorkflowRunProgress, WorkflowRunNotFoundError, WorkflowRef, WorkflowPlugin, WorkflowOptions, WorkflowLogger, WorkflowInternalLoggerContext, WorkflowInternalLogger, WorkflowInternalDefinition, WorkflowFactory, WorkflowEngineOptions, WorkflowEngineError, WorkflowEngine, WorkflowDefinition, WorkflowContext, WorkflowClientOptions, WorkflowClient, StepType, StepInternalDefinition, StepBaseContext, StartWorkflowOptions, InputParameters, InferInputParameters, DurationObject, Duration };
|
package/dist/index.js
CHANGED
|
@@ -2,6 +2,7 @@ import {
|
|
|
2
2
|
DEFAULT_PGBOSS_SCHEMA,
|
|
3
3
|
PAUSE_EVENT_NAME,
|
|
4
4
|
StepType,
|
|
5
|
+
WORKFLOW_RUN_DLQ_QUEUE_NAME,
|
|
5
6
|
WORKFLOW_RUN_QUEUE_NAME,
|
|
6
7
|
WorkflowClient,
|
|
7
8
|
WorkflowEngineError,
|
|
@@ -13,9 +14,11 @@ import {
|
|
|
13
14
|
insertWorkflowRun,
|
|
14
15
|
runMigrations,
|
|
15
16
|
updateWorkflowRun,
|
|
17
|
+
validateResourceId,
|
|
18
|
+
validateWorkflowId,
|
|
16
19
|
withPostgresTransaction,
|
|
17
20
|
workflow
|
|
18
|
-
} from "./shared/chunk-
|
|
21
|
+
} from "./shared/chunk-fr76gdwj.js";
|
|
19
22
|
// src/duration.ts
|
|
20
23
|
import parse from "parse-duration";
|
|
21
24
|
var MS_PER_SECOND = 1000;
|
|
@@ -131,6 +134,12 @@ var defaultLogger = {
|
|
|
131
134
|
error: (message, error) => console.error(message, error)
|
|
132
135
|
};
|
|
133
136
|
var defaultExpireInSeconds = process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS ? Number.parseInt(process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS, 10) : 5 * 60;
|
|
137
|
+
var retrySendOptions = (maxRetries) => ({
|
|
138
|
+
retryLimit: maxRetries,
|
|
139
|
+
retryBackoff: true,
|
|
140
|
+
retryDelay: 1
|
|
141
|
+
});
|
|
142
|
+
var defaultHeartbeatSeconds = process.env.WORKFLOW_RUN_HEARTBEAT_SECONDS ? Number.parseInt(process.env.WORKFLOW_RUN_HEARTBEAT_SECONDS, 10) : 30;
|
|
134
143
|
|
|
135
144
|
class WorkflowEngine {
|
|
136
145
|
boss;
|
|
@@ -164,7 +173,10 @@ class WorkflowEngine {
|
|
|
164
173
|
}
|
|
165
174
|
this.db = this.boss.getDb();
|
|
166
175
|
}
|
|
167
|
-
async start(asEngine = true, {
|
|
176
|
+
async start(asEngine = true, {
|
|
177
|
+
batchSize = 1,
|
|
178
|
+
heartbeatSeconds = defaultHeartbeatSeconds
|
|
179
|
+
} = {}) {
|
|
168
180
|
if (this._started) {
|
|
169
181
|
return;
|
|
170
182
|
}
|
|
@@ -175,13 +187,21 @@ class WorkflowEngine {
|
|
|
175
187
|
await this.registerWorkflow(workflow2);
|
|
176
188
|
}
|
|
177
189
|
}
|
|
178
|
-
|
|
190
|
+
const mainQueueOptions = {
|
|
191
|
+
retryLimit: 0,
|
|
192
|
+
deadLetter: WORKFLOW_RUN_DLQ_QUEUE_NAME,
|
|
193
|
+
heartbeatSeconds
|
|
194
|
+
};
|
|
195
|
+
await this.boss.createQueue(WORKFLOW_RUN_DLQ_QUEUE_NAME, { retryLimit: 0 });
|
|
196
|
+
await this.boss.createQueue(WORKFLOW_RUN_QUEUE_NAME, mainQueueOptions);
|
|
197
|
+
await this.boss.updateQueue(WORKFLOW_RUN_QUEUE_NAME, mainQueueOptions);
|
|
179
198
|
const numWorkers = +(process.env.WORKFLOW_RUN_WORKERS ?? 3);
|
|
180
199
|
if (asEngine) {
|
|
181
|
-
|
|
182
|
-
await this.boss.work(WORKFLOW_RUN_QUEUE_NAME, { pollingIntervalSeconds: 0.5, batchSize }, (job) => this.handleWorkflowRun(job));
|
|
200
|
+
await Promise.all(Array.from({ length: numWorkers }, (_, i) => this.boss.work(WORKFLOW_RUN_QUEUE_NAME, { pollingIntervalSeconds: 0.5, batchSize, includeMetadata: true }, (jobs) => this.handleWorkflowRun(jobs)).then(() => {
|
|
183
201
|
this.logger.log(`Worker ${i + 1}/${numWorkers} started for queue ${WORKFLOW_RUN_QUEUE_NAME}`);
|
|
184
|
-
}
|
|
202
|
+
})));
|
|
203
|
+
await this.boss.work(WORKFLOW_RUN_DLQ_QUEUE_NAME, { pollingIntervalSeconds: 0.5, batchSize: 1 }, (jobs) => this.handleWorkflowRunDlq(jobs));
|
|
204
|
+
this.logger.log(`Worker started for queue ${WORKFLOW_RUN_DLQ_QUEUE_NAME}`);
|
|
185
205
|
}
|
|
186
206
|
this._started = true;
|
|
187
207
|
this.logger.log("Workflow engine started!");
|
|
@@ -244,6 +264,8 @@ class WorkflowEngine {
|
|
|
244
264
|
idempotencyKey = params.idempotencyKey;
|
|
245
265
|
options = params.options;
|
|
246
266
|
}
|
|
267
|
+
validateWorkflowId(workflowId);
|
|
268
|
+
validateResourceId(resourceId);
|
|
247
269
|
if (!this._started) {
|
|
248
270
|
await this.start(false, { batchSize: options?.batchSize ?? 1 });
|
|
249
271
|
}
|
|
@@ -284,7 +306,8 @@ class WorkflowEngine {
|
|
|
284
306
|
};
|
|
285
307
|
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
286
308
|
startAfter: new Date,
|
|
287
|
-
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds
|
|
309
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds,
|
|
310
|
+
...retrySendOptions(insertedRun.maxRetries)
|
|
288
311
|
});
|
|
289
312
|
}
|
|
290
313
|
return insertedRun;
|
|
@@ -415,7 +438,8 @@ class WorkflowEngine {
|
|
|
415
438
|
}
|
|
416
439
|
};
|
|
417
440
|
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
418
|
-
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds
|
|
441
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds,
|
|
442
|
+
...retrySendOptions(run.maxRetries)
|
|
419
443
|
});
|
|
420
444
|
this.logger.log(`event ${eventName} sent for workflow run with id ${runId}`);
|
|
421
445
|
return run;
|
|
@@ -494,7 +518,7 @@ class WorkflowEngine {
|
|
|
494
518
|
return run.resourceId ?? undefined;
|
|
495
519
|
}
|
|
496
520
|
async handleWorkflowRun([job]) {
|
|
497
|
-
const { runId = "", resourceId, workflowId = "",
|
|
521
|
+
const { runId = "", resourceId, workflowId = "", event } = job?.data ?? {};
|
|
498
522
|
let run;
|
|
499
523
|
let scopedResourceId;
|
|
500
524
|
try {
|
|
@@ -517,6 +541,14 @@ class WorkflowEngine {
|
|
|
517
541
|
throw new WorkflowEngineError(`Workflow run ${runId} does not match job workflowId ${workflowId}`, workflowId, runId);
|
|
518
542
|
}
|
|
519
543
|
scopedResourceId = this.resolveScopedResourceId(resourceId, run);
|
|
544
|
+
if (job?.retryCount !== undefined && run.retryCount !== job.retryCount) {
|
|
545
|
+
await this.updateRun({
|
|
546
|
+
runId,
|
|
547
|
+
resourceId: scopedResourceId,
|
|
548
|
+
data: { retryCount: job.retryCount }
|
|
549
|
+
});
|
|
550
|
+
run = { ...run, retryCount: job.retryCount };
|
|
551
|
+
}
|
|
520
552
|
if (run.status === "cancelled" /* CANCELLED */) {
|
|
521
553
|
this.logger.log(`Workflow run ${runId} is cancelled, skipping`);
|
|
522
554
|
return;
|
|
@@ -660,34 +692,11 @@ class WorkflowEngine {
|
|
|
660
692
|
});
|
|
661
693
|
}
|
|
662
694
|
} catch (error) {
|
|
663
|
-
if (run && run.retryCount < run.maxRetries) {
|
|
664
|
-
await this.updateRun({
|
|
665
|
-
runId,
|
|
666
|
-
resourceId: scopedResourceId,
|
|
667
|
-
data: {
|
|
668
|
-
retryCount: run.retryCount + 1,
|
|
669
|
-
jobId: job?.id
|
|
670
|
-
}
|
|
671
|
-
});
|
|
672
|
-
const retryDelay = 2 ** run.retryCount * 1000;
|
|
673
|
-
const pgBossJob = {
|
|
674
|
-
runId,
|
|
675
|
-
resourceId: scopedResourceId,
|
|
676
|
-
workflowId,
|
|
677
|
-
input
|
|
678
|
-
};
|
|
679
|
-
await this.boss?.send("workflow-run", pgBossJob, {
|
|
680
|
-
startAfter: new Date(Date.now() + retryDelay),
|
|
681
|
-
expireInSeconds: defaultExpireInSeconds
|
|
682
|
-
});
|
|
683
|
-
return;
|
|
684
|
-
}
|
|
685
695
|
if (runId) {
|
|
686
696
|
await this.updateRun({
|
|
687
697
|
runId,
|
|
688
698
|
resourceId: scopedResourceId,
|
|
689
699
|
data: {
|
|
690
|
-
status: "failed" /* FAILED */,
|
|
691
700
|
error: error instanceof Error ? error.message : String(error),
|
|
692
701
|
jobId: job?.id
|
|
693
702
|
}
|
|
@@ -696,6 +705,26 @@ class WorkflowEngine {
|
|
|
696
705
|
throw error;
|
|
697
706
|
}
|
|
698
707
|
}
|
|
708
|
+
async handleWorkflowRunDlq([job]) {
|
|
709
|
+
const { runId } = job?.data ?? {};
|
|
710
|
+
if (!runId)
|
|
711
|
+
return;
|
|
712
|
+
const run = await getWorkflowRun({ runId }, { db: this.db });
|
|
713
|
+
if (!run || run.status !== "running" /* RUNNING */)
|
|
714
|
+
return;
|
|
715
|
+
await this.updateRun({
|
|
716
|
+
runId,
|
|
717
|
+
resourceId: run.resourceId ?? undefined,
|
|
718
|
+
data: {
|
|
719
|
+
status: "failed" /* FAILED */,
|
|
720
|
+
error: run.error ?? "Workflow run worker died or job expired before completion"
|
|
721
|
+
}
|
|
722
|
+
});
|
|
723
|
+
this.logger.log("Marked stuck workflow run as failed", {
|
|
724
|
+
runId,
|
|
725
|
+
workflowId: run.workflowId
|
|
726
|
+
});
|
|
727
|
+
}
|
|
699
728
|
getCachedStepEntry(timeline, stepId) {
|
|
700
729
|
const stepEntry = timeline[stepId];
|
|
701
730
|
return stepEntry && typeof stepEntry === "object" && "output" in stepEntry ? stepEntry : null;
|
|
@@ -819,7 +848,8 @@ ${error.stack}` : String(error)
|
|
|
819
848
|
};
|
|
820
849
|
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
821
850
|
startAfter: timeoutDate.getTime() <= Date.now() ? new Date : timeoutDate,
|
|
822
|
-
expireInSeconds: defaultExpireInSeconds
|
|
851
|
+
expireInSeconds: defaultExpireInSeconds,
|
|
852
|
+
...retrySendOptions(run.maxRetries)
|
|
823
853
|
});
|
|
824
854
|
} catch (error) {
|
|
825
855
|
await this.updateRun({
|
|
@@ -936,7 +966,8 @@ ${error.stack}` : String(error)
|
|
|
936
966
|
event: { name: pollEvent, data: {} }
|
|
937
967
|
}, {
|
|
938
968
|
startAfter: new Date(Date.now() + intervalMs),
|
|
939
|
-
expireInSeconds: defaultExpireInSeconds
|
|
969
|
+
expireInSeconds: defaultExpireInSeconds,
|
|
970
|
+
...retrySendOptions(run.maxRetries)
|
|
940
971
|
});
|
|
941
972
|
} catch (error) {
|
|
942
973
|
await this.updateRun({
|
|
@@ -979,6 +1010,9 @@ ${error.stack}` : String(error)
|
|
|
979
1010
|
statuses,
|
|
980
1011
|
workflowId
|
|
981
1012
|
}) {
|
|
1013
|
+
if (workflowId)
|
|
1014
|
+
validateWorkflowId(workflowId);
|
|
1015
|
+
validateResourceId(resourceId);
|
|
982
1016
|
return getWorkflowRuns({
|
|
983
1017
|
resourceId,
|
|
984
1018
|
startingAfter,
|
|
@@ -991,6 +1025,8 @@ ${error.stack}` : String(error)
|
|
|
991
1025
|
}
|
|
992
1026
|
export {
|
|
993
1027
|
workflow,
|
|
1028
|
+
validateWorkflowId,
|
|
1029
|
+
validateResourceId,
|
|
994
1030
|
parseDuration,
|
|
995
1031
|
createWorkflowRef,
|
|
996
1032
|
WorkflowStatus,
|
|
@@ -1001,5 +1037,5 @@ export {
|
|
|
1001
1037
|
StepType
|
|
1002
1038
|
};
|
|
1003
1039
|
|
|
1004
|
-
//# debugId=
|
|
1040
|
+
//# debugId=FC991C83D3B1165A64756E2164756E21
|
|
1005
1041
|
//# sourceMappingURL=index.js.map
|