pg-workflows 0.8.3 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.entry.cjs +73 -14
- package/dist/client.entry.d.cts +37 -10
- package/dist/client.entry.d.ts +37 -10
- package/dist/client.entry.js +1 -1
- package/dist/client.entry.js.map +9 -9
- package/dist/index.cjs +455 -113
- package/dist/index.d.cts +80 -21
- package/dist/index.d.ts +80 -21
- package/dist/index.js +390 -101
- package/dist/index.js.map +11 -11
- package/dist/shared/{chunk-2xy8z3xp.js → chunk-nygamc7b.js} +80 -21
- package/dist/shared/chunk-nygamc7b.js.map +16 -0
- package/package.json +1 -1
- package/dist/shared/chunk-2xy8z3xp.js.map +0 -16
package/dist/index.cjs
CHANGED
|
@@ -65,6 +65,8 @@ var __export = (target, all) => {
|
|
|
65
65
|
var exports_src = {};
|
|
66
66
|
__export(exports_src, {
|
|
67
67
|
workflow: () => workflow,
|
|
68
|
+
validateWorkflowId: () => validateWorkflowId,
|
|
69
|
+
validateResourceId: () => validateResourceId,
|
|
68
70
|
parseDuration: () => parseDuration,
|
|
69
71
|
createWorkflowRef: () => createWorkflowRef,
|
|
70
72
|
WorkflowStatus: () => WorkflowStatus,
|
|
@@ -84,11 +86,19 @@ var import_pg_boss = require("pg-boss");
|
|
|
84
86
|
// src/constants.ts
|
|
85
87
|
var PAUSE_EVENT_NAME = "__internal_pause";
|
|
86
88
|
var WORKFLOW_RUN_QUEUE_NAME = "workflow-run";
|
|
89
|
+
var WORKFLOW_RUN_DLQ_QUEUE_NAME = "workflow_run_dlq";
|
|
87
90
|
var DEFAULT_PGBOSS_SCHEMA = "pgboss_v12_pgworkflow";
|
|
91
|
+
var MAX_WORKFLOW_ID_LENGTH = 256;
|
|
92
|
+
var MAX_RESOURCE_ID_LENGTH = 256;
|
|
93
|
+
var INVOKE_CHILD_WORKFLOW_TIMELINE_SUFFIX = "invoke-child-workflow";
|
|
94
|
+
var WAIT_FOR_TIMELINE_SUFFIX = "wait-for";
|
|
95
|
+
var invokeChildWorkflowTimelineKey = (stepId) => `${stepId}-${INVOKE_CHILD_WORKFLOW_TIMELINE_SUFFIX}`;
|
|
96
|
+
var waitForTimelineKey = (stepId) => `${stepId}-${WAIT_FOR_TIMELINE_SUFFIX}`;
|
|
97
|
+
var isInvokeChildWorkflowTimelineEntry = (entry) => !!entry && typeof entry === "object" && ("invokeChildWorkflow" in entry);
|
|
88
98
|
|
|
89
99
|
// src/db/migration.ts
|
|
90
100
|
var MIGRATION_LOCK_ID = 738291645;
|
|
91
|
-
var CURRENT_SCHEMA_VERSION =
|
|
101
|
+
var CURRENT_SCHEMA_VERSION = 4;
|
|
92
102
|
async function runMigrations(db) {
|
|
93
103
|
if (await isSchemaUpToDate(db)) {
|
|
94
104
|
return;
|
|
@@ -101,8 +111,8 @@ async function runMigrations(db) {
|
|
|
101
111
|
id varchar(32) PRIMARY KEY NOT NULL,
|
|
102
112
|
created_at timestamp with time zone DEFAULT now() NOT NULL,
|
|
103
113
|
updated_at timestamp with time zone DEFAULT now() NOT NULL,
|
|
104
|
-
resource_id varchar(
|
|
105
|
-
workflow_id varchar(
|
|
114
|
+
resource_id varchar(256),
|
|
115
|
+
workflow_id varchar(256) NOT NULL,
|
|
106
116
|
status text DEFAULT 'pending' NOT NULL,
|
|
107
117
|
input jsonb NOT NULL,
|
|
108
118
|
output jsonb,
|
|
@@ -142,6 +152,15 @@ async function runMigrations(db) {
|
|
|
142
152
|
CREATE UNIQUE INDEX IF NOT EXISTS workflow_runs_idempotency_key_idx ON workflow_runs (idempotency_key) WHERE idempotency_key IS NOT NULL
|
|
143
153
|
`);
|
|
144
154
|
}
|
|
155
|
+
if (currentVersion < 3) {
|
|
156
|
+
commands.push("ALTER TABLE workflow_runs ALTER COLUMN resource_id TYPE varchar(256)");
|
|
157
|
+
commands.push("ALTER TABLE workflow_runs ALTER COLUMN workflow_id TYPE varchar(256)");
|
|
158
|
+
}
|
|
159
|
+
if (currentVersion < 4) {
|
|
160
|
+
commands.push("ALTER TABLE workflow_runs ADD COLUMN IF NOT EXISTS parent_run_id varchar(32)");
|
|
161
|
+
commands.push("ALTER TABLE workflow_runs ADD COLUMN IF NOT EXISTS parent_step_id varchar(256)");
|
|
162
|
+
commands.push("ALTER TABLE workflow_runs ADD COLUMN IF NOT EXISTS parent_resource_id varchar(256)");
|
|
163
|
+
}
|
|
145
164
|
if (currentVersion === 0) {
|
|
146
165
|
commands.push(`INSERT INTO workflow_schema_version (version) VALUES (${CURRENT_SCHEMA_VERSION})`);
|
|
147
166
|
} else {
|
|
@@ -204,7 +223,10 @@ function mapRowToWorkflowRun(row) {
|
|
|
204
223
|
retryCount: row.retry_count,
|
|
205
224
|
maxRetries: row.max_retries,
|
|
206
225
|
jobId: row.job_id,
|
|
207
|
-
idempotencyKey: row.idempotency_key
|
|
226
|
+
idempotencyKey: row.idempotency_key,
|
|
227
|
+
parentRunId: row.parent_run_id,
|
|
228
|
+
parentStepId: row.parent_step_id,
|
|
229
|
+
parentResourceId: row.parent_resource_id
|
|
208
230
|
};
|
|
209
231
|
}
|
|
210
232
|
async function insertWorkflowRun({
|
|
@@ -215,7 +237,10 @@ async function insertWorkflowRun({
|
|
|
215
237
|
input,
|
|
216
238
|
maxRetries,
|
|
217
239
|
timeoutAt,
|
|
218
|
-
idempotencyKey
|
|
240
|
+
idempotencyKey,
|
|
241
|
+
parentRunId,
|
|
242
|
+
parentStepId,
|
|
243
|
+
parentResourceId
|
|
219
244
|
}, db) {
|
|
220
245
|
const runId = generateKSUID("run");
|
|
221
246
|
const now = new Date;
|
|
@@ -232,9 +257,12 @@ async function insertWorkflowRun({
|
|
|
232
257
|
updated_at,
|
|
233
258
|
timeline,
|
|
234
259
|
retry_count,
|
|
235
|
-
idempotency_key
|
|
260
|
+
idempotency_key,
|
|
261
|
+
parent_run_id,
|
|
262
|
+
parent_step_id,
|
|
263
|
+
parent_resource_id
|
|
236
264
|
)
|
|
237
|
-
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
|
265
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16)
|
|
238
266
|
ON CONFLICT (idempotency_key) WHERE idempotency_key IS NOT NULL DO NOTHING
|
|
239
267
|
RETURNING *`, [
|
|
240
268
|
runId,
|
|
@@ -249,7 +277,10 @@ async function insertWorkflowRun({
|
|
|
249
277
|
now,
|
|
250
278
|
"{}",
|
|
251
279
|
0,
|
|
252
|
-
idempotencyKey ?? null
|
|
280
|
+
idempotencyKey ?? null,
|
|
281
|
+
parentRunId ?? null,
|
|
282
|
+
parentStepId ?? null,
|
|
283
|
+
parentResourceId ?? null
|
|
253
284
|
]);
|
|
254
285
|
if (result.rows[0]) {
|
|
255
286
|
return { run: mapRowToWorkflowRun(result.rows[0]), created: true };
|
|
@@ -466,6 +497,17 @@ async function withPostgresTransaction(db, callback, pool) {
|
|
|
466
497
|
}
|
|
467
498
|
|
|
468
499
|
// src/error.ts
|
|
500
|
+
function validateWorkflowId(workflowId) {
|
|
501
|
+
if (workflowId.length > MAX_WORKFLOW_ID_LENGTH) {
|
|
502
|
+
throw new WorkflowEngineError(`workflowId exceeds maximum length of ${MAX_WORKFLOW_ID_LENGTH} characters (got ${workflowId.length})`, workflowId);
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
function validateResourceId(resourceId) {
|
|
506
|
+
if (resourceId != null && resourceId.length > MAX_RESOURCE_ID_LENGTH) {
|
|
507
|
+
throw new WorkflowEngineError(`resourceId exceeds maximum length of ${MAX_RESOURCE_ID_LENGTH} characters (got ${resourceId.length})`);
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
|
|
469
511
|
class WorkflowEngineError extends Error {
|
|
470
512
|
workflowId;
|
|
471
513
|
runId;
|
|
@@ -509,6 +551,7 @@ var StepType;
|
|
|
509
551
|
StepType2["WAIT_UNTIL"] = "waitUntil";
|
|
510
552
|
StepType2["DELAY"] = "delay";
|
|
511
553
|
StepType2["POLL"] = "poll";
|
|
554
|
+
StepType2["INVOKE_CHILD_WORKFLOW"] = "invokeChildWorkflow";
|
|
512
555
|
})(StepType ||= {});
|
|
513
556
|
|
|
514
557
|
// src/client.ts
|
|
@@ -593,6 +636,8 @@ class WorkflowClient {
|
|
|
593
636
|
idempotencyKey = params.idempotencyKey;
|
|
594
637
|
options = params.options;
|
|
595
638
|
}
|
|
639
|
+
validateWorkflowId(workflowId);
|
|
640
|
+
validateResourceId(resourceId);
|
|
596
641
|
const run = await withPostgresTransaction(this.db, async (_db) => {
|
|
597
642
|
const timeoutAt = options?.timeout ? new Date(Date.now() + options.timeout) : null;
|
|
598
643
|
const { run: insertedRun, created } = await insertWorkflowRun({
|
|
@@ -614,7 +659,8 @@ class WorkflowClient {
|
|
|
614
659
|
};
|
|
615
660
|
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
616
661
|
startAfter: new Date,
|
|
617
|
-
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds
|
|
662
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds,
|
|
663
|
+
db: _db
|
|
618
664
|
});
|
|
619
665
|
}
|
|
620
666
|
return insertedRun;
|
|
@@ -677,6 +723,11 @@ class WorkflowClient {
|
|
|
677
723
|
if (current.status !== "paused" /* PAUSED */) {
|
|
678
724
|
throw new WorkflowEngineError(`Cannot resume workflow run in '${current.status}' status, must be 'paused'`, current.workflowId, runId);
|
|
679
725
|
}
|
|
726
|
+
const currentStepId = current.currentStepId;
|
|
727
|
+
const currentStepTimelineEntry = current.timeline[invokeChildWorkflowTimelineKey(currentStepId)];
|
|
728
|
+
if (isInvokeChildWorkflowTimelineEntry(currentStepTimelineEntry)) {
|
|
729
|
+
return current;
|
|
730
|
+
}
|
|
680
731
|
return this.triggerEvent({
|
|
681
732
|
runId,
|
|
682
733
|
resourceId,
|
|
@@ -695,8 +746,12 @@ class WorkflowClient {
|
|
|
695
746
|
if (run.status !== "paused" /* PAUSED */) {
|
|
696
747
|
return run;
|
|
697
748
|
}
|
|
698
|
-
const
|
|
699
|
-
const
|
|
749
|
+
const currentStepId = run.currentStepId;
|
|
750
|
+
const currentStepTimelineEntry = run.timeline[invokeChildWorkflowTimelineKey(currentStepId)];
|
|
751
|
+
if (isInvokeChildWorkflowTimelineEntry(currentStepTimelineEntry)) {
|
|
752
|
+
return run;
|
|
753
|
+
}
|
|
754
|
+
const waitForEntry = run.timeline[waitForTimelineKey(currentStepId)];
|
|
700
755
|
if (!waitForEntry || typeof waitForEntry !== "object" || !("waitFor" in waitForEntry)) {
|
|
701
756
|
return run;
|
|
702
757
|
}
|
|
@@ -714,7 +769,7 @@ class WorkflowClient {
|
|
|
714
769
|
resourceId,
|
|
715
770
|
data: {
|
|
716
771
|
timeline: import_es_toolkit.merge(freshRun.timeline, {
|
|
717
|
-
[
|
|
772
|
+
[currentStepId]: {
|
|
718
773
|
output: data ?? {},
|
|
719
774
|
timestamp: new Date
|
|
720
775
|
}
|
|
@@ -786,6 +841,9 @@ class WorkflowClient {
|
|
|
786
841
|
workflowId
|
|
787
842
|
}) {
|
|
788
843
|
await this.ensureStarted();
|
|
844
|
+
if (workflowId)
|
|
845
|
+
validateWorkflowId(workflowId);
|
|
846
|
+
validateResourceId(resourceId);
|
|
789
847
|
return getWorkflowRuns({
|
|
790
848
|
resourceId,
|
|
791
849
|
startingAfter,
|
|
@@ -811,7 +869,10 @@ function createWorkflowRef(id, options) {
|
|
|
811
869
|
retries: defineOptions?.retries
|
|
812
870
|
});
|
|
813
871
|
Object.defineProperty(ref, "id", { value: id, enumerable: true });
|
|
814
|
-
Object.defineProperty(ref, "inputSchema", {
|
|
872
|
+
Object.defineProperty(ref, "inputSchema", {
|
|
873
|
+
value: options?.inputSchema,
|
|
874
|
+
enumerable: true
|
|
875
|
+
});
|
|
815
876
|
return ref;
|
|
816
877
|
}
|
|
817
878
|
function createWorkflowFactory(plugins = []) {
|
|
@@ -906,7 +967,7 @@ function parseWorkflowHandler(handler) {
|
|
|
906
967
|
const propertyAccess = node.expression;
|
|
907
968
|
const objectName = propertyAccess.expression.getText(sourceFile);
|
|
908
969
|
const methodName = propertyAccess.name.text;
|
|
909
|
-
if (objectName === "step" && (methodName === "run" || methodName === "waitFor" || methodName === "pause" || methodName === "waitUntil" || methodName === "delay" || methodName === "sleep" || methodName === "poll")) {
|
|
970
|
+
if (objectName === "step" && (methodName === "run" || methodName === "waitFor" || methodName === "pause" || methodName === "waitUntil" || methodName === "delay" || methodName === "sleep" || methodName === "poll" || methodName === "invokeChildWorkflow")) {
|
|
910
971
|
const firstArg = node.arguments[0];
|
|
911
972
|
if (firstArg) {
|
|
912
973
|
const { id, isDynamic } = extractStepId(firstArg);
|
|
@@ -939,13 +1000,21 @@ var StepTypeToIcon = {
|
|
|
939
1000
|
["pause" /* PAUSE */]: "⏸",
|
|
940
1001
|
["waitUntil" /* WAIT_UNTIL */]: "⏲",
|
|
941
1002
|
["delay" /* DELAY */]: "⏱",
|
|
942
|
-
["poll" /* POLL */]: "↻"
|
|
1003
|
+
["poll" /* POLL */]: "↻",
|
|
1004
|
+
["invokeChildWorkflow" /* INVOKE_CHILD_WORKFLOW */]: "↪"
|
|
943
1005
|
};
|
|
944
1006
|
var defaultLogger2 = {
|
|
945
1007
|
log: (_message) => console.warn(_message),
|
|
946
1008
|
error: (message, error) => console.error(message, error)
|
|
947
1009
|
};
|
|
948
1010
|
var defaultExpireInSeconds2 = process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS ? Number.parseInt(process.env.WORKFLOW_RUN_EXPIRE_IN_SECONDS, 10) : 5 * 60;
|
|
1011
|
+
var retrySendOptions = (maxRetries) => ({
|
|
1012
|
+
retryLimit: maxRetries,
|
|
1013
|
+
retryBackoff: true,
|
|
1014
|
+
retryDelay: 1
|
|
1015
|
+
});
|
|
1016
|
+
var getInvokeChildWorkflowEventName = (childRunId) => `__invoke_child_workflow_completed:${childRunId}`;
|
|
1017
|
+
var defaultHeartbeatSeconds = process.env.WORKFLOW_RUN_HEARTBEAT_SECONDS ? Number.parseInt(process.env.WORKFLOW_RUN_HEARTBEAT_SECONDS, 10) : 30;
|
|
949
1018
|
|
|
950
1019
|
class WorkflowEngine {
|
|
951
1020
|
boss;
|
|
@@ -979,7 +1048,10 @@ class WorkflowEngine {
|
|
|
979
1048
|
}
|
|
980
1049
|
this.db = this.boss.getDb();
|
|
981
1050
|
}
|
|
982
|
-
async start(asEngine = true, {
|
|
1051
|
+
async start(asEngine = true, {
|
|
1052
|
+
batchSize = 1,
|
|
1053
|
+
heartbeatSeconds = defaultHeartbeatSeconds
|
|
1054
|
+
} = {}) {
|
|
983
1055
|
if (this._started) {
|
|
984
1056
|
return;
|
|
985
1057
|
}
|
|
@@ -990,13 +1062,21 @@ class WorkflowEngine {
|
|
|
990
1062
|
await this.registerWorkflow(workflow2);
|
|
991
1063
|
}
|
|
992
1064
|
}
|
|
993
|
-
|
|
1065
|
+
const mainQueueOptions = {
|
|
1066
|
+
retryLimit: 0,
|
|
1067
|
+
deadLetter: WORKFLOW_RUN_DLQ_QUEUE_NAME,
|
|
1068
|
+
heartbeatSeconds
|
|
1069
|
+
};
|
|
1070
|
+
await this.boss.createQueue(WORKFLOW_RUN_DLQ_QUEUE_NAME, { retryLimit: 0 });
|
|
1071
|
+
await this.boss.createQueue(WORKFLOW_RUN_QUEUE_NAME, mainQueueOptions);
|
|
1072
|
+
await this.boss.updateQueue(WORKFLOW_RUN_QUEUE_NAME, mainQueueOptions);
|
|
994
1073
|
const numWorkers = +(process.env.WORKFLOW_RUN_WORKERS ?? 3);
|
|
995
1074
|
if (asEngine) {
|
|
996
|
-
|
|
997
|
-
await this.boss.work(WORKFLOW_RUN_QUEUE_NAME, { pollingIntervalSeconds: 0.5, batchSize }, (job) => this.handleWorkflowRun(job));
|
|
1075
|
+
await Promise.all(Array.from({ length: numWorkers }, (_, i) => this.boss.work(WORKFLOW_RUN_QUEUE_NAME, { pollingIntervalSeconds: 0.5, batchSize, includeMetadata: true }, (jobs) => this.handleWorkflowRun(jobs)).then(() => {
|
|
998
1076
|
this.logger.log(`Worker ${i + 1}/${numWorkers} started for queue ${WORKFLOW_RUN_QUEUE_NAME}`);
|
|
999
|
-
}
|
|
1077
|
+
})));
|
|
1078
|
+
await this.boss.work(WORKFLOW_RUN_DLQ_QUEUE_NAME, { pollingIntervalSeconds: 0.5, batchSize: 1 }, (jobs) => this.handleWorkflowRunDlq(jobs));
|
|
1079
|
+
this.logger.log(`Worker started for queue ${WORKFLOW_RUN_DLQ_QUEUE_NAME}`);
|
|
1000
1080
|
}
|
|
1001
1081
|
this._started = true;
|
|
1002
1082
|
this.logger.log("Workflow engine started!");
|
|
@@ -1039,29 +1119,57 @@ class WorkflowEngine {
|
|
|
1039
1119
|
this.workflows.clear();
|
|
1040
1120
|
return this;
|
|
1041
1121
|
}
|
|
1042
|
-
|
|
1043
|
-
let workflowId;
|
|
1044
|
-
let input;
|
|
1045
|
-
let resourceId;
|
|
1046
|
-
let idempotencyKey;
|
|
1047
|
-
let options;
|
|
1122
|
+
resolveWorkflowRunParameters(refOrParams, inputArg, optionsArg) {
|
|
1048
1123
|
if (typeof refOrParams === "function" && "id" in refOrParams) {
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
workflowId = params.workflowId;
|
|
1057
|
-
input = params.input;
|
|
1058
|
-
resourceId = params.resourceId;
|
|
1059
|
-
idempotencyKey = params.idempotencyKey;
|
|
1060
|
-
options = params.options;
|
|
1124
|
+
return {
|
|
1125
|
+
workflowId: refOrParams.id,
|
|
1126
|
+
input: inputArg,
|
|
1127
|
+
options: optionsArg,
|
|
1128
|
+
resourceId: optionsArg?.resourceId,
|
|
1129
|
+
idempotencyKey: optionsArg?.idempotencyKey
|
|
1130
|
+
};
|
|
1061
1131
|
}
|
|
1132
|
+
const params = refOrParams;
|
|
1133
|
+
return {
|
|
1134
|
+
workflowId: params.workflowId,
|
|
1135
|
+
input: params.input,
|
|
1136
|
+
resourceId: params.resourceId ?? params.options?.resourceId,
|
|
1137
|
+
idempotencyKey: params.idempotencyKey ?? params.options?.idempotencyKey,
|
|
1138
|
+
options: params.options
|
|
1139
|
+
};
|
|
1140
|
+
}
|
|
1141
|
+
async startWorkflow(refOrParams, inputArg, optionsArg) {
|
|
1142
|
+
const { workflowId, input, resourceId, idempotencyKey, options } = this.resolveWorkflowRunParameters(refOrParams, inputArg, optionsArg);
|
|
1062
1143
|
if (!this._started) {
|
|
1063
1144
|
await this.start(false, { batchSize: options?.batchSize ?? 1 });
|
|
1064
1145
|
}
|
|
1146
|
+
const { run } = await this.createWorkflowRun({
|
|
1147
|
+
workflowId,
|
|
1148
|
+
input,
|
|
1149
|
+
resourceId,
|
|
1150
|
+
idempotencyKey,
|
|
1151
|
+
options
|
|
1152
|
+
});
|
|
1153
|
+
this.logger.log("Started workflow run", {
|
|
1154
|
+
runId: run.id,
|
|
1155
|
+
workflowId
|
|
1156
|
+
});
|
|
1157
|
+
return run;
|
|
1158
|
+
}
|
|
1159
|
+
async createWorkflowRun({
|
|
1160
|
+
workflowId,
|
|
1161
|
+
input,
|
|
1162
|
+
resourceId,
|
|
1163
|
+
idempotencyKey,
|
|
1164
|
+
options,
|
|
1165
|
+
parentRunId,
|
|
1166
|
+
parentStepId,
|
|
1167
|
+
parentResourceId,
|
|
1168
|
+
enqueue = true,
|
|
1169
|
+
db
|
|
1170
|
+
}) {
|
|
1171
|
+
validateWorkflowId(workflowId);
|
|
1172
|
+
validateResourceId(resourceId);
|
|
1065
1173
|
const workflow2 = this.workflows.get(workflowId);
|
|
1066
1174
|
if (!workflow2) {
|
|
1067
1175
|
throw new WorkflowEngineError(`Unknown workflow ${workflowId}`);
|
|
@@ -1078,37 +1186,60 @@ class WorkflowEngine {
|
|
|
1078
1186
|
}
|
|
1079
1187
|
}
|
|
1080
1188
|
const initialStepId = workflow2.steps[0]?.id ?? "__start__";
|
|
1081
|
-
const
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
};
|
|
1100
|
-
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
1101
|
-
startAfter: new Date,
|
|
1102
|
-
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds2
|
|
1103
|
-
});
|
|
1189
|
+
const timeoutAt = options?.timeout ? new Date(Date.now() + options.timeout) : workflow2.timeout ? new Date(Date.now() + workflow2.timeout) : null;
|
|
1190
|
+
const insertRun = async (targetDb) => await insertWorkflowRun({
|
|
1191
|
+
resourceId,
|
|
1192
|
+
workflowId,
|
|
1193
|
+
currentStepId: initialStepId,
|
|
1194
|
+
status: "running" /* RUNNING */,
|
|
1195
|
+
input,
|
|
1196
|
+
maxRetries: options?.retries ?? workflow2.retries ?? 0,
|
|
1197
|
+
timeoutAt,
|
|
1198
|
+
idempotencyKey,
|
|
1199
|
+
parentRunId,
|
|
1200
|
+
parentStepId,
|
|
1201
|
+
parentResourceId
|
|
1202
|
+
}, targetDb);
|
|
1203
|
+
const insertAndEnqueue = async (targetDb) => {
|
|
1204
|
+
const result = await insertRun(targetDb);
|
|
1205
|
+
if (enqueue && result.created) {
|
|
1206
|
+
await this.enqueueWorkflowRun(result.run, options, targetDb);
|
|
1104
1207
|
}
|
|
1105
|
-
return
|
|
1106
|
-
}
|
|
1107
|
-
this.
|
|
1208
|
+
return result;
|
|
1209
|
+
};
|
|
1210
|
+
const { run, created } = db ? await insertAndEnqueue(db) : await withPostgresTransaction(this.boss.getDb(), insertAndEnqueue, this.pool);
|
|
1211
|
+
return { run, created };
|
|
1212
|
+
}
|
|
1213
|
+
async enqueueWorkflowRun(run, options, db) {
|
|
1214
|
+
const job = {
|
|
1108
1215
|
runId: run.id,
|
|
1109
|
-
|
|
1216
|
+
resourceId: run.resourceId ?? undefined,
|
|
1217
|
+
workflowId: run.workflowId,
|
|
1218
|
+
input: run.input
|
|
1219
|
+
};
|
|
1220
|
+
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
1221
|
+
startAfter: new Date,
|
|
1222
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds2,
|
|
1223
|
+
...retrySendOptions(run.maxRetries),
|
|
1224
|
+
...db ? { db } : {}
|
|
1225
|
+
});
|
|
1226
|
+
}
|
|
1227
|
+
async notifyParentOfChildTerminalRun(childRun) {
|
|
1228
|
+
if (!childRun.parentRunId || !childRun.parentStepId) {
|
|
1229
|
+
return;
|
|
1230
|
+
}
|
|
1231
|
+
const parentRun = await getWorkflowRun({
|
|
1232
|
+
runId: childRun.parentRunId,
|
|
1233
|
+
resourceId: childRun.parentResourceId ?? undefined
|
|
1234
|
+
}, { db: this.db });
|
|
1235
|
+
if (!parentRun || parentRun.status === "completed" /* COMPLETED */ || parentRun.status === "failed" /* FAILED */ || parentRun.status === "cancelled" /* CANCELLED */) {
|
|
1236
|
+
return;
|
|
1237
|
+
}
|
|
1238
|
+
await this.triggerEvent({
|
|
1239
|
+
runId: parentRun.id,
|
|
1240
|
+
resourceId: parentRun.resourceId ?? undefined,
|
|
1241
|
+
eventName: getInvokeChildWorkflowEventName(childRun.id)
|
|
1110
1242
|
});
|
|
1111
|
-
return run;
|
|
1112
1243
|
}
|
|
1113
1244
|
async pauseWorkflow({
|
|
1114
1245
|
runId,
|
|
@@ -1140,6 +1271,9 @@ class WorkflowEngine {
|
|
|
1140
1271
|
if (current.status !== "paused" /* PAUSED */) {
|
|
1141
1272
|
throw new WorkflowEngineError(`Cannot resume workflow run in '${current.status}' status, must be 'paused'`, current.workflowId, runId);
|
|
1142
1273
|
}
|
|
1274
|
+
if (this.getInvokeChildWorkflowStepEntry(current.timeline, current.currentStepId)) {
|
|
1275
|
+
return current;
|
|
1276
|
+
}
|
|
1143
1277
|
return this.triggerEvent({
|
|
1144
1278
|
runId,
|
|
1145
1279
|
resourceId,
|
|
@@ -1159,6 +1293,9 @@ class WorkflowEngine {
|
|
|
1159
1293
|
return run;
|
|
1160
1294
|
}
|
|
1161
1295
|
const stepId = run.currentStepId;
|
|
1296
|
+
if (this.getInvokeChildWorkflowStepEntry(run.timeline, stepId)) {
|
|
1297
|
+
return run;
|
|
1298
|
+
}
|
|
1162
1299
|
const waitForStep = this.getWaitForStepEntry(run.timeline, stepId);
|
|
1163
1300
|
if (!waitForStep) {
|
|
1164
1301
|
return run;
|
|
@@ -1207,6 +1344,7 @@ class WorkflowEngine {
|
|
|
1207
1344
|
expectedStatuses: ["pending" /* PENDING */, "running" /* RUNNING */, "paused" /* PAUSED */]
|
|
1208
1345
|
});
|
|
1209
1346
|
this.logger.log(`cancelled workflow run with id ${runId}`);
|
|
1347
|
+
await this.notifyParentOfChildTerminalRun(run);
|
|
1210
1348
|
return run;
|
|
1211
1349
|
}
|
|
1212
1350
|
async triggerEvent({
|
|
@@ -1230,7 +1368,8 @@ class WorkflowEngine {
|
|
|
1230
1368
|
}
|
|
1231
1369
|
};
|
|
1232
1370
|
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
1233
|
-
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds2
|
|
1371
|
+
expireInSeconds: options?.expireInSeconds ?? defaultExpireInSeconds2,
|
|
1372
|
+
...retrySendOptions(run.maxRetries)
|
|
1234
1373
|
});
|
|
1235
1374
|
this.logger.log(`event ${eventName} sent for workflow run with id ${runId}`);
|
|
1236
1375
|
return run;
|
|
@@ -1309,7 +1448,7 @@ class WorkflowEngine {
|
|
|
1309
1448
|
return run.resourceId ?? undefined;
|
|
1310
1449
|
}
|
|
1311
1450
|
async handleWorkflowRun([job]) {
|
|
1312
|
-
const { runId = "", resourceId, workflowId = "",
|
|
1451
|
+
const { runId = "", resourceId, workflowId = "", event } = job?.data ?? {};
|
|
1313
1452
|
let run;
|
|
1314
1453
|
let scopedResourceId;
|
|
1315
1454
|
try {
|
|
@@ -1332,6 +1471,14 @@ class WorkflowEngine {
|
|
|
1332
1471
|
throw new WorkflowEngineError(`Workflow run ${runId} does not match job workflowId ${workflowId}`, workflowId, runId);
|
|
1333
1472
|
}
|
|
1334
1473
|
scopedResourceId = this.resolveScopedResourceId(resourceId, run);
|
|
1474
|
+
if (job?.retryCount !== undefined && run.retryCount !== job.retryCount) {
|
|
1475
|
+
await this.updateRun({
|
|
1476
|
+
runId,
|
|
1477
|
+
resourceId: scopedResourceId,
|
|
1478
|
+
data: { retryCount: job.retryCount }
|
|
1479
|
+
});
|
|
1480
|
+
run = { ...run, retryCount: job.retryCount };
|
|
1481
|
+
}
|
|
1335
1482
|
if (run.status === "cancelled" /* CANCELLED */) {
|
|
1336
1483
|
this.logger.log(`Workflow run ${runId} is cancelled, skipping`);
|
|
1337
1484
|
return;
|
|
@@ -1435,6 +1582,22 @@ class WorkflowEngine {
|
|
|
1435
1582
|
}
|
|
1436
1583
|
const timeoutMs = options?.timeout ? parseDuration(options.timeout) : undefined;
|
|
1437
1584
|
return this.pollStep({ run, stepId, conditionFn, intervalMs, timeoutMs });
|
|
1585
|
+
},
|
|
1586
|
+
invokeChildWorkflow: async (stepId, refOrParams, inputArg, optionsArg) => {
|
|
1587
|
+
if (!run) {
|
|
1588
|
+
throw new WorkflowEngineError("Missing workflow run", workflowId, runId);
|
|
1589
|
+
}
|
|
1590
|
+
const resolvedChildCall = this.resolveWorkflowRunParameters(refOrParams, inputArg, optionsArg);
|
|
1591
|
+
const childWorkflowInvocation = {
|
|
1592
|
+
run,
|
|
1593
|
+
stepId,
|
|
1594
|
+
workflowId: resolvedChildCall.workflowId,
|
|
1595
|
+
input: resolvedChildCall.input,
|
|
1596
|
+
options: resolvedChildCall.options,
|
|
1597
|
+
resourceId: resolvedChildCall.resourceId,
|
|
1598
|
+
idempotencyKey: resolvedChildCall.idempotencyKey
|
|
1599
|
+
};
|
|
1600
|
+
return this.invokeChildWorkflowStep(childWorkflowInvocation);
|
|
1438
1601
|
}
|
|
1439
1602
|
};
|
|
1440
1603
|
let step = { ...baseStep };
|
|
@@ -1459,7 +1622,7 @@ class WorkflowEngine {
|
|
|
1459
1622
|
const shouldComplete = run.status === "running" /* RUNNING */ && (noParsedSteps || isLastParsedStep || hasPluginSteps && result !== undefined);
|
|
1460
1623
|
if (shouldComplete) {
|
|
1461
1624
|
const normalizedResult = result === undefined ? {} : result;
|
|
1462
|
-
await this.updateRun({
|
|
1625
|
+
const completedRun = await this.updateRun({
|
|
1463
1626
|
runId,
|
|
1464
1627
|
resourceId: scopedResourceId,
|
|
1465
1628
|
data: {
|
|
@@ -1469,56 +1632,244 @@ class WorkflowEngine {
|
|
|
1469
1632
|
jobId: job?.id
|
|
1470
1633
|
}
|
|
1471
1634
|
});
|
|
1635
|
+
await this.notifyParentOfChildTerminalRun(completedRun);
|
|
1472
1636
|
this.logger.log("Workflow run completed.", {
|
|
1473
1637
|
runId,
|
|
1474
1638
|
workflowId
|
|
1475
1639
|
});
|
|
1476
1640
|
}
|
|
1477
1641
|
} catch (error) {
|
|
1478
|
-
if (run && run.retryCount < run.maxRetries) {
|
|
1479
|
-
await this.updateRun({
|
|
1480
|
-
runId,
|
|
1481
|
-
resourceId: scopedResourceId,
|
|
1482
|
-
data: {
|
|
1483
|
-
retryCount: run.retryCount + 1,
|
|
1484
|
-
jobId: job?.id
|
|
1485
|
-
}
|
|
1486
|
-
});
|
|
1487
|
-
const retryDelay = 2 ** run.retryCount * 1000;
|
|
1488
|
-
const pgBossJob = {
|
|
1489
|
-
runId,
|
|
1490
|
-
resourceId: scopedResourceId,
|
|
1491
|
-
workflowId,
|
|
1492
|
-
input
|
|
1493
|
-
};
|
|
1494
|
-
await this.boss?.send("workflow-run", pgBossJob, {
|
|
1495
|
-
startAfter: new Date(Date.now() + retryDelay),
|
|
1496
|
-
expireInSeconds: defaultExpireInSeconds2
|
|
1497
|
-
});
|
|
1498
|
-
return;
|
|
1499
|
-
}
|
|
1500
1642
|
if (runId) {
|
|
1501
|
-
await this.updateRun({
|
|
1643
|
+
const updatedRun = await this.updateRun({
|
|
1502
1644
|
runId,
|
|
1503
1645
|
resourceId: scopedResourceId,
|
|
1504
1646
|
data: {
|
|
1505
|
-
status: "failed" /* FAILED */,
|
|
1506
1647
|
error: error instanceof Error ? error.message : String(error),
|
|
1507
1648
|
jobId: job?.id
|
|
1508
1649
|
}
|
|
1509
1650
|
});
|
|
1651
|
+
if (updatedRun.status === "completed" /* COMPLETED */ || updatedRun.status === "failed" /* FAILED */ || updatedRun.status === "cancelled" /* CANCELLED */) {
|
|
1652
|
+
await this.notifyParentOfChildTerminalRun(updatedRun);
|
|
1653
|
+
}
|
|
1510
1654
|
}
|
|
1511
1655
|
throw error;
|
|
1512
1656
|
}
|
|
1513
1657
|
}
|
|
1658
|
+
async handleWorkflowRunDlq([job]) {
|
|
1659
|
+
const { runId } = job?.data ?? {};
|
|
1660
|
+
if (!runId)
|
|
1661
|
+
return;
|
|
1662
|
+
const run = await getWorkflowRun({ runId }, { db: this.db });
|
|
1663
|
+
if (!run || run.status !== "running" /* RUNNING */)
|
|
1664
|
+
return;
|
|
1665
|
+
const failedRun = await this.updateRun({
|
|
1666
|
+
runId,
|
|
1667
|
+
resourceId: run.resourceId ?? undefined,
|
|
1668
|
+
data: {
|
|
1669
|
+
status: "failed" /* FAILED */,
|
|
1670
|
+
error: run.error ?? "Workflow run worker died or job expired before completion"
|
|
1671
|
+
}
|
|
1672
|
+
});
|
|
1673
|
+
await this.notifyParentOfChildTerminalRun(failedRun);
|
|
1674
|
+
this.logger.log("Marked stuck workflow run as failed", {
|
|
1675
|
+
runId,
|
|
1676
|
+
workflowId: run.workflowId
|
|
1677
|
+
});
|
|
1678
|
+
}
|
|
1514
1679
|
getCachedStepEntry(timeline, stepId) {
|
|
1515
1680
|
const stepEntry = timeline[stepId];
|
|
1516
1681
|
return stepEntry && typeof stepEntry === "object" && "output" in stepEntry ? stepEntry : null;
|
|
1517
1682
|
}
|
|
1518
1683
|
getWaitForStepEntry(timeline, stepId) {
|
|
1519
|
-
const entry = timeline[
|
|
1684
|
+
const entry = timeline[waitForTimelineKey(stepId)];
|
|
1520
1685
|
return entry && typeof entry === "object" && "waitFor" in entry ? entry : null;
|
|
1521
1686
|
}
|
|
1687
|
+
getInvokeChildWorkflowStepEntry(timeline, stepId) {
|
|
1688
|
+
const entry = timeline[invokeChildWorkflowTimelineKey(stepId)];
|
|
1689
|
+
return isInvokeChildWorkflowTimelineEntry(entry) ? entry : null;
|
|
1690
|
+
}
|
|
1691
|
+
getCompletedChildOutput(childRun) {
|
|
1692
|
+
return childRun.output === undefined ? {} : childRun.output;
|
|
1693
|
+
}
|
|
1694
|
+
throwForNonCompletedChild(childRun) {
|
|
1695
|
+
throw new WorkflowEngineError(`Child workflow ${childRun.workflowId} ${childRun.status}${childRun.error ? `: ${childRun.error}` : ""}`, childRun.workflowId, childRun.id);
|
|
1696
|
+
}
|
|
1697
|
+
assertInvokeChildWorkflowStepOwnership({
|
|
1698
|
+
childRun,
|
|
1699
|
+
parentRun,
|
|
1700
|
+
stepId,
|
|
1701
|
+
workflowId
|
|
1702
|
+
}) {
|
|
1703
|
+
const expectedParentResourceId = parentRun.resourceId ?? null;
|
|
1704
|
+
const matches = childRun.workflowId === workflowId && childRun.parentRunId === parentRun.id && childRun.parentStepId === stepId && childRun.parentResourceId === expectedParentResourceId;
|
|
1705
|
+
if (!matches) {
|
|
1706
|
+
throw new WorkflowEngineError(`Idempotency key resolved to workflow run ${childRun.id}, which does not belong to invokeChildWorkflow step '${stepId}'`, workflowId, parentRun.id);
|
|
1707
|
+
}
|
|
1708
|
+
}
|
|
1709
|
+
async invokeChildWorkflowStep({
|
|
1710
|
+
run,
|
|
1711
|
+
stepId,
|
|
1712
|
+
workflowId,
|
|
1713
|
+
input,
|
|
1714
|
+
resourceId,
|
|
1715
|
+
idempotencyKey,
|
|
1716
|
+
options
|
|
1717
|
+
}) {
|
|
1718
|
+
let invokeOutput;
|
|
1719
|
+
let hasInvokeOutput = false;
|
|
1720
|
+
const childResourceId = resourceId ?? run.resourceId ?? undefined;
|
|
1721
|
+
const childIdempotencyKey = idempotencyKey;
|
|
1722
|
+
await withPostgresTransaction(this.db, async (db) => {
|
|
1723
|
+
const lockedRun = await this.getRun({ runId: run.id, resourceId: run.resourceId ?? undefined }, { exclusiveLock: true, db });
|
|
1724
|
+
if (lockedRun.status === "cancelled" /* CANCELLED */ || lockedRun.status === "paused" /* PAUSED */ || lockedRun.status === "failed" /* FAILED */) {
|
|
1725
|
+
return;
|
|
1726
|
+
}
|
|
1727
|
+
const lockedCached = this.getCachedStepEntry(lockedRun.timeline, stepId);
|
|
1728
|
+
if (lockedCached?.output !== undefined) {
|
|
1729
|
+
invokeOutput = lockedCached.output;
|
|
1730
|
+
hasInvokeOutput = true;
|
|
1731
|
+
return;
|
|
1732
|
+
}
|
|
1733
|
+
const lockedInvoke = this.getInvokeChildWorkflowStepEntry(lockedRun.timeline, stepId);
|
|
1734
|
+
if (lockedInvoke) {
|
|
1735
|
+
const existingChildResourceId = "childResourceId" in lockedInvoke.invokeChildWorkflow ? lockedInvoke.invokeChildWorkflow.childResourceId ?? undefined : childResourceId;
|
|
1736
|
+
const existingChildRun = await this.getRun({
|
|
1737
|
+
runId: lockedInvoke.invokeChildWorkflow.childRunId,
|
|
1738
|
+
resourceId: existingChildResourceId
|
|
1739
|
+
});
|
|
1740
|
+
if (existingChildRun.status === "completed" /* COMPLETED */) {
|
|
1741
|
+
invokeOutput = this.getCompletedChildOutput(existingChildRun);
|
|
1742
|
+
hasInvokeOutput = true;
|
|
1743
|
+
await this.updateRun({
|
|
1744
|
+
runId: run.id,
|
|
1745
|
+
resourceId: run.resourceId ?? undefined,
|
|
1746
|
+
data: {
|
|
1747
|
+
timeline: import_es_toolkit2.merge(lockedRun.timeline, {
|
|
1748
|
+
[stepId]: {
|
|
1749
|
+
output: invokeOutput,
|
|
1750
|
+
timestamp: new Date
|
|
1751
|
+
}
|
|
1752
|
+
})
|
|
1753
|
+
}
|
|
1754
|
+
}, { db });
|
|
1755
|
+
return;
|
|
1756
|
+
}
|
|
1757
|
+
if (existingChildRun.status === "failed" /* FAILED */ || existingChildRun.status === "cancelled" /* CANCELLED */) {
|
|
1758
|
+
this.throwForNonCompletedChild(existingChildRun);
|
|
1759
|
+
}
|
|
1760
|
+
await this.pauseRunForWait({
|
|
1761
|
+
run: lockedRun,
|
|
1762
|
+
stepId,
|
|
1763
|
+
eventName: getInvokeChildWorkflowEventName(existingChildRun.id),
|
|
1764
|
+
skipOutput: true,
|
|
1765
|
+
db
|
|
1766
|
+
});
|
|
1767
|
+
return;
|
|
1768
|
+
}
|
|
1769
|
+
const result = await this.createWorkflowRun({
|
|
1770
|
+
workflowId,
|
|
1771
|
+
input,
|
|
1772
|
+
resourceId: childResourceId,
|
|
1773
|
+
idempotencyKey: childIdempotencyKey,
|
|
1774
|
+
options,
|
|
1775
|
+
parentRunId: run.id,
|
|
1776
|
+
parentStepId: stepId,
|
|
1777
|
+
parentResourceId: run.resourceId ?? undefined,
|
|
1778
|
+
enqueue: true,
|
|
1779
|
+
db
|
|
1780
|
+
});
|
|
1781
|
+
const childRun = result.run;
|
|
1782
|
+
if (!result.created) {
|
|
1783
|
+
this.assertInvokeChildWorkflowStepOwnership({
|
|
1784
|
+
childRun,
|
|
1785
|
+
parentRun: lockedRun,
|
|
1786
|
+
stepId,
|
|
1787
|
+
workflowId
|
|
1788
|
+
});
|
|
1789
|
+
if (childRun.status === "completed" /* COMPLETED */) {
|
|
1790
|
+
invokeOutput = this.getCompletedChildOutput(childRun);
|
|
1791
|
+
hasInvokeOutput = true;
|
|
1792
|
+
await this.updateRun({
|
|
1793
|
+
runId: run.id,
|
|
1794
|
+
resourceId: run.resourceId ?? undefined,
|
|
1795
|
+
data: {
|
|
1796
|
+
timeline: import_es_toolkit2.merge(lockedRun.timeline, {
|
|
1797
|
+
[invokeChildWorkflowTimelineKey(stepId)]: {
|
|
1798
|
+
invokeChildWorkflow: {
|
|
1799
|
+
childRunId: childRun.id,
|
|
1800
|
+
childWorkflowId: childRun.workflowId,
|
|
1801
|
+
childResourceId: childRun.resourceId
|
|
1802
|
+
},
|
|
1803
|
+
timestamp: new Date
|
|
1804
|
+
},
|
|
1805
|
+
[stepId]: {
|
|
1806
|
+
output: invokeOutput,
|
|
1807
|
+
timestamp: new Date
|
|
1808
|
+
}
|
|
1809
|
+
})
|
|
1810
|
+
}
|
|
1811
|
+
}, { db });
|
|
1812
|
+
return;
|
|
1813
|
+
}
|
|
1814
|
+
if (childRun.status === "failed" /* FAILED */ || childRun.status === "cancelled" /* CANCELLED */) {
|
|
1815
|
+
this.throwForNonCompletedChild(childRun);
|
|
1816
|
+
}
|
|
1817
|
+
}
|
|
1818
|
+
await this.pauseRunForWait({
|
|
1819
|
+
run: lockedRun,
|
|
1820
|
+
stepId,
|
|
1821
|
+
eventName: getInvokeChildWorkflowEventName(childRun.id),
|
|
1822
|
+
skipOutput: true,
|
|
1823
|
+
db,
|
|
1824
|
+
timeline: import_es_toolkit2.merge(lockedRun.timeline, {
|
|
1825
|
+
[invokeChildWorkflowTimelineKey(stepId)]: {
|
|
1826
|
+
invokeChildWorkflow: {
|
|
1827
|
+
childRunId: childRun.id,
|
|
1828
|
+
childWorkflowId: childRun.workflowId,
|
|
1829
|
+
childResourceId: childRun.resourceId
|
|
1830
|
+
},
|
|
1831
|
+
timestamp: new Date
|
|
1832
|
+
}
|
|
1833
|
+
})
|
|
1834
|
+
});
|
|
1835
|
+
}, this.pool);
|
|
1836
|
+
if (hasInvokeOutput) {
|
|
1837
|
+
return invokeOutput;
|
|
1838
|
+
}
|
|
1839
|
+
}
|
|
1840
|
+
async pauseRunForWait({
|
|
1841
|
+
run,
|
|
1842
|
+
stepId,
|
|
1843
|
+
eventName,
|
|
1844
|
+
timeoutEvent,
|
|
1845
|
+
skipOutput,
|
|
1846
|
+
db,
|
|
1847
|
+
timeline
|
|
1848
|
+
}) {
|
|
1849
|
+
const baseTimeline = timeline ?? run.timeline;
|
|
1850
|
+
const waitFor = {};
|
|
1851
|
+
if (eventName)
|
|
1852
|
+
waitFor.eventName = eventName;
|
|
1853
|
+
if (timeoutEvent)
|
|
1854
|
+
waitFor.timeoutEvent = timeoutEvent;
|
|
1855
|
+
if (skipOutput)
|
|
1856
|
+
waitFor.skipOutput = true;
|
|
1857
|
+
await this.updateRun({
|
|
1858
|
+
runId: run.id,
|
|
1859
|
+
resourceId: run.resourceId ?? undefined,
|
|
1860
|
+
data: {
|
|
1861
|
+
status: "paused" /* PAUSED */,
|
|
1862
|
+
currentStepId: stepId,
|
|
1863
|
+
pausedAt: new Date,
|
|
1864
|
+
timeline: import_es_toolkit2.merge(baseTimeline, {
|
|
1865
|
+
[waitForTimelineKey(stepId)]: {
|
|
1866
|
+
waitFor,
|
|
1867
|
+
timestamp: new Date
|
|
1868
|
+
}
|
|
1869
|
+
})
|
|
1870
|
+
}
|
|
1871
|
+
}, { db });
|
|
1872
|
+
}
|
|
1522
1873
|
async runStep({
|
|
1523
1874
|
stepId,
|
|
1524
1875
|
run,
|
|
@@ -1607,21 +1958,7 @@ ${error.stack}` : String(error)
|
|
|
1607
1958
|
const timeoutEvent = timeoutDate ? `__timeout_${stepId}` : undefined;
|
|
1608
1959
|
await withPostgresTransaction(this.db, async (db) => {
|
|
1609
1960
|
const freshRun = await this.getRun({ runId: run.id, resourceId: run.resourceId ?? undefined }, { exclusiveLock: true, db });
|
|
1610
|
-
return this.
|
|
1611
|
-
runId: run.id,
|
|
1612
|
-
resourceId: run.resourceId ?? undefined,
|
|
1613
|
-
data: {
|
|
1614
|
-
status: "paused" /* PAUSED */,
|
|
1615
|
-
currentStepId: stepId,
|
|
1616
|
-
pausedAt: new Date,
|
|
1617
|
-
timeline: import_es_toolkit2.merge(freshRun.timeline, {
|
|
1618
|
-
[`${stepId}-wait-for`]: {
|
|
1619
|
-
waitFor: { eventName, timeoutEvent },
|
|
1620
|
-
timestamp: new Date
|
|
1621
|
-
}
|
|
1622
|
-
})
|
|
1623
|
-
}
|
|
1624
|
-
}, { db });
|
|
1961
|
+
return this.pauseRunForWait({ run: freshRun, stepId, eventName, timeoutEvent, db });
|
|
1625
1962
|
}, this.pool);
|
|
1626
1963
|
if (timeoutDate && timeoutEvent) {
|
|
1627
1964
|
try {
|
|
@@ -1634,7 +1971,8 @@ ${error.stack}` : String(error)
|
|
|
1634
1971
|
};
|
|
1635
1972
|
await this.boss.send(WORKFLOW_RUN_QUEUE_NAME, job, {
|
|
1636
1973
|
startAfter: timeoutDate.getTime() <= Date.now() ? new Date : timeoutDate,
|
|
1637
|
-
expireInSeconds: defaultExpireInSeconds2
|
|
1974
|
+
expireInSeconds: defaultExpireInSeconds2,
|
|
1975
|
+
...retrySendOptions(run.maxRetries)
|
|
1638
1976
|
});
|
|
1639
1977
|
} catch (error) {
|
|
1640
1978
|
await this.updateRun({
|
|
@@ -1734,7 +2072,7 @@ ${error.stack}` : String(error)
|
|
|
1734
2072
|
pausedAt: new Date,
|
|
1735
2073
|
timeline: import_es_toolkit2.merge(freshRun.timeline, {
|
|
1736
2074
|
[`${stepId}-poll`]: { startedAt: startedAt.toISOString() },
|
|
1737
|
-
[
|
|
2075
|
+
[waitForTimelineKey(stepId)]: {
|
|
1738
2076
|
waitFor: { timeoutEvent: pollEvent, skipOutput: true },
|
|
1739
2077
|
timestamp: new Date
|
|
1740
2078
|
}
|
|
@@ -1751,7 +2089,8 @@ ${error.stack}` : String(error)
|
|
|
1751
2089
|
event: { name: pollEvent, data: {} }
|
|
1752
2090
|
}, {
|
|
1753
2091
|
startAfter: new Date(Date.now() + intervalMs),
|
|
1754
|
-
expireInSeconds: defaultExpireInSeconds2
|
|
2092
|
+
expireInSeconds: defaultExpireInSeconds2,
|
|
2093
|
+
...retrySendOptions(run.maxRetries)
|
|
1755
2094
|
});
|
|
1756
2095
|
} catch (error) {
|
|
1757
2096
|
await this.updateRun({
|
|
@@ -1794,6 +2133,9 @@ ${error.stack}` : String(error)
|
|
|
1794
2133
|
statuses,
|
|
1795
2134
|
workflowId
|
|
1796
2135
|
}) {
|
|
2136
|
+
if (workflowId)
|
|
2137
|
+
validateWorkflowId(workflowId);
|
|
2138
|
+
validateResourceId(resourceId);
|
|
1797
2139
|
return getWorkflowRuns({
|
|
1798
2140
|
resourceId,
|
|
1799
2141
|
startingAfter,
|
|
@@ -1805,5 +2147,5 @@ ${error.stack}` : String(error)
|
|
|
1805
2147
|
}
|
|
1806
2148
|
}
|
|
1807
2149
|
|
|
1808
|
-
//# debugId=
|
|
2150
|
+
//# debugId=8E8C82F1948B934364756E2164756E21
|
|
1809
2151
|
//# sourceMappingURL=index.js.map
|