@usehelical/workflows 0.0.1-alpha.17 → 0.0.1-alpha.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +74 -6
- package/dist/api.d.ts +2 -2
- package/dist/api.js +50 -7
- package/dist/api.js.map +1 -1
- package/dist/chunk-7I6XZ2V3.js +1039 -0
- package/dist/chunk-7I6XZ2V3.js.map +1 -0
- package/dist/external-JXNhdgzR.d.ts +154 -0
- package/dist/index.d.ts +29 -110
- package/dist/index.js +21 -616
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
- package/dist/chunk-6L2Y7WUY.js +0 -440
- package/dist/chunk-6L2Y7WUY.js.map +0 -1
- package/dist/state-B6QkOxSb.d.ts +0 -38
|
@@ -0,0 +1,1039 @@
|
|
|
1
|
+
import { serializeError as serializeError$1, deserializeError as deserializeError$1 } from 'serialize-error';
|
|
2
|
+
import { AsyncLocalStorage } from 'async_hooks';
|
|
3
|
+
import crypto from 'crypto';
|
|
4
|
+
import { sql } from 'kysely';
|
|
5
|
+
|
|
6
|
+
// src/internal/errors.ts
|
|
7
|
+
var ErrorType = /* @__PURE__ */ ((ErrorType2) => {
|
|
8
|
+
ErrorType2["INVALID_WORKFLOW_TRANSITION"] = "INVALID_WORKFLOW_TRANSITION";
|
|
9
|
+
ErrorType2["RUN_CANCELLED"] = "RUN_CANCELLED";
|
|
10
|
+
ErrorType2["RUN_NOT_FOUND"] = "RUN_NOT_FOUND";
|
|
11
|
+
ErrorType2["RUN_OUTSIDE_OF_WORKFLOW"] = "RUN_OUTSIDE_OF_WORKFLOW";
|
|
12
|
+
ErrorType2["FATAL_ERROR"] = "FATAL_ERROR";
|
|
13
|
+
ErrorType2["NO_MESSAGE_AVAILABLE"] = "NO_MESSAGE_AVAILABLE";
|
|
14
|
+
ErrorType2["MAX_RETRIES_EXCEEDED"] = "MAX_RETRIES_EXCEEDED";
|
|
15
|
+
ErrorType2["ERROR_THAT_SHOULD_NEVER_HAPPEN"] = "ERROR_THAT_SHOULD_NEVER_HAPPEN";
|
|
16
|
+
ErrorType2["SERIALIZATION_ERROR"] = "SERIALIZATION_ERROR";
|
|
17
|
+
ErrorType2["MAX_RECOVERY_ATTEMPTS_EXCEEDED"] = "MAX_RECOVERY_ATTEMPTS_EXCEEDED";
|
|
18
|
+
ErrorType2["WORKFLOW_NOT_FOUND"] = "WORKFLOW_NOT_FOUND";
|
|
19
|
+
ErrorType2["TIMEOUT"] = "TIMEOUT";
|
|
20
|
+
ErrorType2["CANCEL"] = "CANCEL";
|
|
21
|
+
ErrorType2["DEADLINE"] = "DEADLINE";
|
|
22
|
+
ErrorType2["RUN_NOT_CANCELLABLE"] = "RUN_NOT_CANCELLABLE";
|
|
23
|
+
ErrorType2["QUEUE_NOT_FOUND"] = "QUEUE_NOT_FOUND";
|
|
24
|
+
return ErrorType2;
|
|
25
|
+
})(ErrorType || {});
|
|
26
|
+
var BaseError = class extends Error {
|
|
27
|
+
reason;
|
|
28
|
+
constructor(message, reason) {
|
|
29
|
+
super(message);
|
|
30
|
+
this.reason = reason;
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
var RunTimedOutError = class extends BaseError {
|
|
34
|
+
constructor() {
|
|
35
|
+
super("This workflow run has timed out", "timeout");
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
var UnknownError = class extends BaseError {
|
|
39
|
+
constructor(message) {
|
|
40
|
+
super(message || "An unknown error occurred", "unknown");
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
var RunDeadlineExceededError = class extends BaseError {
|
|
44
|
+
constructor() {
|
|
45
|
+
super("This workflow run has exceeded its deadline", "deadline");
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
var RunCancelledError = class extends BaseError {
|
|
49
|
+
constructor() {
|
|
50
|
+
super("This workflow run has been cancelled", "cancel");
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
var MaxRecoveryAttemptsExceededError = class extends BaseError {
|
|
54
|
+
constructor(runId, attempts) {
|
|
55
|
+
super(
|
|
56
|
+
`Max recovery attempts exceeded for run "${runId}" after ${attempts + 1} attempts`,
|
|
57
|
+
"max_recovery_attempts_exceeded"
|
|
58
|
+
);
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
var OperationTimedOutError = class extends BaseError {
|
|
62
|
+
constructor(operationName) {
|
|
63
|
+
super(`This operation "${operationName}" has timed out`, "timeout");
|
|
64
|
+
}
|
|
65
|
+
};
|
|
66
|
+
var RunNotFoundError = class extends BaseError {
|
|
67
|
+
constructor(runId) {
|
|
68
|
+
super(`Workflow run "${runId}" not found`, "run_not_found");
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
var RunOutsideOfWorkflowError = class extends BaseError {
|
|
72
|
+
constructor() {
|
|
73
|
+
super("This function must be called within a workflow", "outside_workflow_context");
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
var FatalError = class extends BaseError {
|
|
77
|
+
constructor(message) {
|
|
78
|
+
super(message, "fatal_error");
|
|
79
|
+
}
|
|
80
|
+
};
|
|
81
|
+
var MaxRetriesExceededError = class extends BaseError {
|
|
82
|
+
attemptErrors;
|
|
83
|
+
stepName;
|
|
84
|
+
maxAttempts;
|
|
85
|
+
constructor(stepName, maxAttempts, errors) {
|
|
86
|
+
const formattedErrors = errors.map((error, index) => `Attempt ${index + 1}: ${error.message}`).join(". ");
|
|
87
|
+
super(
|
|
88
|
+
`Step "${stepName}" failed after ${maxAttempts + 1} attempts. ${formattedErrors}`,
|
|
89
|
+
"max_retries_exceeded"
|
|
90
|
+
);
|
|
91
|
+
this.attemptErrors = errors;
|
|
92
|
+
this.stepName = stepName;
|
|
93
|
+
this.maxAttempts = maxAttempts;
|
|
94
|
+
}
|
|
95
|
+
};
|
|
96
|
+
var SerializationError = class extends BaseError {
|
|
97
|
+
constructor(message) {
|
|
98
|
+
super(message, "serialization_error");
|
|
99
|
+
}
|
|
100
|
+
};
|
|
101
|
+
var WorkflowNotFoundError = class extends BaseError {
|
|
102
|
+
constructor(workflowName) {
|
|
103
|
+
super(`Workflow "${workflowName}" not found`, "workflow_not_found");
|
|
104
|
+
}
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
// src/api/workflow.ts
|
|
108
|
+
var TERMINAL_STATES = [
|
|
109
|
+
"success",
|
|
110
|
+
"error",
|
|
111
|
+
"cancelled",
|
|
112
|
+
"max_recovery_attempts_exceeded"
|
|
113
|
+
];
|
|
114
|
+
function defineWorkflow(name, fn, options = {}) {
|
|
115
|
+
return {
|
|
116
|
+
name,
|
|
117
|
+
fn,
|
|
118
|
+
maxRecoveryAttempts: options.maxRecoveryAttempts
|
|
119
|
+
};
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// src/internal/utils/sleep.ts
|
|
123
|
+
function sleep(ms) {
|
|
124
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// src/internal/db/retry.ts
|
|
128
|
+
var RETRY_SQLSTATE_PREFIXES = /* @__PURE__ */ new Set([
|
|
129
|
+
"08",
|
|
130
|
+
// Connection Exception
|
|
131
|
+
"40",
|
|
132
|
+
// Transaction Rollback (deadlock_detected, serialization_failure)
|
|
133
|
+
"53",
|
|
134
|
+
// Insufficient Resources
|
|
135
|
+
"55",
|
|
136
|
+
// Object Not In Prerequisite State (lock_not_available)
|
|
137
|
+
"57"
|
|
138
|
+
// Operator Intervention (admin_shutdown, cannot_connect_now)
|
|
139
|
+
]);
|
|
140
|
+
var RETRY_SQLSTATE_CODES = /* @__PURE__ */ new Set([
|
|
141
|
+
"40003",
|
|
142
|
+
// statement_completion_unknown
|
|
143
|
+
"40001",
|
|
144
|
+
// serialization_failure
|
|
145
|
+
"40P01",
|
|
146
|
+
// deadlock_detected
|
|
147
|
+
"55P03"
|
|
148
|
+
// lock_not_available
|
|
149
|
+
]);
|
|
150
|
+
var RETRY_NODE_ERRNOS = /* @__PURE__ */ new Set([
|
|
151
|
+
"ECONNRESET",
|
|
152
|
+
"ECONNREFUSED",
|
|
153
|
+
"EHOSTUNREACH",
|
|
154
|
+
"ENETUNREACH",
|
|
155
|
+
"ETIMEDOUT",
|
|
156
|
+
"ECONNABORTED",
|
|
157
|
+
"EPIPE"
|
|
158
|
+
]);
|
|
159
|
+
function isPgDatabaseError(e) {
|
|
160
|
+
return !!e && typeof e === "object" && typeof e.code === "string" && e.code.length === 5;
|
|
161
|
+
}
|
|
162
|
+
function sqlStateLooksRetryable(sqlstate) {
|
|
163
|
+
if (!sqlstate) return false;
|
|
164
|
+
if (RETRY_SQLSTATE_CODES.has(sqlstate)) return true;
|
|
165
|
+
const prefix = sqlstate.toString().slice(0, 2);
|
|
166
|
+
return RETRY_SQLSTATE_PREFIXES.has(prefix);
|
|
167
|
+
}
|
|
168
|
+
function nodeErrnoLooksRetryable(e) {
|
|
169
|
+
const code = e.code;
|
|
170
|
+
return !!code && RETRY_NODE_ERRNOS.has(code);
|
|
171
|
+
}
|
|
172
|
+
function messageLooksRetryable(msg) {
|
|
173
|
+
const m = msg.toLowerCase();
|
|
174
|
+
return msg.includes("ECONNREFUSED") || msg.includes("ECONNRESET") || m.includes("connection timeout") || m.includes("server closed the connection") || m.includes("connection terminated unexpectedly") || m.includes("client has encountered a connection error") || m.includes("timeout exceeded when trying to connect") || m.includes("could not connect to server") || m.includes("connection pool exhausted") || m.includes("too many clients");
|
|
175
|
+
}
|
|
176
|
+
function* unwrapErrors(e) {
|
|
177
|
+
const queue = [e];
|
|
178
|
+
const seen = /* @__PURE__ */ new Set();
|
|
179
|
+
while (queue.length) {
|
|
180
|
+
const cur = queue.shift();
|
|
181
|
+
if (cur && typeof cur === "object") {
|
|
182
|
+
if (seen.has(cur)) continue;
|
|
183
|
+
seen.add(cur);
|
|
184
|
+
const ae = cur;
|
|
185
|
+
if (Array.isArray(ae.errors)) queue.push(...ae.errors);
|
|
186
|
+
const withCause = cur;
|
|
187
|
+
if (withCause.cause) queue.push(withCause.cause);
|
|
188
|
+
const wrapped = cur;
|
|
189
|
+
if (wrapped.error) queue.push(wrapped.error);
|
|
190
|
+
}
|
|
191
|
+
yield cur;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
function isRetriableDBError(err) {
|
|
195
|
+
for (const e of unwrapErrors(err)) {
|
|
196
|
+
const anyErr = e;
|
|
197
|
+
if (isPgDatabaseError(anyErr) && sqlStateLooksRetryable(anyErr.code)) {
|
|
198
|
+
return true;
|
|
199
|
+
}
|
|
200
|
+
if (nodeErrnoLooksRetryable(anyErr)) {
|
|
201
|
+
return true;
|
|
202
|
+
}
|
|
203
|
+
if (e instanceof Error) {
|
|
204
|
+
if (e.stack && messageLooksRetryable(e.stack)) return true;
|
|
205
|
+
if (e.message && messageLooksRetryable(e.message)) return true;
|
|
206
|
+
}
|
|
207
|
+
if (messageLooksRetryable(String(e))) return true;
|
|
208
|
+
}
|
|
209
|
+
return false;
|
|
210
|
+
}
|
|
211
|
+
async function withDbRetry(fn, options = {}) {
|
|
212
|
+
const { initialBackoffMs = 1e3, maxBackoffMs = 6e4, onRetry } = options;
|
|
213
|
+
let attempt = 0;
|
|
214
|
+
let backoffMs = initialBackoffMs;
|
|
215
|
+
while (true) {
|
|
216
|
+
try {
|
|
217
|
+
return await fn();
|
|
218
|
+
} catch (error) {
|
|
219
|
+
if (!isRetriableDBError(error)) {
|
|
220
|
+
throw error;
|
|
221
|
+
}
|
|
222
|
+
attempt++;
|
|
223
|
+
const jitter = 0.5 + Math.random();
|
|
224
|
+
const delayMs = Math.min(backoffMs * jitter, maxBackoffMs);
|
|
225
|
+
if (onRetry) {
|
|
226
|
+
onRetry(error, attempt, delayMs);
|
|
227
|
+
} else {
|
|
228
|
+
console.warn(
|
|
229
|
+
`Database connection failed: ${error instanceof Error ? error.message : String(error)}. Retrying in ${(delayMs / 1e3).toFixed(2)}s (attempt ${attempt})`
|
|
230
|
+
);
|
|
231
|
+
}
|
|
232
|
+
await sleep(delayMs);
|
|
233
|
+
backoffMs = Math.min(backoffMs * 2, maxBackoffMs);
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
function serialize(value) {
|
|
238
|
+
try {
|
|
239
|
+
return JSON.stringify(value);
|
|
240
|
+
} catch (error) {
|
|
241
|
+
throw new SerializationError(error.message);
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
function deserialize(value) {
|
|
245
|
+
try {
|
|
246
|
+
return JSON.parse(value);
|
|
247
|
+
} catch (error) {
|
|
248
|
+
throw new SerializationError(error.message);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
function serializeError(error) {
|
|
252
|
+
try {
|
|
253
|
+
return JSON.stringify(serializeError$1(error));
|
|
254
|
+
} catch (error2) {
|
|
255
|
+
throw new SerializationError(error2.message);
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
function deserializeError(serialized) {
|
|
259
|
+
try {
|
|
260
|
+
return deserializeError$1(JSON.parse(serialized));
|
|
261
|
+
} catch (error) {
|
|
262
|
+
throw new SerializationError(error.message);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
function getExecutionContext() {
|
|
266
|
+
const store = asyncLocalStorage.getStore();
|
|
267
|
+
if (!store) {
|
|
268
|
+
throw new RunOutsideOfWorkflowError();
|
|
269
|
+
}
|
|
270
|
+
return store;
|
|
271
|
+
}
|
|
272
|
+
var asyncLocalStorage = new AsyncLocalStorage();
|
|
273
|
+
function runWithExecutionContext(store, callback) {
|
|
274
|
+
return asyncLocalStorage.run(store, callback);
|
|
275
|
+
}
|
|
276
|
+
function createExecutionContext({
|
|
277
|
+
ctx,
|
|
278
|
+
abortSignal,
|
|
279
|
+
runId,
|
|
280
|
+
runPath,
|
|
281
|
+
operations
|
|
282
|
+
}) {
|
|
283
|
+
return {
|
|
284
|
+
type: "execution",
|
|
285
|
+
runId,
|
|
286
|
+
runPath,
|
|
287
|
+
executorId: ctx.executorId,
|
|
288
|
+
abortSignal,
|
|
289
|
+
operationManager: new OperationManager(ctx.db, runId, operations || []),
|
|
290
|
+
messageEventBus: ctx.messageEventBus,
|
|
291
|
+
stateEventBus: ctx.stateEventBus,
|
|
292
|
+
workflowsMap: ctx.workflowsMap,
|
|
293
|
+
runEventBus: ctx.runEventBus,
|
|
294
|
+
runRegistry: ctx.runRegistry,
|
|
295
|
+
queueRegistry: ctx.queueRegistry,
|
|
296
|
+
db: ctx.db
|
|
297
|
+
};
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
// src/internal/db/queries/get-run.ts
|
|
301
|
+
async function getRun(db, runId) {
|
|
302
|
+
const result = await db.selectFrom("runs").select([
|
|
303
|
+
"id",
|
|
304
|
+
"inputs",
|
|
305
|
+
"output",
|
|
306
|
+
"error",
|
|
307
|
+
"status",
|
|
308
|
+
"change_id",
|
|
309
|
+
"recovery_attempts",
|
|
310
|
+
"workflow_name"
|
|
311
|
+
]).where("id", "=", runId).executeTakeFirst();
|
|
312
|
+
if (!result) {
|
|
313
|
+
return void 0;
|
|
314
|
+
}
|
|
315
|
+
return {
|
|
316
|
+
id: result.id,
|
|
317
|
+
input: result.inputs ?? void 0,
|
|
318
|
+
name: result.workflow_name,
|
|
319
|
+
output: result.output ?? void 0,
|
|
320
|
+
error: result.error ?? void 0,
|
|
321
|
+
status: result.status,
|
|
322
|
+
changeId: result.change_id,
|
|
323
|
+
recoveryAttempts: Number(result.recovery_attempts ?? 0)
|
|
324
|
+
};
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
// src/internal/db/commands/insert-operation.ts
|
|
328
|
+
async function insertOperation(tx, runId, operationName, sequenceId, result, error) {
|
|
329
|
+
await tx.insertInto("operations").values({
|
|
330
|
+
run_id: runId,
|
|
331
|
+
name: operationName,
|
|
332
|
+
sequence_id: sequenceId,
|
|
333
|
+
output: result,
|
|
334
|
+
error
|
|
335
|
+
}).execute();
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
// src/internal/context/operation-manager.ts
|
|
339
|
+
var OperationManager = class {
|
|
340
|
+
constructor(db, runId, operations = []) {
|
|
341
|
+
this.db = db;
|
|
342
|
+
this.runId = runId;
|
|
343
|
+
this.operations = operations;
|
|
344
|
+
}
|
|
345
|
+
sequenceId = 0;
|
|
346
|
+
lastReservedSequenceId = null;
|
|
347
|
+
getOperationResult() {
|
|
348
|
+
const operation = this.operations.pop();
|
|
349
|
+
if (operation) {
|
|
350
|
+
this.sequenceId++;
|
|
351
|
+
return operation;
|
|
352
|
+
}
|
|
353
|
+
return null;
|
|
354
|
+
}
|
|
355
|
+
reserveSequenceId() {
|
|
356
|
+
const reserved = this.sequenceId++;
|
|
357
|
+
this.lastReservedSequenceId = reserved;
|
|
358
|
+
return reserved;
|
|
359
|
+
}
|
|
360
|
+
getCurrentSequenceId() {
|
|
361
|
+
return this.sequenceId;
|
|
362
|
+
}
|
|
363
|
+
/**
|
|
364
|
+
* Gets the sequence ID that was most recently reserved for the current operation.
|
|
365
|
+
* This is the ID that will be (or was) recorded in the database for this operation.
|
|
366
|
+
* Returns null if no sequence ID has been reserved yet.
|
|
367
|
+
*/
|
|
368
|
+
getActiveSequenceId() {
|
|
369
|
+
return this.lastReservedSequenceId;
|
|
370
|
+
}
|
|
371
|
+
async recordResult(operationName, sequenceId, result, tx) {
|
|
372
|
+
if (tx) {
|
|
373
|
+
await insertOperation(tx, this.runId, operationName, sequenceId, result ?? void 0);
|
|
374
|
+
} else {
|
|
375
|
+
await withDbRetry(async () => {
|
|
376
|
+
await insertOperation(this.db, this.runId, operationName, sequenceId, result ?? void 0);
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
async recordError(operationName, sequenceId, error, tx) {
|
|
381
|
+
if (tx) {
|
|
382
|
+
await insertOperation(
|
|
383
|
+
tx,
|
|
384
|
+
this.runId,
|
|
385
|
+
operationName,
|
|
386
|
+
sequenceId,
|
|
387
|
+
void 0,
|
|
388
|
+
error ?? void 0
|
|
389
|
+
);
|
|
390
|
+
} else {
|
|
391
|
+
await withDbRetry(async () => {
|
|
392
|
+
await insertOperation(
|
|
393
|
+
this.db,
|
|
394
|
+
this.runId,
|
|
395
|
+
operationName,
|
|
396
|
+
sequenceId,
|
|
397
|
+
void 0,
|
|
398
|
+
error ?? void 0
|
|
399
|
+
);
|
|
400
|
+
});
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
};
|
|
404
|
+
function returnOrThrowOperationResult(op) {
|
|
405
|
+
if (op.error) {
|
|
406
|
+
throw deserializeError(op.error);
|
|
407
|
+
}
|
|
408
|
+
if (op.result === null || op.result === void 0) {
|
|
409
|
+
return void 0;
|
|
410
|
+
}
|
|
411
|
+
return deserialize(op.result);
|
|
412
|
+
}
|
|
413
|
+
async function executeAndRecordOperation(operationManager, operationName, callback) {
|
|
414
|
+
const seqId = operationManager.reserveSequenceId();
|
|
415
|
+
try {
|
|
416
|
+
const result = await callback();
|
|
417
|
+
const serializedResult = serialize(result);
|
|
418
|
+
await checkCancellation();
|
|
419
|
+
await operationManager.recordResult(operationName, seqId, serializedResult);
|
|
420
|
+
return result;
|
|
421
|
+
} catch (error) {
|
|
422
|
+
if (error instanceof RunCancelledError) {
|
|
423
|
+
throw error;
|
|
424
|
+
}
|
|
425
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
426
|
+
await operationManager.recordError(operationName, seqId, serializeError(err));
|
|
427
|
+
throw error;
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
async function checkCancellation() {
|
|
431
|
+
const { abortSignal, runId, db } = getExecutionContext();
|
|
432
|
+
if (abortSignal.aborted) {
|
|
433
|
+
throw new RunCancelledError();
|
|
434
|
+
}
|
|
435
|
+
const run = await withDbRetry(async () => getRun(db, runId));
|
|
436
|
+
if (run?.status === "cancelled") {
|
|
437
|
+
throw new RunCancelledError();
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
// src/internal/db/queries/get-run-status.ts
|
|
442
|
+
async function getRunStatus(db, runId) {
|
|
443
|
+
const run = await db.selectFrom("runs").select("status").where("id", "=", runId).executeTakeFirst();
|
|
444
|
+
if (!run) {
|
|
445
|
+
throw new RunNotFoundError(runId);
|
|
446
|
+
}
|
|
447
|
+
return run.status;
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
// src/internal/get-run-status.ts
|
|
451
|
+
async function getRunStatus2(ctx, runId) {
|
|
452
|
+
const { db } = ctx;
|
|
453
|
+
if (ctx.type === "runtime") {
|
|
454
|
+
const { runRegistry } = ctx;
|
|
455
|
+
const run = runRegistry.getRun(runId);
|
|
456
|
+
if (run) {
|
|
457
|
+
return deriveRunStatus(run);
|
|
458
|
+
}
|
|
459
|
+
return getRunStatus(db, runId);
|
|
460
|
+
}
|
|
461
|
+
if (ctx.type === "execution") {
|
|
462
|
+
const { operationManager, runRegistry } = ctx;
|
|
463
|
+
const op = operationManager.getOperationResult();
|
|
464
|
+
if (op) {
|
|
465
|
+
returnOrThrowOperationResult(op);
|
|
466
|
+
}
|
|
467
|
+
const status = await executeAndRecordOperation(operationManager, "getRunStatus", async () => {
|
|
468
|
+
const run = runRegistry.getRun(runId);
|
|
469
|
+
if (run) {
|
|
470
|
+
return deriveRunStatus(run);
|
|
471
|
+
}
|
|
472
|
+
return await getRunStatus(db, runId);
|
|
473
|
+
});
|
|
474
|
+
return status;
|
|
475
|
+
}
|
|
476
|
+
return await getRunStatus(db, runId);
|
|
477
|
+
}
|
|
478
|
+
async function deriveRunStatus(runEntry) {
|
|
479
|
+
if (runEntry.store.abortSignal.aborted) {
|
|
480
|
+
return "cancelled";
|
|
481
|
+
}
|
|
482
|
+
const promiseState = runEntry.getPromiseState();
|
|
483
|
+
if (promiseState === "pending") {
|
|
484
|
+
return "pending";
|
|
485
|
+
}
|
|
486
|
+
if (promiseState === "fulfilled") {
|
|
487
|
+
return "success";
|
|
488
|
+
}
|
|
489
|
+
return "error";
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
// src/internal/wait-for-run-result.ts
|
|
493
|
+
async function waitForRunResult(ctx, runId) {
|
|
494
|
+
if (ctx.type === "execution") {
|
|
495
|
+
const { operationManager } = ctx;
|
|
496
|
+
const op = operationManager.getOperationResult();
|
|
497
|
+
if (op) {
|
|
498
|
+
returnOrThrowOperationResult(op);
|
|
499
|
+
}
|
|
500
|
+
const result = await executeAndRecordOperation(
|
|
501
|
+
operationManager,
|
|
502
|
+
"waitForRunResult",
|
|
503
|
+
async () => {
|
|
504
|
+
return await getOrSubscribeToRunResult(ctx, runId);
|
|
505
|
+
}
|
|
506
|
+
);
|
|
507
|
+
return result;
|
|
508
|
+
}
|
|
509
|
+
return await getOrSubscribeToRunResult(ctx, runId);
|
|
510
|
+
}
|
|
511
|
+
async function getOrSubscribeToRunResult(ctx, runId) {
|
|
512
|
+
const { db, runEventBus } = ctx;
|
|
513
|
+
const run = await getRun(db, runId);
|
|
514
|
+
if (!run) {
|
|
515
|
+
return {
|
|
516
|
+
error: new RunNotFoundError(runId),
|
|
517
|
+
success: false
|
|
518
|
+
};
|
|
519
|
+
}
|
|
520
|
+
switch (run.status) {
|
|
521
|
+
case "success":
|
|
522
|
+
return {
|
|
523
|
+
data: run.output ? deserialize(run.output) : void 0,
|
|
524
|
+
success: true
|
|
525
|
+
};
|
|
526
|
+
case "error":
|
|
527
|
+
return {
|
|
528
|
+
error: run.error ? deserializeError(run.error) : new UnknownError(),
|
|
529
|
+
success: false
|
|
530
|
+
};
|
|
531
|
+
case "cancelled":
|
|
532
|
+
return {
|
|
533
|
+
error: new RunCancelledError(),
|
|
534
|
+
success: false
|
|
535
|
+
};
|
|
536
|
+
case "max_recovery_attempts_exceeded":
|
|
537
|
+
return {
|
|
538
|
+
error: run.error ? deserializeError(run.error) : new MaxRecoveryAttemptsExceededError(runId, run.recoveryAttempts),
|
|
539
|
+
success: false
|
|
540
|
+
};
|
|
541
|
+
}
|
|
542
|
+
return new Promise((resolve, reject) => {
|
|
543
|
+
const unsubscribe = runEventBus.subscribe(runId, "*", async (e) => {
|
|
544
|
+
if (TERMINAL_STATES.includes(e.status)) {
|
|
545
|
+
unsubscribe();
|
|
546
|
+
try {
|
|
547
|
+
const run2 = await getRun(db, runId);
|
|
548
|
+
if (!run2) {
|
|
549
|
+
reject(new RunNotFoundError(runId));
|
|
550
|
+
return;
|
|
551
|
+
}
|
|
552
|
+
switch (run2.status) {
|
|
553
|
+
case "success":
|
|
554
|
+
resolve({
|
|
555
|
+
data: run2.output ? deserialize(run2.output) : void 0,
|
|
556
|
+
success: true
|
|
557
|
+
});
|
|
558
|
+
return;
|
|
559
|
+
case "error":
|
|
560
|
+
resolve({
|
|
561
|
+
error: run2.error ? deserializeError(run2.error) : new UnknownError(),
|
|
562
|
+
success: false
|
|
563
|
+
});
|
|
564
|
+
return;
|
|
565
|
+
case "cancelled":
|
|
566
|
+
resolve({
|
|
567
|
+
error: new RunCancelledError(),
|
|
568
|
+
success: false
|
|
569
|
+
});
|
|
570
|
+
return;
|
|
571
|
+
case "max_recovery_attempts_exceeded":
|
|
572
|
+
resolve({
|
|
573
|
+
error: run2.error ? deserializeError(run2.error) : new MaxRecoveryAttemptsExceededError(runId, run2.recoveryAttempts),
|
|
574
|
+
success: false
|
|
575
|
+
});
|
|
576
|
+
return;
|
|
577
|
+
}
|
|
578
|
+
} catch (error) {
|
|
579
|
+
if (error instanceof RunNotFoundError) {
|
|
580
|
+
reject(error);
|
|
581
|
+
return;
|
|
582
|
+
}
|
|
583
|
+
resolve({
|
|
584
|
+
error,
|
|
585
|
+
success: false
|
|
586
|
+
});
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
});
|
|
590
|
+
});
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
// src/internal/run.ts
|
|
594
|
+
function createRunHandle(runtimeContext, id) {
|
|
595
|
+
return {
|
|
596
|
+
id,
|
|
597
|
+
getStatus: () => getRunStatus2(runtimeContext, id),
|
|
598
|
+
waitForResult: () => waitForRunResult(runtimeContext, id)
|
|
599
|
+
};
|
|
600
|
+
}
|
|
601
|
+
async function recordRunResult(db, runId, result, cancelled) {
|
|
602
|
+
const [{ change_id }] = await db.updateTable("runs").set({
|
|
603
|
+
output: result.result,
|
|
604
|
+
error: result.error,
|
|
605
|
+
status: cancelled ? "cancelled" : result.error ? "error" : "success",
|
|
606
|
+
updated_at: sql`(extract(epoch from now()) * 1000)::bigint`
|
|
607
|
+
}).where("id", "=", runId).returning(["change_id"]).execute();
|
|
608
|
+
return change_id;
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
// src/internal/execute-workflow.ts
|
|
612
|
+
async function executeWorkflow(ctx, params) {
|
|
613
|
+
const { db, runRegistry } = ctx;
|
|
614
|
+
const { options, runId, runPath, fn, args, operations } = params;
|
|
615
|
+
const abortController = new AbortController();
|
|
616
|
+
const [deadline, deadlineReason] = getDeadlineAndReason({
|
|
617
|
+
timeout: options?.timeout,
|
|
618
|
+
deadline: options?.deadline
|
|
619
|
+
});
|
|
620
|
+
const runStore = createExecutionContext({
|
|
621
|
+
ctx,
|
|
622
|
+
abortSignal: AbortSignal.any(
|
|
623
|
+
[abortController.signal].concat(deadline ? [AbortSignal.timeout(deadline - Date.now())] : [])
|
|
624
|
+
),
|
|
625
|
+
runId,
|
|
626
|
+
runPath,
|
|
627
|
+
operations
|
|
628
|
+
});
|
|
629
|
+
const executionPromise = (async () => {
|
|
630
|
+
try {
|
|
631
|
+
const result = await runWithExecutionContext(runStore, async () => {
|
|
632
|
+
return await runWithTimeout(async () => {
|
|
633
|
+
return await fn(...args);
|
|
634
|
+
}, deadlineReason);
|
|
635
|
+
});
|
|
636
|
+
await recordRunResult(db, runId, { result: result ? serialize(result) : void 0 });
|
|
637
|
+
return result;
|
|
638
|
+
} catch (error) {
|
|
639
|
+
if (error instanceof RunCancelledError) {
|
|
640
|
+
await recordRunResult(db, runId, { error: serializeError(error) }, true);
|
|
641
|
+
} else {
|
|
642
|
+
await recordRunResult(db, runId, { error: serializeError(error) });
|
|
643
|
+
}
|
|
644
|
+
throw error;
|
|
645
|
+
} finally {
|
|
646
|
+
runRegistry.unregisterRun(runId);
|
|
647
|
+
}
|
|
648
|
+
})();
|
|
649
|
+
runRegistry.registerRun(runId, {
|
|
650
|
+
store: runStore,
|
|
651
|
+
promise: executionPromise,
|
|
652
|
+
abortController
|
|
653
|
+
});
|
|
654
|
+
}
|
|
655
|
+
function getDeadlineAndReason({
|
|
656
|
+
timeout,
|
|
657
|
+
deadline
|
|
658
|
+
}) {
|
|
659
|
+
const now = Date.now();
|
|
660
|
+
const timeoutDeadline = timeout ? now + timeout : void 0;
|
|
661
|
+
if (timeoutDeadline && deadline) {
|
|
662
|
+
return [
|
|
663
|
+
Math.min(timeoutDeadline, deadline),
|
|
664
|
+
timeoutDeadline < deadline ? "timeout" : "deadline"
|
|
665
|
+
];
|
|
666
|
+
} else if (timeoutDeadline) {
|
|
667
|
+
return [timeoutDeadline, "timeout"];
|
|
668
|
+
} else if (deadline) {
|
|
669
|
+
return [deadline, "deadline"];
|
|
670
|
+
}
|
|
671
|
+
return [void 0, void 0];
|
|
672
|
+
}
|
|
673
|
+
async function runWithTimeout(fn, deadlineReason) {
|
|
674
|
+
const { abortSignal } = getExecutionContext();
|
|
675
|
+
const abortPromise = new Promise((_, reject) => {
|
|
676
|
+
abortSignal.throwIfAborted();
|
|
677
|
+
abortSignal.addEventListener(
|
|
678
|
+
"abort",
|
|
679
|
+
() => {
|
|
680
|
+
if (abortSignal.reason?.name === "TimeoutError") {
|
|
681
|
+
reject(new RunTimedOutError());
|
|
682
|
+
return;
|
|
683
|
+
}
|
|
684
|
+
reject(new RunCancelledError());
|
|
685
|
+
},
|
|
686
|
+
{ once: true }
|
|
687
|
+
);
|
|
688
|
+
});
|
|
689
|
+
const callPromise = fn();
|
|
690
|
+
try {
|
|
691
|
+
return await Promise.race([callPromise, abortPromise]);
|
|
692
|
+
} catch (error) {
|
|
693
|
+
if (error instanceof RunTimedOutError) {
|
|
694
|
+
if (deadlineReason === "timeout") {
|
|
695
|
+
throw new RunTimedOutError();
|
|
696
|
+
} else if (deadlineReason === "deadline") {
|
|
697
|
+
throw new RunDeadlineExceededError();
|
|
698
|
+
}
|
|
699
|
+
throw error;
|
|
700
|
+
}
|
|
701
|
+
await callPromise.catch(() => {
|
|
702
|
+
});
|
|
703
|
+
throw error;
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
async function insertPendingRun(db, options) {
|
|
707
|
+
const result = await db.insertInto("runs").values({
|
|
708
|
+
id: options.runId,
|
|
709
|
+
path: options.path,
|
|
710
|
+
inputs: options.inputs,
|
|
711
|
+
executor_id: options.executorId,
|
|
712
|
+
workflow_name: options.workflowName,
|
|
713
|
+
status: "pending",
|
|
714
|
+
started_at_epoch_ms: sql`(extract(epoch from now()) * 1000)::bigint`,
|
|
715
|
+
created_at: sql`(extract(epoch from now()) * 1000)::bigint`,
|
|
716
|
+
updated_at: sql`(extract(epoch from now()) * 1000)::bigint`
|
|
717
|
+
}).returning(["id", "path", "change_id"]).executeTakeFirst();
|
|
718
|
+
return {
|
|
719
|
+
runId: result.id,
|
|
720
|
+
path: result.path,
|
|
721
|
+
changeId: result.change_id
|
|
722
|
+
};
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
// src/internal/run-workflow.ts
|
|
726
|
+
async function runWorkflow(ctx, workflowName, args = [], options = {}) {
|
|
727
|
+
const { db, executorId, workflowsMap, type } = ctx;
|
|
728
|
+
const workflow = workflowsMap[workflowName];
|
|
729
|
+
if (!workflow) {
|
|
730
|
+
throw new WorkflowNotFoundError(workflowName);
|
|
731
|
+
}
|
|
732
|
+
const newRunId = options.id ?? crypto.randomUUID();
|
|
733
|
+
let newRunPath = [];
|
|
734
|
+
if (type === "execution") {
|
|
735
|
+
const { operationManager, runPath } = ctx;
|
|
736
|
+
const op = operationManager.getOperationResult();
|
|
737
|
+
if (op) {
|
|
738
|
+
if (op.error) {
|
|
739
|
+
throw deserializeError(op.error);
|
|
740
|
+
}
|
|
741
|
+
const newRun2 = deserialize(op.result);
|
|
742
|
+
return createRunHandle(ctx, newRun2.runId);
|
|
743
|
+
}
|
|
744
|
+
const newRun = await executeAndRecordOperation(operationManager, "runWorkflow", async () => {
|
|
745
|
+
const newRun2 = {
|
|
746
|
+
runId: newRunId,
|
|
747
|
+
runPath: [...runPath, newRunId],
|
|
748
|
+
workflowName
|
|
749
|
+
};
|
|
750
|
+
withDbRetry(async () => {
|
|
751
|
+
return await insertPendingRun(db, {
|
|
752
|
+
...newRun2,
|
|
753
|
+
path: newRun2.runPath,
|
|
754
|
+
inputs: serialize(args),
|
|
755
|
+
executorId
|
|
756
|
+
});
|
|
757
|
+
});
|
|
758
|
+
return newRun2;
|
|
759
|
+
});
|
|
760
|
+
newRunPath = newRun.runPath;
|
|
761
|
+
}
|
|
762
|
+
if (type === "runtime") {
|
|
763
|
+
const { path } = await insertPendingRun(db, {
|
|
764
|
+
runId: newRunId,
|
|
765
|
+
path: [newRunId],
|
|
766
|
+
inputs: serialize(args),
|
|
767
|
+
executorId,
|
|
768
|
+
workflowName
|
|
769
|
+
});
|
|
770
|
+
newRunPath = path;
|
|
771
|
+
}
|
|
772
|
+
await executeWorkflow(ctx, {
|
|
773
|
+
runId: newRunId,
|
|
774
|
+
runPath: newRunPath,
|
|
775
|
+
fn: workflow.fn,
|
|
776
|
+
args,
|
|
777
|
+
options
|
|
778
|
+
});
|
|
779
|
+
return createRunHandle(ctx, newRunId);
|
|
780
|
+
}
|
|
781
|
+
async function cancelRun(runId, db, options = {}) {
|
|
782
|
+
if (options.cascade) {
|
|
783
|
+
return db.transaction().execute(async (tx) => {
|
|
784
|
+
const result = await tx.updateTable("runs").set({
|
|
785
|
+
status: "cancelled",
|
|
786
|
+
updated_at: sql`(extract(epoch from now()) * 1000)::bigint`
|
|
787
|
+
}).where((eb) => eb.and([eb("id", "=", runId), eb("status", "not in", TERMINAL_STATES)])).returning(["change_id", "path"]).executeTakeFirst();
|
|
788
|
+
if (!result) {
|
|
789
|
+
const exists = await tx.selectFrom("runs").select([]).where("id", "=", runId).executeTakeFirst();
|
|
790
|
+
if (exists) {
|
|
791
|
+
return void 0;
|
|
792
|
+
}
|
|
793
|
+
throw new RunNotFoundError(runId);
|
|
794
|
+
}
|
|
795
|
+
await sql`
|
|
796
|
+
UPDATE runs
|
|
797
|
+
SET
|
|
798
|
+
status = ${"cancelled"},
|
|
799
|
+
updated_at = (extract(epoch from now()) * 1000)::bigint
|
|
800
|
+
WHERE path @> ARRAY[${runId}]::text[]
|
|
801
|
+
AND id != ${runId}
|
|
802
|
+
AND status NOT IN (${"cancelled"}, ${"success"}, ${"error"})
|
|
803
|
+
`.execute(tx);
|
|
804
|
+
return {
|
|
805
|
+
path: result.path,
|
|
806
|
+
changeId: result.change_id
|
|
807
|
+
};
|
|
808
|
+
});
|
|
809
|
+
} else {
|
|
810
|
+
const result = await db.updateTable("runs").set({
|
|
811
|
+
status: "cancelled",
|
|
812
|
+
updated_at: sql`(extract(epoch from now()) * 1000)::bigint`
|
|
813
|
+
}).where((eb) => eb.and([eb("id", "=", runId), eb("status", "not in", TERMINAL_STATES)])).returning(["path", "change_id"]).executeTakeFirst();
|
|
814
|
+
if (!result) {
|
|
815
|
+
throw new RunNotFoundError(runId);
|
|
816
|
+
}
|
|
817
|
+
return {
|
|
818
|
+
path: result.path,
|
|
819
|
+
changeId: result.change_id
|
|
820
|
+
};
|
|
821
|
+
}
|
|
822
|
+
}
|
|
823
|
+
|
|
824
|
+
// src/internal/cancel-run.ts
|
|
825
|
+
async function cancelRun2(ctx, runId, options = {}) {
|
|
826
|
+
if (ctx.type === "execution" || ctx.type === "runtime") {
|
|
827
|
+
const { db, runRegistry } = ctx;
|
|
828
|
+
const run = await cancelRun(runId, db, options);
|
|
829
|
+
if (!run) {
|
|
830
|
+
return;
|
|
831
|
+
}
|
|
832
|
+
if (options.cascade) {
|
|
833
|
+
for (const pathPart of run.path) {
|
|
834
|
+
const run2 = runRegistry.getRun(pathPart);
|
|
835
|
+
if (run2) {
|
|
836
|
+
run2.abortController.abort();
|
|
837
|
+
}
|
|
838
|
+
}
|
|
839
|
+
return;
|
|
840
|
+
}
|
|
841
|
+
runRegistry.getRun(runId)?.abortController.abort();
|
|
842
|
+
} else {
|
|
843
|
+
const { db } = ctx;
|
|
844
|
+
const run = await cancelRun(runId, db, options);
|
|
845
|
+
if (!run) {
|
|
846
|
+
return;
|
|
847
|
+
}
|
|
848
|
+
}
|
|
849
|
+
}
|
|
850
|
+
async function enqueueRun(db, options) {
|
|
851
|
+
const result = await db.insertInto("runs").values({
|
|
852
|
+
id: options.runId,
|
|
853
|
+
path: options.path,
|
|
854
|
+
inputs: options.inputs,
|
|
855
|
+
queue_name: options.queueName,
|
|
856
|
+
queue_partition_key: options.queuePartitionKey,
|
|
857
|
+
queue_deduplication_id: options.deduplicationId,
|
|
858
|
+
workflow_name: options.workflowName,
|
|
859
|
+
status: "queued",
|
|
860
|
+
recovery_attempts: options.recoveryAttempts,
|
|
861
|
+
created_at: sql`(extract(epoch from now()) * 1000)::bigint`,
|
|
862
|
+
updated_at: sql`(extract(epoch from now()) * 1000)::bigint`
|
|
863
|
+
}).onConflict((oc) => oc.columns(["queue_name", "queue_deduplication_id"]).doNothing()).returning(["id", "change_id"]).executeTakeFirst();
|
|
864
|
+
return {
|
|
865
|
+
runId: result?.id,
|
|
866
|
+
changeId: result?.change_id
|
|
867
|
+
};
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
// src/internal/queue-workflow.ts
|
|
871
|
+
async function queueWorkflow(ctx, queueName, workflowName, args, options) {
|
|
872
|
+
const { db } = ctx;
|
|
873
|
+
const newRunId = options?.id ?? crypto.randomUUID();
|
|
874
|
+
if (ctx.type === "execution") {
|
|
875
|
+
const { operationManager, runPath } = ctx;
|
|
876
|
+
const op = operationManager.getOperationResult();
|
|
877
|
+
if (op) {
|
|
878
|
+
if (op.error) {
|
|
879
|
+
throw deserializeError(op.error);
|
|
880
|
+
}
|
|
881
|
+
const newRunId2 = deserialize(op.result);
|
|
882
|
+
return createRunHandle(ctx, newRunId2);
|
|
883
|
+
}
|
|
884
|
+
await executeAndRecordOperation(operationManager, "queueWorkflow", async () => {
|
|
885
|
+
const { runId } = await enqueueRun(db, {
|
|
886
|
+
runId: newRunId,
|
|
887
|
+
path: [...runPath, newRunId],
|
|
888
|
+
inputs: serialize(args),
|
|
889
|
+
workflowName,
|
|
890
|
+
queueName,
|
|
891
|
+
timeout: options?.timeout,
|
|
892
|
+
deadline: options?.deadline
|
|
893
|
+
});
|
|
894
|
+
return runId;
|
|
895
|
+
});
|
|
896
|
+
return createRunHandle(ctx, newRunId);
|
|
897
|
+
}
|
|
898
|
+
await withDbRetry(
|
|
899
|
+
async () => await enqueueRun(db, {
|
|
900
|
+
runId: newRunId,
|
|
901
|
+
path: [newRunId],
|
|
902
|
+
inputs: serialize(args),
|
|
903
|
+
workflowName,
|
|
904
|
+
queueName,
|
|
905
|
+
timeout: options?.timeout,
|
|
906
|
+
deadline: options?.deadline
|
|
907
|
+
})
|
|
908
|
+
);
|
|
909
|
+
return createRunHandle(ctx, newRunId);
|
|
910
|
+
}
|
|
911
|
+
var INTERNAL_QUEUE_NAME = "_helical_internal_queue";
|
|
912
|
+
async function resumeRun(db, runId) {
|
|
913
|
+
const result = await db.updateTable("runs").set({
|
|
914
|
+
status: "queued",
|
|
915
|
+
queue_name: INTERNAL_QUEUE_NAME,
|
|
916
|
+
deadline_epoch_ms: null,
|
|
917
|
+
timeout_ms: null,
|
|
918
|
+
recovery_attempts: 0,
|
|
919
|
+
started_at_epoch_ms: sql`(extract(epoch from now()) * 1000)::bigint`,
|
|
920
|
+
updated_at: sql`(extract(epoch from now()) * 1000)::bigint`
|
|
921
|
+
}).where("id", "=", runId).where("status", "=", "pending").execute();
|
|
922
|
+
if (!result) {
|
|
923
|
+
throw new RunNotFoundError(runId);
|
|
924
|
+
}
|
|
925
|
+
}
|
|
926
|
+
|
|
927
|
+
// src/internal/resume-run.ts
|
|
928
|
+
async function resumeRun2(ctx, runId) {
|
|
929
|
+
const { db } = ctx;
|
|
930
|
+
if (ctx.type === "execution") {
|
|
931
|
+
const { operationManager } = ctx;
|
|
932
|
+
const op = operationManager.getOperationResult();
|
|
933
|
+
if (op) {
|
|
934
|
+
returnOrThrowOperationResult(op);
|
|
935
|
+
}
|
|
936
|
+
await executeAndRecordOperation(operationManager, "resumeRun", async () => {
|
|
937
|
+
await withDbRetry(async () => await resumeRun(db, runId));
|
|
938
|
+
});
|
|
939
|
+
} else {
|
|
940
|
+
await withDbRetry(async () => await resumeRun(db, runId));
|
|
941
|
+
}
|
|
942
|
+
}
|
|
943
|
+
|
|
944
|
+
// src/internal/db/commands/insert-message.ts
|
|
945
|
+
async function insertMessage(db, options) {
|
|
946
|
+
return await db.insertInto("messages").values({
|
|
947
|
+
destination_run_id: options.destinationWorkflowId,
|
|
948
|
+
type: options.messageType,
|
|
949
|
+
payload: options.data
|
|
950
|
+
}).execute();
|
|
951
|
+
}
|
|
952
|
+
|
|
953
|
+
// src/internal/send-message.ts
|
|
954
|
+
async function sendMessage(ctx, target, name, data) {
|
|
955
|
+
const { db } = ctx;
|
|
956
|
+
const destinationWorkflowId = typeof target === "string" ? target : target.id;
|
|
957
|
+
const messageType = typeof name === "string" ? name : name.name;
|
|
958
|
+
const serializedData = serialize(data);
|
|
959
|
+
if (ctx.type === "execution") {
|
|
960
|
+
const { operationManager } = ctx;
|
|
961
|
+
const op = operationManager.getOperationResult();
|
|
962
|
+
if (op) {
|
|
963
|
+
return returnOrThrowOperationResult(op);
|
|
964
|
+
}
|
|
965
|
+
await executeAndRecordOperation(operationManager, "sendMessage", async () => {
|
|
966
|
+
await insertMessage(db, {
|
|
967
|
+
destinationWorkflowId,
|
|
968
|
+
messageType,
|
|
969
|
+
data: serializedData
|
|
970
|
+
});
|
|
971
|
+
});
|
|
972
|
+
} else {
|
|
973
|
+
await insertMessage(db, {
|
|
974
|
+
destinationWorkflowId,
|
|
975
|
+
messageType,
|
|
976
|
+
data: serializedData
|
|
977
|
+
});
|
|
978
|
+
}
|
|
979
|
+
}
|
|
980
|
+
|
|
981
|
+
// src/internal/db/queries/get-state.ts
|
|
982
|
+
async function getState(db, runId, key) {
|
|
983
|
+
const result = await db.selectFrom("state").select(["key", "value", "change_id"]).where("run_id", "=", runId).where("key", "=", key).executeTakeFirst();
|
|
984
|
+
if (!result) {
|
|
985
|
+
return void 0;
|
|
986
|
+
}
|
|
987
|
+
return result.value;
|
|
988
|
+
}
|
|
989
|
+
|
|
990
|
+
// src/internal/get-state.ts
|
|
991
|
+
var StateNotAvailableError = class extends Error {
|
|
992
|
+
};
|
|
993
|
+
async function getState2(ctx, target, key) {
|
|
994
|
+
const { db, stateEventBus } = ctx;
|
|
995
|
+
const destinationWorkflowId = typeof target === "string" ? target : target.id;
|
|
996
|
+
const stateKey = typeof key === "string" ? key : key.name;
|
|
997
|
+
if (ctx.type === "execution") {
|
|
998
|
+
const { operationManager } = ctx;
|
|
999
|
+
const op = operationManager.getOperationResult();
|
|
1000
|
+
if (op) {
|
|
1001
|
+
return returnOrThrowOperationResult(op);
|
|
1002
|
+
}
|
|
1003
|
+
}
|
|
1004
|
+
while (true) {
|
|
1005
|
+
try {
|
|
1006
|
+
return await withDbRetry(async () => {
|
|
1007
|
+
const state = await getState(db, destinationWorkflowId, stateKey);
|
|
1008
|
+
if (!state) {
|
|
1009
|
+
throw new StateNotAvailableError();
|
|
1010
|
+
}
|
|
1011
|
+
if (ctx.type === "execution") {
|
|
1012
|
+
const { operationManager } = ctx;
|
|
1013
|
+
await executeAndRecordOperation(operationManager, "getState", async () => {
|
|
1014
|
+
return state;
|
|
1015
|
+
});
|
|
1016
|
+
}
|
|
1017
|
+
return deserialize(state);
|
|
1018
|
+
});
|
|
1019
|
+
} catch (error) {
|
|
1020
|
+
if (error instanceof StateNotAvailableError) {
|
|
1021
|
+
await waitForStateNotification(stateEventBus, destinationWorkflowId, stateKey);
|
|
1022
|
+
continue;
|
|
1023
|
+
}
|
|
1024
|
+
throw error;
|
|
1025
|
+
}
|
|
1026
|
+
}
|
|
1027
|
+
}
|
|
1028
|
+
async function waitForStateNotification(stateEventBus, runId, key) {
|
|
1029
|
+
return new Promise((resolve) => {
|
|
1030
|
+
const unsubscribe = stateEventBus.subscribe(runId, key, (state) => {
|
|
1031
|
+
unsubscribe();
|
|
1032
|
+
resolve(state);
|
|
1033
|
+
});
|
|
1034
|
+
});
|
|
1035
|
+
}
|
|
1036
|
+
|
|
1037
|
+
export { BaseError, ErrorType, FatalError, MaxRecoveryAttemptsExceededError, MaxRetriesExceededError, OperationTimedOutError, RunCancelledError, RunDeadlineExceededError, RunNotFoundError, RunOutsideOfWorkflowError, RunTimedOutError, SerializationError, TERMINAL_STATES, UnknownError, WorkflowNotFoundError, cancelRun2 as cancelRun, createRunHandle, defineWorkflow, deserialize, executeAndRecordOperation, executeWorkflow, getExecutionContext, getRun, getRunStatus2 as getRunStatus, getState, getState2, queueWorkflow, resumeRun2 as resumeRun, returnOrThrowOperationResult, runWorkflow, sendMessage, serialize, serializeError, sleep, waitForRunResult, withDbRetry };
|
|
1038
|
+
//# sourceMappingURL=chunk-7I6XZ2V3.js.map
|
|
1039
|
+
//# sourceMappingURL=chunk-7I6XZ2V3.js.map
|