@aikirun/worker 0.16.0 → 0.18.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/index.d.ts +1 -4
- package/dist/index.js +1236 -25
- package/package.json +5 -4
package/README.md
CHANGED
|
@@ -15,6 +15,7 @@ import { worker } from "@aikirun/worker";
|
|
|
15
15
|
import { client } from "@aikirun/client";
|
|
16
16
|
import { orderWorkflowV1 } from "./workflows.ts";
|
|
17
17
|
|
|
18
|
+
// Set AIKI_API_KEY env variable or pass apiKey option
|
|
18
19
|
const aikiClient = client({
|
|
19
20
|
url: "http://localhost:9850",
|
|
20
21
|
redis: { host: "localhost", port: 6379 },
|
package/dist/index.d.ts
CHANGED
|
@@ -9,10 +9,7 @@ type IsSubtype<SubT, SuperT> = SubT extends SuperT ? true : false;
|
|
|
9
9
|
type And<T extends NonEmptyArray<boolean>> = T extends [infer First, ...infer Rest] ? false extends First ? false : Rest extends NonEmptyArray<boolean> ? And<Rest> : true : never;
|
|
10
10
|
type Or<T extends NonEmptyArray<boolean>> = T extends [infer First, ...infer Rest] ? true extends First ? true : Rest extends NonEmptyArray<boolean> ? Or<Rest> : false : never;
|
|
11
11
|
type PathFromObject<T, IncludeArrayKeys extends boolean = false> = T extends T ? PathFromObjectInternal<T, IncludeArrayKeys> : never;
|
|
12
|
-
type PathFromObjectInternal<T, IncludeArrayKeys extends boolean> = And<[
|
|
13
|
-
IsSubtype<T, object>,
|
|
14
|
-
Or<[IncludeArrayKeys, NonArrayObject<T> extends never ? false : true]>
|
|
15
|
-
]> extends true ? {
|
|
12
|
+
type PathFromObjectInternal<T, IncludeArrayKeys extends boolean> = And<[IsSubtype<T, object>, Or<[IncludeArrayKeys, NonArrayObject<T> extends never ? false : true]>]> extends true ? {
|
|
16
13
|
[K in Exclude<keyof T, symbol>]-?: And<[
|
|
17
14
|
IsSubtype<NonNullable<T[K]>, object>,
|
|
18
15
|
Or<[IncludeArrayKeys, NonArrayObject<NonNullable<T[K]>> extends never ? false : true]>
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
// ../../lib/array/utils.ts
|
|
2
2
|
function isNonEmptyArray(value) {
|
|
3
|
-
return value.length > 0;
|
|
3
|
+
return value !== void 0 && value.length > 0;
|
|
4
4
|
}
|
|
5
5
|
|
|
6
6
|
// ../../lib/async/delay.ts
|
|
@@ -22,13 +22,6 @@ function delay(ms, options) {
|
|
|
22
22
|
});
|
|
23
23
|
}
|
|
24
24
|
|
|
25
|
-
// ../../lib/async/fire-and-forget.ts
|
|
26
|
-
function fireAndForget(promise, onError) {
|
|
27
|
-
promise.catch((error) => {
|
|
28
|
-
onError(error instanceof Error ? error : new Error(String(error)));
|
|
29
|
-
});
|
|
30
|
-
}
|
|
31
|
-
|
|
32
25
|
// ../../lib/object/overrider.ts
|
|
33
26
|
function set(obj, path, value) {
|
|
34
27
|
const keys = path.split(".");
|
|
@@ -60,18 +53,1230 @@ var objectOverrider = (defaultObj) => (obj) => {
|
|
|
60
53
|
};
|
|
61
54
|
|
|
62
55
|
// worker.ts
|
|
63
|
-
import { INTERNAL } from "@aikirun/types/symbols";
|
|
56
|
+
import { INTERNAL as INTERNAL7 } from "@aikirun/types/symbols";
|
|
64
57
|
import {
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
58
|
+
NonDeterminismError as NonDeterminismError3,
|
|
59
|
+
WorkflowRunFailedError as WorkflowRunFailedError4,
|
|
60
|
+
WorkflowRunNotExecutableError as WorkflowRunNotExecutableError2,
|
|
61
|
+
WorkflowRunRevisionConflictError as WorkflowRunRevisionConflictError6,
|
|
62
|
+
WorkflowRunSuspendedError as WorkflowRunSuspendedError5
|
|
68
63
|
} from "@aikirun/types/workflow-run";
|
|
69
64
|
import {
|
|
70
65
|
createEventWaiters,
|
|
66
|
+
createReplayManifest,
|
|
71
67
|
createSleeper,
|
|
72
68
|
workflowRegistry,
|
|
73
|
-
workflowRunHandle
|
|
69
|
+
workflowRunHandle as workflowRunHandle2
|
|
74
70
|
} from "@aikirun/workflow";
|
|
71
|
+
|
|
72
|
+
// ../workflow/system/cancel-child-runs.ts
|
|
73
|
+
import { NON_TERMINAL_WORKFLOW_RUN_STATUSES } from "@aikirun/types/workflow-run";
|
|
74
|
+
|
|
75
|
+
// ../../lib/address/index.ts
|
|
76
|
+
function getTaskAddress(name, inputHash) {
|
|
77
|
+
return `${name}:${inputHash}`;
|
|
78
|
+
}
|
|
79
|
+
function getWorkflowRunAddress(name, versionId, referenceId) {
|
|
80
|
+
return `${name}:${versionId}:${referenceId}`;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// ../../lib/crypto/hash.ts
|
|
84
|
+
import { createHash } from "crypto";
|
|
85
|
+
|
|
86
|
+
// ../../lib/json/stable-stringify.ts
|
|
87
|
+
function stableStringify(value) {
|
|
88
|
+
return stringifyValue(value);
|
|
89
|
+
}
|
|
90
|
+
function stringifyValue(value) {
|
|
91
|
+
if (value === null || value === void 0) {
|
|
92
|
+
return "null";
|
|
93
|
+
}
|
|
94
|
+
if (typeof value !== "object") {
|
|
95
|
+
return JSON.stringify(value);
|
|
96
|
+
}
|
|
97
|
+
if (Array.isArray(value)) {
|
|
98
|
+
return `[${value.map(stringifyValue).join(",")}]`;
|
|
99
|
+
}
|
|
100
|
+
const keys = Object.keys(value).sort();
|
|
101
|
+
const pairs = [];
|
|
102
|
+
for (const key of keys) {
|
|
103
|
+
const keyValue = value[key];
|
|
104
|
+
if (keyValue !== void 0) {
|
|
105
|
+
pairs.push(`${JSON.stringify(key)}:${stringifyValue(keyValue)}`);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
return `{${pairs.join(",")}}`;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// ../../lib/crypto/hash.ts
|
|
112
|
+
async function sha256(input) {
|
|
113
|
+
const data = new TextEncoder().encode(input);
|
|
114
|
+
const hashBuffer = await crypto.subtle.digest("SHA-256", data);
|
|
115
|
+
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
|
116
|
+
return hashArray.map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
117
|
+
}
|
|
118
|
+
async function hashInput(input) {
|
|
119
|
+
return sha256(stableStringify({ input }));
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// ../../lib/error/serializable.ts
|
|
123
|
+
function createSerializableError(error) {
|
|
124
|
+
return error instanceof Error ? {
|
|
125
|
+
message: error.message,
|
|
126
|
+
name: error.name,
|
|
127
|
+
stack: error.stack,
|
|
128
|
+
cause: error.cause ? createSerializableError(error.cause) : void 0
|
|
129
|
+
} : {
|
|
130
|
+
message: String(error),
|
|
131
|
+
name: "UnknownError"
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// ../../lib/retry/strategy.ts
|
|
136
|
+
function withRetry(fn, strategy, options) {
|
|
137
|
+
return {
|
|
138
|
+
run: async (...args) => {
|
|
139
|
+
let attempts = 0;
|
|
140
|
+
while (true) {
|
|
141
|
+
if (options?.abortSignal?.aborted) {
|
|
142
|
+
return {
|
|
143
|
+
state: "aborted",
|
|
144
|
+
reason: options.abortSignal.reason
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
attempts++;
|
|
148
|
+
let result;
|
|
149
|
+
try {
|
|
150
|
+
result = await fn(...args);
|
|
151
|
+
if (options?.shouldRetryOnResult === void 0 || !await options.shouldRetryOnResult(result)) {
|
|
152
|
+
return {
|
|
153
|
+
state: "completed",
|
|
154
|
+
result,
|
|
155
|
+
attempts
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
} catch (err) {
|
|
159
|
+
if (options?.shouldNotRetryOnError !== void 0 && await options.shouldNotRetryOnError(err)) {
|
|
160
|
+
throw err;
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
const retryParams = getRetryParams(attempts, strategy);
|
|
164
|
+
if (!retryParams.retriesLeft) {
|
|
165
|
+
return {
|
|
166
|
+
state: "timeout"
|
|
167
|
+
};
|
|
168
|
+
}
|
|
169
|
+
await delay(retryParams.delayMs, { abortSignal: options?.abortSignal });
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
function getRetryParams(attempts, strategy) {
|
|
175
|
+
const strategyType = strategy.type;
|
|
176
|
+
switch (strategyType) {
|
|
177
|
+
case "never":
|
|
178
|
+
return {
|
|
179
|
+
retriesLeft: false
|
|
180
|
+
};
|
|
181
|
+
case "fixed":
|
|
182
|
+
if (attempts >= strategy.maxAttempts) {
|
|
183
|
+
return {
|
|
184
|
+
retriesLeft: false
|
|
185
|
+
};
|
|
186
|
+
}
|
|
187
|
+
return {
|
|
188
|
+
retriesLeft: true,
|
|
189
|
+
delayMs: strategy.delayMs
|
|
190
|
+
};
|
|
191
|
+
case "exponential": {
|
|
192
|
+
if (attempts >= strategy.maxAttempts) {
|
|
193
|
+
return {
|
|
194
|
+
retriesLeft: false
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
const delayMs = strategy.baseDelayMs * (strategy.factor ?? 2) ** (attempts - 1);
|
|
198
|
+
return {
|
|
199
|
+
retriesLeft: true,
|
|
200
|
+
delayMs: Math.min(delayMs, strategy.maxDelayMs ?? Number.POSITIVE_INFINITY)
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
case "jittered": {
|
|
204
|
+
if (attempts >= strategy.maxAttempts) {
|
|
205
|
+
return {
|
|
206
|
+
retriesLeft: false
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
const base = strategy.baseDelayMs * (strategy.jitterFactor ?? 2) ** (attempts - 1);
|
|
210
|
+
const delayMs = Math.random() * base;
|
|
211
|
+
return {
|
|
212
|
+
retriesLeft: true,
|
|
213
|
+
delayMs: Math.min(delayMs, strategy.maxDelayMs ?? Number.POSITIVE_INFINITY)
|
|
214
|
+
};
|
|
215
|
+
}
|
|
216
|
+
default:
|
|
217
|
+
return strategyType;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// ../task/task.ts
|
|
222
|
+
import { INTERNAL } from "@aikirun/types/symbols";
|
|
223
|
+
import { TaskFailedError } from "@aikirun/types/task";
|
|
224
|
+
import {
|
|
225
|
+
NonDeterminismError,
|
|
226
|
+
WorkflowRunFailedError,
|
|
227
|
+
WorkflowRunRevisionConflictError,
|
|
228
|
+
WorkflowRunSuspendedError
|
|
229
|
+
} from "@aikirun/types/workflow-run";
|
|
230
|
+
function task(params) {
|
|
231
|
+
return new TaskImpl(params);
|
|
232
|
+
}
|
|
233
|
+
var TaskImpl = class {
|
|
234
|
+
constructor(params) {
|
|
235
|
+
this.params = params;
|
|
236
|
+
this.name = params.name;
|
|
237
|
+
}
|
|
238
|
+
name;
|
|
239
|
+
with() {
|
|
240
|
+
const startOpts = this.params.opts ?? {};
|
|
241
|
+
const startOptsOverrider = objectOverrider(startOpts);
|
|
242
|
+
return new TaskBuilderImpl(this, startOptsOverrider());
|
|
243
|
+
}
|
|
244
|
+
async start(run, ...args) {
|
|
245
|
+
return this.startWithOpts(run, this.params.opts ?? {}, ...args);
|
|
246
|
+
}
|
|
247
|
+
async startWithOpts(run, startOpts, ...args) {
|
|
248
|
+
const handle = run[INTERNAL].handle;
|
|
249
|
+
handle[INTERNAL].assertExecutionAllowed();
|
|
250
|
+
const inputRaw = args[0];
|
|
251
|
+
const input = await this.parse(handle, this.params.schema?.input, inputRaw, run.logger);
|
|
252
|
+
const inputHash = await hashInput(input);
|
|
253
|
+
const address = getTaskAddress(this.name, inputHash);
|
|
254
|
+
const replayManifest = run[INTERNAL].replayManifest;
|
|
255
|
+
if (replayManifest.hasUnconsumedEntries()) {
|
|
256
|
+
const existingTaskInfo = replayManifest.consumeNextTask(address);
|
|
257
|
+
if (existingTaskInfo) {
|
|
258
|
+
return this.getExistingTaskResult(run, handle, startOpts, input, existingTaskInfo);
|
|
259
|
+
}
|
|
260
|
+
await this.throwNonDeterminismError(run, handle, inputHash, replayManifest);
|
|
261
|
+
}
|
|
262
|
+
const attempts = 1;
|
|
263
|
+
const retryStrategy = startOpts.retry ?? { type: "never" };
|
|
264
|
+
const taskInfo = await handle[INTERNAL].transitionTaskState({
|
|
265
|
+
type: "create",
|
|
266
|
+
taskName: this.name,
|
|
267
|
+
options: startOpts,
|
|
268
|
+
taskState: { status: "running", attempts, input }
|
|
269
|
+
});
|
|
270
|
+
const logger = run.logger.child({
|
|
271
|
+
"aiki.component": "task-execution",
|
|
272
|
+
"aiki.taskName": this.name,
|
|
273
|
+
"aiki.taskId": taskInfo.id
|
|
274
|
+
});
|
|
275
|
+
logger.info("Task started", { "aiki.attempts": attempts });
|
|
276
|
+
const { output, lastAttempt } = await this.tryExecuteTask(
|
|
277
|
+
handle,
|
|
278
|
+
input,
|
|
279
|
+
taskInfo.id,
|
|
280
|
+
retryStrategy,
|
|
281
|
+
attempts,
|
|
282
|
+
run[INTERNAL].options.spinThresholdMs,
|
|
283
|
+
logger
|
|
284
|
+
);
|
|
285
|
+
await handle[INTERNAL].transitionTaskState({
|
|
286
|
+
taskId: taskInfo.id,
|
|
287
|
+
taskState: { status: "completed", attempts: lastAttempt, output }
|
|
288
|
+
});
|
|
289
|
+
logger.info("Task complete", { "aiki.attempts": lastAttempt });
|
|
290
|
+
return output;
|
|
291
|
+
}
|
|
292
|
+
async getExistingTaskResult(run, handle, startOpts, input, existingTaskInfo) {
|
|
293
|
+
const existingTaskState = existingTaskInfo.state;
|
|
294
|
+
if (existingTaskState.status === "completed") {
|
|
295
|
+
return this.parse(handle, this.params.schema?.output, existingTaskState.output, run.logger);
|
|
296
|
+
}
|
|
297
|
+
if (existingTaskState.status === "failed") {
|
|
298
|
+
throw new TaskFailedError(
|
|
299
|
+
existingTaskInfo.id,
|
|
300
|
+
existingTaskState.attempts,
|
|
301
|
+
existingTaskState.error.message
|
|
302
|
+
);
|
|
303
|
+
}
|
|
304
|
+
existingTaskState.status;
|
|
305
|
+
const attempts = existingTaskState.attempts;
|
|
306
|
+
const retryStrategy = startOpts.retry ?? { type: "never" };
|
|
307
|
+
this.assertRetryAllowed(existingTaskInfo.id, attempts, retryStrategy, run.logger);
|
|
308
|
+
run.logger.debug("Retrying task", {
|
|
309
|
+
"aiki.taskName": this.name,
|
|
310
|
+
"aiki.taskId": existingTaskInfo.id,
|
|
311
|
+
"aiki.attempts": attempts,
|
|
312
|
+
"aiki.taskStatus": existingTaskState.status
|
|
313
|
+
});
|
|
314
|
+
return this.retryAndExecute(run, handle, input, existingTaskInfo.id, startOpts, retryStrategy, attempts);
|
|
315
|
+
}
|
|
316
|
+
async throwNonDeterminismError(run, handle, inputHash, manifest) {
|
|
317
|
+
const unconsumedManifestEntries = manifest.getUnconsumedEntries();
|
|
318
|
+
run.logger.error("Replay divergence", {
|
|
319
|
+
"aiki.taskName": this.name,
|
|
320
|
+
"aiki.inputHash": inputHash,
|
|
321
|
+
"aiki.unconsumedManifestEntries": unconsumedManifestEntries
|
|
322
|
+
});
|
|
323
|
+
const error = new NonDeterminismError(run.id, handle.run.attempts, unconsumedManifestEntries);
|
|
324
|
+
await handle[INTERNAL].transitionState({
|
|
325
|
+
status: "failed",
|
|
326
|
+
cause: "self",
|
|
327
|
+
error: createSerializableError(error)
|
|
328
|
+
});
|
|
329
|
+
throw error;
|
|
330
|
+
}
|
|
331
|
+
async retryAndExecute(run, handle, input, taskId, startOpts, retryStrategy, previousAttempts) {
|
|
332
|
+
const attempts = previousAttempts + 1;
|
|
333
|
+
const taskInfo = await handle[INTERNAL].transitionTaskState({
|
|
334
|
+
type: "retry",
|
|
335
|
+
taskId,
|
|
336
|
+
options: startOpts,
|
|
337
|
+
taskState: { status: "running", attempts, input }
|
|
338
|
+
});
|
|
339
|
+
const logger = run.logger.child({
|
|
340
|
+
"aiki.component": "task-execution",
|
|
341
|
+
"aiki.taskName": this.name,
|
|
342
|
+
"aiki.taskId": taskInfo.id
|
|
343
|
+
});
|
|
344
|
+
logger.info("Task started", { "aiki.attempts": attempts });
|
|
345
|
+
const { output, lastAttempt } = await this.tryExecuteTask(
|
|
346
|
+
handle,
|
|
347
|
+
input,
|
|
348
|
+
taskInfo.id,
|
|
349
|
+
retryStrategy,
|
|
350
|
+
attempts,
|
|
351
|
+
run[INTERNAL].options.spinThresholdMs,
|
|
352
|
+
logger
|
|
353
|
+
);
|
|
354
|
+
await handle[INTERNAL].transitionTaskState({
|
|
355
|
+
taskId: taskInfo.id,
|
|
356
|
+
taskState: { status: "completed", attempts: lastAttempt, output }
|
|
357
|
+
});
|
|
358
|
+
logger.info("Task complete", { "aiki.attempts": lastAttempt });
|
|
359
|
+
return output;
|
|
360
|
+
}
|
|
361
|
+
async tryExecuteTask(handle, input, taskId, retryStrategy, currentAttempt, spinThresholdMs, logger) {
|
|
362
|
+
let attempts = currentAttempt;
|
|
363
|
+
while (true) {
|
|
364
|
+
try {
|
|
365
|
+
const outputRaw = await this.params.handler(input);
|
|
366
|
+
const output = await this.parse(handle, this.params.schema?.output, outputRaw, logger);
|
|
367
|
+
return { output, lastAttempt: attempts };
|
|
368
|
+
} catch (error) {
|
|
369
|
+
if (error instanceof WorkflowRunSuspendedError || error instanceof WorkflowRunFailedError || error instanceof WorkflowRunRevisionConflictError) {
|
|
370
|
+
throw error;
|
|
371
|
+
}
|
|
372
|
+
const serializableError = createSerializableError(error);
|
|
373
|
+
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
374
|
+
if (!retryParams.retriesLeft) {
|
|
375
|
+
logger.error("Task failed", {
|
|
376
|
+
"aiki.attempts": attempts,
|
|
377
|
+
"aiki.reason": serializableError.message
|
|
378
|
+
});
|
|
379
|
+
await handle[INTERNAL].transitionTaskState({
|
|
380
|
+
taskId,
|
|
381
|
+
taskState: { status: "failed", attempts, error: serializableError }
|
|
382
|
+
});
|
|
383
|
+
throw new TaskFailedError(taskId, attempts, serializableError.message);
|
|
384
|
+
}
|
|
385
|
+
logger.debug("Task failed. It will be retried", {
|
|
386
|
+
"aiki.attempts": attempts,
|
|
387
|
+
"aiki.nextAttemptInMs": retryParams.delayMs,
|
|
388
|
+
"aiki.reason": serializableError.message
|
|
389
|
+
});
|
|
390
|
+
if (retryParams.delayMs <= spinThresholdMs) {
|
|
391
|
+
await delay(retryParams.delayMs);
|
|
392
|
+
attempts++;
|
|
393
|
+
continue;
|
|
394
|
+
}
|
|
395
|
+
await handle[INTERNAL].transitionTaskState({
|
|
396
|
+
taskId,
|
|
397
|
+
taskState: {
|
|
398
|
+
status: "awaiting_retry",
|
|
399
|
+
attempts,
|
|
400
|
+
error: serializableError,
|
|
401
|
+
nextAttemptInMs: retryParams.delayMs
|
|
402
|
+
}
|
|
403
|
+
});
|
|
404
|
+
throw new WorkflowRunSuspendedError(handle.run.id);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
assertRetryAllowed(taskId, attempts, retryStrategy, logger) {
|
|
409
|
+
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
410
|
+
if (!retryParams.retriesLeft) {
|
|
411
|
+
logger.error("Task retry not allowed", {
|
|
412
|
+
"aiki.taskName": this.name,
|
|
413
|
+
"aiki.taskId": taskId,
|
|
414
|
+
"aiki.attempts": attempts
|
|
415
|
+
});
|
|
416
|
+
throw new TaskFailedError(taskId, attempts, "Task retry not allowed");
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
async parse(handle, schema, data, logger) {
|
|
420
|
+
if (!schema) {
|
|
421
|
+
return data;
|
|
422
|
+
}
|
|
423
|
+
const schemaValidation = schema["~standard"].validate(data);
|
|
424
|
+
const schemaValidationResult = schemaValidation instanceof Promise ? await schemaValidation : schemaValidation;
|
|
425
|
+
if (!schemaValidationResult.issues) {
|
|
426
|
+
return schemaValidationResult.value;
|
|
427
|
+
}
|
|
428
|
+
logger.error("Invalid task data", { "aiki.issues": schemaValidationResult.issues });
|
|
429
|
+
await handle[INTERNAL].transitionState({
|
|
430
|
+
status: "failed",
|
|
431
|
+
cause: "self",
|
|
432
|
+
error: {
|
|
433
|
+
name: "SchemaValidationError",
|
|
434
|
+
message: JSON.stringify(schemaValidationResult.issues)
|
|
435
|
+
}
|
|
436
|
+
});
|
|
437
|
+
throw new WorkflowRunFailedError(handle.run.id, handle.run.attempts);
|
|
438
|
+
}
|
|
439
|
+
};
|
|
440
|
+
var TaskBuilderImpl = class _TaskBuilderImpl {
|
|
441
|
+
constructor(task2, startOptsBuilder) {
|
|
442
|
+
this.task = task2;
|
|
443
|
+
this.startOptsBuilder = startOptsBuilder;
|
|
444
|
+
}
|
|
445
|
+
opt(path, value) {
|
|
446
|
+
return new _TaskBuilderImpl(this.task, this.startOptsBuilder.with(path, value));
|
|
447
|
+
}
|
|
448
|
+
start(run, ...args) {
|
|
449
|
+
return this.task.startWithOpts(run, this.startOptsBuilder.build(), ...args);
|
|
450
|
+
}
|
|
451
|
+
};
|
|
452
|
+
|
|
453
|
+
// ../workflow/workflow.ts
|
|
454
|
+
import { INTERNAL as INTERNAL6 } from "@aikirun/types/symbols";
|
|
455
|
+
|
|
456
|
+
// ../workflow/workflow-version.ts
|
|
457
|
+
import { INTERNAL as INTERNAL5 } from "@aikirun/types/symbols";
|
|
458
|
+
import { TaskFailedError as TaskFailedError2 } from "@aikirun/types/task";
|
|
459
|
+
import { SchemaValidationError as SchemaValidationError2 } from "@aikirun/types/validator";
|
|
460
|
+
import {
|
|
461
|
+
NonDeterminismError as NonDeterminismError2,
|
|
462
|
+
WorkflowRunFailedError as WorkflowRunFailedError3,
|
|
463
|
+
WorkflowRunRevisionConflictError as WorkflowRunRevisionConflictError5,
|
|
464
|
+
WorkflowRunSuspendedError as WorkflowRunSuspendedError4
|
|
465
|
+
} from "@aikirun/types/workflow-run";
|
|
466
|
+
|
|
467
|
+
// ../../lib/duration/convert.ts
|
|
468
|
+
var MS_PER_SECOND = 1e3;
|
|
469
|
+
var MS_PER_MINUTE = 60 * MS_PER_SECOND;
|
|
470
|
+
var MS_PER_HOUR = 60 * MS_PER_MINUTE;
|
|
471
|
+
var MS_PER_DAY = 24 * MS_PER_HOUR;
|
|
472
|
+
function toMilliseconds(duration) {
|
|
473
|
+
if (typeof duration === "number") {
|
|
474
|
+
assertIsPositiveNumber(duration);
|
|
475
|
+
return duration;
|
|
476
|
+
}
|
|
477
|
+
let totalMs = 0;
|
|
478
|
+
if (duration.days !== void 0) {
|
|
479
|
+
assertIsPositiveNumber(duration.days, "days");
|
|
480
|
+
totalMs += duration.days * MS_PER_DAY;
|
|
481
|
+
}
|
|
482
|
+
if (duration.hours !== void 0) {
|
|
483
|
+
assertIsPositiveNumber(duration.hours, "hours");
|
|
484
|
+
totalMs += duration.hours * MS_PER_HOUR;
|
|
485
|
+
}
|
|
486
|
+
if (duration.minutes !== void 0) {
|
|
487
|
+
assertIsPositiveNumber(duration.minutes, "minutes");
|
|
488
|
+
totalMs += duration.minutes * MS_PER_MINUTE;
|
|
489
|
+
}
|
|
490
|
+
if (duration.seconds !== void 0) {
|
|
491
|
+
assertIsPositiveNumber(duration.seconds, "seconds");
|
|
492
|
+
totalMs += duration.seconds * MS_PER_SECOND;
|
|
493
|
+
}
|
|
494
|
+
if (duration.milliseconds !== void 0) {
|
|
495
|
+
assertIsPositiveNumber(duration.milliseconds, "milliseconds");
|
|
496
|
+
totalMs += duration.milliseconds;
|
|
497
|
+
}
|
|
498
|
+
return totalMs;
|
|
499
|
+
}
|
|
500
|
+
function assertIsPositiveNumber(value, field) {
|
|
501
|
+
if (!Number.isFinite(value)) {
|
|
502
|
+
throw new Error(
|
|
503
|
+
field !== void 0 ? `'${field}' duration must be finite. Received: ${value}` : `Duration must be finite. Received: ${value}`
|
|
504
|
+
);
|
|
505
|
+
}
|
|
506
|
+
if (value < 0) {
|
|
507
|
+
throw new Error(
|
|
508
|
+
field !== void 0 ? `'${field}' duration must be non-negative. Received: ${value}` : `Duration must be non-negative. Received: ${value}`
|
|
509
|
+
);
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
// ../workflow/run/event.ts
|
|
514
|
+
import { INTERNAL as INTERNAL2 } from "@aikirun/types/symbols";
|
|
515
|
+
import { SchemaValidationError } from "@aikirun/types/validator";
|
|
516
|
+
import {
|
|
517
|
+
WorkflowRunFailedError as WorkflowRunFailedError2,
|
|
518
|
+
WorkflowRunRevisionConflictError as WorkflowRunRevisionConflictError2,
|
|
519
|
+
WorkflowRunSuspendedError as WorkflowRunSuspendedError2
|
|
520
|
+
} from "@aikirun/types/workflow-run";
|
|
521
|
+
function createEventSenders(api, workflowRunId, eventsDefinition, logger) {
|
|
522
|
+
const senders = {};
|
|
523
|
+
for (const [eventName, eventDefinition] of Object.entries(eventsDefinition)) {
|
|
524
|
+
const sender = createEventSender(
|
|
525
|
+
api,
|
|
526
|
+
workflowRunId,
|
|
527
|
+
eventName,
|
|
528
|
+
eventDefinition.schema,
|
|
529
|
+
logger.child({ "aiki.eventName": eventName })
|
|
530
|
+
);
|
|
531
|
+
senders[eventName] = sender;
|
|
532
|
+
}
|
|
533
|
+
return senders;
|
|
534
|
+
}
|
|
535
|
+
function createEventSender(api, workflowRunId, eventName, schema, logger, options) {
|
|
536
|
+
const optsOverrider = objectOverrider(options ?? {});
|
|
537
|
+
const createBuilder = (optsBuilder) => ({
|
|
538
|
+
opt: (path, value) => createBuilder(optsBuilder.with(path, value)),
|
|
539
|
+
send: (...args) => createEventSender(api, workflowRunId, eventName, schema, logger, optsBuilder.build()).send(...args)
|
|
540
|
+
});
|
|
541
|
+
async function send(...args) {
|
|
542
|
+
let data = args[0];
|
|
543
|
+
if (schema) {
|
|
544
|
+
const schemaValidation = schema["~standard"].validate(data);
|
|
545
|
+
const schemaValidationResult = schemaValidation instanceof Promise ? await schemaValidation : schemaValidation;
|
|
546
|
+
if (schemaValidationResult.issues) {
|
|
547
|
+
logger.error("Invalid event data", { "aiki.issues": schemaValidationResult.issues });
|
|
548
|
+
throw new SchemaValidationError("Invalid event data", schemaValidationResult.issues);
|
|
549
|
+
}
|
|
550
|
+
data = schemaValidationResult.value;
|
|
551
|
+
}
|
|
552
|
+
await api.workflowRun.sendEventV1({
|
|
553
|
+
id: workflowRunId,
|
|
554
|
+
eventName,
|
|
555
|
+
data,
|
|
556
|
+
options
|
|
557
|
+
});
|
|
558
|
+
logger.info("Sent event to workflow", {
|
|
559
|
+
...options?.reference ? { "aiki.referenceId": options.reference.id } : {}
|
|
560
|
+
});
|
|
561
|
+
}
|
|
562
|
+
return {
|
|
563
|
+
with: () => createBuilder(optsOverrider()),
|
|
564
|
+
send
|
|
565
|
+
};
|
|
566
|
+
}
|
|
567
|
+
function createEventMulticasters(workflowName, workflowVersionId, eventsDefinition) {
|
|
568
|
+
const senders = {};
|
|
569
|
+
for (const [eventName, eventDefinition] of Object.entries(eventsDefinition)) {
|
|
570
|
+
const sender = createEventMulticaster(
|
|
571
|
+
workflowName,
|
|
572
|
+
workflowVersionId,
|
|
573
|
+
eventName,
|
|
574
|
+
eventDefinition.schema
|
|
575
|
+
);
|
|
576
|
+
senders[eventName] = sender;
|
|
577
|
+
}
|
|
578
|
+
return senders;
|
|
579
|
+
}
|
|
580
|
+
function createEventMulticaster(workflowName, workflowVersionId, eventName, schema, options) {
|
|
581
|
+
const optsOverrider = objectOverrider(options ?? {});
|
|
582
|
+
const createBuilder = (optsBuilder) => ({
|
|
583
|
+
opt: (path, value) => createBuilder(optsBuilder.with(path, value)),
|
|
584
|
+
send: (client, runId, ...args) => createEventMulticaster(workflowName, workflowVersionId, eventName, schema, optsBuilder.build()).send(
|
|
585
|
+
client,
|
|
586
|
+
runId,
|
|
587
|
+
...args
|
|
588
|
+
),
|
|
589
|
+
sendByReferenceId: (client, referenceId, ...args) => createEventMulticaster(workflowName, workflowVersionId, eventName, schema, optsBuilder.build()).sendByReferenceId(
|
|
590
|
+
client,
|
|
591
|
+
referenceId,
|
|
592
|
+
...args
|
|
593
|
+
)
|
|
594
|
+
});
|
|
595
|
+
async function send(client, runId, ...args) {
|
|
596
|
+
let data = args[0];
|
|
597
|
+
if (schema) {
|
|
598
|
+
const schemaValidation = schema["~standard"].validate(data);
|
|
599
|
+
const schemaValidationResult = schemaValidation instanceof Promise ? await schemaValidation : schemaValidation;
|
|
600
|
+
if (schemaValidationResult.issues) {
|
|
601
|
+
client.logger.error("Invalid event data", {
|
|
602
|
+
"aiki.workflowName": workflowName,
|
|
603
|
+
"aiki.workflowVersionId": workflowVersionId,
|
|
604
|
+
"aiki.eventName": eventName,
|
|
605
|
+
"aiki.issues": schemaValidationResult.issues
|
|
606
|
+
});
|
|
607
|
+
throw new SchemaValidationError("Invalid event data", schemaValidationResult.issues);
|
|
608
|
+
}
|
|
609
|
+
data = schemaValidationResult.value;
|
|
610
|
+
}
|
|
611
|
+
const runIds = Array.isArray(runId) ? runId : [runId];
|
|
612
|
+
if (!isNonEmptyArray(runIds)) {
|
|
613
|
+
return;
|
|
614
|
+
}
|
|
615
|
+
await client.api.workflowRun.multicastEventV1({
|
|
616
|
+
ids: runIds,
|
|
617
|
+
eventName,
|
|
618
|
+
data,
|
|
619
|
+
options
|
|
620
|
+
});
|
|
621
|
+
client.logger.info("Multicasted event to workflows", {
|
|
622
|
+
"aiki.workflowName": workflowName,
|
|
623
|
+
"aiki.workflowVersionId": workflowVersionId,
|
|
624
|
+
"aiki.workflowRunIds": runIds,
|
|
625
|
+
"aiki.eventName": eventName,
|
|
626
|
+
...options?.reference ? { "aiki.eventReferenceId": options.reference.id } : {}
|
|
627
|
+
});
|
|
628
|
+
}
|
|
629
|
+
async function sendByReferenceId(client, referenceId, ...args) {
|
|
630
|
+
let data = args[0];
|
|
631
|
+
if (schema) {
|
|
632
|
+
const schemaValidation = schema["~standard"].validate(data);
|
|
633
|
+
const schemaValidationResult = schemaValidation instanceof Promise ? await schemaValidation : schemaValidation;
|
|
634
|
+
if (schemaValidationResult.issues) {
|
|
635
|
+
client.logger.error("Invalid event data", {
|
|
636
|
+
"aiki.workflowName": workflowName,
|
|
637
|
+
"aiki.workflowVersionId": workflowVersionId,
|
|
638
|
+
"aiki.eventName": eventName,
|
|
639
|
+
"aiki.issues": schemaValidationResult.issues
|
|
640
|
+
});
|
|
641
|
+
throw new SchemaValidationError("Invalid event data", schemaValidationResult.issues);
|
|
642
|
+
}
|
|
643
|
+
data = schemaValidationResult.value;
|
|
644
|
+
}
|
|
645
|
+
const referenceIds = Array.isArray(referenceId) ? referenceId : [referenceId];
|
|
646
|
+
if (!isNonEmptyArray(referenceIds)) {
|
|
647
|
+
return;
|
|
648
|
+
}
|
|
649
|
+
await client.api.workflowRun.multicastEventByReferenceV1({
|
|
650
|
+
references: referenceIds.map((referenceId2) => ({
|
|
651
|
+
name: workflowName,
|
|
652
|
+
versionId: workflowVersionId,
|
|
653
|
+
referenceId: referenceId2
|
|
654
|
+
})),
|
|
655
|
+
eventName,
|
|
656
|
+
data,
|
|
657
|
+
options
|
|
658
|
+
});
|
|
659
|
+
client.logger.info("Multicasted event by reference", {
|
|
660
|
+
"aiki.workflowName": workflowName,
|
|
661
|
+
"aiki.workflowVersionId": workflowVersionId,
|
|
662
|
+
"aiki.referenceIds": referenceIds,
|
|
663
|
+
"aiki.eventName": eventName,
|
|
664
|
+
...options?.reference ? { "aiki.eventReferenceId": options.reference.id } : {}
|
|
665
|
+
});
|
|
666
|
+
}
|
|
667
|
+
return {
|
|
668
|
+
with: () => createBuilder(optsOverrider()),
|
|
669
|
+
send,
|
|
670
|
+
sendByReferenceId
|
|
671
|
+
};
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
// ../workflow/run/handle.ts
|
|
675
|
+
import { INTERNAL as INTERNAL3 } from "@aikirun/types/symbols";
|
|
676
|
+
import {
|
|
677
|
+
isTerminalWorkflowRunStatus,
|
|
678
|
+
WorkflowRunNotExecutableError,
|
|
679
|
+
WorkflowRunRevisionConflictError as WorkflowRunRevisionConflictError3
|
|
680
|
+
} from "@aikirun/types/workflow-run";
|
|
681
|
+
async function workflowRunHandle(client, runOrId, eventsDefinition, logger) {
|
|
682
|
+
const run = typeof runOrId !== "string" ? runOrId : (await client.api.workflowRun.getByIdV1({ id: runOrId })).run;
|
|
683
|
+
return new WorkflowRunHandleImpl(
|
|
684
|
+
client,
|
|
685
|
+
run,
|
|
686
|
+
eventsDefinition ?? {},
|
|
687
|
+
logger ?? client.logger.child({
|
|
688
|
+
"aiki.workflowName": run.name,
|
|
689
|
+
"aiki.workflowVersionId": run.versionId,
|
|
690
|
+
"aiki.workflowRunId": run.id
|
|
691
|
+
})
|
|
692
|
+
);
|
|
693
|
+
}
|
|
694
|
+
var WorkflowRunHandleImpl = class {
|
|
695
|
+
constructor(client, _run, eventsDefinition, logger) {
|
|
696
|
+
this._run = _run;
|
|
697
|
+
this.logger = logger;
|
|
698
|
+
this.api = client.api;
|
|
699
|
+
this.events = createEventSenders(client.api, this._run.id, eventsDefinition, this.logger);
|
|
700
|
+
this[INTERNAL3] = {
|
|
701
|
+
client,
|
|
702
|
+
transitionState: this.transitionState.bind(this),
|
|
703
|
+
transitionTaskState: this.transitionTaskState.bind(this),
|
|
704
|
+
assertExecutionAllowed: this.assertExecutionAllowed.bind(this)
|
|
705
|
+
};
|
|
706
|
+
}
|
|
707
|
+
api;
|
|
708
|
+
events;
|
|
709
|
+
[INTERNAL3];
|
|
710
|
+
get run() {
|
|
711
|
+
return this._run;
|
|
712
|
+
}
|
|
713
|
+
async refresh() {
|
|
714
|
+
const { run: currentRun } = await this.api.workflowRun.getByIdV1({ id: this.run.id });
|
|
715
|
+
this._run = currentRun;
|
|
716
|
+
}
|
|
717
|
+
// TODO: instead checking the current state, use the transition history
|
|
718
|
+
// because it is possible for a workflow to flash though a state
|
|
719
|
+
// and the handle will never know that the workflow hit that state
|
|
720
|
+
async waitForStatus(status, options) {
|
|
721
|
+
return this.waitForStatusByPolling(status, options);
|
|
722
|
+
}
|
|
723
|
+
async waitForStatusByPolling(expectedStatus, options) {
|
|
724
|
+
if (options?.abortSignal?.aborted) {
|
|
725
|
+
return {
|
|
726
|
+
success: false,
|
|
727
|
+
cause: "aborted"
|
|
728
|
+
};
|
|
729
|
+
}
|
|
730
|
+
const delayMs = options?.interval ? toMilliseconds(options.interval) : 1e3;
|
|
731
|
+
const maxAttempts = options?.timeout ? Math.ceil(toMilliseconds(options.timeout) / delayMs) : Number.POSITIVE_INFINITY;
|
|
732
|
+
const retryStrategy = { type: "fixed", maxAttempts, delayMs };
|
|
733
|
+
const loadState = async () => {
|
|
734
|
+
await this.refresh();
|
|
735
|
+
return this.run.state;
|
|
736
|
+
};
|
|
737
|
+
const isNeitherExpectedNorTerminal = (state) => state.status !== expectedStatus && !isTerminalWorkflowRunStatus(state.status);
|
|
738
|
+
if (!Number.isFinite(maxAttempts) && !options?.abortSignal) {
|
|
739
|
+
const maybeResult2 = await withRetry(loadState, retryStrategy, {
|
|
740
|
+
shouldRetryOnResult: async (state) => isNeitherExpectedNorTerminal(state)
|
|
741
|
+
}).run();
|
|
742
|
+
if (maybeResult2.state === "timeout") {
|
|
743
|
+
throw new Error("Something's wrong, this should've never timed out");
|
|
744
|
+
}
|
|
745
|
+
if (maybeResult2.result.status !== expectedStatus) {
|
|
746
|
+
return {
|
|
747
|
+
success: false,
|
|
748
|
+
cause: "run_terminated"
|
|
749
|
+
};
|
|
750
|
+
}
|
|
751
|
+
return {
|
|
752
|
+
success: true,
|
|
753
|
+
state: maybeResult2.result
|
|
754
|
+
};
|
|
755
|
+
}
|
|
756
|
+
const maybeResult = options?.abortSignal ? await withRetry(loadState, retryStrategy, {
|
|
757
|
+
abortSignal: options.abortSignal,
|
|
758
|
+
shouldRetryOnResult: async (state) => isNeitherExpectedNorTerminal(state)
|
|
759
|
+
}).run() : await withRetry(loadState, retryStrategy, {
|
|
760
|
+
shouldRetryOnResult: async (state) => isNeitherExpectedNorTerminal(state)
|
|
761
|
+
}).run();
|
|
762
|
+
this.logger.info("Maybe result", { maybeResult });
|
|
763
|
+
if (maybeResult.state === "completed") {
|
|
764
|
+
if (maybeResult.result.status !== expectedStatus) {
|
|
765
|
+
return {
|
|
766
|
+
success: false,
|
|
767
|
+
cause: "run_terminated"
|
|
768
|
+
};
|
|
769
|
+
}
|
|
770
|
+
return {
|
|
771
|
+
success: true,
|
|
772
|
+
state: maybeResult.result
|
|
773
|
+
};
|
|
774
|
+
}
|
|
775
|
+
return { success: false, cause: maybeResult.state };
|
|
776
|
+
}
|
|
777
|
+
async cancel(reason) {
|
|
778
|
+
await this.transitionState({ status: "cancelled", reason });
|
|
779
|
+
this.logger.info("Workflow cancelled");
|
|
780
|
+
}
|
|
781
|
+
async pause() {
|
|
782
|
+
await this.transitionState({ status: "paused" });
|
|
783
|
+
this.logger.info("Workflow paused");
|
|
784
|
+
}
|
|
785
|
+
async resume() {
|
|
786
|
+
await this.transitionState({ status: "scheduled", scheduledInMs: 0, reason: "resume" });
|
|
787
|
+
this.logger.info("Workflow resumed");
|
|
788
|
+
}
|
|
789
|
+
async awake() {
|
|
790
|
+
await this.transitionState({ status: "scheduled", scheduledInMs: 0, reason: "awake_early" });
|
|
791
|
+
this.logger.info("Workflow awoken");
|
|
792
|
+
}
|
|
793
|
+
async transitionState(targetState) {
|
|
794
|
+
try {
|
|
795
|
+
let response;
|
|
796
|
+
if (targetState.status === "scheduled" && (targetState.reason === "new" || targetState.reason === "resume" || targetState.reason === "awake_early") || targetState.status === "paused" || targetState.status === "cancelled") {
|
|
797
|
+
response = await this.api.workflowRun.transitionStateV1({
|
|
798
|
+
type: "pessimistic",
|
|
799
|
+
id: this.run.id,
|
|
800
|
+
state: targetState
|
|
801
|
+
});
|
|
802
|
+
} else {
|
|
803
|
+
response = await this.api.workflowRun.transitionStateV1({
|
|
804
|
+
type: "optimistic",
|
|
805
|
+
id: this.run.id,
|
|
806
|
+
state: targetState,
|
|
807
|
+
expectedRevision: this.run.revision
|
|
808
|
+
});
|
|
809
|
+
}
|
|
810
|
+
this._run.revision = response.revision;
|
|
811
|
+
this._run.state = response.state;
|
|
812
|
+
this._run.attempts = response.attempts;
|
|
813
|
+
} catch (error) {
|
|
814
|
+
if (isWorkflowRunRevisionConflictError(error)) {
|
|
815
|
+
throw new WorkflowRunRevisionConflictError3(this.run.id);
|
|
816
|
+
}
|
|
817
|
+
throw error;
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
async transitionTaskState(request) {
|
|
821
|
+
try {
|
|
822
|
+
const { taskInfo } = await this.api.workflowRun.transitionTaskStateV1({
|
|
823
|
+
...request,
|
|
824
|
+
id: this.run.id,
|
|
825
|
+
expectedWorkflowRunRevision: this.run.revision
|
|
826
|
+
});
|
|
827
|
+
return taskInfo;
|
|
828
|
+
} catch (error) {
|
|
829
|
+
if (isWorkflowRunRevisionConflictError(error)) {
|
|
830
|
+
throw new WorkflowRunRevisionConflictError3(this.run.id);
|
|
831
|
+
}
|
|
832
|
+
throw error;
|
|
833
|
+
}
|
|
834
|
+
}
|
|
835
|
+
assertExecutionAllowed() {
|
|
836
|
+
const status = this.run.state.status;
|
|
837
|
+
if (status !== "queued" && status !== "running") {
|
|
838
|
+
throw new WorkflowRunNotExecutableError(this.run.id, status);
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
};
|
|
842
|
+
function isWorkflowRunRevisionConflictError(error) {
|
|
843
|
+
return error != null && typeof error === "object" && "code" in error && error.code === "WORKFLOW_RUN_REVISION_CONFLICT";
|
|
844
|
+
}
|
|
845
|
+
|
|
846
|
+
// ../workflow/run/handle-child.ts
|
|
847
|
+
import { INTERNAL as INTERNAL4 } from "@aikirun/types/symbols";
|
|
848
|
+
import {
|
|
849
|
+
isTerminalWorkflowRunStatus as isTerminalWorkflowRunStatus2,
|
|
850
|
+
WorkflowRunRevisionConflictError as WorkflowRunRevisionConflictError4,
|
|
851
|
+
WorkflowRunSuspendedError as WorkflowRunSuspendedError3
|
|
852
|
+
} from "@aikirun/types/workflow-run";
|
|
853
|
+
async function childWorkflowRunHandle(client, run, parentRun, childWorkflowRunWaitQueues, logger, eventsDefinition) {
|
|
854
|
+
const handle = await workflowRunHandle(client, run, eventsDefinition, logger);
|
|
855
|
+
return {
|
|
856
|
+
run: handle.run,
|
|
857
|
+
events: handle.events,
|
|
858
|
+
refresh: handle.refresh.bind(handle),
|
|
859
|
+
waitForStatus: createStatusWaiter(handle, parentRun, childWorkflowRunWaitQueues, logger),
|
|
860
|
+
cancel: handle.cancel.bind(handle),
|
|
861
|
+
pause: handle.pause.bind(handle),
|
|
862
|
+
resume: handle.resume.bind(handle),
|
|
863
|
+
awake: handle.awake.bind(handle),
|
|
864
|
+
[INTERNAL4]: handle[INTERNAL4]
|
|
865
|
+
};
|
|
866
|
+
}
|
|
867
|
+
function createStatusWaiter(handle, parentRun, childWorkflowRunWaitQueues, logger) {
|
|
868
|
+
const nextIndexByStatus = {
|
|
869
|
+
cancelled: 0,
|
|
870
|
+
completed: 0,
|
|
871
|
+
failed: 0
|
|
872
|
+
};
|
|
873
|
+
async function waitForStatus(expectedStatus, options) {
|
|
874
|
+
const parentRunHandle = parentRun[INTERNAL4].handle;
|
|
875
|
+
const nextIndex = nextIndexByStatus[expectedStatus];
|
|
876
|
+
const { run } = handle;
|
|
877
|
+
const childWorkflowRunWaits = childWorkflowRunWaitQueues[expectedStatus].childWorkflowRunWaits;
|
|
878
|
+
const existingChildWorkflowRunWait = childWorkflowRunWaits[nextIndex];
|
|
879
|
+
if (existingChildWorkflowRunWait) {
|
|
880
|
+
nextIndexByStatus[expectedStatus] = nextIndex + 1;
|
|
881
|
+
if (existingChildWorkflowRunWait.status === "timeout") {
|
|
882
|
+
logger.debug("Timed out waiting for child workflow status", {
|
|
883
|
+
"aiki.childWorkflowExpectedStatus": expectedStatus
|
|
884
|
+
});
|
|
885
|
+
return {
|
|
886
|
+
success: false,
|
|
887
|
+
cause: "timeout"
|
|
888
|
+
};
|
|
889
|
+
}
|
|
890
|
+
const childWorkflowRunStatus = existingChildWorkflowRunWait.childWorkflowRunState.status;
|
|
891
|
+
if (childWorkflowRunStatus === expectedStatus) {
|
|
892
|
+
return {
|
|
893
|
+
success: true,
|
|
894
|
+
state: existingChildWorkflowRunWait.childWorkflowRunState
|
|
895
|
+
};
|
|
896
|
+
}
|
|
897
|
+
if (isTerminalWorkflowRunStatus2(childWorkflowRunStatus)) {
|
|
898
|
+
logger.debug("Child workflow run reached termnial state", {
|
|
899
|
+
"aiki.childWorkflowTerminalStatus": childWorkflowRunStatus
|
|
900
|
+
});
|
|
901
|
+
return {
|
|
902
|
+
success: false,
|
|
903
|
+
cause: "run_terminated"
|
|
904
|
+
};
|
|
905
|
+
}
|
|
906
|
+
childWorkflowRunStatus;
|
|
907
|
+
}
|
|
908
|
+
const timeoutInMs = options?.timeout && toMilliseconds(options.timeout);
|
|
909
|
+
try {
|
|
910
|
+
await parentRunHandle[INTERNAL4].transitionState({
|
|
911
|
+
status: "awaiting_child_workflow",
|
|
912
|
+
childWorkflowRunId: run.id,
|
|
913
|
+
childWorkflowRunStatus: expectedStatus,
|
|
914
|
+
timeoutInMs
|
|
915
|
+
});
|
|
916
|
+
logger.info("Waiting for child Workflow", {
|
|
917
|
+
"aiki.childWorkflowExpectedStatus": expectedStatus,
|
|
918
|
+
...timeoutInMs !== void 0 ? { "aiki.timeoutInMs": timeoutInMs } : {}
|
|
919
|
+
});
|
|
920
|
+
} catch (error) {
|
|
921
|
+
if (error instanceof WorkflowRunRevisionConflictError4) {
|
|
922
|
+
throw new WorkflowRunSuspendedError3(parentRun.id);
|
|
923
|
+
}
|
|
924
|
+
throw error;
|
|
925
|
+
}
|
|
926
|
+
throw new WorkflowRunSuspendedError3(parentRun.id);
|
|
927
|
+
}
|
|
928
|
+
return waitForStatus;
|
|
929
|
+
}
|
|
930
|
+
|
|
931
|
+
// ../workflow/workflow-version.ts
|
|
932
|
+
var WorkflowVersionImpl = class {
|
|
933
|
+
constructor(name, versionId, params) {
|
|
934
|
+
this.name = name;
|
|
935
|
+
this.versionId = versionId;
|
|
936
|
+
this.params = params;
|
|
937
|
+
const eventsDefinition = this.params.events ?? {};
|
|
938
|
+
this.events = createEventMulticasters(this.name, this.versionId, eventsDefinition);
|
|
939
|
+
this[INTERNAL5] = {
|
|
940
|
+
eventsDefinition,
|
|
941
|
+
handler: this.handler.bind(this)
|
|
942
|
+
};
|
|
943
|
+
}
|
|
944
|
+
events;
|
|
945
|
+
[INTERNAL5];
|
|
946
|
+
with() {
|
|
947
|
+
const startOpts = this.params.opts ?? {};
|
|
948
|
+
const startOptsOverrider = objectOverrider(startOpts);
|
|
949
|
+
return new WorkflowBuilderImpl(this, startOptsOverrider());
|
|
950
|
+
}
|
|
951
|
+
async start(client, ...args) {
|
|
952
|
+
return this.startWithOpts(client, this.params.opts ?? {}, ...args);
|
|
953
|
+
}
|
|
954
|
+
async startWithOpts(client, startOpts, ...args) {
|
|
955
|
+
let input = args[0];
|
|
956
|
+
const schema = this.params.schema?.input;
|
|
957
|
+
if (schema) {
|
|
958
|
+
const schemaValidation = schema["~standard"].validate(input);
|
|
959
|
+
const schemaValidationResult = schemaValidation instanceof Promise ? await schemaValidation : schemaValidation;
|
|
960
|
+
if (schemaValidationResult.issues) {
|
|
961
|
+
client.logger.error("Invalid workflow data", { "aiki.issues": schemaValidationResult.issues });
|
|
962
|
+
throw new SchemaValidationError2("Invalid workflow data", schemaValidationResult.issues);
|
|
963
|
+
}
|
|
964
|
+
input = schemaValidationResult.value;
|
|
965
|
+
}
|
|
966
|
+
const { id } = await client.api.workflowRun.createV1({
|
|
967
|
+
name: this.name,
|
|
968
|
+
versionId: this.versionId,
|
|
969
|
+
input,
|
|
970
|
+
options: startOpts
|
|
971
|
+
});
|
|
972
|
+
client.logger.info("Created workflow", {
|
|
973
|
+
"aiki.workflowName": this.name,
|
|
974
|
+
"aiki.workflowVersionId": this.versionId,
|
|
975
|
+
"aiki.workflowRunId": id
|
|
976
|
+
});
|
|
977
|
+
return workflowRunHandle(client, id, this[INTERNAL5].eventsDefinition);
|
|
978
|
+
}
|
|
979
|
+
async startAsChild(parentRun, ...args) {
|
|
980
|
+
return this.startAsChildWithOpts(parentRun, this.params.opts ?? {}, ...args);
|
|
981
|
+
}
|
|
982
|
+
async startAsChildWithOpts(parentRun, startOpts, ...args) {
|
|
983
|
+
const parentRunHandle = parentRun[INTERNAL5].handle;
|
|
984
|
+
parentRunHandle[INTERNAL5].assertExecutionAllowed();
|
|
985
|
+
const { client } = parentRunHandle[INTERNAL5];
|
|
986
|
+
const inputRaw = args[0];
|
|
987
|
+
const input = await this.parse(parentRunHandle, this.params.schema?.input, inputRaw, parentRun.logger);
|
|
988
|
+
const inputHash = await hashInput(input);
|
|
989
|
+
const referenceId = startOpts.reference?.id;
|
|
990
|
+
const address = getWorkflowRunAddress(this.name, this.versionId, referenceId ?? inputHash);
|
|
991
|
+
const replayManifest = parentRun[INTERNAL5].replayManifest;
|
|
992
|
+
if (replayManifest.hasUnconsumedEntries()) {
|
|
993
|
+
const existingRunInfo = replayManifest.consumeNextChildWorkflowRun(address);
|
|
994
|
+
if (existingRunInfo) {
|
|
995
|
+
const { run: existingRun } = await client.api.workflowRun.getByIdV1({ id: existingRunInfo.id });
|
|
996
|
+
if (existingRun.state.status === "completed") {
|
|
997
|
+
await this.parse(parentRunHandle, this.params.schema?.output, existingRun.state.output, parentRun.logger);
|
|
998
|
+
}
|
|
999
|
+
const logger2 = parentRun.logger.child({
|
|
1000
|
+
"aiki.childWorkflowName": existingRun.name,
|
|
1001
|
+
"aiki.childWorkflowVersionId": existingRun.versionId,
|
|
1002
|
+
"aiki.childWorkflowRunId": existingRun.id
|
|
1003
|
+
});
|
|
1004
|
+
return childWorkflowRunHandle(
|
|
1005
|
+
client,
|
|
1006
|
+
existingRun,
|
|
1007
|
+
parentRun,
|
|
1008
|
+
existingRunInfo.childWorkflowRunWaitQueues,
|
|
1009
|
+
logger2,
|
|
1010
|
+
this[INTERNAL5].eventsDefinition
|
|
1011
|
+
);
|
|
1012
|
+
}
|
|
1013
|
+
await this.throwNonDeterminismError(parentRun, parentRunHandle, inputHash, referenceId, replayManifest);
|
|
1014
|
+
}
|
|
1015
|
+
const shard = parentRun.options.shard;
|
|
1016
|
+
const { id: newRunId } = await client.api.workflowRun.createV1({
|
|
1017
|
+
name: this.name,
|
|
1018
|
+
versionId: this.versionId,
|
|
1019
|
+
input,
|
|
1020
|
+
parentWorkflowRunId: parentRun.id,
|
|
1021
|
+
options: shard === void 0 ? startOpts : { ...startOpts, shard }
|
|
1022
|
+
});
|
|
1023
|
+
const { run: newRun } = await client.api.workflowRun.getByIdV1({ id: newRunId });
|
|
1024
|
+
const logger = parentRun.logger.child({
|
|
1025
|
+
"aiki.childWorkflowName": newRun.name,
|
|
1026
|
+
"aiki.childWorkflowVersionId": newRun.versionId,
|
|
1027
|
+
"aiki.childWorkflowRunId": newRun.id
|
|
1028
|
+
});
|
|
1029
|
+
logger.info("Created child workflow");
|
|
1030
|
+
return childWorkflowRunHandle(
|
|
1031
|
+
client,
|
|
1032
|
+
newRun,
|
|
1033
|
+
parentRun,
|
|
1034
|
+
{
|
|
1035
|
+
cancelled: { childWorkflowRunWaits: [] },
|
|
1036
|
+
completed: { childWorkflowRunWaits: [] },
|
|
1037
|
+
failed: { childWorkflowRunWaits: [] }
|
|
1038
|
+
},
|
|
1039
|
+
logger,
|
|
1040
|
+
this[INTERNAL5].eventsDefinition
|
|
1041
|
+
);
|
|
1042
|
+
}
|
|
1043
|
+
async throwNonDeterminismError(parentRun, parentRunHandle, inputHash, referenceId, manifest) {
|
|
1044
|
+
const unconsumedManifestEntries = manifest.getUnconsumedEntries();
|
|
1045
|
+
const logMeta = {
|
|
1046
|
+
"aiki.workflowName": this.name,
|
|
1047
|
+
"aiki.inputHash": inputHash,
|
|
1048
|
+
"aiki.unconsumedManifestEntries": unconsumedManifestEntries
|
|
1049
|
+
};
|
|
1050
|
+
if (referenceId !== void 0) {
|
|
1051
|
+
logMeta["aiki.referenceId"] = referenceId;
|
|
1052
|
+
}
|
|
1053
|
+
parentRun.logger.error("Replay divergence", logMeta);
|
|
1054
|
+
const error = new NonDeterminismError2(parentRun.id, parentRunHandle.run.attempts, unconsumedManifestEntries);
|
|
1055
|
+
await parentRunHandle[INTERNAL5].transitionState({
|
|
1056
|
+
status: "failed",
|
|
1057
|
+
cause: "self",
|
|
1058
|
+
error: createSerializableError(error)
|
|
1059
|
+
});
|
|
1060
|
+
throw error;
|
|
1061
|
+
}
|
|
1062
|
+
async getHandleById(client, runId) {
|
|
1063
|
+
return workflowRunHandle(client, runId, this[INTERNAL5].eventsDefinition);
|
|
1064
|
+
}
|
|
1065
|
+
async getHandleByReferenceId(client, referenceId) {
|
|
1066
|
+
const { run } = await client.api.workflowRun.getByReferenceIdV1({
|
|
1067
|
+
name: this.name,
|
|
1068
|
+
versionId: this.versionId,
|
|
1069
|
+
referenceId
|
|
1070
|
+
});
|
|
1071
|
+
return workflowRunHandle(client, run, this[INTERNAL5].eventsDefinition);
|
|
1072
|
+
}
|
|
1073
|
+
async handler(run, input, context) {
|
|
1074
|
+
const { logger } = run;
|
|
1075
|
+
const { handle } = run[INTERNAL5];
|
|
1076
|
+
handle[INTERNAL5].assertExecutionAllowed();
|
|
1077
|
+
const retryStrategy = this.params.opts?.retry ?? { type: "never" };
|
|
1078
|
+
const state = handle.run.state;
|
|
1079
|
+
if (state.status === "queued" && state.reason === "retry") {
|
|
1080
|
+
await this.assertRetryAllowed(handle, retryStrategy, logger);
|
|
1081
|
+
}
|
|
1082
|
+
logger.info("Starting workflow");
|
|
1083
|
+
await handle[INTERNAL5].transitionState({ status: "running" });
|
|
1084
|
+
const output = await this.tryExecuteWorkflow(input, run, context, retryStrategy);
|
|
1085
|
+
await handle[INTERNAL5].transitionState({ status: "completed", output });
|
|
1086
|
+
logger.info("Workflow complete");
|
|
1087
|
+
}
|
|
1088
|
+
async tryExecuteWorkflow(input, run, context, retryStrategy) {
|
|
1089
|
+
const { handle } = run[INTERNAL5];
|
|
1090
|
+
while (true) {
|
|
1091
|
+
try {
|
|
1092
|
+
const outputRaw = await this.params.handler(run, input, context);
|
|
1093
|
+
const output = await this.parse(handle, this.params.schema?.output, outputRaw, run.logger);
|
|
1094
|
+
return output;
|
|
1095
|
+
} catch (error) {
|
|
1096
|
+
if (error instanceof WorkflowRunSuspendedError4 || error instanceof WorkflowRunFailedError3 || error instanceof WorkflowRunRevisionConflictError5 || error instanceof NonDeterminismError2) {
|
|
1097
|
+
throw error;
|
|
1098
|
+
}
|
|
1099
|
+
const attempts = handle.run.attempts;
|
|
1100
|
+
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
1101
|
+
if (!retryParams.retriesLeft) {
|
|
1102
|
+
const failedState = this.createFailedState(error);
|
|
1103
|
+
await handle[INTERNAL5].transitionState(failedState);
|
|
1104
|
+
const logMeta2 = {};
|
|
1105
|
+
for (const [key, value] of Object.entries(failedState)) {
|
|
1106
|
+
logMeta2[`aiki.${key}`] = value;
|
|
1107
|
+
}
|
|
1108
|
+
run.logger.error("Workflow failed", {
|
|
1109
|
+
"aiki.attempts": attempts,
|
|
1110
|
+
...logMeta2
|
|
1111
|
+
});
|
|
1112
|
+
throw new WorkflowRunFailedError3(run.id, attempts);
|
|
1113
|
+
}
|
|
1114
|
+
const awaitingRetryState = this.createAwaitingRetryState(error, retryParams.delayMs);
|
|
1115
|
+
await handle[INTERNAL5].transitionState(awaitingRetryState);
|
|
1116
|
+
const logMeta = {};
|
|
1117
|
+
for (const [key, value] of Object.entries(awaitingRetryState)) {
|
|
1118
|
+
logMeta[`aiki.${key}`] = value;
|
|
1119
|
+
}
|
|
1120
|
+
run.logger.info("Workflow awaiting retry", {
|
|
1121
|
+
"aiki.attempts": attempts,
|
|
1122
|
+
...logMeta
|
|
1123
|
+
});
|
|
1124
|
+
throw new WorkflowRunSuspendedError4(run.id);
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
}
|
|
1128
|
+
async assertRetryAllowed(handle, retryStrategy, logger) {
|
|
1129
|
+
const { id, attempts } = handle.run;
|
|
1130
|
+
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
1131
|
+
if (!retryParams.retriesLeft) {
|
|
1132
|
+
logger.error("Workflow retry not allowed", { "aiki.attempts": attempts });
|
|
1133
|
+
const error = new WorkflowRunFailedError3(id, attempts);
|
|
1134
|
+
await handle[INTERNAL5].transitionState({
|
|
1135
|
+
status: "failed",
|
|
1136
|
+
cause: "self",
|
|
1137
|
+
error: createSerializableError(error)
|
|
1138
|
+
});
|
|
1139
|
+
throw error;
|
|
1140
|
+
}
|
|
1141
|
+
}
|
|
1142
|
+
async parse(handle, schema, data, logger) {
|
|
1143
|
+
if (!schema) {
|
|
1144
|
+
return data;
|
|
1145
|
+
}
|
|
1146
|
+
const schemaValidation = schema["~standard"].validate(data);
|
|
1147
|
+
const schemaValidationResult = schemaValidation instanceof Promise ? await schemaValidation : schemaValidation;
|
|
1148
|
+
if (!schemaValidationResult.issues) {
|
|
1149
|
+
return schemaValidationResult.value;
|
|
1150
|
+
}
|
|
1151
|
+
logger.error("Invalid workflow data", { "aiki.issues": schemaValidationResult.issues });
|
|
1152
|
+
await handle[INTERNAL5].transitionState({
|
|
1153
|
+
status: "failed",
|
|
1154
|
+
cause: "self",
|
|
1155
|
+
error: {
|
|
1156
|
+
name: "SchemaValidationError",
|
|
1157
|
+
message: JSON.stringify(schemaValidationResult.issues)
|
|
1158
|
+
}
|
|
1159
|
+
});
|
|
1160
|
+
throw new WorkflowRunFailedError3(handle.run.id, handle.run.attempts);
|
|
1161
|
+
}
|
|
1162
|
+
createFailedState(error) {
|
|
1163
|
+
if (error instanceof TaskFailedError2) {
|
|
1164
|
+
return {
|
|
1165
|
+
status: "failed",
|
|
1166
|
+
cause: "task",
|
|
1167
|
+
taskId: error.taskId
|
|
1168
|
+
};
|
|
1169
|
+
}
|
|
1170
|
+
return {
|
|
1171
|
+
status: "failed",
|
|
1172
|
+
cause: "self",
|
|
1173
|
+
error: createSerializableError(error)
|
|
1174
|
+
};
|
|
1175
|
+
}
|
|
1176
|
+
createAwaitingRetryState(error, nextAttemptInMs) {
|
|
1177
|
+
if (error instanceof TaskFailedError2) {
|
|
1178
|
+
return {
|
|
1179
|
+
status: "awaiting_retry",
|
|
1180
|
+
cause: "task",
|
|
1181
|
+
nextAttemptInMs,
|
|
1182
|
+
taskId: error.taskId
|
|
1183
|
+
};
|
|
1184
|
+
}
|
|
1185
|
+
return {
|
|
1186
|
+
status: "awaiting_retry",
|
|
1187
|
+
cause: "self",
|
|
1188
|
+
nextAttemptInMs,
|
|
1189
|
+
error: createSerializableError(error)
|
|
1190
|
+
};
|
|
1191
|
+
}
|
|
1192
|
+
};
|
|
1193
|
+
var WorkflowBuilderImpl = class _WorkflowBuilderImpl {
|
|
1194
|
+
constructor(workflow2, startOptsBuilder) {
|
|
1195
|
+
this.workflow = workflow2;
|
|
1196
|
+
this.startOptsBuilder = startOptsBuilder;
|
|
1197
|
+
}
|
|
1198
|
+
opt(path, value) {
|
|
1199
|
+
return new _WorkflowBuilderImpl(this.workflow, this.startOptsBuilder.with(path, value));
|
|
1200
|
+
}
|
|
1201
|
+
start(client, ...args) {
|
|
1202
|
+
return this.workflow.startWithOpts(client, this.startOptsBuilder.build(), ...args);
|
|
1203
|
+
}
|
|
1204
|
+
startAsChild(parentRun, ...args) {
|
|
1205
|
+
return this.workflow.startAsChildWithOpts(parentRun, this.startOptsBuilder.build(), ...args);
|
|
1206
|
+
}
|
|
1207
|
+
};
|
|
1208
|
+
|
|
1209
|
+
// ../workflow/workflow.ts
|
|
1210
|
+
function workflow(params) {
|
|
1211
|
+
return new WorkflowImpl(params);
|
|
1212
|
+
}
|
|
1213
|
+
var WorkflowImpl = class {
|
|
1214
|
+
name;
|
|
1215
|
+
[INTERNAL6];
|
|
1216
|
+
workflowVersions = /* @__PURE__ */ new Map();
|
|
1217
|
+
constructor(params) {
|
|
1218
|
+
this.name = params.name;
|
|
1219
|
+
this[INTERNAL6] = {
|
|
1220
|
+
getAllVersions: this.getAllVersions.bind(this),
|
|
1221
|
+
getVersion: this.getVersion.bind(this)
|
|
1222
|
+
};
|
|
1223
|
+
}
|
|
1224
|
+
v(versionId, params) {
|
|
1225
|
+
if (this.workflowVersions.has(versionId)) {
|
|
1226
|
+
throw new Error(`Workflow "${this.name}:${versionId}" already exists`);
|
|
1227
|
+
}
|
|
1228
|
+
const workflowVersion = new WorkflowVersionImpl(this.name, versionId, params);
|
|
1229
|
+
this.workflowVersions.set(
|
|
1230
|
+
versionId,
|
|
1231
|
+
workflowVersion
|
|
1232
|
+
);
|
|
1233
|
+
return workflowVersion;
|
|
1234
|
+
}
|
|
1235
|
+
getAllVersions() {
|
|
1236
|
+
return Array.from(this.workflowVersions.values());
|
|
1237
|
+
}
|
|
1238
|
+
getVersion(versionId) {
|
|
1239
|
+
return this.workflowVersions.get(versionId);
|
|
1240
|
+
}
|
|
1241
|
+
};
|
|
1242
|
+
|
|
1243
|
+
// ../workflow/system/cancel-child-runs.ts
|
|
1244
|
+
var createCancelChildRunsV1 = (api) => {
|
|
1245
|
+
const listNonTerminalChildRuns = task({
|
|
1246
|
+
name: "aiki:list-non-terminal-child-runs",
|
|
1247
|
+
async handler(parentRunId) {
|
|
1248
|
+
const { runs } = await api.workflowRun.listChildRunsV1({
|
|
1249
|
+
parentRunId,
|
|
1250
|
+
status: NON_TERMINAL_WORKFLOW_RUN_STATUSES
|
|
1251
|
+
});
|
|
1252
|
+
return runs.map((r) => r.id);
|
|
1253
|
+
}
|
|
1254
|
+
});
|
|
1255
|
+
const cancelRuns = task({
|
|
1256
|
+
name: "aiki:cancel-runs",
|
|
1257
|
+
async handler(runIds) {
|
|
1258
|
+
const { cancelledIds } = await api.workflowRun.cancelByIdsV1({ ids: runIds });
|
|
1259
|
+
return cancelledIds;
|
|
1260
|
+
}
|
|
1261
|
+
});
|
|
1262
|
+
return workflow({ name: "aiki:cancel-child-runs" }).v("1.0.0", {
|
|
1263
|
+
async handler(run, parentRunId) {
|
|
1264
|
+
const childRunIds = await listNonTerminalChildRuns.start(run, parentRunId);
|
|
1265
|
+
if (!isNonEmptyArray(childRunIds)) {
|
|
1266
|
+
return;
|
|
1267
|
+
}
|
|
1268
|
+
await cancelRuns.start(run, childRunIds);
|
|
1269
|
+
}
|
|
1270
|
+
});
|
|
1271
|
+
};
|
|
1272
|
+
|
|
1273
|
+
// ../workflow/system/index.ts
|
|
1274
|
+
function getSystemWorkflows(api) {
|
|
1275
|
+
return [createCancelChildRunsV1(api)];
|
|
1276
|
+
}
|
|
1277
|
+
|
|
1278
|
+
// worker.ts
|
|
1279
|
+
import { ulid } from "ulidx";
|
|
75
1280
|
function worker(params) {
|
|
76
1281
|
return new WorkerImpl(params);
|
|
77
1282
|
}
|
|
@@ -100,13 +1305,13 @@ var WorkerHandleImpl = class {
|
|
|
100
1305
|
this.client = client;
|
|
101
1306
|
this.params = params;
|
|
102
1307
|
this.spawnOpts = spawnOpts;
|
|
103
|
-
this.id =
|
|
1308
|
+
this.id = ulid();
|
|
104
1309
|
this.name = params.name;
|
|
105
1310
|
this.workflowRunOpts = {
|
|
106
1311
|
heartbeatIntervalMs: this.spawnOpts.workflowRun?.heartbeatIntervalMs ?? 3e4,
|
|
107
1312
|
spinThresholdMs: this.spawnOpts.workflowRun?.spinThresholdMs ?? 10
|
|
108
1313
|
};
|
|
109
|
-
this.registry = workflowRegistry().addMany(this.params.workflows);
|
|
1314
|
+
this.registry = workflowRegistry().addMany(getSystemWorkflows(client.api)).addMany(this.params.workflows);
|
|
110
1315
|
const reference = this.spawnOpts.reference;
|
|
111
1316
|
this.logger = client.logger.child({
|
|
112
1317
|
"aiki.component": "worker",
|
|
@@ -122,9 +1327,10 @@ var WorkerHandleImpl = class {
|
|
|
122
1327
|
logger;
|
|
123
1328
|
abortController;
|
|
124
1329
|
subscriberStrategy;
|
|
1330
|
+
pollPromise;
|
|
125
1331
|
activeWorkflowRunsById = /* @__PURE__ */ new Map();
|
|
126
1332
|
async _start() {
|
|
127
|
-
const subscriberStrategyBuilder = this.client[
|
|
1333
|
+
const subscriberStrategyBuilder = this.client[INTERNAL7].subscriber.create(
|
|
128
1334
|
this.params.subscriber ?? { type: "redis" },
|
|
129
1335
|
this.registry.getAll(),
|
|
130
1336
|
this.spawnOpts.shards
|
|
@@ -135,7 +1341,7 @@ var WorkerHandleImpl = class {
|
|
|
135
1341
|
});
|
|
136
1342
|
this.abortController = new AbortController();
|
|
137
1343
|
const abortSignal = this.abortController.signal;
|
|
138
|
-
|
|
1344
|
+
this.pollPromise = this.poll(abortSignal).catch((error) => {
|
|
139
1345
|
if (!abortSignal.aborted) {
|
|
140
1346
|
this.logger.error("Unexpected error", {
|
|
141
1347
|
"aiki.error": error.message
|
|
@@ -146,6 +1352,7 @@ var WorkerHandleImpl = class {
|
|
|
146
1352
|
async stop() {
|
|
147
1353
|
this.logger.info("Worker stopping");
|
|
148
1354
|
this.abortController?.abort();
|
|
1355
|
+
await this.pollPromise;
|
|
149
1356
|
const activeWorkflowRuns = Array.from(this.activeWorkflowRunsById.values());
|
|
150
1357
|
if (activeWorkflowRuns.length === 0) {
|
|
151
1358
|
return;
|
|
@@ -168,7 +1375,7 @@ var WorkerHandleImpl = class {
|
|
|
168
1375
|
throw new Error("Subscriber strategy not initialized");
|
|
169
1376
|
}
|
|
170
1377
|
this.logger.info("Worker started", {
|
|
171
|
-
"aiki.registeredWorkflows": this.params.workflows.map((w) => `${w.name}
|
|
1378
|
+
"aiki.registeredWorkflows": this.params.workflows.map((w) => `${w.name}:${w.versionId}`)
|
|
172
1379
|
});
|
|
173
1380
|
const maxConcurrentWorkflowRuns = this.spawnOpts.maxConcurrentWorkflowRuns ?? 1;
|
|
174
1381
|
let nextDelayMs = this.subscriberStrategy.getNextDelay({ type: "polled", foundWork: false });
|
|
@@ -287,26 +1494,30 @@ var WorkerHandleImpl = class {
|
|
|
287
1494
|
}
|
|
288
1495
|
}, this.workflowRunOpts.heartbeatIntervalMs);
|
|
289
1496
|
}
|
|
290
|
-
const eventsDefinition = workflowVersion[
|
|
291
|
-
const handle = await
|
|
292
|
-
const appContext = this.client[
|
|
293
|
-
await workflowVersion[
|
|
1497
|
+
const eventsDefinition = workflowVersion[INTERNAL7].eventsDefinition;
|
|
1498
|
+
const handle = await workflowRunHandle2(this.client, workflowRun, eventsDefinition, logger);
|
|
1499
|
+
const appContext = this.client[INTERNAL7].createContext ? await this.client[INTERNAL7].createContext(workflowRun) : null;
|
|
1500
|
+
await workflowVersion[INTERNAL7].handler(
|
|
294
1501
|
{
|
|
295
1502
|
id: workflowRun.id,
|
|
296
1503
|
name: workflowRun.name,
|
|
297
1504
|
versionId: workflowRun.versionId,
|
|
298
|
-
options: workflowRun.options,
|
|
1505
|
+
options: workflowRun.options ?? {},
|
|
299
1506
|
logger,
|
|
300
1507
|
sleep: createSleeper(handle, logger),
|
|
301
1508
|
events: createEventWaiters(handle, eventsDefinition, logger),
|
|
302
|
-
[
|
|
1509
|
+
[INTERNAL7]: {
|
|
1510
|
+
handle,
|
|
1511
|
+
replayManifest: createReplayManifest(workflowRun),
|
|
1512
|
+
options: { spinThresholdMs: this.workflowRunOpts.spinThresholdMs }
|
|
1513
|
+
}
|
|
303
1514
|
},
|
|
304
1515
|
workflowRun.input,
|
|
305
1516
|
appContext
|
|
306
1517
|
);
|
|
307
1518
|
shouldAcknowledge = true;
|
|
308
1519
|
} catch (error) {
|
|
309
|
-
if (error instanceof
|
|
1520
|
+
if (error instanceof WorkflowRunNotExecutableError2 || error instanceof WorkflowRunSuspendedError5 || error instanceof WorkflowRunFailedError4 || error instanceof WorkflowRunRevisionConflictError6 || error instanceof NonDeterminismError3) {
|
|
310
1521
|
shouldAcknowledge = true;
|
|
311
1522
|
} else {
|
|
312
1523
|
logger.error("Unexpected error during workflow execution", {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aikirun/worker",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.18.0",
|
|
4
4
|
"description": "Worker SDK for Aiki - execute workflows and tasks with durable state management and automatic recovery",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -18,9 +18,10 @@
|
|
|
18
18
|
"build": "tsup"
|
|
19
19
|
},
|
|
20
20
|
"dependencies": {
|
|
21
|
-
"@aikirun/types": "0.
|
|
22
|
-
"@aikirun/client": "0.
|
|
23
|
-
"@aikirun/workflow": "0.
|
|
21
|
+
"@aikirun/types": "0.18.0",
|
|
22
|
+
"@aikirun/client": "0.18.0",
|
|
23
|
+
"@aikirun/workflow": "0.18.0",
|
|
24
|
+
"ulidx": "^2.4.1"
|
|
24
25
|
},
|
|
25
26
|
"publishConfig": {
|
|
26
27
|
"access": "public"
|