@aikirun/workflow 0.7.0 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -24
- package/dist/index.d.ts +59 -46
- package/dist/index.js +391 -206
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -16,10 +16,10 @@ npm install @aikirun/workflow
|
|
|
16
16
|
import { workflow } from "@aikirun/workflow";
|
|
17
17
|
import { markUserVerified, sendVerificationEmail } from "./tasks.ts";
|
|
18
18
|
|
|
19
|
-
export const onboardingWorkflow = workflow({
|
|
19
|
+
export const onboardingWorkflow = workflow({ name: "user-onboarding" });
|
|
20
20
|
|
|
21
|
-
export const onboardingWorkflowV1 = onboardingWorkflow.v("1.0", {
|
|
22
|
-
async handler(input: { email: string }
|
|
21
|
+
export const onboardingWorkflowV1 = onboardingWorkflow.v("1.0.0", {
|
|
22
|
+
async handler(run, input: { email: string }) {
|
|
23
23
|
run.logger.info("Starting onboarding", { email: input.email });
|
|
24
24
|
|
|
25
25
|
// Execute a task to send verification email
|
|
@@ -30,7 +30,7 @@ export const onboardingWorkflowV1 = onboardingWorkflow.v("1.0", {
|
|
|
30
30
|
await markUserVerified.start(run, { email: input.email });
|
|
31
31
|
|
|
32
32
|
// Sleep for 24 hours before sending tips
|
|
33
|
-
await run.sleep(
|
|
33
|
+
await run.sleep("onboarding-delay", { days: 1 });
|
|
34
34
|
|
|
35
35
|
// Send usage tips
|
|
36
36
|
await sendUsageTips.start(run, { email: input.email });
|
|
@@ -64,9 +64,9 @@ const result = await createUserProfile.start(run, {
|
|
|
64
64
|
|
|
65
65
|
```typescript
|
|
66
66
|
// Sleep requires a unique id for memoization
|
|
67
|
-
await run.sleep(
|
|
68
|
-
await run.sleep(
|
|
69
|
-
await run.sleep(
|
|
67
|
+
await run.sleep("daily-delay", { days: 1 });
|
|
68
|
+
await run.sleep("processing-delay", { hours: 2, minutes: 30 });
|
|
69
|
+
await run.sleep("short-pause", { seconds: 30 });
|
|
70
70
|
```
|
|
71
71
|
|
|
72
72
|
### Sleep Cancellation
|
|
@@ -81,7 +81,7 @@ await handle.wake(); // Wakes the workflow if sleeping
|
|
|
81
81
|
The sleep returns a result indicating whether it was cancelled:
|
|
82
82
|
|
|
83
83
|
```typescript
|
|
84
|
-
const { cancelled } = await run.sleep(
|
|
84
|
+
const { cancelled } = await run.sleep("wait-period", { hours: 1 });
|
|
85
85
|
if (cancelled) {
|
|
86
86
|
// Handle early wake-up
|
|
87
87
|
}
|
|
@@ -106,7 +106,7 @@ run.logger.debug("User created", { userId: result.userId });
|
|
|
106
106
|
### Delayed Trigger
|
|
107
107
|
|
|
108
108
|
```typescript
|
|
109
|
-
export const morningWorkflowV1 = morningWorkflow.v("1.0", {
|
|
109
|
+
export const morningWorkflowV1 = morningWorkflow.v("1.0.0", {
|
|
110
110
|
// ... workflow definition
|
|
111
111
|
opts: {
|
|
112
112
|
trigger: {
|
|
@@ -120,7 +120,7 @@ export const morningWorkflowV1 = morningWorkflow.v("1.0", {
|
|
|
120
120
|
### Retry Strategy
|
|
121
121
|
|
|
122
122
|
```typescript
|
|
123
|
-
export const paymentWorkflowV1 = paymentWorkflow.v("1.0", {
|
|
123
|
+
export const paymentWorkflowV1 = paymentWorkflow.v("1.0.0", {
|
|
124
124
|
// ... workflow definition
|
|
125
125
|
opts: {
|
|
126
126
|
retry: {
|
|
@@ -133,15 +133,18 @@ export const paymentWorkflowV1 = paymentWorkflow.v("1.0", {
|
|
|
133
133
|
});
|
|
134
134
|
```
|
|
135
135
|
|
|
136
|
-
###
|
|
136
|
+
### Reference ID
|
|
137
137
|
|
|
138
138
|
```typescript
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
139
|
+
// Assign a reference ID for tracking and lookup
|
|
140
|
+
const handle = await orderWorkflowV1
|
|
141
|
+
.with().opt("reference.id", `order-${orderId}`)
|
|
142
|
+
.start(client, { orderId });
|
|
143
|
+
|
|
144
|
+
// Configure conflict handling: "error" (default) or "return_existing"
|
|
145
|
+
const handle = await orderWorkflowV1
|
|
146
|
+
.with().opt("reference", { id: `order-${orderId}`, onConflict: "return_existing" })
|
|
147
|
+
.start(client, { orderId });
|
|
145
148
|
```
|
|
146
149
|
|
|
147
150
|
## Running Workflows
|
|
@@ -153,7 +156,7 @@ import { client } from "@aikirun/client";
|
|
|
153
156
|
import { onboardingWorkflowV1 } from "./workflows.ts";
|
|
154
157
|
|
|
155
158
|
const aikiClient = await client({
|
|
156
|
-
url: "http://localhost:
|
|
159
|
+
url: "http://localhost:9876",
|
|
157
160
|
redis: { host: "localhost", port: 6379 },
|
|
158
161
|
});
|
|
159
162
|
|
|
@@ -180,7 +183,7 @@ With a worker:
|
|
|
180
183
|
import { worker } from "@aikirun/worker";
|
|
181
184
|
|
|
182
185
|
const aikiWorker = worker({
|
|
183
|
-
|
|
186
|
+
name: "my-worker",
|
|
184
187
|
workflows: [onboardingWorkflowV1],
|
|
185
188
|
opts: {
|
|
186
189
|
maxConcurrentWorkflowRuns: 10,
|
|
@@ -199,7 +202,7 @@ interface WorkflowRunContext<Input, Output> {
|
|
|
199
202
|
id: WorkflowRunId; // Unique run ID
|
|
200
203
|
name: WorkflowName; // Workflow name
|
|
201
204
|
versionId: WorkflowVersionId; // Version ID
|
|
202
|
-
options: WorkflowOptions; // Execution options (trigger, retry,
|
|
205
|
+
options: WorkflowOptions; // Execution options (trigger, retry, reference)
|
|
203
206
|
handle: WorkflowRunHandle<Input, Output>; // Advanced state management
|
|
204
207
|
logger: Logger; // Logging (info, debug, warn, error, trace)
|
|
205
208
|
sleep(params: SleepParams): Promise<SleepResult>; // Durable sleep
|
|
@@ -210,7 +213,7 @@ Sleep parameters:
|
|
|
210
213
|
- `id` (required): Unique identifier for memoization
|
|
211
214
|
- Duration fields: `days`, `hours`, `minutes`, `seconds`, `milliseconds`
|
|
212
215
|
|
|
213
|
-
Example: `run.sleep(
|
|
216
|
+
Example: `run.sleep("my-sleep", { days: 1, hours: 2 })`
|
|
214
217
|
|
|
215
218
|
## Error Handling
|
|
216
219
|
|
|
@@ -229,9 +232,11 @@ Failed workflows transition to `awaiting_retry` state and are automatically retr
|
|
|
229
232
|
|
|
230
233
|
### Expected Errors
|
|
231
234
|
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
when
|
|
235
|
+
These errors are thrown during normal workflow execution and should not be caught in workflow code:
|
|
236
|
+
|
|
237
|
+
- `WorkflowRunSuspendedError` - Thrown when a workflow suspends (e.g., during sleep or awaiting events). The worker catches this error and the workflow resumes when the condition is met.
|
|
238
|
+
|
|
239
|
+
- `WorkflowRunConflictError` - Thrown when another worker has already claimed the workflow execution. This prevents duplicate execution when workers race to process the same workflow.
|
|
235
240
|
|
|
236
241
|
## Best Practices
|
|
237
242
|
|
package/dist/index.d.ts
CHANGED
|
@@ -1,13 +1,15 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { WorkflowName, WorkflowVersionId } from '@aikirun/types/workflow';
|
|
2
2
|
import { Client, Logger, ApiClient } from '@aikirun/types/client';
|
|
3
3
|
import { INTERNAL } from '@aikirun/types/symbols';
|
|
4
|
+
import { Schema } from '@aikirun/types/validator';
|
|
4
5
|
import { WorkflowRun, TerminalWorkflowRunStatus, WorkflowRunState, WorkflowRunId, WorkflowOptions } from '@aikirun/types/workflow-run';
|
|
5
|
-
import {
|
|
6
|
+
import { DurationObject, Duration } from '@aikirun/types/duration';
|
|
7
|
+
import { SleepResult } from '@aikirun/types/sleep';
|
|
6
8
|
import { EventSendOptions, EventWaitOptions, EventWaitState } from '@aikirun/types/event';
|
|
7
|
-
import {
|
|
8
|
-
import {
|
|
9
|
-
import {
|
|
10
|
-
import {
|
|
9
|
+
import { Serializable } from '@aikirun/types/serializable';
|
|
10
|
+
import { DistributiveOmit, RequireAtLeastOneProp } from '@aikirun/types/utils';
|
|
11
|
+
import { TaskId } from '@aikirun/types/task';
|
|
12
|
+
import { WorkflowRunStateRequest, WorkflowRunTransitionTaskStateRequestV1 } from '@aikirun/types/workflow-run-api';
|
|
11
13
|
|
|
12
14
|
type NonEmptyArray<T> = [T, ...T[]];
|
|
13
15
|
|
|
@@ -90,7 +92,9 @@ interface WorkflowRunHandle<Input, Output, AppContext, TEventsDefinition extends
|
|
|
90
92
|
[INTERNAL]: {
|
|
91
93
|
client: Client<AppContext>;
|
|
92
94
|
transitionState: (state: WorkflowRunStateRequest) => Promise<void>;
|
|
93
|
-
transitionTaskState: (
|
|
95
|
+
transitionTaskState: (request: DistributiveOmit<WorkflowRunTransitionTaskStateRequestV1, "id" | "expectedRevision">) => Promise<{
|
|
96
|
+
taskId: TaskId;
|
|
97
|
+
}>;
|
|
94
98
|
assertExecutionAllowed: () => void;
|
|
95
99
|
};
|
|
96
100
|
}
|
|
@@ -131,14 +135,11 @@ type WorkflowRunWaitResult<Status extends TerminalWorkflowRunStatus, Output, Tim
|
|
|
131
135
|
* });
|
|
132
136
|
* ```
|
|
133
137
|
*/
|
|
134
|
-
declare function event(): EventDefinition<
|
|
135
|
-
declare function event<Data>(params?: EventParams<Data>): EventDefinition<Data>;
|
|
138
|
+
declare function event(): EventDefinition<void>;
|
|
139
|
+
declare function event<Data extends Serializable>(params?: EventParams<Data>): EventDefinition<Data>;
|
|
136
140
|
interface EventParams<Data> {
|
|
137
141
|
schema?: Schema<Data>;
|
|
138
142
|
}
|
|
139
|
-
interface Schema<Data> {
|
|
140
|
-
parse: (data: unknown) => Data;
|
|
141
|
-
}
|
|
142
143
|
interface EventDefinition<Data> {
|
|
143
144
|
_type: Data;
|
|
144
145
|
schema?: Schema<Data>;
|
|
@@ -156,24 +157,34 @@ type EventSenders<TEventsDefinition extends EventsDefinition> = {
|
|
|
156
157
|
[K in keyof TEventsDefinition]: EventSender<EventData<TEventsDefinition[K]>>;
|
|
157
158
|
};
|
|
158
159
|
interface EventSender<Data> {
|
|
159
|
-
|
|
160
|
+
with(): EventSenderBuilder<Data>;
|
|
161
|
+
send: (...args: Data extends void ? [] : [Data]) => Promise<void>;
|
|
162
|
+
}
|
|
163
|
+
interface EventSenderBuilder<Data> {
|
|
164
|
+
opt<Path extends PathFromObject<EventSendOptions>>(path: Path, value: TypeOfValueAtPath<EventSendOptions, Path>): EventSenderBuilder<Data>;
|
|
165
|
+
send: (...args: Data extends void ? [] : [Data]) => Promise<void>;
|
|
160
166
|
}
|
|
161
167
|
type EventMulticasters<TEventsDefinition extends EventsDefinition> = {
|
|
162
168
|
[K in keyof TEventsDefinition]: EventMulticaster<EventData<TEventsDefinition[K]>>;
|
|
163
169
|
};
|
|
164
170
|
interface EventMulticaster<Data> {
|
|
165
|
-
|
|
171
|
+
with(): EventMulticasterBuilder<Data>;
|
|
172
|
+
send: <AppContext>(client: Client<AppContext>, runId: string | string[], ...args: Data extends void ? [] : [Data]) => Promise<void>;
|
|
173
|
+
}
|
|
174
|
+
interface EventMulticasterBuilder<Data> {
|
|
175
|
+
opt<Path extends PathFromObject<EventSendOptions>>(path: Path, value: TypeOfValueAtPath<EventSendOptions, Path>): EventMulticasterBuilder<Data>;
|
|
176
|
+
send: <AppContext>(client: Client<AppContext>, runId: string | string[], ...args: Data extends void ? [] : [Data]) => Promise<void>;
|
|
166
177
|
}
|
|
167
178
|
declare function createEventWaiters<TEventsDefinition extends EventsDefinition>(handle: WorkflowRunHandle<unknown, unknown, unknown, TEventsDefinition>, eventsDefinition: TEventsDefinition, logger: Logger): EventWaiters<TEventsDefinition>;
|
|
168
179
|
declare function createEventSenders<TEventsDefinition extends EventsDefinition>(api: ApiClient, workflowRunId: string, eventsDefinition: TEventsDefinition, logger: Logger, onSend: (run: WorkflowRun<unknown, unknown>) => void): EventSenders<TEventsDefinition>;
|
|
169
180
|
|
|
170
181
|
interface WorkflowRunContext<Input, AppContext, TEventDefinition extends EventsDefinition> {
|
|
171
182
|
id: WorkflowRunId;
|
|
172
|
-
|
|
173
|
-
|
|
183
|
+
name: WorkflowName;
|
|
184
|
+
versionId: WorkflowVersionId;
|
|
174
185
|
options: WorkflowOptions;
|
|
175
186
|
logger: Logger;
|
|
176
|
-
sleep: (
|
|
187
|
+
sleep: (name: string, duration: Duration) => Promise<SleepResult>;
|
|
177
188
|
events: EventWaiters<TEventDefinition>;
|
|
178
189
|
[INTERNAL]: {
|
|
179
190
|
handle: WorkflowRunHandle<Input, unknown, AppContext, TEventDefinition>;
|
|
@@ -229,45 +240,50 @@ interface ChildWorkflowRunWaitOptions<Timed extends boolean> {
|
|
|
229
240
|
}
|
|
230
241
|
|
|
231
242
|
interface WorkflowVersionParams<Input, Output, AppContext, TEventsDefinition extends EventsDefinition> {
|
|
232
|
-
handler: (
|
|
243
|
+
handler: (run: Readonly<WorkflowRunContext<Input, AppContext, TEventsDefinition>>, input: Input, context: AppContext) => Promise<Output>;
|
|
233
244
|
events?: TEventsDefinition;
|
|
234
245
|
opts?: WorkflowOptions;
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
startAsChild: WorkflowVersion<Input, Output, AppContext, TEventsDefinition>["startAsChild"];
|
|
246
|
+
schema?: RequireAtLeastOneProp<{
|
|
247
|
+
input?: Schema<Input>;
|
|
248
|
+
output?: Schema<Output>;
|
|
249
|
+
}>;
|
|
240
250
|
}
|
|
241
251
|
interface WorkflowVersion<Input, Output, AppContext, TEventsDefinition extends EventsDefinition = EventsDefinition> {
|
|
242
|
-
|
|
252
|
+
name: WorkflowName;
|
|
243
253
|
versionId: WorkflowVersionId;
|
|
244
254
|
events: EventMulticasters<TEventsDefinition>;
|
|
245
255
|
with(): WorkflowBuilder<Input, Output, AppContext, TEventsDefinition>;
|
|
246
|
-
start: (client: Client<AppContext>, ...args: Input extends
|
|
247
|
-
startAsChild: <ParentInput, ParentEventsDefinition extends EventsDefinition>(parentRun: WorkflowRunContext<ParentInput, AppContext, ParentEventsDefinition>, ...args: Input extends
|
|
256
|
+
start: (client: Client<AppContext>, ...args: Input extends void ? [] : [Input]) => Promise<WorkflowRunHandle<Input, Output, AppContext, TEventsDefinition>>;
|
|
257
|
+
startAsChild: <ParentInput, ParentEventsDefinition extends EventsDefinition>(parentRun: WorkflowRunContext<ParentInput, AppContext, ParentEventsDefinition>, ...args: Input extends void ? [] : [Input]) => Promise<ChildWorkflowRunHandle<Input, Output, AppContext, TEventsDefinition>>;
|
|
248
258
|
getHandle: (client: Client<AppContext>, runId: WorkflowRunId) => Promise<WorkflowRunHandle<Input, Output, AppContext, TEventsDefinition>>;
|
|
249
259
|
[INTERNAL]: {
|
|
250
260
|
eventsDefinition: TEventsDefinition;
|
|
251
|
-
handler: (
|
|
261
|
+
handler: (run: WorkflowRunContext<Input, AppContext, TEventsDefinition>, input: Input, context: AppContext) => Promise<void>;
|
|
252
262
|
};
|
|
253
263
|
}
|
|
264
|
+
interface WorkflowBuilder<Input, Output, AppContext, TEventsDefinition extends EventsDefinition> {
|
|
265
|
+
opt<Path extends PathFromObject<WorkflowOptions>>(path: Path, value: TypeOfValueAtPath<WorkflowOptions, Path>): WorkflowBuilder<Input, Output, AppContext, TEventsDefinition>;
|
|
266
|
+
start: WorkflowVersion<Input, Output, AppContext, TEventsDefinition>["start"];
|
|
267
|
+
startAsChild: WorkflowVersion<Input, Output, AppContext, TEventsDefinition>["startAsChild"];
|
|
268
|
+
}
|
|
254
269
|
declare class WorkflowVersionImpl<Input, Output, AppContext, TEventsDefinition extends EventsDefinition> implements WorkflowVersion<Input, Output, AppContext, TEventsDefinition> {
|
|
255
|
-
readonly
|
|
270
|
+
readonly name: WorkflowName;
|
|
256
271
|
readonly versionId: WorkflowVersionId;
|
|
257
272
|
private readonly params;
|
|
258
273
|
readonly events: EventMulticasters<TEventsDefinition>;
|
|
259
274
|
readonly [INTERNAL]: WorkflowVersion<Input, Output, AppContext, TEventsDefinition>[typeof INTERNAL];
|
|
260
|
-
constructor(
|
|
275
|
+
constructor(name: WorkflowName, versionId: WorkflowVersionId, params: WorkflowVersionParams<Input, Output, AppContext, TEventsDefinition>);
|
|
261
276
|
with(): WorkflowBuilder<Input, Output, AppContext, TEventsDefinition>;
|
|
262
|
-
start(client: Client<AppContext>, ...args: Input extends
|
|
263
|
-
startAsChild
|
|
277
|
+
start(client: Client<AppContext>, ...args: Input extends void ? [] : [Input]): Promise<WorkflowRunHandle<Input, Output, AppContext, TEventsDefinition>>;
|
|
278
|
+
startAsChild(parentRun: WorkflowRunContext<unknown, AppContext, EventsDefinition>, ...args: Input extends void ? [] : [Input]): Promise<ChildWorkflowRunHandle<Input, Output, AppContext, TEventsDefinition>>;
|
|
279
|
+
private assertUniqueChildRunReferenceId;
|
|
264
280
|
getHandle(client: Client<AppContext>, runId: WorkflowRunId): Promise<WorkflowRunHandle<Input, Output, AppContext, TEventsDefinition>>;
|
|
265
281
|
private handler;
|
|
266
282
|
private tryExecuteWorkflow;
|
|
267
283
|
private assertRetryAllowed;
|
|
284
|
+
private parse;
|
|
268
285
|
private createFailedState;
|
|
269
286
|
private createAwaitingRetryState;
|
|
270
|
-
private getPath;
|
|
271
287
|
}
|
|
272
288
|
|
|
273
289
|
declare function workflowRegistry(): WorkflowRegistry;
|
|
@@ -279,13 +295,10 @@ interface WorkflowRegistry {
|
|
|
279
295
|
removeMany: (workflows: Workflow$1[]) => WorkflowRegistry;
|
|
280
296
|
removeAll: () => WorkflowRegistry;
|
|
281
297
|
getAll(): Workflow$1[];
|
|
282
|
-
get: (
|
|
298
|
+
get: (name: WorkflowName, versionId: WorkflowVersionId) => Workflow$1 | undefined;
|
|
283
299
|
}
|
|
284
300
|
|
|
285
|
-
|
|
286
|
-
spinThresholdMs: number;
|
|
287
|
-
}
|
|
288
|
-
declare function createSleeper(handle: WorkflowRunHandle<unknown, unknown, unknown>, logger: Logger, options: SleeperOptions): (params: SleepParams) => Promise<SleepResult>;
|
|
301
|
+
declare function createSleeper(handle: WorkflowRunHandle<unknown, unknown, unknown>, logger: Logger): (name: string, duration: Duration) => Promise<SleepResult>;
|
|
289
302
|
|
|
290
303
|
/**
|
|
291
304
|
* Defines a durable workflow with versioning and multiple task execution.
|
|
@@ -295,17 +308,17 @@ declare function createSleeper(handle: WorkflowRunHandle<unknown, unknown, unkno
|
|
|
295
308
|
* Multiple versions of a workflow can run simultaneously, allowing safe deployments.
|
|
296
309
|
*
|
|
297
310
|
* @param params - Workflow configuration
|
|
298
|
-
* @param params.
|
|
311
|
+
* @param params.name - Unique workflow name used for identification and routing
|
|
299
312
|
* @returns Workflow instance with version management methods
|
|
300
313
|
*
|
|
301
314
|
* @example
|
|
302
315
|
* ```typescript
|
|
303
316
|
* // Define a workflow
|
|
304
|
-
* export const userOnboarding = workflow({
|
|
317
|
+
* export const userOnboarding = workflow({ name: "user-onboarding" });
|
|
305
318
|
*
|
|
306
319
|
* // Define version 1.0
|
|
307
|
-
* export const userOnboardingV1 = userOnboarding.v("1.0", {
|
|
308
|
-
* async handler(input: { email: string }
|
|
320
|
+
* export const userOnboardingV1 = userOnboarding.v("1.0.0", {
|
|
321
|
+
* async handler(run, input: { email: string }) {
|
|
309
322
|
* run.logger.info("Starting onboarding", { email: input.email });
|
|
310
323
|
*
|
|
311
324
|
* // Execute tasks
|
|
@@ -313,7 +326,7 @@ declare function createSleeper(handle: WorkflowRunHandle<unknown, unknown, unkno
|
|
|
313
326
|
* await createUserProfile.start(run, { email: input.email });
|
|
314
327
|
*
|
|
315
328
|
* // Durable sleep
|
|
316
|
-
* await run.sleep(
|
|
329
|
+
* await run.sleep("onboarding-delay", { days: 1 });
|
|
317
330
|
*
|
|
318
331
|
* // More tasks
|
|
319
332
|
* await sendUsageTips.start(run, { email: input.email });
|
|
@@ -323,8 +336,8 @@ declare function createSleeper(handle: WorkflowRunHandle<unknown, unknown, unkno
|
|
|
323
336
|
* });
|
|
324
337
|
*
|
|
325
338
|
* // Deploy version 2.0 alongside 1.0 (no downtime)
|
|
326
|
-
* export const userOnboardingV2 = userOnboarding.v("2.0", {
|
|
327
|
-
* async handler(input: { email: string; trial: boolean }
|
|
339
|
+
* export const userOnboardingV2 = userOnboarding.v("2.0.0", {
|
|
340
|
+
* async handler(run, input: { email: string; trial: boolean }) {
|
|
328
341
|
* // Enhanced version with different logic
|
|
329
342
|
* // Existing v1.0 workflows continue with their version
|
|
330
343
|
* // New workflows use v2.0
|
|
@@ -336,10 +349,10 @@ declare function createSleeper(handle: WorkflowRunHandle<unknown, unknown, unkno
|
|
|
336
349
|
*/
|
|
337
350
|
declare function workflow(params: WorkflowParams): Workflow;
|
|
338
351
|
interface WorkflowParams {
|
|
339
|
-
|
|
352
|
+
name: string;
|
|
340
353
|
}
|
|
341
354
|
interface Workflow {
|
|
342
|
-
|
|
355
|
+
name: WorkflowName;
|
|
343
356
|
v: <Input extends Serializable, Output extends Serializable, AppContext = null, TEventsDefinition extends EventsDefinition = Record<string, never>>(versionId: string, params: WorkflowVersionParams<Input, Output, AppContext, TEventsDefinition>) => WorkflowVersion<Input, Output, AppContext, TEventsDefinition>;
|
|
344
357
|
[INTERNAL]: {
|
|
345
358
|
getAllVersions: () => WorkflowVersion<unknown, unknown, unknown>[];
|
package/dist/index.js
CHANGED
|
@@ -3,15 +3,15 @@ function workflowRegistry() {
|
|
|
3
3
|
return new WorkflowRegistryImpl();
|
|
4
4
|
}
|
|
5
5
|
var WorkflowRegistryImpl = class {
|
|
6
|
-
|
|
6
|
+
workflowsByName = /* @__PURE__ */ new Map();
|
|
7
7
|
add(workflow2) {
|
|
8
|
-
const workflows = this.
|
|
8
|
+
const workflows = this.workflowsByName.get(workflow2.name);
|
|
9
9
|
if (!workflows) {
|
|
10
|
-
this.
|
|
10
|
+
this.workflowsByName.set(workflow2.name, /* @__PURE__ */ new Map([[workflow2.versionId, workflow2]]));
|
|
11
11
|
return this;
|
|
12
12
|
}
|
|
13
13
|
if (workflows.has(workflow2.versionId)) {
|
|
14
|
-
throw new Error(`Workflow "${workflow2.
|
|
14
|
+
throw new Error(`Workflow "${workflow2.name}/${workflow2.versionId}" is already registered`);
|
|
15
15
|
}
|
|
16
16
|
workflows.set(workflow2.versionId, workflow2);
|
|
17
17
|
return this;
|
|
@@ -23,7 +23,7 @@ var WorkflowRegistryImpl = class {
|
|
|
23
23
|
return this;
|
|
24
24
|
}
|
|
25
25
|
remove(workflow2) {
|
|
26
|
-
const workflowVersinos = this.
|
|
26
|
+
const workflowVersinos = this.workflowsByName.get(workflow2.name);
|
|
27
27
|
if (workflowVersinos) {
|
|
28
28
|
workflowVersinos.delete(workflow2.versionId);
|
|
29
29
|
}
|
|
@@ -36,20 +36,20 @@ var WorkflowRegistryImpl = class {
|
|
|
36
36
|
return this;
|
|
37
37
|
}
|
|
38
38
|
removeAll() {
|
|
39
|
-
this.
|
|
39
|
+
this.workflowsByName.clear();
|
|
40
40
|
return this;
|
|
41
41
|
}
|
|
42
42
|
getAll() {
|
|
43
43
|
const workflows = [];
|
|
44
|
-
for (const workflowVersions of this.
|
|
44
|
+
for (const workflowVersions of this.workflowsByName.values()) {
|
|
45
45
|
for (const workflow2 of workflowVersions.values()) {
|
|
46
46
|
workflows.push(workflow2);
|
|
47
47
|
}
|
|
48
48
|
}
|
|
49
49
|
return workflows;
|
|
50
50
|
}
|
|
51
|
-
get(
|
|
52
|
-
return this.
|
|
51
|
+
get(name, versionId) {
|
|
52
|
+
return this.workflowsByName.get(name)?.get(versionId);
|
|
53
53
|
}
|
|
54
54
|
};
|
|
55
55
|
|
|
@@ -77,6 +77,31 @@ function delay(ms, options) {
|
|
|
77
77
|
});
|
|
78
78
|
}
|
|
79
79
|
|
|
80
|
+
// ../../lib/json/stable-stringify.ts
|
|
81
|
+
function stableStringify(value) {
|
|
82
|
+
return stringifyValue(value);
|
|
83
|
+
}
|
|
84
|
+
function stringifyValue(value) {
|
|
85
|
+
if (value === null || value === void 0) {
|
|
86
|
+
return "null";
|
|
87
|
+
}
|
|
88
|
+
if (typeof value !== "object") {
|
|
89
|
+
return JSON.stringify(value);
|
|
90
|
+
}
|
|
91
|
+
if (Array.isArray(value)) {
|
|
92
|
+
return `[${value.map(stringifyValue).join(",")}]`;
|
|
93
|
+
}
|
|
94
|
+
const keys = Object.keys(value).sort();
|
|
95
|
+
const pairs = [];
|
|
96
|
+
for (const key of keys) {
|
|
97
|
+
const keyValue = value[key];
|
|
98
|
+
if (keyValue !== void 0) {
|
|
99
|
+
pairs.push(`${JSON.stringify(key)}:${stringifyValue(keyValue)}`);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
return `{${pairs.join(",")}}`;
|
|
103
|
+
}
|
|
104
|
+
|
|
80
105
|
// ../../lib/crypto/hash.ts
|
|
81
106
|
async function sha256(input) {
|
|
82
107
|
const data = new TextEncoder().encode(input);
|
|
@@ -84,6 +109,9 @@ async function sha256(input) {
|
|
|
84
109
|
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
|
85
110
|
return hashArray.map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
86
111
|
}
|
|
112
|
+
async function hashInput(input) {
|
|
113
|
+
return sha256(stableStringify({ input }));
|
|
114
|
+
}
|
|
87
115
|
|
|
88
116
|
// ../../lib/duration/convert.ts
|
|
89
117
|
var MS_PER_SECOND = 1e3;
|
|
@@ -144,25 +172,6 @@ function createSerializableError(error) {
|
|
|
144
172
|
};
|
|
145
173
|
}
|
|
146
174
|
|
|
147
|
-
// ../../lib/json/stable-stringify.ts
|
|
148
|
-
function stableStringify(value) {
|
|
149
|
-
if (value === null || value === void 0) {
|
|
150
|
-
return JSON.stringify(value);
|
|
151
|
-
}
|
|
152
|
-
if (typeof value !== "object") {
|
|
153
|
-
return JSON.stringify(value);
|
|
154
|
-
}
|
|
155
|
-
if (Array.isArray(value)) {
|
|
156
|
-
return `[${value.map((item) => stableStringify(item)).join(",")}]`;
|
|
157
|
-
}
|
|
158
|
-
const keys = Object.keys(value).sort();
|
|
159
|
-
const pairs = keys.map((key) => {
|
|
160
|
-
const val = value[key];
|
|
161
|
-
return `${JSON.stringify(key)}:${stableStringify(val)}`;
|
|
162
|
-
});
|
|
163
|
-
return `{${pairs.join(",")}}`;
|
|
164
|
-
}
|
|
165
|
-
|
|
166
175
|
// ../../lib/object/overrider.ts
|
|
167
176
|
function set(obj, path, value) {
|
|
168
177
|
const keys = path.split(".");
|
|
@@ -281,6 +290,7 @@ function getRetryParams(attempts, strategy) {
|
|
|
281
290
|
// run/event.ts
|
|
282
291
|
import { INTERNAL } from "@aikirun/types/symbols";
|
|
283
292
|
import {
|
|
293
|
+
WorkflowRunConflictError,
|
|
284
294
|
WorkflowRunFailedError,
|
|
285
295
|
WorkflowRunSuspendedError
|
|
286
296
|
} from "@aikirun/types/workflow-run";
|
|
@@ -292,22 +302,22 @@ function event(params) {
|
|
|
292
302
|
}
|
|
293
303
|
function createEventWaiters(handle, eventsDefinition, logger) {
|
|
294
304
|
const waiters = {};
|
|
295
|
-
for (const [
|
|
305
|
+
for (const [eventName, eventDefinition] of Object.entries(eventsDefinition)) {
|
|
296
306
|
const waiter = createEventWaiter(
|
|
297
307
|
handle,
|
|
298
|
-
|
|
308
|
+
eventName,
|
|
299
309
|
eventDefinition.schema,
|
|
300
|
-
logger.child({ "aiki.
|
|
310
|
+
logger.child({ "aiki.eventName": eventName })
|
|
301
311
|
);
|
|
302
|
-
waiters[
|
|
312
|
+
waiters[eventName] = waiter;
|
|
303
313
|
}
|
|
304
314
|
return waiters;
|
|
305
315
|
}
|
|
306
|
-
function createEventWaiter(handle,
|
|
316
|
+
function createEventWaiter(handle, eventName, schema, logger) {
|
|
307
317
|
let nextEventIndex = 0;
|
|
308
318
|
async function wait(options) {
|
|
309
319
|
await handle.refresh();
|
|
310
|
-
const events = handle.run.eventsQueue[
|
|
320
|
+
const events = handle.run.eventsQueue[eventName]?.events ?? [];
|
|
311
321
|
const event2 = events[nextEventIndex];
|
|
312
322
|
if (event2) {
|
|
313
323
|
nextEventIndex++;
|
|
@@ -320,11 +330,10 @@ function createEventWaiter(handle, eventId, schema, logger) {
|
|
|
320
330
|
data = schema ? schema.parse(event2.data) : event2.data;
|
|
321
331
|
} catch (error) {
|
|
322
332
|
logger.error("Invalid event data", { data: event2.data, error });
|
|
323
|
-
const serializableError = createSerializableError(error);
|
|
324
333
|
await handle[INTERNAL].transitionState({
|
|
325
334
|
status: "failed",
|
|
326
335
|
cause: "self",
|
|
327
|
-
error:
|
|
336
|
+
error: createSerializableError(error)
|
|
328
337
|
});
|
|
329
338
|
throw new WorkflowRunFailedError(handle.run.id, handle.run.attempts);
|
|
330
339
|
}
|
|
@@ -332,75 +341,121 @@ function createEventWaiter(handle, eventId, schema, logger) {
|
|
|
332
341
|
return { timeout: false, data };
|
|
333
342
|
}
|
|
334
343
|
const timeoutInMs = options?.timeout && toMilliseconds(options.timeout);
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
344
|
+
try {
|
|
345
|
+
await handle[INTERNAL].transitionState({
|
|
346
|
+
status: "awaiting_event",
|
|
347
|
+
eventName,
|
|
348
|
+
timeoutInMs
|
|
349
|
+
});
|
|
350
|
+
logger.info("Waiting for event", {
|
|
351
|
+
...timeoutInMs !== void 0 ? { "aiki.timeoutInMs": timeoutInMs } : {}
|
|
352
|
+
});
|
|
353
|
+
} catch (error) {
|
|
354
|
+
if (error instanceof WorkflowRunConflictError) {
|
|
355
|
+
throw new WorkflowRunSuspendedError(handle.run.id);
|
|
356
|
+
}
|
|
357
|
+
throw error;
|
|
358
|
+
}
|
|
343
359
|
throw new WorkflowRunSuspendedError(handle.run.id);
|
|
344
360
|
}
|
|
345
361
|
return { wait };
|
|
346
362
|
}
|
|
347
363
|
function createEventSenders(api, workflowRunId, eventsDefinition, logger, onSend) {
|
|
348
364
|
const senders = {};
|
|
349
|
-
for (const [
|
|
365
|
+
for (const [eventName, eventDefinition] of Object.entries(eventsDefinition)) {
|
|
350
366
|
const sender = createEventSender(
|
|
351
367
|
api,
|
|
352
368
|
workflowRunId,
|
|
353
|
-
|
|
369
|
+
eventName,
|
|
354
370
|
eventDefinition.schema,
|
|
355
|
-
logger.child({ "aiki.
|
|
371
|
+
logger.child({ "aiki.eventName": eventName }),
|
|
356
372
|
onSend
|
|
357
373
|
);
|
|
358
|
-
senders[
|
|
374
|
+
senders[eventName] = sender;
|
|
359
375
|
}
|
|
360
376
|
return senders;
|
|
361
377
|
}
|
|
362
|
-
function createEventSender(api, workflowRunId,
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
const { run } = await api.workflowRun.sendEventV1({
|
|
373
|
-
id: workflowRunId,
|
|
374
|
-
eventId,
|
|
375
|
-
data,
|
|
376
|
-
options
|
|
377
|
-
});
|
|
378
|
-
onSend(run);
|
|
378
|
+
function createEventSender(api, workflowRunId, eventName, schema, logger, onSend, options) {
|
|
379
|
+
const optsOverrider = objectOverrider(options ?? {});
|
|
380
|
+
const createBuilder = (optsBuilder) => ({
|
|
381
|
+
opt: (path, value) => createBuilder(optsBuilder.with(path, value)),
|
|
382
|
+
send: (...args) => createEventSender(api, workflowRunId, eventName, schema, logger, onSend, optsBuilder.build()).send(...args)
|
|
383
|
+
});
|
|
384
|
+
async function send(...args) {
|
|
385
|
+
const data = isNonEmptyArray(args) ? args[0] : void 0;
|
|
386
|
+
if (schema) {
|
|
387
|
+
schema.parse(data);
|
|
379
388
|
}
|
|
389
|
+
const { run } = await api.workflowRun.sendEventV1({
|
|
390
|
+
id: workflowRunId,
|
|
391
|
+
eventName,
|
|
392
|
+
data,
|
|
393
|
+
options
|
|
394
|
+
});
|
|
395
|
+
onSend(run);
|
|
396
|
+
logger.info("Sent event to workflow", {
|
|
397
|
+
...options?.reference ? { "aiki.referenceId": options.reference.id } : {}
|
|
398
|
+
});
|
|
399
|
+
}
|
|
400
|
+
return {
|
|
401
|
+
with: () => createBuilder(optsOverrider()),
|
|
402
|
+
send
|
|
380
403
|
};
|
|
381
404
|
}
|
|
382
|
-
function createEventMulticasters(eventsDefinition) {
|
|
405
|
+
function createEventMulticasters(workflowName, workflowVersionId, eventsDefinition) {
|
|
383
406
|
const senders = {};
|
|
384
|
-
for (const [
|
|
385
|
-
const sender = createEventMulticaster(
|
|
386
|
-
|
|
407
|
+
for (const [eventName, eventDefinition] of Object.entries(eventsDefinition)) {
|
|
408
|
+
const sender = createEventMulticaster(
|
|
409
|
+
workflowName,
|
|
410
|
+
workflowVersionId,
|
|
411
|
+
eventName,
|
|
412
|
+
eventDefinition.schema
|
|
413
|
+
);
|
|
414
|
+
senders[eventName] = sender;
|
|
387
415
|
}
|
|
388
416
|
return senders;
|
|
389
417
|
}
|
|
390
|
-
function createEventMulticaster(
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
418
|
+
function createEventMulticaster(workflowName, workflowVersionId, eventName, schema, options) {
|
|
419
|
+
const optsOverrider = objectOverrider(options ?? {});
|
|
420
|
+
const createBuilder = (optsBuilder) => ({
|
|
421
|
+
opt: (path, value) => createBuilder(optsBuilder.with(path, value)),
|
|
422
|
+
send: (client, runId, ...args) => createEventMulticaster(workflowName, workflowVersionId, eventName, schema, optsBuilder.build()).send(
|
|
423
|
+
client,
|
|
424
|
+
runId,
|
|
425
|
+
...args
|
|
426
|
+
)
|
|
427
|
+
});
|
|
428
|
+
async function send(client, runId, ...args) {
|
|
429
|
+
const data = isNonEmptyArray(args) ? args[0] : void 0;
|
|
430
|
+
if (schema) {
|
|
431
|
+
schema.parse(data);
|
|
403
432
|
}
|
|
433
|
+
const runIds = Array.isArray(runId) ? runId : [runId];
|
|
434
|
+
if (!isNonEmptyArray(runIds)) {
|
|
435
|
+
return;
|
|
436
|
+
}
|
|
437
|
+
const logger = client.logger.child({
|
|
438
|
+
"aiki.workflowName": workflowName,
|
|
439
|
+
"aiki.workflowVersionId": workflowVersionId,
|
|
440
|
+
"aiki.eventName": eventName
|
|
441
|
+
});
|
|
442
|
+
await client.api.workflowRun.multicastEventV1({
|
|
443
|
+
ids: runIds,
|
|
444
|
+
eventName,
|
|
445
|
+
data,
|
|
446
|
+
options
|
|
447
|
+
});
|
|
448
|
+
logger.info("Multicasted event to workflows", {
|
|
449
|
+
"aiki.workflowName": workflowName,
|
|
450
|
+
"aiki.workflowVersionId": workflowVersionId,
|
|
451
|
+
"aiki.workflowRunIds": runIds,
|
|
452
|
+
"aiki.eventName": eventName,
|
|
453
|
+
...options?.reference ? { "aiki.referenceId": options.reference.id } : {}
|
|
454
|
+
});
|
|
455
|
+
}
|
|
456
|
+
return {
|
|
457
|
+
with: () => createBuilder(optsOverrider()),
|
|
458
|
+
send
|
|
404
459
|
};
|
|
405
460
|
}
|
|
406
461
|
|
|
@@ -408,6 +463,7 @@ function createEventMulticaster(eventId, schema) {
|
|
|
408
463
|
import { INTERNAL as INTERNAL2 } from "@aikirun/types/symbols";
|
|
409
464
|
import {
|
|
410
465
|
isTerminalWorkflowRunStatus,
|
|
466
|
+
WorkflowRunConflictError as WorkflowRunConflictError2,
|
|
411
467
|
WorkflowRunNotExecutableError
|
|
412
468
|
} from "@aikirun/types/workflow-run";
|
|
413
469
|
async function workflowRunHandle(client, runOrId, eventsDefinition, logger) {
|
|
@@ -417,8 +473,8 @@ async function workflowRunHandle(client, runOrId, eventsDefinition, logger) {
|
|
|
417
473
|
run,
|
|
418
474
|
eventsDefinition ?? {},
|
|
419
475
|
logger ?? client.logger.child({
|
|
420
|
-
"aiki.
|
|
421
|
-
"aiki.workflowVersionId": run.
|
|
476
|
+
"aiki.workflowName": run.name,
|
|
477
|
+
"aiki.workflowVersionId": run.versionId,
|
|
422
478
|
"aiki.workflowRunId": run.id
|
|
423
479
|
})
|
|
424
480
|
);
|
|
@@ -506,43 +562,61 @@ var WorkflowRunHandleImpl = class {
|
|
|
506
562
|
return { success: false, cause: maybeResult.state };
|
|
507
563
|
}
|
|
508
564
|
async cancel(reason) {
|
|
509
|
-
|
|
565
|
+
await this.transitionState({ status: "cancelled", reason });
|
|
566
|
+
this.logger.info("Workflow cancelled");
|
|
510
567
|
}
|
|
511
568
|
async pause() {
|
|
512
|
-
|
|
569
|
+
await this.transitionState({ status: "paused" });
|
|
570
|
+
this.logger.info("Workflow paused");
|
|
513
571
|
}
|
|
514
572
|
async resume() {
|
|
515
|
-
|
|
573
|
+
await this.transitionState({ status: "scheduled", scheduledInMs: 0, reason: "resume" });
|
|
574
|
+
this.logger.info("Workflow resumed");
|
|
516
575
|
}
|
|
517
576
|
async awake() {
|
|
518
|
-
|
|
577
|
+
await this.transitionState({ status: "scheduled", scheduledInMs: 0, reason: "awake_early" });
|
|
578
|
+
this.logger.info("Workflow awoken");
|
|
519
579
|
}
|
|
520
580
|
async transitionState(targetState) {
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
581
|
+
try {
|
|
582
|
+
if (targetState.status === "scheduled" && (targetState.reason === "new" || targetState.reason === "resume" || targetState.reason === "awake_early") || targetState.status === "paused" || targetState.status === "cancelled") {
|
|
583
|
+
const { run: run2 } = await this.api.workflowRun.transitionStateV1({
|
|
584
|
+
type: "pessimistic",
|
|
585
|
+
id: this.run.id,
|
|
586
|
+
state: targetState
|
|
587
|
+
});
|
|
588
|
+
this._run = run2;
|
|
589
|
+
return;
|
|
590
|
+
}
|
|
591
|
+
const { run } = await this.api.workflowRun.transitionStateV1({
|
|
592
|
+
type: "optimistic",
|
|
524
593
|
id: this.run.id,
|
|
525
|
-
state: targetState
|
|
594
|
+
state: targetState,
|
|
595
|
+
expectedRevision: this.run.revision
|
|
526
596
|
});
|
|
527
|
-
this._run =
|
|
528
|
-
|
|
597
|
+
this._run = run;
|
|
598
|
+
} catch (error) {
|
|
599
|
+
if (isConflictError(error)) {
|
|
600
|
+
throw new WorkflowRunConflictError2(this.run.id);
|
|
601
|
+
}
|
|
602
|
+
throw error;
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
async transitionTaskState(request) {
|
|
606
|
+
try {
|
|
607
|
+
const { run, taskId } = await this.api.workflowRun.transitionTaskStateV1({
|
|
608
|
+
...request,
|
|
609
|
+
id: this.run.id,
|
|
610
|
+
expectedRevision: this.run.revision
|
|
611
|
+
});
|
|
612
|
+
this._run = run;
|
|
613
|
+
return { taskId };
|
|
614
|
+
} catch (error) {
|
|
615
|
+
if (isConflictError(error)) {
|
|
616
|
+
throw new WorkflowRunConflictError2(this.run.id);
|
|
617
|
+
}
|
|
618
|
+
throw error;
|
|
529
619
|
}
|
|
530
|
-
const { run } = await this.api.workflowRun.transitionStateV1({
|
|
531
|
-
type: "optimistic",
|
|
532
|
-
id: this.run.id,
|
|
533
|
-
state: targetState,
|
|
534
|
-
expectedRevision: this.run.revision
|
|
535
|
-
});
|
|
536
|
-
this._run = run;
|
|
537
|
-
}
|
|
538
|
-
async transitionTaskState(taskPath, taskState) {
|
|
539
|
-
const { run } = await this.api.workflowRun.transitionTaskStateV1({
|
|
540
|
-
id: this.run.id,
|
|
541
|
-
taskPath,
|
|
542
|
-
taskState,
|
|
543
|
-
expectedRevision: this.run.revision
|
|
544
|
-
});
|
|
545
|
-
this._run = run;
|
|
546
620
|
}
|
|
547
621
|
assertExecutionAllowed() {
|
|
548
622
|
const status = this.run.state.status;
|
|
@@ -551,56 +625,92 @@ var WorkflowRunHandleImpl = class {
|
|
|
551
625
|
}
|
|
552
626
|
}
|
|
553
627
|
};
|
|
628
|
+
function isConflictError(error) {
|
|
629
|
+
return error != null && typeof error === "object" && "code" in error && error.code === "CONFLICT";
|
|
630
|
+
}
|
|
554
631
|
|
|
555
632
|
// run/sleeper.ts
|
|
556
633
|
import { INTERNAL as INTERNAL3 } from "@aikirun/types/symbols";
|
|
557
|
-
import { WorkflowRunSuspendedError as WorkflowRunSuspendedError2 } from "@aikirun/types/workflow-run";
|
|
634
|
+
import { WorkflowRunConflictError as WorkflowRunConflictError3, WorkflowRunSuspendedError as WorkflowRunSuspendedError2 } from "@aikirun/types/workflow-run";
|
|
558
635
|
var MAX_SLEEP_YEARS = 10;
|
|
559
636
|
var MAX_SLEEP_MS = MAX_SLEEP_YEARS * 365 * 24 * 60 * 60 * 1e3;
|
|
560
|
-
function createSleeper(handle, logger
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
const
|
|
637
|
+
function createSleeper(handle, logger) {
|
|
638
|
+
const nextSleepIndexByName = {};
|
|
639
|
+
return async (name, duration) => {
|
|
640
|
+
const sleepName = name;
|
|
641
|
+
let durationMs = toMilliseconds(duration);
|
|
564
642
|
if (durationMs > MAX_SLEEP_MS) {
|
|
565
643
|
throw new Error(`Sleep duration ${durationMs}ms exceeds maximum of ${MAX_SLEEP_YEARS} years`);
|
|
566
644
|
}
|
|
567
|
-
const
|
|
568
|
-
const
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
645
|
+
const nextSleepIndex = nextSleepIndexByName[sleepName] ?? 0;
|
|
646
|
+
const sleepQueue = handle.run.sleepsQueue[sleepName] ?? { sleeps: [] };
|
|
647
|
+
const sleepState = sleepQueue.sleeps[nextSleepIndex];
|
|
648
|
+
if (!sleepState) {
|
|
649
|
+
try {
|
|
650
|
+
await handle[INTERNAL3].transitionState({ status: "sleeping", sleepName, durationMs });
|
|
651
|
+
logger.info("Sleeping", {
|
|
652
|
+
"aiki.sleepName": sleepName,
|
|
653
|
+
"aiki.durationMs": durationMs
|
|
654
|
+
});
|
|
655
|
+
} catch (error) {
|
|
656
|
+
if (error instanceof WorkflowRunConflictError3) {
|
|
657
|
+
throw new WorkflowRunSuspendedError2(handle.run.id);
|
|
658
|
+
}
|
|
659
|
+
throw error;
|
|
660
|
+
}
|
|
661
|
+
throw new WorkflowRunSuspendedError2(handle.run.id);
|
|
662
|
+
}
|
|
663
|
+
if (sleepState.status === "sleeping") {
|
|
664
|
+
logger.debug("Already sleeping", {
|
|
665
|
+
"aiki.sleepName": sleepName,
|
|
666
|
+
"aiki.awakeAt": sleepState.awakeAt
|
|
573
667
|
});
|
|
574
|
-
|
|
668
|
+
throw new WorkflowRunSuspendedError2(handle.run.id);
|
|
575
669
|
}
|
|
670
|
+
sleepState.status;
|
|
671
|
+
nextSleepIndexByName[sleepName] = nextSleepIndex + 1;
|
|
576
672
|
if (sleepState.status === "cancelled") {
|
|
577
673
|
logger.debug("Sleep cancelled", {
|
|
578
|
-
"aiki.
|
|
579
|
-
"aiki.
|
|
674
|
+
"aiki.sleepName": sleepName,
|
|
675
|
+
"aiki.cancelledAt": sleepState.cancelledAt
|
|
580
676
|
});
|
|
581
677
|
return { cancelled: true };
|
|
582
678
|
}
|
|
583
|
-
if (
|
|
584
|
-
logger.debug("
|
|
585
|
-
"aiki.
|
|
586
|
-
"aiki.durationMs": durationMs
|
|
679
|
+
if (durationMs === sleepState.durationMs) {
|
|
680
|
+
logger.debug("Sleep completed", {
|
|
681
|
+
"aiki.sleepName": sleepName,
|
|
682
|
+
"aiki.durationMs": durationMs,
|
|
683
|
+
"aiki.completedAt": sleepState.completedAt
|
|
587
684
|
});
|
|
588
|
-
|
|
685
|
+
return { cancelled: false };
|
|
589
686
|
}
|
|
590
|
-
sleepState
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
"aiki.
|
|
594
|
-
"aiki.
|
|
687
|
+
if (durationMs > sleepState.durationMs) {
|
|
688
|
+
logger.warn("Higher sleep duration encountered during replay. Sleeping for remaining duration", {
|
|
689
|
+
"aiki.sleepName": sleepName,
|
|
690
|
+
"aiki.historicDurationMs": sleepState.durationMs,
|
|
691
|
+
"aiki.latestDurationMs": durationMs
|
|
692
|
+
});
|
|
693
|
+
durationMs -= sleepState.durationMs;
|
|
694
|
+
} else {
|
|
695
|
+
logger.warn("Lower sleep duration encountered during replay. Already slept enough", {
|
|
696
|
+
"aiki.sleepName": sleepName,
|
|
697
|
+
"aiki.historicDurationMs": sleepState.durationMs,
|
|
698
|
+
"aiki.latestDurationMs": durationMs
|
|
595
699
|
});
|
|
596
|
-
await delay(durationMs);
|
|
597
700
|
return { cancelled: false };
|
|
598
701
|
}
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
702
|
+
try {
|
|
703
|
+
await handle[INTERNAL3].transitionState({ status: "sleeping", sleepName, durationMs });
|
|
704
|
+
logger.info("Sleeping", {
|
|
705
|
+
"aiki.sleepName": sleepName,
|
|
706
|
+
"aiki.durationMs": durationMs
|
|
707
|
+
});
|
|
708
|
+
} catch (error) {
|
|
709
|
+
if (error instanceof WorkflowRunConflictError3) {
|
|
710
|
+
throw new WorkflowRunSuspendedError2(handle.run.id);
|
|
711
|
+
}
|
|
712
|
+
throw error;
|
|
713
|
+
}
|
|
604
714
|
throw new WorkflowRunSuspendedError2(handle.run.id);
|
|
605
715
|
};
|
|
606
716
|
}
|
|
@@ -608,10 +718,16 @@ function createSleeper(handle, logger, options) {
|
|
|
608
718
|
// workflow.ts
|
|
609
719
|
import { INTERNAL as INTERNAL6 } from "@aikirun/types/symbols";
|
|
610
720
|
|
|
721
|
+
// ../../lib/path/index.ts
|
|
722
|
+
function getWorkflowRunPath(name, versionId, referenceId) {
|
|
723
|
+
return `${name}/${versionId}/${referenceId}`;
|
|
724
|
+
}
|
|
725
|
+
|
|
611
726
|
// workflow-version.ts
|
|
612
727
|
import { INTERNAL as INTERNAL5 } from "@aikirun/types/symbols";
|
|
613
728
|
import { TaskFailedError } from "@aikirun/types/task";
|
|
614
729
|
import {
|
|
730
|
+
WorkflowRunConflictError as WorkflowRunConflictError5,
|
|
615
731
|
WorkflowRunFailedError as WorkflowRunFailedError2,
|
|
616
732
|
WorkflowRunSuspendedError as WorkflowRunSuspendedError4
|
|
617
733
|
} from "@aikirun/types/workflow-run";
|
|
@@ -620,15 +736,16 @@ import {
|
|
|
620
736
|
import { INTERNAL as INTERNAL4 } from "@aikirun/types/symbols";
|
|
621
737
|
import {
|
|
622
738
|
isTerminalWorkflowRunStatus as isTerminalWorkflowRunStatus2,
|
|
739
|
+
WorkflowRunConflictError as WorkflowRunConflictError4,
|
|
623
740
|
WorkflowRunSuspendedError as WorkflowRunSuspendedError3
|
|
624
741
|
} from "@aikirun/types/workflow-run";
|
|
625
|
-
async function childWorkflowRunHandle(client,
|
|
742
|
+
async function childWorkflowRunHandle(client, run, parentRun, logger, eventsDefinition) {
|
|
626
743
|
const handle = await workflowRunHandle(client, run, eventsDefinition, logger);
|
|
627
744
|
return {
|
|
628
745
|
run: handle.run,
|
|
629
746
|
events: handle.events,
|
|
630
747
|
refresh: handle.refresh.bind(handle),
|
|
631
|
-
waitForStatus: createStatusWaiter(
|
|
748
|
+
waitForStatus: createStatusWaiter(handle, parentRun, logger),
|
|
632
749
|
cancel: handle.cancel.bind(handle),
|
|
633
750
|
pause: handle.pause.bind(handle),
|
|
634
751
|
resume: handle.resume.bind(handle),
|
|
@@ -636,16 +753,18 @@ async function childWorkflowRunHandle(client, path, run, parentRun, logger, even
|
|
|
636
753
|
[INTERNAL4]: handle[INTERNAL4]
|
|
637
754
|
};
|
|
638
755
|
}
|
|
639
|
-
function createStatusWaiter(
|
|
756
|
+
function createStatusWaiter(handle, parentRun, logger) {
|
|
640
757
|
let nextWaitIndex = 0;
|
|
641
758
|
async function waitForStatus(expectedStatus, options) {
|
|
642
759
|
const parentRunHandle = parentRun[INTERNAL4].handle;
|
|
643
|
-
const waitResults = parentRunHandle.run.childWorkflowRuns[path]?.statusWaitResults ?? [];
|
|
760
|
+
const waitResults = parentRunHandle.run.childWorkflowRuns[handle.run.path]?.statusWaitResults ?? [];
|
|
644
761
|
const waitResult = waitResults[nextWaitIndex];
|
|
645
762
|
if (waitResult) {
|
|
646
763
|
nextWaitIndex++;
|
|
647
764
|
if (waitResult.status === "timeout") {
|
|
648
|
-
logger.debug("Timed out waiting for child workflow status", {
|
|
765
|
+
logger.debug("Timed out waiting for child workflow status", {
|
|
766
|
+
"aiki.childWorkflowExpectedStatus": expectedStatus
|
|
767
|
+
});
|
|
649
768
|
return {
|
|
650
769
|
success: false,
|
|
651
770
|
cause: "timeout"
|
|
@@ -658,7 +777,9 @@ function createStatusWaiter(path, handle, parentRun, logger) {
|
|
|
658
777
|
};
|
|
659
778
|
}
|
|
660
779
|
if (isTerminalWorkflowRunStatus2(waitResult.childWorkflowRunState.status)) {
|
|
661
|
-
logger.debug("Child workflow run reached termnial state"
|
|
780
|
+
logger.debug("Child workflow run reached termnial state", {
|
|
781
|
+
"aiki.childWorkflowTerminalStatus": waitResult.childWorkflowRunState.status
|
|
782
|
+
});
|
|
662
783
|
return {
|
|
663
784
|
success: false,
|
|
664
785
|
cause: "run_terminated"
|
|
@@ -673,19 +794,32 @@ function createStatusWaiter(path, handle, parentRun, logger) {
|
|
|
673
794
|
};
|
|
674
795
|
}
|
|
675
796
|
if (isTerminalWorkflowRunStatus2(state.status)) {
|
|
676
|
-
logger.debug("Child workflow run reached termnial state"
|
|
797
|
+
logger.debug("Child workflow run reached termnial state", {
|
|
798
|
+
"aiki.childWorkflowTerminalStatus": state.status
|
|
799
|
+
});
|
|
677
800
|
return {
|
|
678
801
|
success: false,
|
|
679
802
|
cause: "run_terminated"
|
|
680
803
|
};
|
|
681
804
|
}
|
|
682
805
|
const timeoutInMs = options?.timeout && toMilliseconds(options.timeout);
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
806
|
+
try {
|
|
807
|
+
await parentRunHandle[INTERNAL4].transitionState({
|
|
808
|
+
status: "awaiting_child_workflow",
|
|
809
|
+
childWorkflowRunId: handle.run.id,
|
|
810
|
+
childWorkflowRunStatus: expectedStatus,
|
|
811
|
+
timeoutInMs
|
|
812
|
+
});
|
|
813
|
+
logger.info("Waiting for child Workflow", {
|
|
814
|
+
"aiki.childWorkflowExpectedStatus": expectedStatus,
|
|
815
|
+
...timeoutInMs !== void 0 ? { "aiki.timeoutInMs": timeoutInMs } : {}
|
|
816
|
+
});
|
|
817
|
+
} catch (error) {
|
|
818
|
+
if (error instanceof WorkflowRunConflictError4) {
|
|
819
|
+
throw new WorkflowRunSuspendedError3(parentRun.id);
|
|
820
|
+
}
|
|
821
|
+
throw error;
|
|
822
|
+
}
|
|
689
823
|
throw new WorkflowRunSuspendedError3(parentRun.id);
|
|
690
824
|
}
|
|
691
825
|
return waitForStatus;
|
|
@@ -693,12 +827,12 @@ function createStatusWaiter(path, handle, parentRun, logger) {
|
|
|
693
827
|
|
|
694
828
|
// workflow-version.ts
|
|
695
829
|
var WorkflowVersionImpl = class _WorkflowVersionImpl {
|
|
696
|
-
constructor(
|
|
697
|
-
this.
|
|
830
|
+
constructor(name, versionId, params) {
|
|
831
|
+
this.name = name;
|
|
698
832
|
this.versionId = versionId;
|
|
699
833
|
this.params = params;
|
|
700
834
|
const eventsDefinition = this.params.events ?? {};
|
|
701
|
-
this.events = createEventMulticasters(eventsDefinition);
|
|
835
|
+
this.events = createEventMulticasters(this.name, this.versionId, eventsDefinition);
|
|
702
836
|
this[INTERNAL5] = {
|
|
703
837
|
eventsDefinition,
|
|
704
838
|
handler: this.handler.bind(this)
|
|
@@ -711,11 +845,11 @@ var WorkflowVersionImpl = class _WorkflowVersionImpl {
|
|
|
711
845
|
const createBuilder = (optsBuilder) => {
|
|
712
846
|
return {
|
|
713
847
|
opt: (path, value) => createBuilder(optsBuilder.with(path, value)),
|
|
714
|
-
start: (client, ...args) => new _WorkflowVersionImpl(this.
|
|
848
|
+
start: (client, ...args) => new _WorkflowVersionImpl(this.name, this.versionId, {
|
|
715
849
|
...this.params,
|
|
716
850
|
opts: optsBuilder.build()
|
|
717
851
|
}).start(client, ...args),
|
|
718
|
-
startAsChild: (parentRun, ...args) => new _WorkflowVersionImpl(this.
|
|
852
|
+
startAsChild: (parentRun, ...args) => new _WorkflowVersionImpl(this.name, this.versionId, {
|
|
719
853
|
...this.params,
|
|
720
854
|
opts: optsBuilder.build()
|
|
721
855
|
}).startAsChild(parentRun, ...args)
|
|
@@ -724,67 +858,109 @@ var WorkflowVersionImpl = class _WorkflowVersionImpl {
|
|
|
724
858
|
return createBuilder(optsOverrider());
|
|
725
859
|
}
|
|
726
860
|
async start(client, ...args) {
|
|
861
|
+
const inputRaw = isNonEmptyArray(args) ? args[0] : void 0;
|
|
862
|
+
const input = this.params.schema?.input ? this.params.schema.input.parse(inputRaw) : inputRaw;
|
|
727
863
|
const { run } = await client.api.workflowRun.createV1({
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
input
|
|
864
|
+
name: this.name,
|
|
865
|
+
versionId: this.versionId,
|
|
866
|
+
input,
|
|
731
867
|
options: this.params.opts
|
|
732
868
|
});
|
|
869
|
+
client.logger.info("Created workflow", {
|
|
870
|
+
"aiki.workflowName": this.name,
|
|
871
|
+
"aiki.workflowVersionId": this.versionId,
|
|
872
|
+
"aiki.workflowRunId": run.id
|
|
873
|
+
});
|
|
733
874
|
return workflowRunHandle(client, run, this[INTERNAL5].eventsDefinition);
|
|
734
875
|
}
|
|
735
876
|
async startAsChild(parentRun, ...args) {
|
|
736
877
|
const parentRunHandle = parentRun[INTERNAL5].handle;
|
|
737
878
|
parentRunHandle[INTERNAL5].assertExecutionAllowed();
|
|
738
879
|
const { client } = parentRunHandle[INTERNAL5];
|
|
739
|
-
const
|
|
740
|
-
const
|
|
741
|
-
const
|
|
742
|
-
|
|
743
|
-
|
|
880
|
+
const inputRaw = isNonEmptyArray(args) ? args[0] : void 0;
|
|
881
|
+
const input = await this.parse(parentRunHandle, this.params.schema?.input, inputRaw);
|
|
882
|
+
const inputHash = await hashInput(input);
|
|
883
|
+
const reference = this.params.opts?.reference;
|
|
884
|
+
const path = getWorkflowRunPath(this.name, this.versionId, reference?.id ?? inputHash);
|
|
885
|
+
const existingRunInfo = parentRunHandle.run.childWorkflowRuns[path];
|
|
886
|
+
if (existingRunInfo) {
|
|
887
|
+
await this.assertUniqueChildRunReferenceId(
|
|
888
|
+
parentRunHandle,
|
|
889
|
+
existingRunInfo,
|
|
890
|
+
inputHash,
|
|
891
|
+
reference,
|
|
892
|
+
parentRun.logger
|
|
893
|
+
);
|
|
894
|
+
const { run: existingRun } = await client.api.workflowRun.getByIdV1({ id: existingRunInfo.id });
|
|
895
|
+
if (existingRun.state.status === "completed") {
|
|
896
|
+
await this.parse(parentRunHandle, this.params.schema?.output, existingRun.state.output);
|
|
897
|
+
}
|
|
744
898
|
const logger2 = parentRun.logger.child({
|
|
745
|
-
"aiki.
|
|
746
|
-
"aiki.childWorkflowVersionId":
|
|
747
|
-
"aiki.childWorkflowRunId":
|
|
899
|
+
"aiki.childWorkflowName": existingRun.name,
|
|
900
|
+
"aiki.childWorkflowVersionId": existingRun.versionId,
|
|
901
|
+
"aiki.childWorkflowRunId": existingRun.id
|
|
748
902
|
});
|
|
749
903
|
return childWorkflowRunHandle(
|
|
750
904
|
client,
|
|
751
|
-
|
|
752
|
-
existingChildRun,
|
|
905
|
+
existingRun,
|
|
753
906
|
parentRun,
|
|
754
907
|
logger2,
|
|
755
908
|
this[INTERNAL5].eventsDefinition
|
|
756
909
|
);
|
|
757
910
|
}
|
|
758
|
-
const { run:
|
|
759
|
-
|
|
760
|
-
|
|
911
|
+
const { run: newRun } = await client.api.workflowRun.createV1({
|
|
912
|
+
name: this.name,
|
|
913
|
+
versionId: this.versionId,
|
|
761
914
|
input,
|
|
762
|
-
path: childRunPath,
|
|
763
915
|
parentWorkflowRunId: parentRun.id,
|
|
764
|
-
options:
|
|
765
|
-
...this.params.opts,
|
|
766
|
-
idempotencyKey: childRunPath
|
|
767
|
-
}
|
|
916
|
+
options: this.params.opts
|
|
768
917
|
});
|
|
769
|
-
parentRunHandle.run.childWorkflowRuns[
|
|
918
|
+
parentRunHandle.run.childWorkflowRuns[path] = {
|
|
919
|
+
id: newRun.id,
|
|
920
|
+
inputHash,
|
|
921
|
+
statusWaitResults: []
|
|
922
|
+
};
|
|
770
923
|
const logger = parentRun.logger.child({
|
|
771
|
-
"aiki.
|
|
772
|
-
"aiki.childWorkflowVersionId":
|
|
773
|
-
"aiki.childWorkflowRunId":
|
|
924
|
+
"aiki.childWorkflowName": newRun.name,
|
|
925
|
+
"aiki.childWorkflowVersionId": newRun.versionId,
|
|
926
|
+
"aiki.childWorkflowRunId": newRun.id
|
|
774
927
|
});
|
|
928
|
+
logger.info("Created child workflow");
|
|
775
929
|
return childWorkflowRunHandle(
|
|
776
930
|
client,
|
|
777
|
-
|
|
778
|
-
newChildRun,
|
|
931
|
+
newRun,
|
|
779
932
|
parentRun,
|
|
780
933
|
logger,
|
|
781
934
|
this[INTERNAL5].eventsDefinition
|
|
782
935
|
);
|
|
783
936
|
}
|
|
937
|
+
async assertUniqueChildRunReferenceId(parentRunHandle, existingRunInfo, inputHash, reference, logger) {
|
|
938
|
+
if (existingRunInfo.inputHash !== inputHash && reference) {
|
|
939
|
+
const onConflict = reference.onConflict ?? "error";
|
|
940
|
+
if (onConflict !== "error") {
|
|
941
|
+
return;
|
|
942
|
+
}
|
|
943
|
+
logger.error("Reference ID already used by another child workflow", {
|
|
944
|
+
"aiki.referenceId": reference.id,
|
|
945
|
+
"aiki.existingChildWorkflowRunId": existingRunInfo.id
|
|
946
|
+
});
|
|
947
|
+
const error = new WorkflowRunFailedError2(
|
|
948
|
+
parentRunHandle.run.id,
|
|
949
|
+
parentRunHandle.run.attempts,
|
|
950
|
+
`Reference ID "${reference.id}" already used by another child workflow run ${existingRunInfo.id}`
|
|
951
|
+
);
|
|
952
|
+
await parentRunHandle[INTERNAL5].transitionState({
|
|
953
|
+
status: "failed",
|
|
954
|
+
cause: "self",
|
|
955
|
+
error: createSerializableError(error)
|
|
956
|
+
});
|
|
957
|
+
throw error;
|
|
958
|
+
}
|
|
959
|
+
}
|
|
784
960
|
async getHandle(client, runId) {
|
|
785
961
|
return workflowRunHandle(client, runId, this[INTERNAL5].eventsDefinition);
|
|
786
962
|
}
|
|
787
|
-
async handler(
|
|
963
|
+
async handler(run, input, context) {
|
|
788
964
|
const { logger } = run;
|
|
789
965
|
const { handle } = run[INTERNAL5];
|
|
790
966
|
handle[INTERNAL5].assertExecutionAllowed();
|
|
@@ -800,14 +976,16 @@ var WorkflowVersionImpl = class _WorkflowVersionImpl {
|
|
|
800
976
|
logger.info("Workflow complete");
|
|
801
977
|
}
|
|
802
978
|
async tryExecuteWorkflow(input, run, context, retryStrategy) {
|
|
979
|
+
const { handle } = run[INTERNAL5];
|
|
803
980
|
while (true) {
|
|
804
981
|
try {
|
|
805
|
-
|
|
982
|
+
const outputRaw = await this.params.handler(run, input, context);
|
|
983
|
+
const output = await this.parse(handle, this.params.schema?.output, outputRaw);
|
|
984
|
+
return output;
|
|
806
985
|
} catch (error) {
|
|
807
|
-
if (error instanceof WorkflowRunSuspendedError4 || error instanceof WorkflowRunFailedError2) {
|
|
986
|
+
if (error instanceof WorkflowRunSuspendedError4 || error instanceof WorkflowRunFailedError2 || error instanceof WorkflowRunConflictError5) {
|
|
808
987
|
throw error;
|
|
809
988
|
}
|
|
810
|
-
const { handle } = run[INTERNAL5];
|
|
811
989
|
const attempts = handle.run.attempts;
|
|
812
990
|
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
813
991
|
if (!retryParams.retriesLeft) {
|
|
@@ -844,28 +1022,41 @@ var WorkflowVersionImpl = class _WorkflowVersionImpl {
|
|
|
844
1022
|
if (!retryParams.retriesLeft) {
|
|
845
1023
|
logger.error("Workflow retry not allowed", { "aiki.attempts": attempts });
|
|
846
1024
|
const error = new WorkflowRunFailedError2(id, attempts);
|
|
847
|
-
const serializableError = createSerializableError(error);
|
|
848
1025
|
await handle[INTERNAL5].transitionState({
|
|
849
1026
|
status: "failed",
|
|
850
1027
|
cause: "self",
|
|
851
|
-
error:
|
|
1028
|
+
error: createSerializableError(error)
|
|
852
1029
|
});
|
|
853
1030
|
throw error;
|
|
854
1031
|
}
|
|
855
1032
|
}
|
|
1033
|
+
async parse(handle, schema, data) {
|
|
1034
|
+
if (!schema) {
|
|
1035
|
+
return data;
|
|
1036
|
+
}
|
|
1037
|
+
try {
|
|
1038
|
+
return schema.parse(data);
|
|
1039
|
+
} catch (error) {
|
|
1040
|
+
await handle[INTERNAL5].transitionState({
|
|
1041
|
+
status: "failed",
|
|
1042
|
+
cause: "self",
|
|
1043
|
+
error: createSerializableError(error)
|
|
1044
|
+
});
|
|
1045
|
+
throw new WorkflowRunFailedError2(handle.run.id, handle.run.attempts);
|
|
1046
|
+
}
|
|
1047
|
+
}
|
|
856
1048
|
createFailedState(error) {
|
|
857
1049
|
if (error instanceof TaskFailedError) {
|
|
858
1050
|
return {
|
|
859
1051
|
status: "failed",
|
|
860
1052
|
cause: "task",
|
|
861
|
-
|
|
1053
|
+
taskId: error.taskId
|
|
862
1054
|
};
|
|
863
1055
|
}
|
|
864
|
-
const serializableError = createSerializableError(error);
|
|
865
1056
|
return {
|
|
866
1057
|
status: "failed",
|
|
867
1058
|
cause: "self",
|
|
868
|
-
error:
|
|
1059
|
+
error: createSerializableError(error)
|
|
869
1060
|
};
|
|
870
1061
|
}
|
|
871
1062
|
createAwaitingRetryState(error, nextAttemptInMs) {
|
|
@@ -874,22 +1065,16 @@ var WorkflowVersionImpl = class _WorkflowVersionImpl {
|
|
|
874
1065
|
status: "awaiting_retry",
|
|
875
1066
|
cause: "task",
|
|
876
1067
|
nextAttemptInMs,
|
|
877
|
-
|
|
1068
|
+
taskId: error.taskId
|
|
878
1069
|
};
|
|
879
1070
|
}
|
|
880
|
-
const serializableError = createSerializableError(error);
|
|
881
1071
|
return {
|
|
882
1072
|
status: "awaiting_retry",
|
|
883
1073
|
cause: "self",
|
|
884
1074
|
nextAttemptInMs,
|
|
885
|
-
error:
|
|
1075
|
+
error: createSerializableError(error)
|
|
886
1076
|
};
|
|
887
1077
|
}
|
|
888
|
-
async getPath(input) {
|
|
889
|
-
const inputHash = await sha256(stableStringify(input));
|
|
890
|
-
const path = this.params.opts?.idempotencyKey ? `${this.id}/${this.versionId}/${inputHash}/${this.params.opts.idempotencyKey}` : `${this.id}/${this.versionId}/${inputHash}`;
|
|
891
|
-
return path;
|
|
892
|
-
}
|
|
893
1078
|
};
|
|
894
1079
|
|
|
895
1080
|
// workflow.ts
|
|
@@ -897,11 +1082,11 @@ function workflow(params) {
|
|
|
897
1082
|
return new WorkflowImpl(params);
|
|
898
1083
|
}
|
|
899
1084
|
var WorkflowImpl = class {
|
|
900
|
-
|
|
1085
|
+
name;
|
|
901
1086
|
[INTERNAL6];
|
|
902
1087
|
workflowVersions = /* @__PURE__ */ new Map();
|
|
903
1088
|
constructor(params) {
|
|
904
|
-
this.
|
|
1089
|
+
this.name = params.name;
|
|
905
1090
|
this[INTERNAL6] = {
|
|
906
1091
|
getAllVersions: this.getAllVersions.bind(this),
|
|
907
1092
|
getVersion: this.getVersion.bind(this)
|
|
@@ -909,9 +1094,9 @@ var WorkflowImpl = class {
|
|
|
909
1094
|
}
|
|
910
1095
|
v(versionId, params) {
|
|
911
1096
|
if (this.workflowVersions.has(versionId)) {
|
|
912
|
-
throw new Error(`Workflow "${this.
|
|
1097
|
+
throw new Error(`Workflow "${this.name}/${versionId}" already exists`);
|
|
913
1098
|
}
|
|
914
|
-
const workflowVersion = new WorkflowVersionImpl(this.
|
|
1099
|
+
const workflowVersion = new WorkflowVersionImpl(this.name, versionId, params);
|
|
915
1100
|
this.workflowVersions.set(
|
|
916
1101
|
versionId,
|
|
917
1102
|
workflowVersion
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aikirun/workflow",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.9.0",
|
|
4
4
|
"description": "Workflow SDK for Aiki - define durable workflows with tasks, sleeps, waits, and event handling",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -18,7 +18,7 @@
|
|
|
18
18
|
"build": "tsup"
|
|
19
19
|
},
|
|
20
20
|
"dependencies": {
|
|
21
|
-
"@aikirun/types": "0.
|
|
21
|
+
"@aikirun/types": "0.9.0"
|
|
22
22
|
},
|
|
23
23
|
"publishConfig": {
|
|
24
24
|
"access": "public"
|