@aikirun/workflow 0.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +258 -0
- package/dist/index.d.ts +189 -0
- package/dist/index.js +611 -0
- package/package.json +27 -0
package/README.md
ADDED
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
# @aikirun/workflow
|
|
2
|
+
|
|
3
|
+
Workflow SDK for Aiki durable execution platform - define durable workflows with tasks, sleeps, waits, and event handling.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install @aikirun/workflow
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Quick Start
|
|
12
|
+
|
|
13
|
+
### Define a Workflow
|
|
14
|
+
|
|
15
|
+
```typescript
|
|
16
|
+
import { workflow } from "@aikirun/workflow";
|
|
17
|
+
import { markUserVerified, sendVerificationEmail } from "./tasks.ts";
|
|
18
|
+
|
|
19
|
+
export const onboardingWorkflow = workflow({ id: "user-onboarding" });
|
|
20
|
+
|
|
21
|
+
export const onboardingWorkflowV1 = onboardingWorkflow.v("1.0", {
|
|
22
|
+
async handler(input: { email: string }, run) {
|
|
23
|
+
run.logger.info("Starting onboarding", { email: input.email });
|
|
24
|
+
|
|
25
|
+
// Execute a task to send verification email
|
|
26
|
+
await sendVerificationEmail.start(run, { email: input.email });
|
|
27
|
+
|
|
28
|
+
// Execute task to mark user as verified
|
|
29
|
+
// (In a real scenario, this would be triggered by an external event)
|
|
30
|
+
await markUserVerified.start(run, { email: input.email });
|
|
31
|
+
|
|
32
|
+
// Sleep for 24 hours before sending tips
|
|
33
|
+
await run.sleep({ id: "onboarding-delay", days: 1 });
|
|
34
|
+
|
|
35
|
+
// Send usage tips
|
|
36
|
+
await sendUsageTips.start(run, { email: input.email });
|
|
37
|
+
|
|
38
|
+
return { success: true, userId: input.email };
|
|
39
|
+
},
|
|
40
|
+
});
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
## Features
|
|
44
|
+
|
|
45
|
+
- **Durable Execution** - Automatically survives crashes and restarts
|
|
46
|
+
- **Task Orchestration** - Coordinate multiple tasks in sequence
|
|
47
|
+
- **Durable Sleep** - Sleep without consuming resources or blocking workers
|
|
48
|
+
- **State Snapshots** - Automatically save state at each step
|
|
49
|
+
- **Error Handling** - Built-in retry and recovery mechanisms
|
|
50
|
+
- **Multiple Versions** - Run different workflow versions simultaneously
|
|
51
|
+
- **Logging** - Built-in structured logging for debugging
|
|
52
|
+
|
|
53
|
+
## Workflow Primitives
|
|
54
|
+
|
|
55
|
+
### Execute Tasks
|
|
56
|
+
|
|
57
|
+
```typescript
|
|
58
|
+
const result = await createUserProfile.start(run, {
|
|
59
|
+
email: input.email,
|
|
60
|
+
});
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
### Sleep for a Duration
|
|
64
|
+
|
|
65
|
+
```typescript
|
|
66
|
+
// Sleep requires a unique id for memoization
|
|
67
|
+
await run.sleep({ id: "daily-delay", days: 1 });
|
|
68
|
+
await run.sleep({ id: "processing-delay", hours: 2, minutes: 30 });
|
|
69
|
+
await run.sleep({ id: "short-pause", seconds: 30 });
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
### Sleep Cancellation
|
|
73
|
+
|
|
74
|
+
Sleeps can be cancelled externally via the `wake()` method:
|
|
75
|
+
|
|
76
|
+
```typescript
|
|
77
|
+
const handle = await myWorkflow.start(client, input);
|
|
78
|
+
await handle.wake(); // Wakes the workflow if sleeping
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
The sleep returns a result indicating whether it was cancelled:
|
|
82
|
+
|
|
83
|
+
```typescript
|
|
84
|
+
const { cancelled } = await run.sleep({ id: "wait-period", hours: 1 });
|
|
85
|
+
if (cancelled) {
|
|
86
|
+
// Handle early wake-up
|
|
87
|
+
}
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
### Get Workflow State
|
|
91
|
+
|
|
92
|
+
```typescript
|
|
93
|
+
const { state } = await run.handle.getState();
|
|
94
|
+
console.log("Workflow status:", state.status);
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### Logging
|
|
98
|
+
|
|
99
|
+
```typescript
|
|
100
|
+
run.logger.info("Processing user", { email: input.email });
|
|
101
|
+
run.logger.debug("User created", { userId: result.userId });
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
## Workflow Options
|
|
105
|
+
|
|
106
|
+
### Delayed Trigger
|
|
107
|
+
|
|
108
|
+
```typescript
|
|
109
|
+
export const morningWorkflowV1 = morningWorkflow.v("1.0", {
|
|
110
|
+
// ... workflow definition
|
|
111
|
+
opts: {
|
|
112
|
+
trigger: {
|
|
113
|
+
type: "delayed",
|
|
114
|
+
delay: { seconds: 5 }, // or: delay: 5000
|
|
115
|
+
},
|
|
116
|
+
},
|
|
117
|
+
});
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
### Retry Strategy
|
|
121
|
+
|
|
122
|
+
```typescript
|
|
123
|
+
export const paymentWorkflowV1 = paymentWorkflow.v("1.0", {
|
|
124
|
+
// ... workflow definition
|
|
125
|
+
opts: {
|
|
126
|
+
retry: {
|
|
127
|
+
type: "exponential",
|
|
128
|
+
maxAttempts: 3,
|
|
129
|
+
baseDelayMs: 1000,
|
|
130
|
+
maxDelayMs: 10000,
|
|
131
|
+
},
|
|
132
|
+
},
|
|
133
|
+
});
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
### Idempotency Key
|
|
137
|
+
|
|
138
|
+
```typescript
|
|
139
|
+
export const orderWorkflowV1 = orderWorkflow.v("1.0", {
|
|
140
|
+
// ... workflow definition
|
|
141
|
+
opts: {
|
|
142
|
+
idempotencyKey: "order-${orderId}",
|
|
143
|
+
},
|
|
144
|
+
});
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
## Running Workflows
|
|
148
|
+
|
|
149
|
+
With the client:
|
|
150
|
+
|
|
151
|
+
```typescript
|
|
152
|
+
import { client } from "@aikirun/client";
|
|
153
|
+
import { onboardingWorkflowV1 } from "./workflows.ts";
|
|
154
|
+
|
|
155
|
+
const aikiClient = await client({
|
|
156
|
+
url: "http://localhost:9090",
|
|
157
|
+
redis: { host: "localhost", port: 6379 },
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
const handle = await onboardingWorkflowV1.start(aikiClient, {
|
|
161
|
+
email: "user@example.com",
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
// Wait for completion
|
|
165
|
+
const result = await handle.wait(
|
|
166
|
+
{ type: "status", status: "completed" },
|
|
167
|
+
{ maxDurationMs: 60 * 1000, pollIntervalMs: 5_000 },
|
|
168
|
+
);
|
|
169
|
+
|
|
170
|
+
if (result.success) {
|
|
171
|
+
console.log("Workflow completed!", result.state);
|
|
172
|
+
} else {
|
|
173
|
+
console.log("Workflow did not complete:", result.cause);
|
|
174
|
+
}
|
|
175
|
+
```
|
|
176
|
+
|
|
177
|
+
With a worker:
|
|
178
|
+
|
|
179
|
+
```typescript
|
|
180
|
+
import { worker } from "@aikirun/worker";
|
|
181
|
+
|
|
182
|
+
const aikiWorker = worker({
|
|
183
|
+
id: "my-worker",
|
|
184
|
+
workflows: [onboardingWorkflowV1],
|
|
185
|
+
opts: {
|
|
186
|
+
maxConcurrentWorkflowRuns: 10,
|
|
187
|
+
},
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
await aikiWorker.spawn(aikiClient);
|
|
191
|
+
```
|
|
192
|
+
|
|
193
|
+
## Execution Context
|
|
194
|
+
|
|
195
|
+
The `run` parameter provides access to:
|
|
196
|
+
|
|
197
|
+
```typescript
|
|
198
|
+
interface WorkflowRunContext<Input, Output> {
|
|
199
|
+
id: WorkflowRunId; // Unique run ID
|
|
200
|
+
name: WorkflowName; // Workflow name
|
|
201
|
+
versionId: WorkflowVersionId; // Version ID
|
|
202
|
+
options: WorkflowOptions; // Execution options (trigger, retry, idempotencyKey)
|
|
203
|
+
handle: WorkflowRunHandle<Input, Output>; // Advanced state management
|
|
204
|
+
logger: Logger; // Logging (info, debug, warn, error, trace)
|
|
205
|
+
sleep(params: SleepParams): Promise<SleepResult>; // Durable sleep
|
|
206
|
+
}
|
|
207
|
+
```
|
|
208
|
+
|
|
209
|
+
Sleep parameters:
|
|
210
|
+
- `id` (required): Unique identifier for memoization
|
|
211
|
+
- Duration fields: `days`, `hours`, `minutes`, `seconds`, `milliseconds`
|
|
212
|
+
|
|
213
|
+
Example: `run.sleep({ id: "my-sleep", days: 1, hours: 2 })`
|
|
214
|
+
|
|
215
|
+
## Error Handling
|
|
216
|
+
|
|
217
|
+
Workflows handle errors gracefully:
|
|
218
|
+
|
|
219
|
+
```typescript
|
|
220
|
+
try {
|
|
221
|
+
await risky.start(run, input);
|
|
222
|
+
} catch (error) {
|
|
223
|
+
run.logger.error("Task failed", { error: error.message });
|
|
224
|
+
// Workflow can decide how to proceed
|
|
225
|
+
}
|
|
226
|
+
```
|
|
227
|
+
|
|
228
|
+
Failed workflows transition to `awaiting_retry` state and are automatically retried by the server.
|
|
229
|
+
|
|
230
|
+
### Expected Errors
|
|
231
|
+
|
|
232
|
+
`WorkflowRunSuspendedError` is thrown when a workflow suspends (e.g., during sleep).
|
|
233
|
+
This is expected behavior - the worker catches this error and the workflow resumes
|
|
234
|
+
when the sleep completes. Do not catch this error in workflow code.
|
|
235
|
+
|
|
236
|
+
## Best Practices
|
|
237
|
+
|
|
238
|
+
1. **Keep Workflows Deterministic** - Same input should always produce same output
|
|
239
|
+
2. **Expect Replays** - Code may execute multiple times during retries
|
|
240
|
+
3. **Use Descriptive Events** - Name events clearly for debugging
|
|
241
|
+
4. **Handle Timeouts** - Always check `event.received` after waiting
|
|
242
|
+
5. **Log Strategically** - Use logger to track workflow progress
|
|
243
|
+
6. **Version Your Workflows** - Deploy new versions alongside old ones
|
|
244
|
+
|
|
245
|
+
## Related Packages
|
|
246
|
+
|
|
247
|
+
- [@aikirun/task](https://www.npmjs.com/package/@aikirun/task) - Define tasks
|
|
248
|
+
- [@aikirun/client](https://www.npmjs.com/package/@aikirun/client) - Start workflows
|
|
249
|
+
- [@aikirun/worker](https://www.npmjs.com/package/@aikirun/worker) - Execute workflows
|
|
250
|
+
- [@aikirun/types](https://www.npmjs.com/package/@aikirun/types) - Type definitions
|
|
251
|
+
|
|
252
|
+
## Changelog
|
|
253
|
+
|
|
254
|
+
See the [CHANGELOG](https://github.com/aikirun/aiki/blob/main/CHANGELOG.md) for version history.
|
|
255
|
+
|
|
256
|
+
## License
|
|
257
|
+
|
|
258
|
+
Apache-2.0
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
import { WorkflowId, WorkflowVersionId } from '@aikirun/types/workflow';
|
|
2
|
+
import { Client, Logger } from '@aikirun/types/client';
|
|
3
|
+
import { INTERNAL } from '@aikirun/types/symbols';
|
|
4
|
+
import { WorkflowRun, WorkflowRunStatus, WorkflowRunStateCompleted, WorkflowRunStateInComplete, WorkflowRunState, WorkflowRunId, WorkflowOptions } from '@aikirun/types/workflow-run';
|
|
5
|
+
import { SleepParams, SleepResult } from '@aikirun/types/sleep';
|
|
6
|
+
import { TaskPath, TaskState } from '@aikirun/types/task';
|
|
7
|
+
import { SerializableInput } from '@aikirun/types/error';
|
|
8
|
+
|
|
9
|
+
type NonEmptyArray<T> = [T, ...T[]];
|
|
10
|
+
|
|
11
|
+
type NonArrayObject<T> = T extends object ? (T extends ReadonlyArray<unknown> ? never : T) : never;
|
|
12
|
+
type IsSubtype<SubT, SuperT> = SubT extends SuperT ? true : false;
|
|
13
|
+
type And<T extends NonEmptyArray<boolean>> = T extends [infer First, ...infer Rest] ? false extends First ? false : Rest extends NonEmptyArray<boolean> ? And<Rest> : true : never;
|
|
14
|
+
type Or<T extends NonEmptyArray<boolean>> = T extends [infer First, ...infer Rest] ? true extends First ? true : Rest extends NonEmptyArray<boolean> ? Or<Rest> : false : never;
|
|
15
|
+
type PathFromObject<T, IncludeArrayKeys extends boolean = false> = T extends T ? PathFromObjectInternal<T, IncludeArrayKeys> : never;
|
|
16
|
+
type PathFromObjectInternal<T, IncludeArrayKeys extends boolean> = And<[
|
|
17
|
+
IsSubtype<T, object>,
|
|
18
|
+
Or<[IncludeArrayKeys, NonArrayObject<T> extends never ? false : true]>
|
|
19
|
+
]> extends true ? {
|
|
20
|
+
[K in Exclude<keyof T, symbol>]-?: And<[
|
|
21
|
+
IsSubtype<NonNullable<T[K]>, object>,
|
|
22
|
+
Or<[IncludeArrayKeys, NonArrayObject<NonNullable<T[K]>> extends never ? false : true]>
|
|
23
|
+
]> extends true ? K | `${K}.${PathFromObjectInternal<NonNullable<T[K]>, IncludeArrayKeys>}` : K;
|
|
24
|
+
}[Exclude<keyof T, symbol>] : "";
|
|
25
|
+
type ExtractObjectType<T> = T extends object ? T : never;
|
|
26
|
+
type TypeOfValueAtPath<T extends object, Path extends PathFromObject<T>> = Path extends keyof T ? T[Path] : Path extends `${infer First}.${infer Rest}` ? First extends keyof T ? undefined extends T[First] ? Rest extends PathFromObject<ExtractObjectType<T[First]>> ? TypeOfValueAtPath<ExtractObjectType<T[First]>, Rest> | undefined : never : Rest extends PathFromObject<ExtractObjectType<T[First]>> ? TypeOfValueAtPath<ExtractObjectType<T[First]>, Rest> : never : never : never;
|
|
27
|
+
|
|
28
|
+
declare function workflowRunHandle<Input, Output>(client: Client<unknown>, id: WorkflowRunId): Promise<WorkflowRunHandle<Input, Output>>;
|
|
29
|
+
declare function workflowRunHandle<Input, Output>(client: Client<unknown>, run: WorkflowRun<Input, Output>, logger?: Logger): Promise<WorkflowRunHandle<Input, Output>>;
|
|
30
|
+
interface WorkflowRunHandle<Input, Output> {
|
|
31
|
+
run: Readonly<WorkflowRun<Input, Output>>;
|
|
32
|
+
refresh: () => Promise<void>;
|
|
33
|
+
wait<S extends WorkflowRunStatus>(condition: {
|
|
34
|
+
type: "status";
|
|
35
|
+
status: S;
|
|
36
|
+
}, options: WorkflowRunWaitOptions): Promise<{
|
|
37
|
+
success: false;
|
|
38
|
+
cause: "timeout" | "aborted";
|
|
39
|
+
} | {
|
|
40
|
+
success: true;
|
|
41
|
+
state: S extends "completed" ? WorkflowRunStateCompleted<Output> : WorkflowRunStateInComplete;
|
|
42
|
+
}>;
|
|
43
|
+
wait(condition: {
|
|
44
|
+
type: "event";
|
|
45
|
+
event: string;
|
|
46
|
+
}, options: WorkflowRunWaitOptions): Promise<{
|
|
47
|
+
success: false;
|
|
48
|
+
cause: "timeout" | "aborted";
|
|
49
|
+
} | {
|
|
50
|
+
success: true;
|
|
51
|
+
state: WorkflowRunState<Output>;
|
|
52
|
+
}>;
|
|
53
|
+
cancel: (reason?: string) => Promise<void>;
|
|
54
|
+
pause: () => Promise<void>;
|
|
55
|
+
resume: () => Promise<void>;
|
|
56
|
+
[INTERNAL]: {
|
|
57
|
+
transitionState: (state: WorkflowRunState<Output>) => Promise<void>;
|
|
58
|
+
transitionTaskState: (taskPath: TaskPath, taskState: TaskState<unknown>) => Promise<void>;
|
|
59
|
+
assertExecutionAllowed: () => void;
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
interface WorkflowRunWaitOptions {
|
|
63
|
+
maxDurationMs: number;
|
|
64
|
+
pollIntervalMs?: number;
|
|
65
|
+
abortSignal?: AbortSignal;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
interface WorkflowRunContext<Input, Output> {
|
|
69
|
+
id: WorkflowRunId;
|
|
70
|
+
workflowId: WorkflowId;
|
|
71
|
+
workflowVersionId: WorkflowVersionId;
|
|
72
|
+
options: WorkflowOptions;
|
|
73
|
+
logger: Logger;
|
|
74
|
+
sleep: (params: SleepParams) => Promise<SleepResult>;
|
|
75
|
+
[INTERNAL]: {
|
|
76
|
+
handle: WorkflowRunHandle<Input, Output>;
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
interface WorkflowVersionParams<Input, Output, AppContext> {
|
|
81
|
+
handler: (input: Input, run: Readonly<WorkflowRunContext<Input, Output>>, context: AppContext) => Promise<Output>;
|
|
82
|
+
opts?: WorkflowOptions;
|
|
83
|
+
}
|
|
84
|
+
interface WorkflowBuilder<Input, Output, AppContext> {
|
|
85
|
+
opt<Path extends PathFromObject<WorkflowOptions>>(path: Path, value: TypeOfValueAtPath<WorkflowOptions, Path>): WorkflowBuilder<Input, Output, AppContext>;
|
|
86
|
+
start: WorkflowVersion<Input, Output, AppContext>["start"];
|
|
87
|
+
}
|
|
88
|
+
interface WorkflowVersion<Input, Output, AppContext> {
|
|
89
|
+
id: WorkflowId;
|
|
90
|
+
versionId: WorkflowVersionId;
|
|
91
|
+
with(): WorkflowBuilder<Input, Output, AppContext>;
|
|
92
|
+
start: (client: Client<AppContext>, ...args: Input extends null ? [] : [Input]) => Promise<WorkflowRunHandle<Input, Output>>;
|
|
93
|
+
[INTERNAL]: {
|
|
94
|
+
handler: (input: Input, run: WorkflowRunContext<Input, Output>, context: AppContext) => Promise<void>;
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
declare class WorkflowVersionImpl<Input, Output, AppContext> implements WorkflowVersion<Input, Output, AppContext> {
|
|
98
|
+
readonly id: WorkflowId;
|
|
99
|
+
readonly versionId: WorkflowVersionId;
|
|
100
|
+
private readonly params;
|
|
101
|
+
readonly [INTERNAL]: WorkflowVersion<Input, Output, AppContext>[typeof INTERNAL];
|
|
102
|
+
constructor(id: WorkflowId, versionId: WorkflowVersionId, params: WorkflowVersionParams<Input, Output, AppContext>);
|
|
103
|
+
with(): WorkflowBuilder<Input, Output, AppContext>;
|
|
104
|
+
start(client: Client<AppContext>, ...args: Input extends null ? [] : [Input]): Promise<WorkflowRunHandle<Input, Output>>;
|
|
105
|
+
private handler;
|
|
106
|
+
private tryExecuteWorkflow;
|
|
107
|
+
private assertRetryAllowed;
|
|
108
|
+
private createFailedState;
|
|
109
|
+
private createAwaitingRetryState;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
declare function workflowRegistry(): WorkflowRegistry;
|
|
113
|
+
type Workflow$1 = WorkflowVersion<unknown, unknown, unknown>;
|
|
114
|
+
interface WorkflowRegistry {
|
|
115
|
+
add: (workflow: Workflow$1) => WorkflowRegistry;
|
|
116
|
+
addMany: (workflows: Workflow$1[]) => WorkflowRegistry;
|
|
117
|
+
remove: (workflow: Workflow$1) => WorkflowRegistry;
|
|
118
|
+
removeMany: (workflows: Workflow$1[]) => WorkflowRegistry;
|
|
119
|
+
removeAll: () => WorkflowRegistry;
|
|
120
|
+
getAll(): Workflow$1[];
|
|
121
|
+
get: (id: WorkflowId, versionId: WorkflowVersionId) => Workflow$1 | undefined;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
interface SleeperOptions {
|
|
125
|
+
spinThresholdMs: number;
|
|
126
|
+
}
|
|
127
|
+
declare function createWorkflowRunSleeper(workflowRunHandle: WorkflowRunHandle<unknown, unknown>, logger: Logger, options: SleeperOptions): (params: SleepParams) => Promise<SleepResult>;
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Defines a durable workflow with versioning and multiple task execution.
|
|
131
|
+
*
|
|
132
|
+
* Workflows are long-running business processes that can span hours, days, or longer.
|
|
133
|
+
* They automatically survive crashes, timeouts, and infrastructure failures.
|
|
134
|
+
* Multiple versions of a workflow can run simultaneously, allowing safe deployments.
|
|
135
|
+
*
|
|
136
|
+
* @param params - Workflow configuration
|
|
137
|
+
* @param params.id - Unique workflow id used for identification and routing
|
|
138
|
+
* @returns Workflow instance with version management methods
|
|
139
|
+
*
|
|
140
|
+
* @example
|
|
141
|
+
* ```typescript
|
|
142
|
+
* // Define a workflow
|
|
143
|
+
* export const userOnboarding = workflow({ id: "user-onboarding" });
|
|
144
|
+
*
|
|
145
|
+
* // Define version 1.0
|
|
146
|
+
* export const userOnboardingV1 = userOnboarding.v("1.0", {
|
|
147
|
+
* async handler(input: { email: string }, run) {
|
|
148
|
+
* run.logger.info("Starting onboarding", { email: input.email });
|
|
149
|
+
*
|
|
150
|
+
* // Execute tasks
|
|
151
|
+
* await sendWelcomeEmail.start(run, { email: input.email });
|
|
152
|
+
* await createUserProfile.start(run, { email: input.email });
|
|
153
|
+
*
|
|
154
|
+
* // Durable sleep
|
|
155
|
+
* await run.sleep({ id: "onboarding-delay", days: 1 });
|
|
156
|
+
*
|
|
157
|
+
* // More tasks
|
|
158
|
+
* await sendUsageTips.start(run, { email: input.email });
|
|
159
|
+
*
|
|
160
|
+
* return { success: true };
|
|
161
|
+
* },
|
|
162
|
+
* });
|
|
163
|
+
*
|
|
164
|
+
* // Deploy version 2.0 alongside 1.0 (no downtime)
|
|
165
|
+
* export const userOnboardingV2 = userOnboarding.v("2.0", {
|
|
166
|
+
* async handler(input: { email: string; trial: boolean }, run) {
|
|
167
|
+
* // Enhanced version with different logic
|
|
168
|
+
* // Existing v1.0 workflows continue with their version
|
|
169
|
+
* // New workflows use v2.0
|
|
170
|
+
* },
|
|
171
|
+
* });
|
|
172
|
+
* ```
|
|
173
|
+
*
|
|
174
|
+
* @see {@link https://github.com/aikirun/aiki} for complete documentation
|
|
175
|
+
*/
|
|
176
|
+
declare function workflow(params: WorkflowParams): Workflow;
|
|
177
|
+
interface WorkflowParams {
|
|
178
|
+
id: string;
|
|
179
|
+
}
|
|
180
|
+
interface Workflow {
|
|
181
|
+
id: WorkflowId;
|
|
182
|
+
v: <Input extends SerializableInput = null, Output = void, AppContext = null>(versionId: string, params: WorkflowVersionParams<Input, Output, AppContext>) => WorkflowVersion<Input, Output, AppContext>;
|
|
183
|
+
[INTERNAL]: {
|
|
184
|
+
getAllVersions: () => WorkflowVersion<unknown, unknown, unknown>[];
|
|
185
|
+
getVersion: (versionId: WorkflowVersionId) => WorkflowVersion<unknown, unknown, unknown> | undefined;
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
export { type Workflow, type WorkflowParams, type WorkflowRegistry, type WorkflowRunContext, type WorkflowRunHandle, type WorkflowRunWaitOptions, type WorkflowVersion, WorkflowVersionImpl, type WorkflowVersionParams, createWorkflowRunSleeper, workflow, workflowRegistry, workflowRunHandle };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,611 @@
|
|
|
1
|
+
// registry.ts
|
|
2
|
+
function workflowRegistry() {
|
|
3
|
+
return new WorkflowRegistryImpl();
|
|
4
|
+
}
|
|
5
|
+
var WorkflowRegistryImpl = class {
|
|
6
|
+
workflowsById = /* @__PURE__ */ new Map();
|
|
7
|
+
add(workflow2) {
|
|
8
|
+
const workflows = this.workflowsById.get(workflow2.id);
|
|
9
|
+
if (!workflows) {
|
|
10
|
+
this.workflowsById.set(workflow2.id, /* @__PURE__ */ new Map([[workflow2.versionId, workflow2]]));
|
|
11
|
+
return this;
|
|
12
|
+
}
|
|
13
|
+
if (workflows.has(workflow2.versionId)) {
|
|
14
|
+
throw new Error(`Workflow "${workflow2.id}/${workflow2.versionId}" is already registered`);
|
|
15
|
+
}
|
|
16
|
+
workflows.set(workflow2.versionId, workflow2);
|
|
17
|
+
return this;
|
|
18
|
+
}
|
|
19
|
+
addMany(workflows) {
|
|
20
|
+
for (const workflow2 of workflows) {
|
|
21
|
+
this.add(workflow2);
|
|
22
|
+
}
|
|
23
|
+
return this;
|
|
24
|
+
}
|
|
25
|
+
remove(workflow2) {
|
|
26
|
+
const workflowVersinos = this.workflowsById.get(workflow2.id);
|
|
27
|
+
if (workflowVersinos) {
|
|
28
|
+
workflowVersinos.delete(workflow2.versionId);
|
|
29
|
+
}
|
|
30
|
+
return this;
|
|
31
|
+
}
|
|
32
|
+
removeMany(workflows) {
|
|
33
|
+
for (const workflow2 of workflows) {
|
|
34
|
+
this.remove(workflow2);
|
|
35
|
+
}
|
|
36
|
+
return this;
|
|
37
|
+
}
|
|
38
|
+
removeAll() {
|
|
39
|
+
this.workflowsById.clear();
|
|
40
|
+
return this;
|
|
41
|
+
}
|
|
42
|
+
getAll() {
|
|
43
|
+
const workflows = [];
|
|
44
|
+
for (const workflowVersions of this.workflowsById.values()) {
|
|
45
|
+
for (const workflow2 of workflowVersions.values()) {
|
|
46
|
+
workflows.push(workflow2);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return workflows;
|
|
50
|
+
}
|
|
51
|
+
get(id, versionId) {
|
|
52
|
+
return this.workflowsById.get(id)?.get(versionId);
|
|
53
|
+
}
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
// ../../lib/array/utils.ts
|
|
57
|
+
function isNonEmptyArray(value) {
|
|
58
|
+
return value.length > 0;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// ../../lib/async/delay.ts
|
|
62
|
+
function delay(ms, options) {
|
|
63
|
+
const abortSignal = options?.abortSignal;
|
|
64
|
+
if (abortSignal?.aborted) {
|
|
65
|
+
return Promise.reject(abortSignal.reason);
|
|
66
|
+
}
|
|
67
|
+
return new Promise((resolve, reject) => {
|
|
68
|
+
const abort = () => {
|
|
69
|
+
clearTimeout(timeout);
|
|
70
|
+
reject(abortSignal?.reason);
|
|
71
|
+
};
|
|
72
|
+
const timeout = setTimeout(() => {
|
|
73
|
+
abortSignal?.removeEventListener("abort", abort);
|
|
74
|
+
resolve();
|
|
75
|
+
}, ms);
|
|
76
|
+
abortSignal?.addEventListener("abort", abort, { once: true });
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// ../../lib/duration/convert.ts
|
|
81
|
+
var MS_PER_SECOND = 1e3;
|
|
82
|
+
var MS_PER_MINUTE = 60 * MS_PER_SECOND;
|
|
83
|
+
var MS_PER_HOUR = 60 * MS_PER_MINUTE;
|
|
84
|
+
var MS_PER_DAY = 24 * MS_PER_HOUR;
|
|
85
|
+
function toMilliseconds(duration) {
|
|
86
|
+
if (typeof duration === "number") {
|
|
87
|
+
assertIsPositiveNumber(duration);
|
|
88
|
+
return duration;
|
|
89
|
+
}
|
|
90
|
+
let totalMs = 0;
|
|
91
|
+
if (duration.days !== void 0) {
|
|
92
|
+
assertIsPositiveNumber(duration.days, "days");
|
|
93
|
+
totalMs += duration.days * MS_PER_DAY;
|
|
94
|
+
}
|
|
95
|
+
if (duration.hours !== void 0) {
|
|
96
|
+
assertIsPositiveNumber(duration.hours, "hours");
|
|
97
|
+
totalMs += duration.hours * MS_PER_HOUR;
|
|
98
|
+
}
|
|
99
|
+
if (duration.minutes !== void 0) {
|
|
100
|
+
assertIsPositiveNumber(duration.minutes, "minutes");
|
|
101
|
+
totalMs += duration.minutes * MS_PER_MINUTE;
|
|
102
|
+
}
|
|
103
|
+
if (duration.seconds !== void 0) {
|
|
104
|
+
assertIsPositiveNumber(duration.seconds, "seconds");
|
|
105
|
+
totalMs += duration.seconds * MS_PER_SECOND;
|
|
106
|
+
}
|
|
107
|
+
if (duration.milliseconds !== void 0) {
|
|
108
|
+
assertIsPositiveNumber(duration.milliseconds, "milliseconds");
|
|
109
|
+
totalMs += duration.milliseconds;
|
|
110
|
+
}
|
|
111
|
+
return totalMs;
|
|
112
|
+
}
|
|
113
|
+
function assertIsPositiveNumber(value, field) {
|
|
114
|
+
if (!Number.isFinite(value)) {
|
|
115
|
+
throw new Error(
|
|
116
|
+
field !== void 0 ? `'${field}' duration must be finite. Received: ${value}` : `Duration must be finite. Received: ${value}`
|
|
117
|
+
);
|
|
118
|
+
}
|
|
119
|
+
if (value < 0) {
|
|
120
|
+
throw new Error(
|
|
121
|
+
field !== void 0 ? `'${field}' duration must be non-negative. Received: ${value}` : `Duration must be non-negative. Received: ${value}`
|
|
122
|
+
);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// ../../lib/error/serializable.ts
|
|
127
|
+
function createSerializableError(error) {
|
|
128
|
+
return error instanceof Error ? {
|
|
129
|
+
message: error.message,
|
|
130
|
+
name: error.name,
|
|
131
|
+
stack: error.stack,
|
|
132
|
+
cause: error.cause ? createSerializableError(error.cause) : void 0
|
|
133
|
+
} : {
|
|
134
|
+
message: String(error),
|
|
135
|
+
name: "UnknownError"
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// ../../lib/object/overrider.ts
|
|
140
|
+
function set(obj, path, value) {
|
|
141
|
+
const keys = path.split(".");
|
|
142
|
+
let currentValue = obj;
|
|
143
|
+
for (let i = 0; i < keys.length - 1; i++) {
|
|
144
|
+
const key = keys[i];
|
|
145
|
+
currentValue = currentValue[key];
|
|
146
|
+
if (currentValue === void 0 || currentValue === null) {
|
|
147
|
+
currentValue = {};
|
|
148
|
+
currentValue[key] = currentValue;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
const lastKey = keys[keys.length - 1];
|
|
152
|
+
currentValue[lastKey] = value;
|
|
153
|
+
}
|
|
154
|
+
var objectOverrider = (defaultObj) => (obj) => {
|
|
155
|
+
const createBuilder = (overrides) => ({
|
|
156
|
+
with: (path, value) => createBuilder([...overrides, { path: `${path}`, value }]),
|
|
157
|
+
build: () => {
|
|
158
|
+
const clonedObject = structuredClone(obj ?? defaultObj);
|
|
159
|
+
for (const { path, value } of overrides) {
|
|
160
|
+
set(clonedObject, path, value);
|
|
161
|
+
}
|
|
162
|
+
return clonedObject;
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
return createBuilder([]);
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
// ../../lib/retry/strategy.ts
|
|
169
|
+
function withRetry(fn, strategy, options) {
|
|
170
|
+
return {
|
|
171
|
+
run: async (...args) => {
|
|
172
|
+
let attempts = 0;
|
|
173
|
+
while (true) {
|
|
174
|
+
if (options?.abortSignal?.aborted) {
|
|
175
|
+
return {
|
|
176
|
+
state: "aborted",
|
|
177
|
+
reason: options.abortSignal.reason
|
|
178
|
+
};
|
|
179
|
+
}
|
|
180
|
+
attempts++;
|
|
181
|
+
let result;
|
|
182
|
+
try {
|
|
183
|
+
result = await fn(...args);
|
|
184
|
+
if (options?.shouldRetryOnResult === void 0 || !await options.shouldRetryOnResult(result)) {
|
|
185
|
+
return {
|
|
186
|
+
state: "completed",
|
|
187
|
+
result,
|
|
188
|
+
attempts
|
|
189
|
+
};
|
|
190
|
+
}
|
|
191
|
+
} catch (err) {
|
|
192
|
+
if (options?.shouldNotRetryOnError !== void 0 && await options.shouldNotRetryOnError(err)) {
|
|
193
|
+
throw err;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
const retryParams = getRetryParams(attempts, strategy);
|
|
197
|
+
if (!retryParams.retriesLeft) {
|
|
198
|
+
return {
|
|
199
|
+
state: "timeout"
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
await delay(retryParams.delayMs, { abortSignal: options?.abortSignal });
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
};
|
|
206
|
+
}
|
|
207
|
+
function getRetryParams(attempts, strategy) {
|
|
208
|
+
const strategyType = strategy.type;
|
|
209
|
+
switch (strategyType) {
|
|
210
|
+
case "never":
|
|
211
|
+
return {
|
|
212
|
+
retriesLeft: false
|
|
213
|
+
};
|
|
214
|
+
case "fixed":
|
|
215
|
+
if (attempts >= strategy.maxAttempts) {
|
|
216
|
+
return {
|
|
217
|
+
retriesLeft: false
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
return {
|
|
221
|
+
retriesLeft: true,
|
|
222
|
+
delayMs: strategy.delayMs
|
|
223
|
+
};
|
|
224
|
+
case "exponential": {
|
|
225
|
+
if (attempts >= strategy.maxAttempts) {
|
|
226
|
+
return {
|
|
227
|
+
retriesLeft: false
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
const delayMs = strategy.baseDelayMs * (strategy.factor ?? 2) ** (attempts - 1);
|
|
231
|
+
return {
|
|
232
|
+
retriesLeft: true,
|
|
233
|
+
delayMs: Math.min(delayMs, strategy.maxDelayMs ?? Number.POSITIVE_INFINITY)
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
case "jittered": {
|
|
237
|
+
if (attempts >= strategy.maxAttempts) {
|
|
238
|
+
return {
|
|
239
|
+
retriesLeft: false
|
|
240
|
+
};
|
|
241
|
+
}
|
|
242
|
+
const base = strategy.baseDelayMs * (strategy.jitterFactor ?? 2) ** (attempts - 1);
|
|
243
|
+
const delayMs = Math.random() * base;
|
|
244
|
+
return {
|
|
245
|
+
retriesLeft: true,
|
|
246
|
+
delayMs: Math.min(delayMs, strategy.maxDelayMs ?? Number.POSITIVE_INFINITY)
|
|
247
|
+
};
|
|
248
|
+
}
|
|
249
|
+
default:
|
|
250
|
+
return strategyType;
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
// run/run-handle.ts
|
|
255
|
+
import { INTERNAL } from "@aikirun/types/symbols";
|
|
256
|
+
import {
|
|
257
|
+
WorkflowRunNotExecutableError
|
|
258
|
+
} from "@aikirun/types/workflow-run";
|
|
259
|
+
async function workflowRunHandle(client, runOrId, logger) {
|
|
260
|
+
const run = typeof runOrId !== "string" ? runOrId : (await client.api.workflowRun.getByIdV1({ id: runOrId })).run;
|
|
261
|
+
return new WorkflowRunHandleImpl(client.api, run, logger ?? client.logger.child({ "aiki.workflowRunId": run.id }));
|
|
262
|
+
}
|
|
263
|
+
var WorkflowRunHandleImpl = class {
|
|
264
|
+
constructor(api, _run, logger) {
|
|
265
|
+
this.api = api;
|
|
266
|
+
this._run = _run;
|
|
267
|
+
this.logger = logger;
|
|
268
|
+
this[INTERNAL] = {
|
|
269
|
+
transitionState: this.transitionState.bind(this),
|
|
270
|
+
transitionTaskState: this.transitionTaskState.bind(this),
|
|
271
|
+
assertExecutionAllowed: this.assertExecutionAllowed.bind(this)
|
|
272
|
+
};
|
|
273
|
+
}
|
|
274
|
+
[INTERNAL];
|
|
275
|
+
get run() {
|
|
276
|
+
return this._run;
|
|
277
|
+
}
|
|
278
|
+
async refresh() {
|
|
279
|
+
const { run: currentRun } = await this.api.workflowRun.getByIdV1({ id: this.run.id });
|
|
280
|
+
this._run = currentRun;
|
|
281
|
+
}
|
|
282
|
+
async wait(condition, options) {
|
|
283
|
+
if (options.abortSignal?.aborted) {
|
|
284
|
+
throw new Error("Wait operation aborted");
|
|
285
|
+
}
|
|
286
|
+
const delayMs = options.pollIntervalMs ?? 1e3;
|
|
287
|
+
const maxAttempts = Math.ceil(options.maxDurationMs / delayMs);
|
|
288
|
+
switch (condition.type) {
|
|
289
|
+
case "status": {
|
|
290
|
+
if (options.abortSignal !== void 0) {
|
|
291
|
+
const maybeResult2 = await withRetry(
|
|
292
|
+
async () => {
|
|
293
|
+
await this.refresh();
|
|
294
|
+
return this.run.state;
|
|
295
|
+
},
|
|
296
|
+
{ type: "fixed", maxAttempts, delayMs },
|
|
297
|
+
{
|
|
298
|
+
abortSignal: options.abortSignal,
|
|
299
|
+
shouldRetryOnResult: (state) => Promise.resolve(state.status !== condition.status)
|
|
300
|
+
}
|
|
301
|
+
).run();
|
|
302
|
+
if (maybeResult2.state === "timeout" || maybeResult2.state === "aborted") {
|
|
303
|
+
return { success: false, cause: maybeResult2.state };
|
|
304
|
+
}
|
|
305
|
+
return {
|
|
306
|
+
success: true,
|
|
307
|
+
state: maybeResult2.result
|
|
308
|
+
};
|
|
309
|
+
}
|
|
310
|
+
const maybeResult = await withRetry(
|
|
311
|
+
async () => {
|
|
312
|
+
await this.refresh();
|
|
313
|
+
return this.run.state;
|
|
314
|
+
},
|
|
315
|
+
{ type: "fixed", maxAttempts, delayMs },
|
|
316
|
+
{ shouldRetryOnResult: (state) => Promise.resolve(state.status !== condition.status) }
|
|
317
|
+
).run();
|
|
318
|
+
if (maybeResult.state === "timeout") {
|
|
319
|
+
return { success: false, cause: maybeResult.state };
|
|
320
|
+
}
|
|
321
|
+
return {
|
|
322
|
+
success: true,
|
|
323
|
+
state: maybeResult.result
|
|
324
|
+
};
|
|
325
|
+
}
|
|
326
|
+
case "event": {
|
|
327
|
+
throw new Error("Event-based waiting is not yet implemented");
|
|
328
|
+
}
|
|
329
|
+
default:
|
|
330
|
+
return condition;
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
async cancel(reason) {
|
|
334
|
+
return this.transitionState({ status: "cancelled", reason });
|
|
335
|
+
}
|
|
336
|
+
async pause() {
|
|
337
|
+
return this.transitionState({ status: "paused", pausedAt: Date.now() });
|
|
338
|
+
}
|
|
339
|
+
async resume() {
|
|
340
|
+
return this.transitionState({ status: "scheduled", scheduledAt: Date.now(), reason: "resume" });
|
|
341
|
+
}
|
|
342
|
+
async transitionState(targetState) {
|
|
343
|
+
if (targetState.status === "scheduled" && (targetState.reason === "new" || targetState.reason === "resume") || targetState.status === "paused" || targetState.status === "cancelled") {
|
|
344
|
+
const { run: run2 } = await this.api.workflowRun.transitionStateV1({
|
|
345
|
+
type: "pessimistic",
|
|
346
|
+
id: this.run.id,
|
|
347
|
+
state: targetState
|
|
348
|
+
});
|
|
349
|
+
this._run = run2;
|
|
350
|
+
return;
|
|
351
|
+
}
|
|
352
|
+
const { run } = await this.api.workflowRun.transitionStateV1({
|
|
353
|
+
type: "optimistic",
|
|
354
|
+
id: this.run.id,
|
|
355
|
+
state: targetState,
|
|
356
|
+
expectedRevision: this.run.revision
|
|
357
|
+
});
|
|
358
|
+
this._run = run;
|
|
359
|
+
}
|
|
360
|
+
async transitionTaskState(taskPath, taskState) {
|
|
361
|
+
const { run } = await this.api.workflowRun.transitionTaskStateV1({
|
|
362
|
+
id: this.run.id,
|
|
363
|
+
taskPath,
|
|
364
|
+
taskState,
|
|
365
|
+
expectedRevision: this.run.revision
|
|
366
|
+
});
|
|
367
|
+
this._run = run;
|
|
368
|
+
}
|
|
369
|
+
assertExecutionAllowed() {
|
|
370
|
+
const status = this.run.state.status;
|
|
371
|
+
if (status !== "queued" && status !== "running") {
|
|
372
|
+
throw new WorkflowRunNotExecutableError(this.run.id, status);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
};
|
|
376
|
+
|
|
377
|
+
// run/sleeper.ts
|
|
378
|
+
import { INTERNAL as INTERNAL2 } from "@aikirun/types/symbols";
|
|
379
|
+
import { WorkflowRunSuspendedError } from "@aikirun/types/workflow-run";
|
|
380
|
+
var MAX_SLEEP_YEARS = 10;
|
|
381
|
+
var MAX_SLEEP_MS = MAX_SLEEP_YEARS * 365 * 24 * 60 * 60 * 1e3;
|
|
382
|
+
function createWorkflowRunSleeper(workflowRunHandle2, logger, options) {
|
|
383
|
+
return async (params) => {
|
|
384
|
+
const { id: sleepId, ...durationFields } = params;
|
|
385
|
+
const durationMs = toMilliseconds(durationFields);
|
|
386
|
+
if (durationMs > MAX_SLEEP_MS) {
|
|
387
|
+
throw new Error(`Sleep duration ${durationMs}ms exceeds maximum of ${MAX_SLEEP_YEARS} years`);
|
|
388
|
+
}
|
|
389
|
+
const sleepPath = `${sleepId}/${durationMs}`;
|
|
390
|
+
const sleepState = workflowRunHandle2.run.sleepsState[sleepPath] ?? { status: "none" };
|
|
391
|
+
if (sleepState.status === "completed") {
|
|
392
|
+
logger.debug("Sleep completed", {
|
|
393
|
+
"aiki.sleepId": sleepId,
|
|
394
|
+
"aiki.durationMs": durationMs
|
|
395
|
+
});
|
|
396
|
+
return { cancelled: false };
|
|
397
|
+
}
|
|
398
|
+
if (sleepState.status === "cancelled") {
|
|
399
|
+
logger.debug("Sleep cancelled", {
|
|
400
|
+
"aiki.sleepId": sleepId,
|
|
401
|
+
"aiki.durationMs": durationMs
|
|
402
|
+
});
|
|
403
|
+
return { cancelled: true };
|
|
404
|
+
}
|
|
405
|
+
if (sleepState.status === "sleeping") {
|
|
406
|
+
logger.debug("Already sleeping", {
|
|
407
|
+
"aiki.sleepId": sleepId,
|
|
408
|
+
"aiki.durationMs": durationMs
|
|
409
|
+
});
|
|
410
|
+
throw new WorkflowRunSuspendedError(workflowRunHandle2.run.id);
|
|
411
|
+
}
|
|
412
|
+
sleepState;
|
|
413
|
+
if (durationMs <= options.spinThresholdMs) {
|
|
414
|
+
logger.debug("Spinning for short sleep", {
|
|
415
|
+
"aiki.sleepId": sleepId,
|
|
416
|
+
"aiki.durationMs": durationMs
|
|
417
|
+
});
|
|
418
|
+
await delay(durationMs);
|
|
419
|
+
return { cancelled: false };
|
|
420
|
+
}
|
|
421
|
+
await workflowRunHandle2[INTERNAL2].transitionState({ status: "sleeping", sleepPath, durationMs });
|
|
422
|
+
logger.info("Workflow going to sleep", {
|
|
423
|
+
"aiki.sleepId": sleepId,
|
|
424
|
+
"aiki.durationMs": durationMs
|
|
425
|
+
});
|
|
426
|
+
throw new WorkflowRunSuspendedError(workflowRunHandle2.run.id);
|
|
427
|
+
};
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
// workflow.ts
|
|
431
|
+
import { INTERNAL as INTERNAL4 } from "@aikirun/types/symbols";
|
|
432
|
+
|
|
433
|
+
// workflow-version.ts
|
|
434
|
+
import { INTERNAL as INTERNAL3 } from "@aikirun/types/symbols";
|
|
435
|
+
import { TaskFailedError } from "@aikirun/types/task";
|
|
436
|
+
import {
|
|
437
|
+
WorkflowRunFailedError,
|
|
438
|
+
WorkflowRunSuspendedError as WorkflowRunSuspendedError2
|
|
439
|
+
} from "@aikirun/types/workflow-run";
|
|
440
|
+
var WorkflowVersionImpl = class _WorkflowVersionImpl {
|
|
441
|
+
constructor(id, versionId, params) {
|
|
442
|
+
this.id = id;
|
|
443
|
+
this.versionId = versionId;
|
|
444
|
+
this.params = params;
|
|
445
|
+
this[INTERNAL3] = {
|
|
446
|
+
handler: this.handler.bind(this)
|
|
447
|
+
};
|
|
448
|
+
}
|
|
449
|
+
[INTERNAL3];
|
|
450
|
+
with() {
|
|
451
|
+
const optsOverrider = objectOverrider(this.params.opts ?? {});
|
|
452
|
+
const createBuilder = (optsBuilder) => ({
|
|
453
|
+
opt: (path, value) => createBuilder(optsBuilder.with(path, value)),
|
|
454
|
+
start: (client, ...args) => new _WorkflowVersionImpl(this.id, this.versionId, { ...this.params, opts: optsBuilder.build() }).start(
|
|
455
|
+
client,
|
|
456
|
+
...args
|
|
457
|
+
)
|
|
458
|
+
});
|
|
459
|
+
return createBuilder(optsOverrider());
|
|
460
|
+
}
|
|
461
|
+
async start(client, ...args) {
|
|
462
|
+
const { run } = await client.api.workflowRun.createV1({
|
|
463
|
+
workflowId: this.id,
|
|
464
|
+
workflowVersionId: this.versionId,
|
|
465
|
+
input: isNonEmptyArray(args) ? args[0] : null,
|
|
466
|
+
options: this.params.opts
|
|
467
|
+
});
|
|
468
|
+
return workflowRunHandle(client, run);
|
|
469
|
+
}
|
|
470
|
+
async handler(input, run, context) {
|
|
471
|
+
const { logger } = run;
|
|
472
|
+
const { handle } = run[INTERNAL3];
|
|
473
|
+
handle[INTERNAL3].assertExecutionAllowed();
|
|
474
|
+
const retryStrategy = this.params.opts?.retry ?? { type: "never" };
|
|
475
|
+
const state = handle.run.state;
|
|
476
|
+
if (state.status === "queued" && state.reason === "retry") {
|
|
477
|
+
this.assertRetryAllowed(handle.run.id, handle.run.attempts, retryStrategy, logger);
|
|
478
|
+
}
|
|
479
|
+
logger.info("Starting workflow");
|
|
480
|
+
await handle[INTERNAL3].transitionState({ status: "running" });
|
|
481
|
+
const output = await this.tryExecuteWorkflow(input, run, context, retryStrategy);
|
|
482
|
+
await handle[INTERNAL3].transitionState({ status: "completed", output });
|
|
483
|
+
logger.info("Workflow complete");
|
|
484
|
+
}
|
|
485
|
+
async tryExecuteWorkflow(input, run, context, retryStrategy) {
|
|
486
|
+
while (true) {
|
|
487
|
+
try {
|
|
488
|
+
return await this.params.handler(input, run, context);
|
|
489
|
+
} catch (error) {
|
|
490
|
+
if (error instanceof WorkflowRunSuspendedError2) {
|
|
491
|
+
throw error;
|
|
492
|
+
}
|
|
493
|
+
const attempts = run[INTERNAL3].handle.run.attempts;
|
|
494
|
+
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
495
|
+
if (!retryParams.retriesLeft) {
|
|
496
|
+
const failedState = this.createFailedState(error);
|
|
497
|
+
await run[INTERNAL3].handle[INTERNAL3].transitionState(failedState);
|
|
498
|
+
const logMeta2 = {};
|
|
499
|
+
for (const [key, value] of Object.entries(failedState)) {
|
|
500
|
+
logMeta2[`aiki.${key}`] = value;
|
|
501
|
+
}
|
|
502
|
+
run.logger.error("Workflow failed", {
|
|
503
|
+
"aiki.attempts": attempts,
|
|
504
|
+
...logMeta2
|
|
505
|
+
});
|
|
506
|
+
throw new WorkflowRunFailedError(run.id, attempts, failedState.reason, failedState.cause);
|
|
507
|
+
}
|
|
508
|
+
const nextAttemptAt = Date.now() + retryParams.delayMs;
|
|
509
|
+
const awaitingRetryState = this.createAwaitingRetryState(error, nextAttemptAt);
|
|
510
|
+
await run[INTERNAL3].handle[INTERNAL3].transitionState(awaitingRetryState);
|
|
511
|
+
const logMeta = {};
|
|
512
|
+
for (const [key, value] of Object.entries(awaitingRetryState)) {
|
|
513
|
+
logMeta[`aiki.${key}`] = value;
|
|
514
|
+
}
|
|
515
|
+
run.logger.info("Workflow failed. Awaiting retry", {
|
|
516
|
+
"aiki.attempts": attempts,
|
|
517
|
+
"aiki.nextAttemptAt": nextAttemptAt,
|
|
518
|
+
"aiki.delayMs": retryParams.delayMs,
|
|
519
|
+
...logMeta
|
|
520
|
+
});
|
|
521
|
+
throw new WorkflowRunSuspendedError2(run.id);
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
assertRetryAllowed(id, attempts, retryStrategy, logger) {
|
|
526
|
+
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
527
|
+
if (!retryParams.retriesLeft) {
|
|
528
|
+
logger.error("Workflow retry not allowed", {
|
|
529
|
+
"aiki.attempts": attempts
|
|
530
|
+
});
|
|
531
|
+
throw new WorkflowRunFailedError(id, attempts, "Workflow retry not allowed");
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
createFailedState(error) {
|
|
535
|
+
if (error instanceof TaskFailedError) {
|
|
536
|
+
return {
|
|
537
|
+
status: "failed",
|
|
538
|
+
cause: "task",
|
|
539
|
+
taskPath: error.taskPath,
|
|
540
|
+
reason: error.reason
|
|
541
|
+
};
|
|
542
|
+
}
|
|
543
|
+
const serializableError = createSerializableError(error);
|
|
544
|
+
return {
|
|
545
|
+
status: "failed",
|
|
546
|
+
cause: "self",
|
|
547
|
+
reason: serializableError.message,
|
|
548
|
+
error: serializableError
|
|
549
|
+
};
|
|
550
|
+
}
|
|
551
|
+
createAwaitingRetryState(error, nextAttemptAt) {
|
|
552
|
+
if (error instanceof TaskFailedError) {
|
|
553
|
+
return {
|
|
554
|
+
status: "awaiting_retry",
|
|
555
|
+
cause: "task",
|
|
556
|
+
reason: error.reason,
|
|
557
|
+
nextAttemptAt,
|
|
558
|
+
taskPath: error.taskPath
|
|
559
|
+
};
|
|
560
|
+
}
|
|
561
|
+
const serializableError = createSerializableError(error);
|
|
562
|
+
return {
|
|
563
|
+
status: "awaiting_retry",
|
|
564
|
+
cause: "self",
|
|
565
|
+
reason: serializableError.message,
|
|
566
|
+
nextAttemptAt,
|
|
567
|
+
error: serializableError
|
|
568
|
+
};
|
|
569
|
+
}
|
|
570
|
+
};
|
|
571
|
+
|
|
572
|
+
// workflow.ts
|
|
573
|
+
function workflow(params) {
|
|
574
|
+
return new WorkflowImpl(params);
|
|
575
|
+
}
|
|
576
|
+
var WorkflowImpl = class {
|
|
577
|
+
id;
|
|
578
|
+
[INTERNAL4];
|
|
579
|
+
workflowVersions = /* @__PURE__ */ new Map();
|
|
580
|
+
constructor(params) {
|
|
581
|
+
this.id = params.id;
|
|
582
|
+
this[INTERNAL4] = {
|
|
583
|
+
getAllVersions: this.getAllVersions.bind(this),
|
|
584
|
+
getVersion: this.getVersion.bind(this)
|
|
585
|
+
};
|
|
586
|
+
}
|
|
587
|
+
v(versionId, params) {
|
|
588
|
+
if (this.workflowVersions.has(versionId)) {
|
|
589
|
+
throw new Error(`Workflow "${this.id}/${versionId}" already exists`);
|
|
590
|
+
}
|
|
591
|
+
const workflowVersion = new WorkflowVersionImpl(this.id, versionId, params);
|
|
592
|
+
this.workflowVersions.set(
|
|
593
|
+
versionId,
|
|
594
|
+
workflowVersion
|
|
595
|
+
);
|
|
596
|
+
return workflowVersion;
|
|
597
|
+
}
|
|
598
|
+
getAllVersions() {
|
|
599
|
+
return Array.from(this.workflowVersions.values());
|
|
600
|
+
}
|
|
601
|
+
getVersion(versionId) {
|
|
602
|
+
return this.workflowVersions.get(versionId);
|
|
603
|
+
}
|
|
604
|
+
};
|
|
605
|
+
export {
|
|
606
|
+
WorkflowVersionImpl,
|
|
607
|
+
createWorkflowRunSleeper,
|
|
608
|
+
workflow,
|
|
609
|
+
workflowRegistry,
|
|
610
|
+
workflowRunHandle
|
|
611
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@aikirun/workflow",
|
|
3
|
+
"version": "0.5.3",
|
|
4
|
+
"description": "Workflow SDK for Aiki - define durable workflows with tasks, sleeps, waits, and event handling",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.js"
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
"files": [
|
|
15
|
+
"dist"
|
|
16
|
+
],
|
|
17
|
+
"scripts": {
|
|
18
|
+
"build": "tsup"
|
|
19
|
+
},
|
|
20
|
+
"dependencies": {
|
|
21
|
+
"@aikirun/types": "0.5.3"
|
|
22
|
+
},
|
|
23
|
+
"publishConfig": {
|
|
24
|
+
"access": "public"
|
|
25
|
+
},
|
|
26
|
+
"license": "Apache-2.0"
|
|
27
|
+
}
|