@aikirun/worker 0.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +149 -0
- package/dist/index.d.ts +96 -0
- package/dist/index.js +351 -0
- package/package.json +29 -0
package/README.md
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
# @aikirun/worker
|
|
2
|
+
|
|
3
|
+
Worker SDK for Aiki durable execution platform - execute workflows and tasks with durable state management and automatic
|
|
4
|
+
recovery.
|
|
5
|
+
|
|
6
|
+
## Installation
|
|
7
|
+
|
|
8
|
+
```bash
|
|
9
|
+
npm install @aikirun/worker
|
|
10
|
+
```
|
|
11
|
+
|
|
12
|
+
## Quick Start
|
|
13
|
+
|
|
14
|
+
### Create and Spawn a Worker
|
|
15
|
+
|
|
16
|
+
```typescript
|
|
17
|
+
import { worker } from "@aikirun/worker";
|
|
18
|
+
import { client } from "@aikirun/client";
|
|
19
|
+
import { onboardingWorkflowV1 } from "./workflows.ts";
|
|
20
|
+
|
|
21
|
+
// Define worker
|
|
22
|
+
const aikiWorker = worker({
|
|
23
|
+
id: "worker-1",
|
|
24
|
+
workflows: [onboardingWorkflowV1],
|
|
25
|
+
subscriber: { type: "redis_streams" },
|
|
26
|
+
opts: {
|
|
27
|
+
maxConcurrentWorkflowRuns: 10,
|
|
28
|
+
},
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
// Initialize client
|
|
32
|
+
const aikiClient = await client({
|
|
33
|
+
url: "http://localhost:9090",
|
|
34
|
+
redis: { host: "localhost", port: 6379 },
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
// Spawn worker
|
|
38
|
+
const handle = await aikiWorker.spawn(aiki);
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
### Graceful Shutdown
|
|
42
|
+
|
|
43
|
+
```typescript
|
|
44
|
+
import process from "node:process";
|
|
45
|
+
|
|
46
|
+
const shutdown = async () => {
|
|
47
|
+
await handle.stop();
|
|
48
|
+
await aiki.close();
|
|
49
|
+
process.exit(0);
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
process.on("SIGINT", shutdown);
|
|
53
|
+
process.on("SIGTERM", shutdown);
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
## Features
|
|
57
|
+
|
|
58
|
+
- **Durable Execution** - Automatically resume from failures without losing progress
|
|
59
|
+
- **Horizontal Scaling** - Multiple workers process workflows in parallel
|
|
60
|
+
- **State Management** - Persist execution state at each step
|
|
61
|
+
- **Automatic Recovery** - Detect stuck workflows and retry automatically
|
|
62
|
+
- **Polling Strategies** - Adaptive polling with configurable backoff
|
|
63
|
+
- **Graceful Shutdown** - Clean worker termination with in-flight workflow handling
|
|
64
|
+
|
|
65
|
+
## Horizontal Scaling
|
|
66
|
+
|
|
67
|
+
Scale workers by creating separate definitions to isolate workflows or shard by key:
|
|
68
|
+
|
|
69
|
+
```typescript
|
|
70
|
+
// Separate workers by workflow type
|
|
71
|
+
const orderWorker = worker({ id: "orders", workflows: [orderWorkflowV1] });
|
|
72
|
+
const emailWorker = worker({ id: "emails", workflows: [emailWorkflowV1] });
|
|
73
|
+
|
|
74
|
+
await orderWorker.spawn(client);
|
|
75
|
+
await emailWorker.spawn(client);
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
```typescript
|
|
79
|
+
// Shard workers by key (reuse base definition with different shards)
|
|
80
|
+
const orderWorker = worker({ id: "order-processor", workflows: [orderWorkflowV1] });
|
|
81
|
+
|
|
82
|
+
await orderWorker.with().opt("shardKeys", ["us-east", "us-west"]).spawn(client);
|
|
83
|
+
await orderWorker.with().opt("shardKeys", ["eu-west"]).spawn(client);
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
## Worker Configuration
|
|
87
|
+
|
|
88
|
+
### Params (required for worker identity)
|
|
89
|
+
|
|
90
|
+
```typescript
|
|
91
|
+
interface WorkerParams {
|
|
92
|
+
id: string; // Unique worker ID
|
|
93
|
+
workflows: WorkflowVersion[]; // Workflow versions to execute
|
|
94
|
+
subscriber?: SubscriberStrategy; // Message subscriber (default: redis_streams)
|
|
95
|
+
}
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
### Options (via `opts` param or `with()` builder)
|
|
99
|
+
|
|
100
|
+
```typescript
|
|
101
|
+
interface WorkerOptions {
|
|
102
|
+
maxConcurrentWorkflowRuns?: number; // Concurrency limit (default: 1)
|
|
103
|
+
workflowRun?: {
|
|
104
|
+
heartbeatIntervalMs?: number; // Heartbeat interval (default: 30s)
|
|
105
|
+
};
|
|
106
|
+
gracefulShutdownTimeoutMs?: number; // Shutdown timeout (default: 5s)
|
|
107
|
+
shardKeys?: string[]; // Optional shard keys for distributed work
|
|
108
|
+
}
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
## Workflow Registration
|
|
112
|
+
|
|
113
|
+
Workers receive workflow versions through the `workflows` param:
|
|
114
|
+
|
|
115
|
+
```typescript
|
|
116
|
+
const aikiWorker = worker({
|
|
117
|
+
id: "worker-1",
|
|
118
|
+
workflows: [workflowV1, workflowV2, anotherWorkflowV1],
|
|
119
|
+
subscriber: { type: "redis_streams" },
|
|
120
|
+
});
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
The worker automatically discovers and executes the registered workflow versions.
|
|
124
|
+
|
|
125
|
+
## State Persistence
|
|
126
|
+
|
|
127
|
+
Workers store execution state at each step:
|
|
128
|
+
|
|
129
|
+
- Task completion status
|
|
130
|
+
- Sleep/wait checkpoints
|
|
131
|
+
- Event acknowledgments
|
|
132
|
+
- Child workflow results
|
|
133
|
+
|
|
134
|
+
This allows workflows to resume from the exact point of failure.
|
|
135
|
+
|
|
136
|
+
## Related Packages
|
|
137
|
+
|
|
138
|
+
- [@aikirun/client](https://www.npmjs.com/package/@aikirun/client) - Start workflows
|
|
139
|
+
- [@aikirun/workflow](https://www.npmjs.com/package/@aikirun/workflow) - Define workflows
|
|
140
|
+
- [@aikirun/task](https://www.npmjs.com/package/@aikirun/task) - Define tasks
|
|
141
|
+
- [@aikirun/types](https://www.npmjs.com/package/@aikirun/types) - Type definitions
|
|
142
|
+
|
|
143
|
+
## Changelog
|
|
144
|
+
|
|
145
|
+
See the [CHANGELOG](https://github.com/aikirun/aiki/blob/main/CHANGELOG.md) for version history.
|
|
146
|
+
|
|
147
|
+
## License
|
|
148
|
+
|
|
149
|
+
Apache-2.0
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { Client, SubscriberStrategy } from '@aikirun/client';
|
|
2
|
+
import { WorkflowVersion } from '@aikirun/workflow';
|
|
3
|
+
|
|
4
|
+
type NonEmptyArray<T> = [T, ...T[]];
|
|
5
|
+
|
|
6
|
+
type NonArrayObject<T> = T extends object ? (T extends ReadonlyArray<unknown> ? never : T) : never;
|
|
7
|
+
type IsSubtype<SubT, SuperT> = SubT extends SuperT ? true : false;
|
|
8
|
+
type And<T extends NonEmptyArray<boolean>> = T extends [infer First, ...infer Rest] ? false extends First ? false : Rest extends NonEmptyArray<boolean> ? And<Rest> : true : never;
|
|
9
|
+
type Or<T extends NonEmptyArray<boolean>> = T extends [infer First, ...infer Rest] ? true extends First ? true : Rest extends NonEmptyArray<boolean> ? Or<Rest> : false : never;
|
|
10
|
+
type PathFromObject<T, IncludeArrayKeys extends boolean = false> = T extends T ? PathFromObjectInternal<T, IncludeArrayKeys> : never;
|
|
11
|
+
type PathFromObjectInternal<T, IncludeArrayKeys extends boolean> = And<[
|
|
12
|
+
IsSubtype<T, object>,
|
|
13
|
+
Or<[IncludeArrayKeys, NonArrayObject<T> extends never ? false : true]>
|
|
14
|
+
]> extends true ? {
|
|
15
|
+
[K in Exclude<keyof T, symbol>]-?: And<[
|
|
16
|
+
IsSubtype<NonNullable<T[K]>, object>,
|
|
17
|
+
Or<[IncludeArrayKeys, NonArrayObject<NonNullable<T[K]>> extends never ? false : true]>
|
|
18
|
+
]> extends true ? K | `${K}.${PathFromObjectInternal<NonNullable<T[K]>, IncludeArrayKeys>}` : K;
|
|
19
|
+
}[Exclude<keyof T, symbol>] : "";
|
|
20
|
+
type ExtractObjectType<T> = T extends object ? T : never;
|
|
21
|
+
type TypeOfValueAtPath<T extends object, Path extends PathFromObject<T>> = Path extends keyof T ? T[Path] : Path extends `${infer First}.${infer Rest}` ? First extends keyof T ? undefined extends T[First] ? Rest extends PathFromObject<ExtractObjectType<T[First]>> ? TypeOfValueAtPath<ExtractObjectType<T[First]>, Rest> | undefined : never : Rest extends PathFromObject<ExtractObjectType<T[First]>> ? TypeOfValueAtPath<ExtractObjectType<T[First]>, Rest> : never : never : never;
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Creates an Aiki worker definition for executing workflows.
|
|
25
|
+
*
|
|
26
|
+
* Worker definitions are static and reusable. Call `spawn(client)` to begin
|
|
27
|
+
* execution, which returns a handle for controlling the running worker.
|
|
28
|
+
*
|
|
29
|
+
* @param params - Worker configuration parameters
|
|
30
|
+
* @param params.id - Unique worker ID for identification and monitoring
|
|
31
|
+
* @param params.workflows - Array of workflow versions this worker can execute
|
|
32
|
+
* @param params.subscriber - Message subscriber strategy (default: redis_streams)
|
|
33
|
+
* @returns Worker definition, call spawn(client) to begin execution
|
|
34
|
+
*
|
|
35
|
+
* @example
|
|
36
|
+
* ```typescript
|
|
37
|
+
* export const myWorker = worker({
|
|
38
|
+
* id: "order-worker",
|
|
39
|
+
* workflows: [orderWorkflowV1, paymentWorkflowV1],
|
|
40
|
+
* opts: {
|
|
41
|
+
* maxConcurrentWorkflowRuns: 10,
|
|
42
|
+
* },
|
|
43
|
+
* });
|
|
44
|
+
*
|
|
45
|
+
* const handle = await myWorker.spawn(client);
|
|
46
|
+
*
|
|
47
|
+
* process.on("SIGINT", async () => {
|
|
48
|
+
* await handle.stop();
|
|
49
|
+
* await client.close();
|
|
50
|
+
* });
|
|
51
|
+
* ```
|
|
52
|
+
*/
|
|
53
|
+
declare function worker(params: WorkerParams): Worker;
|
|
54
|
+
interface WorkerParams {
|
|
55
|
+
id: string;
|
|
56
|
+
workflows: WorkflowVersion<any, any, any>[];
|
|
57
|
+
subscriber?: SubscriberStrategy;
|
|
58
|
+
opts?: WorkerOptions;
|
|
59
|
+
}
|
|
60
|
+
interface WorkerOptions {
|
|
61
|
+
maxConcurrentWorkflowRuns?: number;
|
|
62
|
+
workflowRun?: {
|
|
63
|
+
heartbeatIntervalMs?: number;
|
|
64
|
+
/**
|
|
65
|
+
* Threshold for spinning vs persisting delays (default: 10ms).
|
|
66
|
+
*
|
|
67
|
+
* Delays <= threshold: In-memory wait (fast, no history, not durable)
|
|
68
|
+
* Delays > threshold: Server state transition (history recorded, durable)
|
|
69
|
+
*
|
|
70
|
+
* Set to 0 to record all delays in transition history.
|
|
71
|
+
*/
|
|
72
|
+
spinThresholdMs?: number;
|
|
73
|
+
};
|
|
74
|
+
gracefulShutdownTimeoutMs?: number;
|
|
75
|
+
/**
|
|
76
|
+
* Optional array of shardKeys this worker should process.
|
|
77
|
+
* When provided, the worker will only subscribe to sharded streams.
|
|
78
|
+
* When omitted, the worker subscribes to default streams.
|
|
79
|
+
*/
|
|
80
|
+
shardKeys?: string[];
|
|
81
|
+
}
|
|
82
|
+
interface WorkerBuilder {
|
|
83
|
+
opt<Path extends PathFromObject<WorkerOptions>>(path: Path, value: TypeOfValueAtPath<WorkerOptions, Path>): WorkerBuilder;
|
|
84
|
+
spawn: Worker["spawn"];
|
|
85
|
+
}
|
|
86
|
+
interface Worker {
|
|
87
|
+
id: string;
|
|
88
|
+
with(): WorkerBuilder;
|
|
89
|
+
spawn: <AppContext>(client: Client<AppContext>) => Promise<WorkerHandle>;
|
|
90
|
+
}
|
|
91
|
+
interface WorkerHandle {
|
|
92
|
+
id: string;
|
|
93
|
+
stop: () => Promise<void>;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
export { type Worker, type WorkerParams, worker };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,351 @@
|
|
|
1
|
+
// ../../lib/array/utils.ts
|
|
2
|
+
function isNonEmptyArray(value) {
|
|
3
|
+
return value.length > 0;
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
// ../../lib/async/delay.ts
|
|
7
|
+
function delay(ms, options) {
|
|
8
|
+
const abortSignal = options?.abortSignal;
|
|
9
|
+
if (abortSignal?.aborted) {
|
|
10
|
+
return Promise.reject(abortSignal.reason);
|
|
11
|
+
}
|
|
12
|
+
return new Promise((resolve, reject) => {
|
|
13
|
+
const abort = () => {
|
|
14
|
+
clearTimeout(timeout);
|
|
15
|
+
reject(abortSignal?.reason);
|
|
16
|
+
};
|
|
17
|
+
const timeout = setTimeout(() => {
|
|
18
|
+
abortSignal?.removeEventListener("abort", abort);
|
|
19
|
+
resolve();
|
|
20
|
+
}, ms);
|
|
21
|
+
abortSignal?.addEventListener("abort", abort, { once: true });
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// ../../lib/async/fire-and-forget.ts
|
|
26
|
+
function fireAndForget(promise, onError) {
|
|
27
|
+
promise.catch((error) => {
|
|
28
|
+
onError(error instanceof Error ? error : new Error(String(error)));
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// ../../lib/error/conflict.ts
|
|
33
|
+
function isServerConflictError(error) {
|
|
34
|
+
if (error === null || typeof error !== "object") {
|
|
35
|
+
return false;
|
|
36
|
+
}
|
|
37
|
+
if ("code" in error && error.code === "CONFLICT") {
|
|
38
|
+
return true;
|
|
39
|
+
}
|
|
40
|
+
if ("status" in error && error.status === 409) {
|
|
41
|
+
return true;
|
|
42
|
+
}
|
|
43
|
+
if (error instanceof Error && error.name === "ConflictError") {
|
|
44
|
+
return true;
|
|
45
|
+
}
|
|
46
|
+
if ("data" in error && error.data !== null && typeof error.data === "object" && "status" in error.data && error.data.status === 409) {
|
|
47
|
+
return true;
|
|
48
|
+
}
|
|
49
|
+
return false;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// ../../lib/object/overrider.ts
|
|
53
|
+
function set(obj, path, value) {
|
|
54
|
+
const keys = path.split(".");
|
|
55
|
+
let currentValue = obj;
|
|
56
|
+
for (let i = 0; i < keys.length - 1; i++) {
|
|
57
|
+
const key = keys[i];
|
|
58
|
+
currentValue = currentValue[key];
|
|
59
|
+
if (currentValue === void 0 || currentValue === null) {
|
|
60
|
+
currentValue = {};
|
|
61
|
+
currentValue[key] = currentValue;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
const lastKey = keys[keys.length - 1];
|
|
65
|
+
currentValue[lastKey] = value;
|
|
66
|
+
}
|
|
67
|
+
var objectOverrider = (defaultObj) => (obj) => {
|
|
68
|
+
const createBuilder = (overrides) => ({
|
|
69
|
+
with: (path, value) => createBuilder([...overrides, { path: `${path}`, value }]),
|
|
70
|
+
build: () => {
|
|
71
|
+
const clonedObject = structuredClone(obj ?? defaultObj);
|
|
72
|
+
for (const { path, value } of overrides) {
|
|
73
|
+
set(clonedObject, path, value);
|
|
74
|
+
}
|
|
75
|
+
return clonedObject;
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
return createBuilder([]);
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
// worker.ts
|
|
82
|
+
import { INTERNAL } from "@aikirun/types/symbols";
|
|
83
|
+
import { TaskFailedError } from "@aikirun/types/task";
|
|
84
|
+
import {
|
|
85
|
+
WorkflowRunCancelledError,
|
|
86
|
+
WorkflowRunFailedError,
|
|
87
|
+
WorkflowRunNotExecutableError,
|
|
88
|
+
WorkflowRunSuspendedError
|
|
89
|
+
} from "@aikirun/types/workflow-run";
|
|
90
|
+
import {
|
|
91
|
+
createWorkflowRunSleeper,
|
|
92
|
+
workflowRegistry,
|
|
93
|
+
workflowRunHandle
|
|
94
|
+
} from "@aikirun/workflow";
|
|
95
|
+
function worker(params) {
|
|
96
|
+
return new WorkerImpl(params);
|
|
97
|
+
}
|
|
98
|
+
var WorkerImpl = class _WorkerImpl {
|
|
99
|
+
constructor(params) {
|
|
100
|
+
this.params = params;
|
|
101
|
+
this.id = params.id;
|
|
102
|
+
}
|
|
103
|
+
id;
|
|
104
|
+
with() {
|
|
105
|
+
const optsOverrider = objectOverrider(this.params.opts ?? {});
|
|
106
|
+
const createBuilder = (optsBuilder) => ({
|
|
107
|
+
opt: (path, value) => createBuilder(optsBuilder.with(path, value)),
|
|
108
|
+
spawn: (client) => new _WorkerImpl({ ...this.params, opts: optsBuilder.build() }).spawn(client)
|
|
109
|
+
});
|
|
110
|
+
return createBuilder(optsOverrider());
|
|
111
|
+
}
|
|
112
|
+
async spawn(client) {
|
|
113
|
+
const handle = new WorkerHandleImpl(client, this.params);
|
|
114
|
+
await handle._start();
|
|
115
|
+
return handle;
|
|
116
|
+
}
|
|
117
|
+
};
|
|
118
|
+
var WorkerHandleImpl = class {
|
|
119
|
+
constructor(client, params) {
|
|
120
|
+
this.client = client;
|
|
121
|
+
this.params = params;
|
|
122
|
+
this.id = params.id;
|
|
123
|
+
this.registry = workflowRegistry().addMany(this.params.workflows);
|
|
124
|
+
this.logger = client.logger.child({
|
|
125
|
+
"aiki.component": "worker",
|
|
126
|
+
"aiki.workerId": this.id
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
id;
|
|
130
|
+
registry;
|
|
131
|
+
logger;
|
|
132
|
+
abortController;
|
|
133
|
+
subscriberStrategy;
|
|
134
|
+
activeWorkflowRunsById = /* @__PURE__ */ new Map();
|
|
135
|
+
async _start() {
|
|
136
|
+
const subscriberStrategyBuilder = this.client[INTERNAL].subscriber.create(
|
|
137
|
+
this.params.subscriber ?? { type: "redis_streams" },
|
|
138
|
+
this.registry.getAll(),
|
|
139
|
+
this.params.opts?.shardKeys
|
|
140
|
+
);
|
|
141
|
+
this.subscriberStrategy = await subscriberStrategyBuilder.init(this.id, {
|
|
142
|
+
onError: (error) => this.handleNotificationError(error),
|
|
143
|
+
onStop: () => this.stop()
|
|
144
|
+
});
|
|
145
|
+
this.abortController = new AbortController();
|
|
146
|
+
const abortSignal = this.abortController.signal;
|
|
147
|
+
fireAndForget(this.poll(abortSignal), (error) => {
|
|
148
|
+
if (!abortSignal.aborted) {
|
|
149
|
+
this.logger.error("Unexpected error", {
|
|
150
|
+
"aiki.error": error.message
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
async stop() {
|
|
156
|
+
this.logger.info("Worker stopping");
|
|
157
|
+
this.abortController?.abort();
|
|
158
|
+
const activeWorkflowRuns = Array.from(this.activeWorkflowRunsById.values());
|
|
159
|
+
if (activeWorkflowRuns.length === 0) return;
|
|
160
|
+
const timeoutMs = this.params.opts?.gracefulShutdownTimeoutMs ?? 5e3;
|
|
161
|
+
if (timeoutMs > 0) {
|
|
162
|
+
await Promise.race([Promise.allSettled(activeWorkflowRuns.map((w) => w.executionPromise)), delay(timeoutMs)]);
|
|
163
|
+
}
|
|
164
|
+
const stillActive = Array.from(this.activeWorkflowRunsById.values());
|
|
165
|
+
if (stillActive.length > 0) {
|
|
166
|
+
const ids = stillActive.map((w) => w.run.id).join(", ");
|
|
167
|
+
this.logger.warn("Worker shutdown with active workflows", {
|
|
168
|
+
"aiki.activeWorkflowRunIds": ids
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
this.activeWorkflowRunsById.clear();
|
|
172
|
+
}
|
|
173
|
+
async poll(abortSignal) {
|
|
174
|
+
this.logger.info("Worker started");
|
|
175
|
+
if (!this.subscriberStrategy) {
|
|
176
|
+
throw new Error("Subscriber strategy not initialized");
|
|
177
|
+
}
|
|
178
|
+
let nextDelayMs = this.subscriberStrategy.getNextDelay({ type: "polled", foundWork: false });
|
|
179
|
+
let subscriberFailedAttempts = 0;
|
|
180
|
+
while (!abortSignal.aborted) {
|
|
181
|
+
await delay(nextDelayMs, { abortSignal });
|
|
182
|
+
const availableCapacity = (this.params.opts?.maxConcurrentWorkflowRuns ?? 1) - this.activeWorkflowRunsById.size;
|
|
183
|
+
if (availableCapacity <= 0) {
|
|
184
|
+
nextDelayMs = this.subscriberStrategy.getNextDelay({ type: "at_capacity" });
|
|
185
|
+
continue;
|
|
186
|
+
}
|
|
187
|
+
const nextBatchResponse = await this.fetchNextWorkflowRunBatch(availableCapacity);
|
|
188
|
+
if (!nextBatchResponse.success) {
|
|
189
|
+
subscriberFailedAttempts++;
|
|
190
|
+
nextDelayMs = this.subscriberStrategy.getNextDelay({
|
|
191
|
+
type: "retry",
|
|
192
|
+
attemptNumber: subscriberFailedAttempts
|
|
193
|
+
});
|
|
194
|
+
continue;
|
|
195
|
+
}
|
|
196
|
+
subscriberFailedAttempts = 0;
|
|
197
|
+
if (!isNonEmptyArray(nextBatchResponse.batch)) {
|
|
198
|
+
nextDelayMs = this.subscriberStrategy.getNextDelay({ type: "polled", foundWork: false });
|
|
199
|
+
continue;
|
|
200
|
+
}
|
|
201
|
+
await this.enqueueWorkflowRunBatch(nextBatchResponse.batch, abortSignal);
|
|
202
|
+
nextDelayMs = this.subscriberStrategy.getNextDelay({ type: "polled", foundWork: true });
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
async fetchNextWorkflowRunBatch(size) {
|
|
206
|
+
if (!this.subscriberStrategy) {
|
|
207
|
+
return {
|
|
208
|
+
success: false,
|
|
209
|
+
error: new Error("Subscriber strategy not initialized")
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
try {
|
|
213
|
+
const batch = await this.subscriberStrategy.getNextBatch(size);
|
|
214
|
+
return {
|
|
215
|
+
success: true,
|
|
216
|
+
batch
|
|
217
|
+
};
|
|
218
|
+
} catch (error) {
|
|
219
|
+
this.logger.error("Error getting next workflow runs batch", {
|
|
220
|
+
"aiki.error": error instanceof Error ? error.message : String(error)
|
|
221
|
+
});
|
|
222
|
+
return {
|
|
223
|
+
success: false,
|
|
224
|
+
error
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
async enqueueWorkflowRunBatch(batch, abortSignal) {
|
|
229
|
+
for (const { data, meta } of batch) {
|
|
230
|
+
const { workflowRunId } = data;
|
|
231
|
+
if (this.activeWorkflowRunsById.has(workflowRunId)) {
|
|
232
|
+
this.logger.info("Workflow already running", {
|
|
233
|
+
"aiki.workflowRunId": workflowRunId
|
|
234
|
+
});
|
|
235
|
+
continue;
|
|
236
|
+
}
|
|
237
|
+
const { run: workflowRun } = await this.client.api.workflowRun.getByIdV1({ id: workflowRunId });
|
|
238
|
+
if (!workflowRun) {
|
|
239
|
+
if (meta && this.subscriberStrategy?.acknowledge) {
|
|
240
|
+
await this.subscriberStrategy.acknowledge(workflowRunId, meta).catch(() => {
|
|
241
|
+
});
|
|
242
|
+
}
|
|
243
|
+
continue;
|
|
244
|
+
}
|
|
245
|
+
const workflowVersion = this.registry.get(
|
|
246
|
+
workflowRun.workflowId,
|
|
247
|
+
workflowRun.workflowVersionId
|
|
248
|
+
);
|
|
249
|
+
if (!workflowVersion) {
|
|
250
|
+
this.logger.warn("Workflow version not found", {
|
|
251
|
+
"aiki.workflowId": workflowRun.workflowId,
|
|
252
|
+
"aiki.workflowVersionId": workflowRun.workflowVersionId,
|
|
253
|
+
"aiki.workflowRunId": workflowRun.id
|
|
254
|
+
});
|
|
255
|
+
if (meta && this.subscriberStrategy?.acknowledge) {
|
|
256
|
+
await this.subscriberStrategy.acknowledge(workflowRunId, meta).catch(() => {
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
continue;
|
|
260
|
+
}
|
|
261
|
+
if (abortSignal.aborted) break;
|
|
262
|
+
const workflowExecutionPromise = this.executeWorkflow(workflowRun, workflowVersion, meta);
|
|
263
|
+
this.activeWorkflowRunsById.set(workflowRun.id, {
|
|
264
|
+
run: workflowRun,
|
|
265
|
+
executionPromise: workflowExecutionPromise,
|
|
266
|
+
meta
|
|
267
|
+
});
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
async executeWorkflow(workflowRun, workflowVersion, meta) {
|
|
271
|
+
const logger = this.logger.child({
|
|
272
|
+
"aiki.component": "workflow-execution",
|
|
273
|
+
"aiki.workflowId": workflowRun.workflowId,
|
|
274
|
+
"aiki.workflowVersionId": workflowRun.workflowVersionId,
|
|
275
|
+
"aiki.workflowRunId": workflowRun.id,
|
|
276
|
+
...meta && {
|
|
277
|
+
"aiki.messageId": meta.messageId
|
|
278
|
+
}
|
|
279
|
+
});
|
|
280
|
+
let heartbeatInterval;
|
|
281
|
+
let shouldAcknowledge = false;
|
|
282
|
+
try {
|
|
283
|
+
const handle = await workflowRunHandle(this.client, workflowRun, logger);
|
|
284
|
+
const appContext = this.client[INTERNAL].contextFactory ? await this.client[INTERNAL].contextFactory(workflowRun) : null;
|
|
285
|
+
const heartbeat = this.subscriberStrategy?.heartbeat;
|
|
286
|
+
if (meta && heartbeat) {
|
|
287
|
+
heartbeatInterval = setInterval(() => {
|
|
288
|
+
try {
|
|
289
|
+
heartbeat(workflowRun.id, meta);
|
|
290
|
+
} catch (error) {
|
|
291
|
+
logger.warn("Failed to send heartbeat", {
|
|
292
|
+
"aiki.error": error instanceof Error ? error.message : String(error)
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
}, this.params.opts?.workflowRun?.heartbeatIntervalMs ?? 3e4);
|
|
296
|
+
}
|
|
297
|
+
await workflowVersion[INTERNAL].handler(
|
|
298
|
+
workflowRun.input,
|
|
299
|
+
{
|
|
300
|
+
id: workflowRun.id,
|
|
301
|
+
workflowId: workflowRun.workflowId,
|
|
302
|
+
workflowVersionId: workflowRun.workflowVersionId,
|
|
303
|
+
options: workflowRun.options,
|
|
304
|
+
logger,
|
|
305
|
+
sleep: createWorkflowRunSleeper(handle, logger, {
|
|
306
|
+
spinThresholdMs: this.params.opts?.workflowRun?.spinThresholdMs ?? 10
|
|
307
|
+
}),
|
|
308
|
+
[INTERNAL]: { handle }
|
|
309
|
+
},
|
|
310
|
+
appContext
|
|
311
|
+
);
|
|
312
|
+
shouldAcknowledge = true;
|
|
313
|
+
} catch (error) {
|
|
314
|
+
if (error instanceof WorkflowRunNotExecutableError || error instanceof WorkflowRunCancelledError || error instanceof WorkflowRunFailedError || error instanceof WorkflowRunSuspendedError || error instanceof TaskFailedError || isServerConflictError(error)) {
|
|
315
|
+
shouldAcknowledge = true;
|
|
316
|
+
} else {
|
|
317
|
+
logger.error("Unexpected error during workflow execution", {
|
|
318
|
+
"aiki.error": error instanceof Error ? error.message : String(error),
|
|
319
|
+
"aiki.stack": error instanceof Error ? error.stack : void 0
|
|
320
|
+
});
|
|
321
|
+
shouldAcknowledge = false;
|
|
322
|
+
}
|
|
323
|
+
} finally {
|
|
324
|
+
if (heartbeatInterval) clearInterval(heartbeatInterval);
|
|
325
|
+
if (meta && this.subscriberStrategy?.acknowledge) {
|
|
326
|
+
if (shouldAcknowledge) {
|
|
327
|
+
try {
|
|
328
|
+
await this.subscriberStrategy.acknowledge(workflowRun.id, meta);
|
|
329
|
+
} catch (error) {
|
|
330
|
+
logger.error("Failed to acknowledge message, it may be reprocessed", {
|
|
331
|
+
"aiki.errorType": "MESSAGE_ACK_FAILED",
|
|
332
|
+
"aiki.error": error instanceof Error ? error.message : String(error)
|
|
333
|
+
});
|
|
334
|
+
}
|
|
335
|
+
} else {
|
|
336
|
+
logger.debug("Message left in PEL for retry");
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
this.activeWorkflowRunsById.delete(workflowRun.id);
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
handleNotificationError(error) {
|
|
343
|
+
this.logger.warn("Notification error, falling back to polling", {
|
|
344
|
+
"aiki.error": error.message,
|
|
345
|
+
"aiki.stack": error.stack
|
|
346
|
+
});
|
|
347
|
+
}
|
|
348
|
+
};
|
|
349
|
+
export {
|
|
350
|
+
worker
|
|
351
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@aikirun/worker",
|
|
3
|
+
"version": "0.5.3",
|
|
4
|
+
"description": "Worker SDK for Aiki - execute workflows and tasks with durable state management and automatic recovery",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.js"
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
"files": [
|
|
15
|
+
"dist"
|
|
16
|
+
],
|
|
17
|
+
"scripts": {
|
|
18
|
+
"build": "tsup"
|
|
19
|
+
},
|
|
20
|
+
"dependencies": {
|
|
21
|
+
"@aikirun/types": "0.5.3",
|
|
22
|
+
"@aikirun/client": "0.5.3",
|
|
23
|
+
"@aikirun/workflow": "0.5.3"
|
|
24
|
+
},
|
|
25
|
+
"publishConfig": {
|
|
26
|
+
"access": "public"
|
|
27
|
+
},
|
|
28
|
+
"license": "Apache-2.0"
|
|
29
|
+
}
|