@aikirun/task 0.7.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -12
- package/dist/index.d.ts +17 -19
- package/dist/index.js +161 -65
- package/package.json +3 -3
package/README.md
CHANGED
|
@@ -17,7 +17,7 @@ npm install @aikirun/task
|
|
|
17
17
|
import { task } from "@aikirun/task";
|
|
18
18
|
|
|
19
19
|
export const sendVerificationEmail = task({
|
|
20
|
-
|
|
20
|
+
name: "send-verification",
|
|
21
21
|
async handler(input: { email: string }) {
|
|
22
22
|
return emailService.sendVerification(input.email);
|
|
23
23
|
},
|
|
@@ -28,7 +28,7 @@ export const sendVerificationEmail = task({
|
|
|
28
28
|
|
|
29
29
|
```typescript
|
|
30
30
|
export const ringAlarm = task({
|
|
31
|
-
|
|
31
|
+
name: "ring-alarm",
|
|
32
32
|
handler(input: { song: string }) {
|
|
33
33
|
return Promise.resolve(audioService.play(input.song));
|
|
34
34
|
},
|
|
@@ -47,10 +47,10 @@ export const ringAlarm = task({
|
|
|
47
47
|
```typescript
|
|
48
48
|
import { workflow } from "@aikirun/workflow";
|
|
49
49
|
|
|
50
|
-
export const morningWorkflow = workflow({
|
|
50
|
+
export const morningWorkflow = workflow({ name: "morning-routine" });
|
|
51
51
|
|
|
52
|
-
export const morningWorkflowV1 = morningWorkflow.v("1.0", {
|
|
53
|
-
async handler(
|
|
52
|
+
export const morningWorkflowV1 = morningWorkflow.v("1.0.0", {
|
|
53
|
+
async handler(run, input) {
|
|
54
54
|
const result = await ringAlarm.start(run, { song: "alarm.mp3" });
|
|
55
55
|
console.log("Task completed:", result);
|
|
56
56
|
},
|
|
@@ -61,7 +61,7 @@ export const morningWorkflowV1 = morningWorkflow.v("1.0", {
|
|
|
61
61
|
|
|
62
62
|
- **Idempotent Execution** - Tasks can be safely retried without unintended side effects
|
|
63
63
|
- **Automatic Retries** - Multiple retry strategies (fixed, exponential, jittered)
|
|
64
|
-
- **
|
|
64
|
+
- **Reference IDs** - Custom identifiers for tracking and deduplication
|
|
65
65
|
- **Error Handling** - Structured error information with recovery strategies
|
|
66
66
|
- **State Tracking** - Task execution state persists across failures
|
|
67
67
|
- **Type Safety** - Full TypeScript support with input/output types
|
|
@@ -70,8 +70,8 @@ export const morningWorkflowV1 = morningWorkflow.v("1.0", {
|
|
|
70
70
|
|
|
71
71
|
```typescript
|
|
72
72
|
interface TaskOptions {
|
|
73
|
-
retry?: RetryStrategy;
|
|
74
|
-
|
|
73
|
+
retry?: RetryStrategy;
|
|
74
|
+
reference?: { id: string; onConflict?: "error" | "return_existing" };
|
|
75
75
|
}
|
|
76
76
|
```
|
|
77
77
|
|
|
@@ -131,14 +131,14 @@ Tasks are executed within a workflow's execution context. Logging happens in the
|
|
|
131
131
|
|
|
132
132
|
```typescript
|
|
133
133
|
export const processPayment = task({
|
|
134
|
-
|
|
134
|
+
name: "process-payment",
|
|
135
135
|
async handler(input: { amount: number }) {
|
|
136
136
|
return { success: true, transactionId: "tx_123" };
|
|
137
137
|
},
|
|
138
138
|
});
|
|
139
139
|
|
|
140
|
-
export const paymentWorkflowV1 = paymentWorkflow.v("1.0", {
|
|
141
|
-
async handler(
|
|
140
|
+
export const paymentWorkflowV1 = paymentWorkflow.v("1.0.0", {
|
|
141
|
+
async handler(run, input) {
|
|
142
142
|
run.logger.info("Processing payment", { amount: input.amount });
|
|
143
143
|
const result = await processPayment.start(run, { amount: input.amount });
|
|
144
144
|
run.logger.info("Payment complete", result);
|
|
@@ -149,7 +149,7 @@ export const paymentWorkflowV1 = paymentWorkflow.v("1.0", {
|
|
|
149
149
|
## Best Practices
|
|
150
150
|
|
|
151
151
|
1. **Make Tasks Idempotent** - Tasks may be retried, so re-running should not cause unintended side effects
|
|
152
|
-
2. **Use
|
|
152
|
+
2. **Use Reference IDs** - Use custom reference IDs to prevent duplicate processing
|
|
153
153
|
3. **Use Meaningful Errors** - Help diagnose failures
|
|
154
154
|
4. **Log Information** - Use `run.logger` for debugging
|
|
155
155
|
5. **Keep Tasks Focused** - One responsibility per task
|
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
import { Serializable } from '@aikirun/types/
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
1
|
+
import { Serializable } from '@aikirun/types/serializable';
|
|
2
|
+
import { TaskName, TaskOptions } from '@aikirun/types/task';
|
|
3
|
+
import { Schema } from '@aikirun/types/validator';
|
|
4
4
|
import { WorkflowRunContext } from '@aikirun/workflow';
|
|
5
|
+
import { RequireAtLeastOneProp } from '@aikirun/types/utils';
|
|
5
6
|
|
|
6
7
|
type NonEmptyArray<T> = [T, ...T[]];
|
|
7
8
|
|
|
@@ -22,9 +23,6 @@ type PathFromObjectInternal<T, IncludeArrayKeys extends boolean> = And<[
|
|
|
22
23
|
type ExtractObjectType<T> = T extends object ? T : never;
|
|
23
24
|
type TypeOfValueAtPath<T extends object, Path extends PathFromObject<T>> = Path extends keyof T ? T[Path] : Path extends `${infer First}.${infer Rest}` ? First extends keyof T ? undefined extends T[First] ? Rest extends PathFromObject<ExtractObjectType<T[First]>> ? TypeOfValueAtPath<ExtractObjectType<T[First]>, Rest> | undefined : never : Rest extends PathFromObject<ExtractObjectType<T[First]>> ? TypeOfValueAtPath<ExtractObjectType<T[First]>, Rest> : never : never : never;
|
|
24
25
|
|
|
25
|
-
interface Schema<Data> {
|
|
26
|
-
parse: (data: unknown) => Data;
|
|
27
|
-
}
|
|
28
26
|
interface EventDefinition<Data> {
|
|
29
27
|
_type: Data;
|
|
30
28
|
schema?: Schema<Data>;
|
|
@@ -41,7 +39,7 @@ type EventsDefinition = Record<string, EventDefinition<unknown>>;
|
|
|
41
39
|
* @template Input - Type of task input (must be JSON serializable)
|
|
42
40
|
* @template Output - Type of task output (must be JSON serializable)
|
|
43
41
|
* @param params - Task configuration
|
|
44
|
-
* @param params.
|
|
42
|
+
* @param params.name - Unique task name used for execution tracking
|
|
45
43
|
* @param params.handler - Async function that executes the task logic
|
|
46
44
|
* @returns Task instance with retry and option configuration methods
|
|
47
45
|
*
|
|
@@ -49,7 +47,7 @@ type EventsDefinition = Record<string, EventDefinition<unknown>>;
|
|
|
49
47
|
* ```typescript
|
|
50
48
|
* // Simple task without retry
|
|
51
49
|
* export const sendEmail = task({
|
|
52
|
-
*
|
|
50
|
+
* name: "send-email",
|
|
53
51
|
* handler(input: { email: string; message: string }) {
|
|
54
52
|
* return emailService.send(input.email, input.message);
|
|
55
53
|
* },
|
|
@@ -57,7 +55,7 @@ type EventsDefinition = Record<string, EventDefinition<unknown>>;
|
|
|
57
55
|
*
|
|
58
56
|
* // Task with retry configuration
|
|
59
57
|
* export const chargeCard = task({
|
|
60
|
-
*
|
|
58
|
+
* name: "charge-card",
|
|
61
59
|
* handler(input: { cardId: string; amount: number }) {
|
|
62
60
|
* return paymentService.charge(input.cardId, input.amount);
|
|
63
61
|
* },
|
|
@@ -74,24 +72,24 @@ type EventsDefinition = Record<string, EventDefinition<unknown>>;
|
|
|
74
72
|
* const result = await chargeCard.start(run, { cardId: "123", amount: 9999 });
|
|
75
73
|
* ```
|
|
76
74
|
*/
|
|
77
|
-
declare function task<Input extends Serializable
|
|
75
|
+
declare function task<Input extends Serializable, Output extends Serializable>(params: TaskParams<Input, Output>): Task<Input, Output>;
|
|
78
76
|
interface TaskParams<Input, Output> {
|
|
79
|
-
|
|
77
|
+
name: string;
|
|
80
78
|
handler: (input: Input) => Promise<Output>;
|
|
81
79
|
opts?: TaskOptions;
|
|
80
|
+
schema?: RequireAtLeastOneProp<{
|
|
81
|
+
input?: Schema<Input>;
|
|
82
|
+
output?: Schema<Output>;
|
|
83
|
+
}>;
|
|
82
84
|
}
|
|
83
|
-
interface
|
|
84
|
-
|
|
85
|
-
|
|
85
|
+
interface Task<Input, Output> {
|
|
86
|
+
name: TaskName;
|
|
87
|
+
with(): TaskBuilder<Input, Output>;
|
|
88
|
+
start: (run: WorkflowRunContext<unknown, unknown, EventsDefinition>, ...args: Input extends void ? [] : [Input]) => Promise<Output>;
|
|
86
89
|
}
|
|
87
90
|
interface TaskBuilder<Input, Output> {
|
|
88
91
|
opt<Path extends PathFromObject<TaskOptions>>(path: Path, value: TypeOfValueAtPath<TaskOptions, Path>): TaskBuilder<Input, Output>;
|
|
89
92
|
start: Task<Input, Output>["start"];
|
|
90
93
|
}
|
|
91
|
-
interface Task<Input, Output> {
|
|
92
|
-
id: TaskId;
|
|
93
|
-
with(): TaskBuilder<Input, Output>;
|
|
94
|
-
start: (run: WorkflowRunContext<unknown, unknown, EventsDefinition>, ...args: Input extends null ? [] : [Input]) => Promise<Output>;
|
|
95
|
-
}
|
|
96
94
|
|
|
97
95
|
export { type Task, type TaskParams, task };
|
package/dist/index.js
CHANGED
|
@@ -22,6 +22,31 @@ function delay(ms, options) {
|
|
|
22
22
|
});
|
|
23
23
|
}
|
|
24
24
|
|
|
25
|
+
// ../../lib/json/stable-stringify.ts
|
|
26
|
+
function stableStringify(value) {
|
|
27
|
+
return stringifyValue(value);
|
|
28
|
+
}
|
|
29
|
+
function stringifyValue(value) {
|
|
30
|
+
if (value === null || value === void 0) {
|
|
31
|
+
return "null";
|
|
32
|
+
}
|
|
33
|
+
if (typeof value !== "object") {
|
|
34
|
+
return JSON.stringify(value);
|
|
35
|
+
}
|
|
36
|
+
if (Array.isArray(value)) {
|
|
37
|
+
return `[${value.map(stringifyValue).join(",")}]`;
|
|
38
|
+
}
|
|
39
|
+
const keys = Object.keys(value).sort();
|
|
40
|
+
const pairs = [];
|
|
41
|
+
for (const key of keys) {
|
|
42
|
+
const keyValue = value[key];
|
|
43
|
+
if (keyValue !== void 0) {
|
|
44
|
+
pairs.push(`${JSON.stringify(key)}:${stringifyValue(keyValue)}`);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return `{${pairs.join(",")}}`;
|
|
48
|
+
}
|
|
49
|
+
|
|
25
50
|
// ../../lib/crypto/hash.ts
|
|
26
51
|
async function sha256(input) {
|
|
27
52
|
const data = new TextEncoder().encode(input);
|
|
@@ -29,6 +54,9 @@ async function sha256(input) {
|
|
|
29
54
|
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
|
30
55
|
return hashArray.map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
31
56
|
}
|
|
57
|
+
async function hashInput(input) {
|
|
58
|
+
return sha256(stableStringify({ input }));
|
|
59
|
+
}
|
|
32
60
|
|
|
33
61
|
// ../../lib/error/serializable.ts
|
|
34
62
|
function createSerializableError(error) {
|
|
@@ -43,25 +71,6 @@ function createSerializableError(error) {
|
|
|
43
71
|
};
|
|
44
72
|
}
|
|
45
73
|
|
|
46
|
-
// ../../lib/json/stable-stringify.ts
|
|
47
|
-
function stableStringify(value) {
|
|
48
|
-
if (value === null || value === void 0) {
|
|
49
|
-
return JSON.stringify(value);
|
|
50
|
-
}
|
|
51
|
-
if (typeof value !== "object") {
|
|
52
|
-
return JSON.stringify(value);
|
|
53
|
-
}
|
|
54
|
-
if (Array.isArray(value)) {
|
|
55
|
-
return `[${value.map((item) => stableStringify(item)).join(",")}]`;
|
|
56
|
-
}
|
|
57
|
-
const keys = Object.keys(value).sort();
|
|
58
|
-
const pairs = keys.map((key) => {
|
|
59
|
-
const val = value[key];
|
|
60
|
-
return `${JSON.stringify(key)}:${stableStringify(val)}`;
|
|
61
|
-
});
|
|
62
|
-
return `{${pairs.join(",")}}`;
|
|
63
|
-
}
|
|
64
|
-
|
|
65
74
|
// ../../lib/object/overrider.ts
|
|
66
75
|
function set(obj, path, value) {
|
|
67
76
|
const keys = path.split(".");
|
|
@@ -91,6 +100,11 @@ var objectOverrider = (defaultObj) => (obj) => {
|
|
|
91
100
|
return createBuilder([]);
|
|
92
101
|
};
|
|
93
102
|
|
|
103
|
+
// ../../lib/path/index.ts
|
|
104
|
+
function getTaskPath(name, referenceId) {
|
|
105
|
+
return `${name}/${referenceId}`;
|
|
106
|
+
}
|
|
107
|
+
|
|
94
108
|
// ../../lib/retry/strategy.ts
|
|
95
109
|
function getRetryParams(attempts, strategy) {
|
|
96
110
|
const strategyType = strategy.type;
|
|
@@ -142,16 +156,20 @@ function getRetryParams(attempts, strategy) {
|
|
|
142
156
|
// task.ts
|
|
143
157
|
import { INTERNAL } from "@aikirun/types/symbols";
|
|
144
158
|
import { TaskFailedError } from "@aikirun/types/task";
|
|
145
|
-
import {
|
|
159
|
+
import {
|
|
160
|
+
WorkflowRunConflictError,
|
|
161
|
+
WorkflowRunFailedError,
|
|
162
|
+
WorkflowRunSuspendedError
|
|
163
|
+
} from "@aikirun/types/workflow-run";
|
|
146
164
|
function task(params) {
|
|
147
165
|
return new TaskImpl(params);
|
|
148
166
|
}
|
|
149
167
|
var TaskImpl = class _TaskImpl {
|
|
150
168
|
constructor(params) {
|
|
151
169
|
this.params = params;
|
|
152
|
-
this.
|
|
170
|
+
this.name = params.name;
|
|
153
171
|
}
|
|
154
|
-
|
|
172
|
+
name;
|
|
155
173
|
with() {
|
|
156
174
|
const optsOverrider = objectOverrider(this.params.opts ?? {});
|
|
157
175
|
const createBuilder = (optsBuilder) => ({
|
|
@@ -163,47 +181,101 @@ var TaskImpl = class _TaskImpl {
|
|
|
163
181
|
async start(run, ...args) {
|
|
164
182
|
const handle = run[INTERNAL].handle;
|
|
165
183
|
handle[INTERNAL].assertExecutionAllowed();
|
|
166
|
-
const
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
184
|
+
const inputRaw = isNonEmptyArray(args) ? args[0] : void 0;
|
|
185
|
+
let input = inputRaw;
|
|
186
|
+
if (this.params.schema?.input) {
|
|
187
|
+
try {
|
|
188
|
+
input = this.params.schema.input.parse(inputRaw);
|
|
189
|
+
} catch (error) {
|
|
190
|
+
await handle[INTERNAL].transitionState({
|
|
191
|
+
status: "failed",
|
|
192
|
+
cause: "self",
|
|
193
|
+
error: createSerializableError(error)
|
|
194
|
+
});
|
|
195
|
+
throw new WorkflowRunFailedError(run.id, handle.run.attempts);
|
|
196
|
+
}
|
|
171
197
|
}
|
|
172
|
-
|
|
173
|
-
|
|
198
|
+
const inputHash = await hashInput(input);
|
|
199
|
+
const reference = this.params.opts?.reference;
|
|
200
|
+
const path = getTaskPath(this.name, reference?.id ?? inputHash);
|
|
201
|
+
const existingTaskInfo = handle.run.tasks[path];
|
|
202
|
+
if (existingTaskInfo) {
|
|
203
|
+
await this.assertUniqueTaskReferenceId(handle, existingTaskInfo, inputHash, reference, run.logger);
|
|
204
|
+
}
|
|
205
|
+
if (existingTaskInfo?.state.status === "completed") {
|
|
206
|
+
return existingTaskInfo.state.output;
|
|
207
|
+
}
|
|
208
|
+
if (existingTaskInfo?.state.status === "failed") {
|
|
209
|
+
const { state } = existingTaskInfo;
|
|
210
|
+
throw new TaskFailedError(existingTaskInfo.id, state.attempts, state.error.message);
|
|
174
211
|
}
|
|
175
|
-
const logger = run.logger.child({
|
|
176
|
-
"aiki.component": "task-execution",
|
|
177
|
-
"aiki.taskPath": path
|
|
178
|
-
});
|
|
179
212
|
let attempts = 0;
|
|
180
213
|
const retryStrategy = this.params.opts?.retry ?? { type: "never" };
|
|
181
|
-
if (
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
214
|
+
if (existingTaskInfo?.state) {
|
|
215
|
+
const taskId2 = existingTaskInfo.id;
|
|
216
|
+
const state = existingTaskInfo?.state;
|
|
217
|
+
this.assertRetryAllowed(taskId2, state, retryStrategy, run.logger);
|
|
218
|
+
run.logger.debug("Retrying task", {
|
|
219
|
+
"aiki.taskName": this.name,
|
|
220
|
+
"aiki.taskId": taskId2,
|
|
221
|
+
"aiki.attempts": state.attempts,
|
|
222
|
+
"aiki.taskStatus": state.status
|
|
186
223
|
});
|
|
187
|
-
attempts =
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
224
|
+
attempts = state.attempts;
|
|
225
|
+
if (state.status === "awaiting_retry" && handle.run.state.status === "running") {
|
|
226
|
+
throw new WorkflowRunSuspendedError(run.id);
|
|
227
|
+
}
|
|
191
228
|
}
|
|
192
229
|
attempts++;
|
|
193
|
-
|
|
194
|
-
await handle[INTERNAL].transitionTaskState(
|
|
195
|
-
|
|
196
|
-
|
|
230
|
+
const options = { retry: retryStrategy, reference };
|
|
231
|
+
const { taskId } = existingTaskInfo ? await handle[INTERNAL].transitionTaskState({
|
|
232
|
+
type: "retry",
|
|
233
|
+
taskId: existingTaskInfo.id,
|
|
234
|
+
options,
|
|
235
|
+
taskState: { status: "running", attempts, input }
|
|
236
|
+
}) : await handle[INTERNAL].transitionTaskState({
|
|
237
|
+
type: "create",
|
|
238
|
+
taskName: this.name,
|
|
239
|
+
options,
|
|
240
|
+
taskState: { status: "running", attempts, input }
|
|
241
|
+
});
|
|
242
|
+
const logger = run.logger.child({
|
|
243
|
+
"aiki.component": "task-execution",
|
|
244
|
+
"aiki.taskName": this.name,
|
|
245
|
+
"aiki.taskId": taskId
|
|
246
|
+
});
|
|
247
|
+
logger.info("Task started", { "aiki.attempts": attempts });
|
|
248
|
+
const { output, lastAttempt } = await this.tryExecuteTask(run, input, taskId, retryStrategy, attempts, logger);
|
|
249
|
+
await handle[INTERNAL].transitionTaskState({
|
|
250
|
+
taskId,
|
|
251
|
+
taskState: { status: "completed", attempts: lastAttempt, output }
|
|
252
|
+
});
|
|
197
253
|
logger.info("Task complete", { "aiki.attempts": lastAttempt });
|
|
198
254
|
return output;
|
|
199
255
|
}
|
|
200
|
-
async tryExecuteTask(run, input,
|
|
256
|
+
async tryExecuteTask(run, input, taskId, retryStrategy, currentAttempt, logger) {
|
|
201
257
|
let attempts = currentAttempt;
|
|
202
258
|
while (true) {
|
|
203
259
|
try {
|
|
204
|
-
const
|
|
260
|
+
const outputRaw = await this.params.handler(input);
|
|
261
|
+
let output = outputRaw;
|
|
262
|
+
if (this.params.schema?.output) {
|
|
263
|
+
try {
|
|
264
|
+
output = this.params.schema.output.parse(outputRaw);
|
|
265
|
+
} catch (error) {
|
|
266
|
+
await run[INTERNAL].handle[INTERNAL].transitionState({
|
|
267
|
+
status: "failed",
|
|
268
|
+
cause: "self",
|
|
269
|
+
error: createSerializableError(error)
|
|
270
|
+
});
|
|
271
|
+
throw new WorkflowRunFailedError(run.id, run[INTERNAL].handle.run.attempts);
|
|
272
|
+
}
|
|
273
|
+
}
|
|
205
274
|
return { output, lastAttempt: attempts };
|
|
206
275
|
} catch (error) {
|
|
276
|
+
if (error instanceof WorkflowRunFailedError || error instanceof WorkflowRunSuspendedError || error instanceof WorkflowRunConflictError) {
|
|
277
|
+
throw error;
|
|
278
|
+
}
|
|
207
279
|
const serializableError = createSerializableError(error);
|
|
208
280
|
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
209
281
|
if (!retryParams.retriesLeft) {
|
|
@@ -211,12 +283,11 @@ var TaskImpl = class _TaskImpl {
|
|
|
211
283
|
"aiki.attempts": attempts,
|
|
212
284
|
"aiki.reason": serializableError.message
|
|
213
285
|
});
|
|
214
|
-
await run[INTERNAL].handle[INTERNAL].transitionTaskState(
|
|
215
|
-
|
|
216
|
-
attempts,
|
|
217
|
-
error: serializableError
|
|
286
|
+
await run[INTERNAL].handle[INTERNAL].transitionTaskState({
|
|
287
|
+
taskId,
|
|
288
|
+
taskState: { status: "failed", attempts, error: serializableError }
|
|
218
289
|
});
|
|
219
|
-
throw new TaskFailedError(
|
|
290
|
+
throw new TaskFailedError(taskId, attempts, serializableError.message);
|
|
220
291
|
}
|
|
221
292
|
logger.debug("Task failed. It will be retried", {
|
|
222
293
|
"aiki.attempts": attempts,
|
|
@@ -228,30 +299,55 @@ var TaskImpl = class _TaskImpl {
|
|
|
228
299
|
attempts++;
|
|
229
300
|
continue;
|
|
230
301
|
}
|
|
231
|
-
await run[INTERNAL].handle[INTERNAL].transitionTaskState(
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
302
|
+
await run[INTERNAL].handle[INTERNAL].transitionTaskState({
|
|
303
|
+
taskId,
|
|
304
|
+
taskState: {
|
|
305
|
+
status: "awaiting_retry",
|
|
306
|
+
attempts,
|
|
307
|
+
error: serializableError,
|
|
308
|
+
nextAttemptInMs: retryParams.delayMs
|
|
309
|
+
}
|
|
236
310
|
});
|
|
237
311
|
throw new WorkflowRunSuspendedError(run.id);
|
|
238
312
|
}
|
|
239
313
|
}
|
|
240
314
|
}
|
|
241
|
-
|
|
315
|
+
async assertUniqueTaskReferenceId(handle, existingTaskInfo, inputHash, reference, logger) {
|
|
316
|
+
if (existingTaskInfo.inputHash !== inputHash && reference) {
|
|
317
|
+
const onConflict = reference.onConflict ?? "error";
|
|
318
|
+
if (onConflict !== "error") {
|
|
319
|
+
return;
|
|
320
|
+
}
|
|
321
|
+
logger.error("Reference ID already used by another task", {
|
|
322
|
+
"aiki.taskName": this.name,
|
|
323
|
+
"aiki.referenceId": reference.id,
|
|
324
|
+
"aiki.existingTaskId": existingTaskInfo.id
|
|
325
|
+
});
|
|
326
|
+
const error = new WorkflowRunFailedError(
|
|
327
|
+
handle.run.id,
|
|
328
|
+
handle.run.attempts,
|
|
329
|
+
`Reference ID "${reference.id}" already used by another task ${existingTaskInfo.id}`
|
|
330
|
+
);
|
|
331
|
+
await handle[INTERNAL].transitionState({
|
|
332
|
+
status: "failed",
|
|
333
|
+
cause: "self",
|
|
334
|
+
error: createSerializableError(error)
|
|
335
|
+
});
|
|
336
|
+
throw error;
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
assertRetryAllowed(taskId, state, retryStrategy, logger) {
|
|
340
|
+
const { attempts } = state;
|
|
242
341
|
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
243
342
|
if (!retryParams.retriesLeft) {
|
|
244
343
|
logger.error("Task retry not allowed", {
|
|
344
|
+
"aiki.taskName": this.name,
|
|
345
|
+
"aiki.taskId": taskId,
|
|
245
346
|
"aiki.attempts": attempts
|
|
246
347
|
});
|
|
247
|
-
throw new TaskFailedError(
|
|
348
|
+
throw new TaskFailedError(taskId, attempts, "Task retry not allowed");
|
|
248
349
|
}
|
|
249
350
|
}
|
|
250
|
-
async getPath(input) {
|
|
251
|
-
const inputHash = await sha256(stableStringify(input));
|
|
252
|
-
const path = this.params.opts?.idempotencyKey ? `${this.id}/${inputHash}/${this.params.opts.idempotencyKey}` : `${this.id}/${inputHash}`;
|
|
253
|
-
return path;
|
|
254
|
-
}
|
|
255
351
|
};
|
|
256
352
|
export {
|
|
257
353
|
task
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aikirun/task",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.8.0",
|
|
4
4
|
"description": "Task SDK for Aiki - define reliable tasks with automatic retries, idempotency, and error handling",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -18,8 +18,8 @@
|
|
|
18
18
|
"build": "tsup"
|
|
19
19
|
},
|
|
20
20
|
"dependencies": {
|
|
21
|
-
"@aikirun/types": "0.
|
|
22
|
-
"@aikirun/workflow": "0.
|
|
21
|
+
"@aikirun/types": "0.8.0",
|
|
22
|
+
"@aikirun/workflow": "0.8.0"
|
|
23
23
|
},
|
|
24
24
|
"publishConfig": {
|
|
25
25
|
"access": "public"
|