@aikirun/task 0.7.0 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -12
- package/dist/index.d.ts +17 -19
- package/dist/index.js +161 -66
- package/package.json +3 -3
package/README.md
CHANGED
|
@@ -17,7 +17,7 @@ npm install @aikirun/task
|
|
|
17
17
|
import { task } from "@aikirun/task";
|
|
18
18
|
|
|
19
19
|
export const sendVerificationEmail = task({
|
|
20
|
-
|
|
20
|
+
name: "send-verification",
|
|
21
21
|
async handler(input: { email: string }) {
|
|
22
22
|
return emailService.sendVerification(input.email);
|
|
23
23
|
},
|
|
@@ -28,7 +28,7 @@ export const sendVerificationEmail = task({
|
|
|
28
28
|
|
|
29
29
|
```typescript
|
|
30
30
|
export const ringAlarm = task({
|
|
31
|
-
|
|
31
|
+
name: "ring-alarm",
|
|
32
32
|
handler(input: { song: string }) {
|
|
33
33
|
return Promise.resolve(audioService.play(input.song));
|
|
34
34
|
},
|
|
@@ -47,10 +47,10 @@ export const ringAlarm = task({
|
|
|
47
47
|
```typescript
|
|
48
48
|
import { workflow } from "@aikirun/workflow";
|
|
49
49
|
|
|
50
|
-
export const morningWorkflow = workflow({
|
|
50
|
+
export const morningWorkflow = workflow({ name: "morning-routine" });
|
|
51
51
|
|
|
52
|
-
export const morningWorkflowV1 = morningWorkflow.v("1.0", {
|
|
53
|
-
async handler(
|
|
52
|
+
export const morningWorkflowV1 = morningWorkflow.v("1.0.0", {
|
|
53
|
+
async handler(run, input) {
|
|
54
54
|
const result = await ringAlarm.start(run, { song: "alarm.mp3" });
|
|
55
55
|
console.log("Task completed:", result);
|
|
56
56
|
},
|
|
@@ -61,7 +61,7 @@ export const morningWorkflowV1 = morningWorkflow.v("1.0", {
|
|
|
61
61
|
|
|
62
62
|
- **Idempotent Execution** - Tasks can be safely retried without unintended side effects
|
|
63
63
|
- **Automatic Retries** - Multiple retry strategies (fixed, exponential, jittered)
|
|
64
|
-
- **
|
|
64
|
+
- **Reference IDs** - Custom identifiers for tracking and deduplication
|
|
65
65
|
- **Error Handling** - Structured error information with recovery strategies
|
|
66
66
|
- **State Tracking** - Task execution state persists across failures
|
|
67
67
|
- **Type Safety** - Full TypeScript support with input/output types
|
|
@@ -70,8 +70,8 @@ export const morningWorkflowV1 = morningWorkflow.v("1.0", {
|
|
|
70
70
|
|
|
71
71
|
```typescript
|
|
72
72
|
interface TaskOptions {
|
|
73
|
-
retry?: RetryStrategy;
|
|
74
|
-
|
|
73
|
+
retry?: RetryStrategy;
|
|
74
|
+
reference?: { id: string; onConflict?: "error" | "return_existing" };
|
|
75
75
|
}
|
|
76
76
|
```
|
|
77
77
|
|
|
@@ -131,14 +131,14 @@ Tasks are executed within a workflow's execution context. Logging happens in the
|
|
|
131
131
|
|
|
132
132
|
```typescript
|
|
133
133
|
export const processPayment = task({
|
|
134
|
-
|
|
134
|
+
name: "process-payment",
|
|
135
135
|
async handler(input: { amount: number }) {
|
|
136
136
|
return { success: true, transactionId: "tx_123" };
|
|
137
137
|
},
|
|
138
138
|
});
|
|
139
139
|
|
|
140
|
-
export const paymentWorkflowV1 = paymentWorkflow.v("1.0", {
|
|
141
|
-
async handler(
|
|
140
|
+
export const paymentWorkflowV1 = paymentWorkflow.v("1.0.0", {
|
|
141
|
+
async handler(run, input) {
|
|
142
142
|
run.logger.info("Processing payment", { amount: input.amount });
|
|
143
143
|
const result = await processPayment.start(run, { amount: input.amount });
|
|
144
144
|
run.logger.info("Payment complete", result);
|
|
@@ -149,7 +149,7 @@ export const paymentWorkflowV1 = paymentWorkflow.v("1.0", {
|
|
|
149
149
|
## Best Practices
|
|
150
150
|
|
|
151
151
|
1. **Make Tasks Idempotent** - Tasks may be retried, so re-running should not cause unintended side effects
|
|
152
|
-
2. **Use
|
|
152
|
+
2. **Use Reference IDs** - Use custom reference IDs to prevent duplicate processing
|
|
153
153
|
3. **Use Meaningful Errors** - Help diagnose failures
|
|
154
154
|
4. **Log Information** - Use `run.logger` for debugging
|
|
155
155
|
5. **Keep Tasks Focused** - One responsibility per task
|
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
import { Serializable } from '@aikirun/types/
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
1
|
+
import { Serializable } from '@aikirun/types/serializable';
|
|
2
|
+
import { TaskName, TaskOptions } from '@aikirun/types/task';
|
|
3
|
+
import { Schema } from '@aikirun/types/validator';
|
|
4
4
|
import { WorkflowRunContext } from '@aikirun/workflow';
|
|
5
|
+
import { RequireAtLeastOneProp } from '@aikirun/types/utils';
|
|
5
6
|
|
|
6
7
|
type NonEmptyArray<T> = [T, ...T[]];
|
|
7
8
|
|
|
@@ -22,9 +23,6 @@ type PathFromObjectInternal<T, IncludeArrayKeys extends boolean> = And<[
|
|
|
22
23
|
type ExtractObjectType<T> = T extends object ? T : never;
|
|
23
24
|
type TypeOfValueAtPath<T extends object, Path extends PathFromObject<T>> = Path extends keyof T ? T[Path] : Path extends `${infer First}.${infer Rest}` ? First extends keyof T ? undefined extends T[First] ? Rest extends PathFromObject<ExtractObjectType<T[First]>> ? TypeOfValueAtPath<ExtractObjectType<T[First]>, Rest> | undefined : never : Rest extends PathFromObject<ExtractObjectType<T[First]>> ? TypeOfValueAtPath<ExtractObjectType<T[First]>, Rest> : never : never : never;
|
|
24
25
|
|
|
25
|
-
interface Schema<Data> {
|
|
26
|
-
parse: (data: unknown) => Data;
|
|
27
|
-
}
|
|
28
26
|
interface EventDefinition<Data> {
|
|
29
27
|
_type: Data;
|
|
30
28
|
schema?: Schema<Data>;
|
|
@@ -41,7 +39,7 @@ type EventsDefinition = Record<string, EventDefinition<unknown>>;
|
|
|
41
39
|
* @template Input - Type of task input (must be JSON serializable)
|
|
42
40
|
* @template Output - Type of task output (must be JSON serializable)
|
|
43
41
|
* @param params - Task configuration
|
|
44
|
-
* @param params.
|
|
42
|
+
* @param params.name - Unique task name used for execution tracking
|
|
45
43
|
* @param params.handler - Async function that executes the task logic
|
|
46
44
|
* @returns Task instance with retry and option configuration methods
|
|
47
45
|
*
|
|
@@ -49,7 +47,7 @@ type EventsDefinition = Record<string, EventDefinition<unknown>>;
|
|
|
49
47
|
* ```typescript
|
|
50
48
|
* // Simple task without retry
|
|
51
49
|
* export const sendEmail = task({
|
|
52
|
-
*
|
|
50
|
+
* name: "send-email",
|
|
53
51
|
* handler(input: { email: string; message: string }) {
|
|
54
52
|
* return emailService.send(input.email, input.message);
|
|
55
53
|
* },
|
|
@@ -57,7 +55,7 @@ type EventsDefinition = Record<string, EventDefinition<unknown>>;
|
|
|
57
55
|
*
|
|
58
56
|
* // Task with retry configuration
|
|
59
57
|
* export const chargeCard = task({
|
|
60
|
-
*
|
|
58
|
+
* name: "charge-card",
|
|
61
59
|
* handler(input: { cardId: string; amount: number }) {
|
|
62
60
|
* return paymentService.charge(input.cardId, input.amount);
|
|
63
61
|
* },
|
|
@@ -74,24 +72,24 @@ type EventsDefinition = Record<string, EventDefinition<unknown>>;
|
|
|
74
72
|
* const result = await chargeCard.start(run, { cardId: "123", amount: 9999 });
|
|
75
73
|
* ```
|
|
76
74
|
*/
|
|
77
|
-
declare function task<Input extends Serializable
|
|
75
|
+
declare function task<Input extends Serializable, Output extends Serializable>(params: TaskParams<Input, Output>): Task<Input, Output>;
|
|
78
76
|
interface TaskParams<Input, Output> {
|
|
79
|
-
|
|
77
|
+
name: string;
|
|
80
78
|
handler: (input: Input) => Promise<Output>;
|
|
81
79
|
opts?: TaskOptions;
|
|
80
|
+
schema?: RequireAtLeastOneProp<{
|
|
81
|
+
input?: Schema<Input>;
|
|
82
|
+
output?: Schema<Output>;
|
|
83
|
+
}>;
|
|
82
84
|
}
|
|
83
|
-
interface
|
|
84
|
-
|
|
85
|
-
|
|
85
|
+
interface Task<Input, Output> {
|
|
86
|
+
name: TaskName;
|
|
87
|
+
with(): TaskBuilder<Input, Output>;
|
|
88
|
+
start: (run: WorkflowRunContext<unknown, unknown, EventsDefinition>, ...args: Input extends void ? [] : [Input]) => Promise<Output>;
|
|
86
89
|
}
|
|
87
90
|
interface TaskBuilder<Input, Output> {
|
|
88
91
|
opt<Path extends PathFromObject<TaskOptions>>(path: Path, value: TypeOfValueAtPath<TaskOptions, Path>): TaskBuilder<Input, Output>;
|
|
89
92
|
start: Task<Input, Output>["start"];
|
|
90
93
|
}
|
|
91
|
-
interface Task<Input, Output> {
|
|
92
|
-
id: TaskId;
|
|
93
|
-
with(): TaskBuilder<Input, Output>;
|
|
94
|
-
start: (run: WorkflowRunContext<unknown, unknown, EventsDefinition>, ...args: Input extends null ? [] : [Input]) => Promise<Output>;
|
|
95
|
-
}
|
|
96
94
|
|
|
97
95
|
export { type Task, type TaskParams, task };
|
package/dist/index.js
CHANGED
|
@@ -22,6 +22,31 @@ function delay(ms, options) {
|
|
|
22
22
|
});
|
|
23
23
|
}
|
|
24
24
|
|
|
25
|
+
// ../../lib/json/stable-stringify.ts
|
|
26
|
+
function stableStringify(value) {
|
|
27
|
+
return stringifyValue(value);
|
|
28
|
+
}
|
|
29
|
+
function stringifyValue(value) {
|
|
30
|
+
if (value === null || value === void 0) {
|
|
31
|
+
return "null";
|
|
32
|
+
}
|
|
33
|
+
if (typeof value !== "object") {
|
|
34
|
+
return JSON.stringify(value);
|
|
35
|
+
}
|
|
36
|
+
if (Array.isArray(value)) {
|
|
37
|
+
return `[${value.map(stringifyValue).join(",")}]`;
|
|
38
|
+
}
|
|
39
|
+
const keys = Object.keys(value).sort();
|
|
40
|
+
const pairs = [];
|
|
41
|
+
for (const key of keys) {
|
|
42
|
+
const keyValue = value[key];
|
|
43
|
+
if (keyValue !== void 0) {
|
|
44
|
+
pairs.push(`${JSON.stringify(key)}:${stringifyValue(keyValue)}`);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return `{${pairs.join(",")}}`;
|
|
48
|
+
}
|
|
49
|
+
|
|
25
50
|
// ../../lib/crypto/hash.ts
|
|
26
51
|
async function sha256(input) {
|
|
27
52
|
const data = new TextEncoder().encode(input);
|
|
@@ -29,6 +54,9 @@ async function sha256(input) {
|
|
|
29
54
|
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
|
30
55
|
return hashArray.map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
31
56
|
}
|
|
57
|
+
async function hashInput(input) {
|
|
58
|
+
return sha256(stableStringify({ input }));
|
|
59
|
+
}
|
|
32
60
|
|
|
33
61
|
// ../../lib/error/serializable.ts
|
|
34
62
|
function createSerializableError(error) {
|
|
@@ -43,25 +71,6 @@ function createSerializableError(error) {
|
|
|
43
71
|
};
|
|
44
72
|
}
|
|
45
73
|
|
|
46
|
-
// ../../lib/json/stable-stringify.ts
|
|
47
|
-
function stableStringify(value) {
|
|
48
|
-
if (value === null || value === void 0) {
|
|
49
|
-
return JSON.stringify(value);
|
|
50
|
-
}
|
|
51
|
-
if (typeof value !== "object") {
|
|
52
|
-
return JSON.stringify(value);
|
|
53
|
-
}
|
|
54
|
-
if (Array.isArray(value)) {
|
|
55
|
-
return `[${value.map((item) => stableStringify(item)).join(",")}]`;
|
|
56
|
-
}
|
|
57
|
-
const keys = Object.keys(value).sort();
|
|
58
|
-
const pairs = keys.map((key) => {
|
|
59
|
-
const val = value[key];
|
|
60
|
-
return `${JSON.stringify(key)}:${stableStringify(val)}`;
|
|
61
|
-
});
|
|
62
|
-
return `{${pairs.join(",")}}`;
|
|
63
|
-
}
|
|
64
|
-
|
|
65
74
|
// ../../lib/object/overrider.ts
|
|
66
75
|
function set(obj, path, value) {
|
|
67
76
|
const keys = path.split(".");
|
|
@@ -91,6 +100,11 @@ var objectOverrider = (defaultObj) => (obj) => {
|
|
|
91
100
|
return createBuilder([]);
|
|
92
101
|
};
|
|
93
102
|
|
|
103
|
+
// ../../lib/path/index.ts
|
|
104
|
+
function getTaskPath(name, referenceId) {
|
|
105
|
+
return `${name}/${referenceId}`;
|
|
106
|
+
}
|
|
107
|
+
|
|
94
108
|
// ../../lib/retry/strategy.ts
|
|
95
109
|
function getRetryParams(attempts, strategy) {
|
|
96
110
|
const strategyType = strategy.type;
|
|
@@ -142,16 +156,20 @@ function getRetryParams(attempts, strategy) {
|
|
|
142
156
|
// task.ts
|
|
143
157
|
import { INTERNAL } from "@aikirun/types/symbols";
|
|
144
158
|
import { TaskFailedError } from "@aikirun/types/task";
|
|
145
|
-
import {
|
|
159
|
+
import {
|
|
160
|
+
WorkflowRunConflictError,
|
|
161
|
+
WorkflowRunFailedError,
|
|
162
|
+
WorkflowRunSuspendedError
|
|
163
|
+
} from "@aikirun/types/workflow-run";
|
|
146
164
|
function task(params) {
|
|
147
165
|
return new TaskImpl(params);
|
|
148
166
|
}
|
|
149
167
|
var TaskImpl = class _TaskImpl {
|
|
150
168
|
constructor(params) {
|
|
151
169
|
this.params = params;
|
|
152
|
-
this.
|
|
170
|
+
this.name = params.name;
|
|
153
171
|
}
|
|
154
|
-
|
|
172
|
+
name;
|
|
155
173
|
with() {
|
|
156
174
|
const optsOverrider = objectOverrider(this.params.opts ?? {});
|
|
157
175
|
const createBuilder = (optsBuilder) => ({
|
|
@@ -163,47 +181,85 @@ var TaskImpl = class _TaskImpl {
|
|
|
163
181
|
async start(run, ...args) {
|
|
164
182
|
const handle = run[INTERNAL].handle;
|
|
165
183
|
handle[INTERNAL].assertExecutionAllowed();
|
|
166
|
-
const
|
|
167
|
-
const
|
|
168
|
-
const
|
|
169
|
-
|
|
170
|
-
|
|
184
|
+
const inputRaw = isNonEmptyArray(args) ? args[0] : void 0;
|
|
185
|
+
const input = await this.parse(handle, this.params.schema?.input, inputRaw);
|
|
186
|
+
const inputHash = await hashInput(input);
|
|
187
|
+
const reference = this.params.opts?.reference;
|
|
188
|
+
const path = getTaskPath(this.name, reference?.id ?? inputHash);
|
|
189
|
+
const existingTaskInfo = handle.run.tasks[path];
|
|
190
|
+
if (existingTaskInfo) {
|
|
191
|
+
await this.assertUniqueTaskReferenceId(handle, existingTaskInfo, inputHash, reference, run.logger);
|
|
171
192
|
}
|
|
172
|
-
if (
|
|
173
|
-
|
|
193
|
+
if (existingTaskInfo?.state.status === "completed") {
|
|
194
|
+
return this.parse(handle, this.params.schema?.output, existingTaskInfo.state.output);
|
|
195
|
+
}
|
|
196
|
+
if (existingTaskInfo?.state.status === "failed") {
|
|
197
|
+
const { state } = existingTaskInfo;
|
|
198
|
+
throw new TaskFailedError(existingTaskInfo.id, state.attempts, state.error.message);
|
|
174
199
|
}
|
|
175
|
-
const logger = run.logger.child({
|
|
176
|
-
"aiki.component": "task-execution",
|
|
177
|
-
"aiki.taskPath": path
|
|
178
|
-
});
|
|
179
200
|
let attempts = 0;
|
|
180
201
|
const retryStrategy = this.params.opts?.retry ?? { type: "never" };
|
|
181
|
-
if (
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
202
|
+
if (existingTaskInfo?.state) {
|
|
203
|
+
const taskId2 = existingTaskInfo.id;
|
|
204
|
+
const state = existingTaskInfo?.state;
|
|
205
|
+
this.assertRetryAllowed(taskId2, state, retryStrategy, run.logger);
|
|
206
|
+
run.logger.debug("Retrying task", {
|
|
207
|
+
"aiki.taskName": this.name,
|
|
208
|
+
"aiki.taskId": taskId2,
|
|
209
|
+
"aiki.attempts": state.attempts,
|
|
210
|
+
"aiki.taskStatus": state.status
|
|
186
211
|
});
|
|
187
|
-
attempts =
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
212
|
+
attempts = state.attempts;
|
|
213
|
+
if (state.status === "awaiting_retry" && handle.run.state.status === "running") {
|
|
214
|
+
throw new WorkflowRunSuspendedError(run.id);
|
|
215
|
+
}
|
|
191
216
|
}
|
|
192
217
|
attempts++;
|
|
193
|
-
|
|
194
|
-
await handle[INTERNAL].transitionTaskState(
|
|
195
|
-
|
|
196
|
-
|
|
218
|
+
const options = { retry: retryStrategy, reference };
|
|
219
|
+
const { taskId } = existingTaskInfo ? await handle[INTERNAL].transitionTaskState({
|
|
220
|
+
type: "retry",
|
|
221
|
+
taskId: existingTaskInfo.id,
|
|
222
|
+
options,
|
|
223
|
+
taskState: { status: "running", attempts, input }
|
|
224
|
+
}) : await handle[INTERNAL].transitionTaskState({
|
|
225
|
+
type: "create",
|
|
226
|
+
taskName: this.name,
|
|
227
|
+
options,
|
|
228
|
+
taskState: { status: "running", attempts, input }
|
|
229
|
+
});
|
|
230
|
+
const logger = run.logger.child({
|
|
231
|
+
"aiki.component": "task-execution",
|
|
232
|
+
"aiki.taskName": this.name,
|
|
233
|
+
"aiki.taskId": taskId
|
|
234
|
+
});
|
|
235
|
+
logger.info("Task started", { "aiki.attempts": attempts });
|
|
236
|
+
const { output, lastAttempt } = await this.tryExecuteTask(
|
|
237
|
+
handle,
|
|
238
|
+
input,
|
|
239
|
+
taskId,
|
|
240
|
+
retryStrategy,
|
|
241
|
+
attempts,
|
|
242
|
+
run[INTERNAL].options.spinThresholdMs,
|
|
243
|
+
logger
|
|
244
|
+
);
|
|
245
|
+
await handle[INTERNAL].transitionTaskState({
|
|
246
|
+
taskId,
|
|
247
|
+
taskState: { status: "completed", attempts: lastAttempt, output }
|
|
248
|
+
});
|
|
197
249
|
logger.info("Task complete", { "aiki.attempts": lastAttempt });
|
|
198
250
|
return output;
|
|
199
251
|
}
|
|
200
|
-
async tryExecuteTask(
|
|
252
|
+
async tryExecuteTask(handle, input, taskId, retryStrategy, currentAttempt, spinThresholdMs, logger) {
|
|
201
253
|
let attempts = currentAttempt;
|
|
202
254
|
while (true) {
|
|
203
255
|
try {
|
|
204
|
-
const
|
|
256
|
+
const outputRaw = await this.params.handler(input);
|
|
257
|
+
const output = await this.parse(handle, this.params.schema?.output, outputRaw);
|
|
205
258
|
return { output, lastAttempt: attempts };
|
|
206
259
|
} catch (error) {
|
|
260
|
+
if (error instanceof WorkflowRunFailedError || error instanceof WorkflowRunSuspendedError || error instanceof WorkflowRunConflictError) {
|
|
261
|
+
throw error;
|
|
262
|
+
}
|
|
207
263
|
const serializableError = createSerializableError(error);
|
|
208
264
|
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
209
265
|
if (!retryParams.retriesLeft) {
|
|
@@ -211,46 +267,85 @@ var TaskImpl = class _TaskImpl {
|
|
|
211
267
|
"aiki.attempts": attempts,
|
|
212
268
|
"aiki.reason": serializableError.message
|
|
213
269
|
});
|
|
214
|
-
await
|
|
215
|
-
|
|
216
|
-
attempts,
|
|
217
|
-
error: serializableError
|
|
270
|
+
await handle[INTERNAL].transitionTaskState({
|
|
271
|
+
taskId,
|
|
272
|
+
taskState: { status: "failed", attempts, error: serializableError }
|
|
218
273
|
});
|
|
219
|
-
throw new TaskFailedError(
|
|
274
|
+
throw new TaskFailedError(taskId, attempts, serializableError.message);
|
|
220
275
|
}
|
|
221
276
|
logger.debug("Task failed. It will be retried", {
|
|
222
277
|
"aiki.attempts": attempts,
|
|
223
278
|
"aiki.nextAttemptInMs": retryParams.delayMs,
|
|
224
279
|
"aiki.reason": serializableError.message
|
|
225
280
|
});
|
|
226
|
-
if (retryParams.delayMs <=
|
|
281
|
+
if (retryParams.delayMs <= spinThresholdMs) {
|
|
227
282
|
await delay(retryParams.delayMs);
|
|
228
283
|
attempts++;
|
|
229
284
|
continue;
|
|
230
285
|
}
|
|
231
|
-
await
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
286
|
+
await handle[INTERNAL].transitionTaskState({
|
|
287
|
+
taskId,
|
|
288
|
+
taskState: {
|
|
289
|
+
status: "awaiting_retry",
|
|
290
|
+
attempts,
|
|
291
|
+
error: serializableError,
|
|
292
|
+
nextAttemptInMs: retryParams.delayMs
|
|
293
|
+
}
|
|
236
294
|
});
|
|
237
|
-
throw new WorkflowRunSuspendedError(run.id);
|
|
295
|
+
throw new WorkflowRunSuspendedError(handle.run.id);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
async assertUniqueTaskReferenceId(handle, existingTaskInfo, inputHash, reference, logger) {
|
|
300
|
+
if (existingTaskInfo.inputHash !== inputHash && reference) {
|
|
301
|
+
const onConflict = reference.onConflict ?? "error";
|
|
302
|
+
if (onConflict !== "error") {
|
|
303
|
+
return;
|
|
238
304
|
}
|
|
305
|
+
logger.error("Reference ID already used by another task", {
|
|
306
|
+
"aiki.taskName": this.name,
|
|
307
|
+
"aiki.referenceId": reference.id,
|
|
308
|
+
"aiki.existingTaskId": existingTaskInfo.id
|
|
309
|
+
});
|
|
310
|
+
const error = new WorkflowRunFailedError(
|
|
311
|
+
handle.run.id,
|
|
312
|
+
handle.run.attempts,
|
|
313
|
+
`Reference ID "${reference.id}" already used by another task ${existingTaskInfo.id}`
|
|
314
|
+
);
|
|
315
|
+
await handle[INTERNAL].transitionState({
|
|
316
|
+
status: "failed",
|
|
317
|
+
cause: "self",
|
|
318
|
+
error: createSerializableError(error)
|
|
319
|
+
});
|
|
320
|
+
throw error;
|
|
239
321
|
}
|
|
240
322
|
}
|
|
241
|
-
assertRetryAllowed(
|
|
323
|
+
assertRetryAllowed(taskId, state, retryStrategy, logger) {
|
|
324
|
+
const { attempts } = state;
|
|
242
325
|
const retryParams = getRetryParams(attempts, retryStrategy);
|
|
243
326
|
if (!retryParams.retriesLeft) {
|
|
244
327
|
logger.error("Task retry not allowed", {
|
|
328
|
+
"aiki.taskName": this.name,
|
|
329
|
+
"aiki.taskId": taskId,
|
|
245
330
|
"aiki.attempts": attempts
|
|
246
331
|
});
|
|
247
|
-
throw new TaskFailedError(
|
|
332
|
+
throw new TaskFailedError(taskId, attempts, "Task retry not allowed");
|
|
248
333
|
}
|
|
249
334
|
}
|
|
250
|
-
async
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
335
|
+
async parse(handle, schema, data) {
|
|
336
|
+
if (!schema) {
|
|
337
|
+
return data;
|
|
338
|
+
}
|
|
339
|
+
try {
|
|
340
|
+
return schema.parse(data);
|
|
341
|
+
} catch (error) {
|
|
342
|
+
await handle[INTERNAL].transitionState({
|
|
343
|
+
status: "failed",
|
|
344
|
+
cause: "self",
|
|
345
|
+
error: createSerializableError(error)
|
|
346
|
+
});
|
|
347
|
+
throw new WorkflowRunFailedError(handle.run.id, handle.run.attempts);
|
|
348
|
+
}
|
|
254
349
|
}
|
|
255
350
|
};
|
|
256
351
|
export {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aikirun/task",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.9.0",
|
|
4
4
|
"description": "Task SDK for Aiki - define reliable tasks with automatic retries, idempotency, and error handling",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -18,8 +18,8 @@
|
|
|
18
18
|
"build": "tsup"
|
|
19
19
|
},
|
|
20
20
|
"dependencies": {
|
|
21
|
-
"@aikirun/types": "0.
|
|
22
|
-
"@aikirun/workflow": "0.
|
|
21
|
+
"@aikirun/types": "0.9.0",
|
|
22
|
+
"@aikirun/workflow": "0.9.0"
|
|
23
23
|
},
|
|
24
24
|
"publishConfig": {
|
|
25
25
|
"access": "public"
|