@workglow/task-graph 0.0.52

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +1280 -0
  3. package/dist/browser.d.ts +7 -0
  4. package/dist/browser.d.ts.map +1 -0
  5. package/dist/browser.js +2842 -0
  6. package/dist/browser.js.map +33 -0
  7. package/dist/bun.d.ts +7 -0
  8. package/dist/bun.d.ts.map +1 -0
  9. package/dist/bun.js +2843 -0
  10. package/dist/bun.js.map +33 -0
  11. package/dist/common.d.ts +33 -0
  12. package/dist/common.d.ts.map +1 -0
  13. package/dist/node.d.ts +7 -0
  14. package/dist/node.d.ts.map +1 -0
  15. package/dist/node.js +2842 -0
  16. package/dist/node.js.map +33 -0
  17. package/dist/storage/TaskGraphRepository.d.ts +92 -0
  18. package/dist/storage/TaskGraphRepository.d.ts.map +1 -0
  19. package/dist/storage/TaskGraphTabularRepository.d.ts +73 -0
  20. package/dist/storage/TaskGraphTabularRepository.d.ts.map +1 -0
  21. package/dist/storage/TaskOutputRepository.d.ts +93 -0
  22. package/dist/storage/TaskOutputRepository.d.ts.map +1 -0
  23. package/dist/storage/TaskOutputTabularRepository.d.ts +84 -0
  24. package/dist/storage/TaskOutputTabularRepository.d.ts.map +1 -0
  25. package/dist/task/ArrayTask.d.ts +72 -0
  26. package/dist/task/ArrayTask.d.ts.map +1 -0
  27. package/dist/task/ConditionalTask.d.ts +278 -0
  28. package/dist/task/ConditionalTask.d.ts.map +1 -0
  29. package/dist/task/GraphAsTask.d.ts +79 -0
  30. package/dist/task/GraphAsTask.d.ts.map +1 -0
  31. package/dist/task/GraphAsTaskRunner.d.ts +36 -0
  32. package/dist/task/GraphAsTaskRunner.d.ts.map +1 -0
  33. package/dist/task/ITask.d.ts +144 -0
  34. package/dist/task/ITask.d.ts.map +1 -0
  35. package/dist/task/ITaskRunner.d.ts +36 -0
  36. package/dist/task/ITaskRunner.d.ts.map +1 -0
  37. package/dist/task/JobQueueFactory.d.ts +23 -0
  38. package/dist/task/JobQueueFactory.d.ts.map +1 -0
  39. package/dist/task/JobQueueTask.d.ts +65 -0
  40. package/dist/task/JobQueueTask.d.ts.map +1 -0
  41. package/dist/task/Task.d.ts +334 -0
  42. package/dist/task/Task.d.ts.map +1 -0
  43. package/dist/task/TaskError.d.ts +66 -0
  44. package/dist/task/TaskError.d.ts.map +1 -0
  45. package/dist/task/TaskEvents.d.ts +40 -0
  46. package/dist/task/TaskEvents.d.ts.map +1 -0
  47. package/dist/task/TaskJSON.d.ts +82 -0
  48. package/dist/task/TaskJSON.d.ts.map +1 -0
  49. package/dist/task/TaskQueueRegistry.d.ts +69 -0
  50. package/dist/task/TaskQueueRegistry.d.ts.map +1 -0
  51. package/dist/task/TaskRegistry.d.ts +31 -0
  52. package/dist/task/TaskRegistry.d.ts.map +1 -0
  53. package/dist/task/TaskRunner.d.ts +99 -0
  54. package/dist/task/TaskRunner.d.ts.map +1 -0
  55. package/dist/task/TaskTypes.d.ts +68 -0
  56. package/dist/task/TaskTypes.d.ts.map +1 -0
  57. package/dist/task-graph/Conversions.d.ts +28 -0
  58. package/dist/task-graph/Conversions.d.ts.map +1 -0
  59. package/dist/task-graph/Dataflow.d.ts +73 -0
  60. package/dist/task-graph/Dataflow.d.ts.map +1 -0
  61. package/dist/task-graph/DataflowEvents.d.ts +34 -0
  62. package/dist/task-graph/DataflowEvents.d.ts.map +1 -0
  63. package/dist/task-graph/ITaskGraph.d.ts +38 -0
  64. package/dist/task-graph/ITaskGraph.d.ts.map +1 -0
  65. package/dist/task-graph/IWorkflow.d.ts +13 -0
  66. package/dist/task-graph/IWorkflow.d.ts.map +1 -0
  67. package/dist/task-graph/TaskGraph.d.ts +230 -0
  68. package/dist/task-graph/TaskGraph.d.ts.map +1 -0
  69. package/dist/task-graph/TaskGraphEvents.d.ts +54 -0
  70. package/dist/task-graph/TaskGraphEvents.d.ts.map +1 -0
  71. package/dist/task-graph/TaskGraphRunner.d.ts +202 -0
  72. package/dist/task-graph/TaskGraphRunner.d.ts.map +1 -0
  73. package/dist/task-graph/TaskGraphScheduler.d.ts +56 -0
  74. package/dist/task-graph/TaskGraphScheduler.d.ts.map +1 -0
  75. package/dist/task-graph/Workflow.d.ts +155 -0
  76. package/dist/task-graph/Workflow.d.ts.map +1 -0
  77. package/dist/types.d.ts +7 -0
  78. package/dist/types.d.ts.map +1 -0
  79. package/package.json +59 -0
  80. package/src/storage/README.md +61 -0
  81. package/src/task/ConditionalTask.README.md +268 -0
  82. package/src/task/README.md +251 -0
  83. package/src/task-graph/README.md +142 -0
package/README.md ADDED
@@ -0,0 +1,1280 @@
1
+ # @workglow/task-graph
2
+
3
+ A lightweight yet powerful TypeScript library for building and executing DAG (Directed Acyclic Graph) pipelines of tasks. Provides flexible task orchestration, persistent storage, workflow management, and error handling for complex task execution scenarios.
4
+
5
+ ## Table of Contents
6
+
7
+ - [Installation](#installation)
8
+ - [Quick Start](#quick-start)
9
+ - [Core Concepts](#core-concepts)
10
+ - [Tasks](#tasks)
11
+ - [Task Graphs](#task-graphs)
12
+ - [Workflows](#workflows)
13
+ - [Data Flow](#data-flow)
14
+ - [Creating Custom Tasks](#creating-custom-tasks)
15
+ - [Building Task Graphs](#building-task-graphs)
16
+ - [Using Workflows](#using-workflows)
17
+ - [Storage and Caching](#storage-and-caching)
18
+ - [Error Handling](#error-handling)
19
+ - [Advanced Patterns](#advanced-patterns)
20
+ - [API Reference](#api-reference)
21
+ - [Examples](#examples)
22
+ - [Testing](#testing)
23
+ - [License](#license)
24
+
25
+ ## Installation
26
+
27
+ ```bash
28
+ npm install @workglow/task-graph
29
+ # or
30
+ bun add @workglow/task-graph
31
+ # or
32
+ yarn add @workglow/task-graph
33
+ ```
34
+
35
+ ## Quick Start
36
+
37
+ Here's a simple example that demonstrates the core concepts:
38
+
39
+ ```typescript
40
+ import { Task, TaskGraph, Dataflow, Workflow } from "@workglow/task-graph";
41
+ import { DataPortSchema } from "@workglow/util";
42
+
43
+ // 1. Define a custom task
44
+ class MultiplyBy2Task extends Task<{ value: number }, { result: number }> {
45
+ static readonly type = "MultiplyBy2Task";
46
+ static readonly category = "Math";
47
+ static readonly title = "Multiply by 2";
48
+ static readonly description = "Multiplies a number by 2";
49
+
50
+ static inputSchema() {
51
+ return {
52
+ type: "object",
53
+ properties: {
54
+ value: {
55
+ type: "number",
56
+ description: "Input number",
57
+ },
58
+ },
59
+ required: ["value"],
60
+ additionalProperties: false,
61
+ } as const satisfies DataPortSchema;
62
+ }
63
+
64
+ static outputSchema() {
65
+ return {
66
+ type: "object",
67
+ properties: {
68
+ result: {
69
+ type: "number",
70
+ description: "Multiplied result",
71
+ },
72
+ },
73
+ required: ["result"],
74
+ additionalProperties: false,
75
+ } as const satisfies DataPortSchema;
76
+ }
77
+
78
+ async execute(input: { value: number }) {
79
+ return { result: input.value * 2 };
80
+ }
81
+ }
82
+
83
+ // 2. Use the Task
84
+
85
+ // 2.1 Use it directly
86
+ const task = new MultiplyBy2Task({ value: 15 });
87
+ const result = await task.run();
88
+ console.log(result); // { result: 30 }
89
+
90
+ // 2.2 Use it with TaskGraph
91
+ const graph = new TaskGraph();
92
+ graph.addTask(new MultiplyBy2Task({ value: 15 }, { id: "multiply1" }));
93
+ graph.addTask(new MultiplyBy2Task({}, { id: "multiply2" }));
94
+ graph.addDataflow(new Dataflow("multiply1", "result", "multiply2", "value"));
95
+
96
+ const results = await graph.run();
97
+ console.log(results); // [{ id: "multiply1", data: { result: 60 } }]
98
+
99
+ // 2.3 With Workflow
100
+ const wf = new Workflow();
101
+ wf.addTask(new MultiplyBy2Task({ value: 15 }));
102
+ wf.addTask(new MultiplyBy2Task()); // auto-connects to previous task
103
+ const result = await wf.run();
104
+ console.log(result); // { result: 60 }
105
+
106
+ // 2.3.1 Adding to Workflow
107
+ import { CreateWorkflow } from "@workglow/task-graph";
108
+ declare module "@workglow/task-graph" {
109
+ interface Workflow {
110
+ MultiplyBy2: CreateWorkflow<{ value: number }>;
111
+ }
112
+ }
113
+ Workflow.prototype.MultiplyBy2 = CreateWorkflow(MultiplyBy2Task);
114
+
115
+ const wf = new Workflow();
116
+ wf.MultiplyBy2({ value: 15 });
117
+ wf.MultiplyBy2(); // input is output from previous task
118
+ const result = await wf.run();
119
+ console.log(result); // { result: 60 }
120
+
121
+ // 2.3 Create a helper function
122
+ export const MultiplyBy2 = (input: { value: number }) => {
123
+ return new MultiplyBy2Task(input).run();
124
+ };
125
+ const first = await MultiplyBy2({ value: 15 });
126
+ const second = await MultiplyBy2({ value: first.result });
127
+ console.log(second); // { result: 60 }
128
+ ```
129
+
130
+ ## Core Concepts
131
+
132
+ ### Tasks
133
+
134
+ Tasks are the fundamental units of work. Each task:
135
+
136
+ - Defines input/output schemas using JSON Schema (from `@workglow/util`), TypeBox, or Zod
137
+ - Implements `execute()` for main logic or `executeReactive()` for UI updates
138
+ - Has a unique type identifier and category
139
+ - Can be cached based on inputs
140
+ - Emits lifecycle events
141
+
142
+ ### Task Graphs
143
+
144
+ TaskGraph is the low-level API for building directed acyclic graphs of tasks:
145
+
146
+ - Manages tasks and their dependencies
147
+ - Handles execution order and parallelization
148
+ - Provides detailed control over data flow
149
+ - Returns results as an array of task outputs
150
+
151
+ ### Data Flow
152
+
153
+ Data flows between tasks through `Dataflow` objects that specify:
154
+
155
+ - Source task and output port
156
+ - Target task and input port
157
+ - Data transformation and validation
158
+ - Error propagation
159
+ - Edges in the graph
160
+
161
+ ### Workflows
162
+
163
+ Workflow is the high-level API that provides:
164
+
165
+ - Builder pattern for easier task composition
166
+ - Automatic task connection based on compatible input/output types
167
+ - Pipeline operations (`pipe`, `parallel`)
168
+ - Simplified result handling
169
+ - Event management
170
+
171
+ ## Creating Custom Tasks
172
+
173
+ ### Basic Task Structure
174
+
175
+ You can define schemas using plain JSON Schema, TypeBox, or Zod. Here are examples of each approach:
176
+
177
+ #### Using Plain JSON Schema
178
+
179
+ ```typescript
180
+ import { Task, IExecuteContext } from "@workglow/task-graph";
181
+ import { DataPortSchema, FromSchema } from "@workglow/util";
182
+
183
+ const MyInputSchema = {
184
+ type: "object",
185
+ properties: {
186
+ text: {
187
+ type: "string",
188
+ description: "Text to process",
189
+ },
190
+ multiplier: {
191
+ type: "number",
192
+ description: "Repeat multiplier",
193
+ default: 1,
194
+ },
195
+ },
196
+ required: ["text"],
197
+ additionalProperties: false,
198
+ } as const satisfies DataPortSchema;
199
+
200
+ type MyInput = FromSchema<typeof MyInputSchema>;
201
+ // Equivalent to:
202
+ // type MyInput = {
203
+ // text: string;
204
+ // multiplier?: number;
205
+ // };
206
+
207
+ const MyOutputSchema = {
208
+ type: "object",
209
+ properties: {
210
+ processed: {
211
+ type: "string",
212
+ description: "Processed text",
213
+ },
214
+ length: {
215
+ type: "number",
216
+ description: "Text length",
217
+ },
218
+ },
219
+ required: ["processed", "length"],
220
+ additionalProperties: false,
221
+ } as const satisfies DataPortSchema;
222
+
223
+ type MyOutput = FromSchema<typeof MyOutputSchema>;
224
+
225
+ class TextProcessorTask extends Task<MyInput, MyOutput> {
226
+ static readonly type = "TextProcessorTask";
227
+ static readonly title = "Text Processor";
228
+ static readonly description = "Processes text";
229
+ static readonly category = "Text Processing";
230
+ static readonly cacheable = true;
231
+
232
+ static inputSchema() {
233
+ return MyInputSchema;
234
+ }
235
+
236
+ static outputSchema() {
237
+ return MyOutputSchema;
238
+ }
239
+
240
+ async execute(input: MyInput, context: IExecuteContext): Promise<MyOutput> {
241
+ const { text, multiplier = 1 } = input;
242
+ const { signal, updateProgress } = context;
243
+
244
+ if (signal?.aborted) {
245
+ throw new TaskAbortedError("Task was cancelled");
246
+ }
247
+
248
+ await updateProgress(0.5, "Processing text...");
249
+ await new Promise((resolve) => setTimeout(resolve, 100));
250
+
251
+ const processed = text.repeat(multiplier);
252
+
253
+ return {
254
+ processed,
255
+ length: processed.length,
256
+ };
257
+ }
258
+ }
259
+ ```
260
+
261
+ #### Using TypeBox
262
+
263
+ TypeBox schemas are JSON Schema compatible and can be used directly:
264
+
265
+ ```typescript
266
+ import { Task, IExecuteContext } from "@workglow/task-graph";
267
+ import { Type } from "@sinclair/typebox";
268
+ import { DataPortSchema, FromSchema } from "@workglow/util";
269
+
270
+ const MyInputSchema = Type.Object({
271
+ text: Type.String({ description: "Text to process" }),
272
+ multiplier: Type.Optional(Type.Number({ description: "Repeat multiplier", default: 1 })),
273
+ }) satisfies DataPortSchema;
274
+
275
+ type MyInput = FromSchema<typeof MyInputSchema>;
276
+
277
+ const MyOutputSchema = Type.Object({
278
+ processed: Type.String({ description: "Processed text" }),
279
+ length: Type.Number({ description: "Text length" }),
280
+ }) satisfies DataPortSchema;
281
+
282
+ type MyOutput = FromSchema<typeof MyOutputSchema>;
283
+
284
+ class TextProcessorTask extends Task<MyInput, MyOutput> {
285
+ static readonly type = "TextProcessorTask";
286
+ static readonly title = "Text Processor";
287
+ static readonly description = "Processes text";
288
+ static readonly category = "Text Processing";
289
+ static readonly cacheable = true;
290
+
291
+ static inputSchema() {
292
+ return MyInputSchema;
293
+ }
294
+
295
+ static outputSchema() {
296
+ return MyOutputSchema;
297
+ }
298
+
299
+ async execute(input: MyInput, context: IExecuteContext): Promise<MyOutput> {
300
+ const { text, multiplier = 1 } = input;
301
+ const { signal, updateProgress } = context;
302
+
303
+ if (signal?.aborted) {
304
+ throw new TaskAbortedError("Task was cancelled");
305
+ }
306
+
307
+ await updateProgress(0.5, "Processing text...");
308
+ await new Promise((resolve) => setTimeout(resolve, 100));
309
+
310
+ const processed = text.repeat(multiplier);
311
+
312
+ return {
313
+ processed,
314
+ length: processed.length,
315
+ };
316
+ }
317
+
318
+ // Override validation to use TypeBox's native validation -- only if you needed as the default will work in most cases.
319
+ async validateInput(input: Partial<MyInput>): Promise<boolean> {
320
+ // Use TypeBox's Value.Check for validation
321
+ if (!Value.Check(MyInputSchema, input)) {
322
+ const errors = [...Value.Errors(MyInputSchema, input)];
323
+ const errorMessages = errors.map((error) => {
324
+ const path = error.path || "";
325
+ return `${error.message}${path ? ` (${path})` : ""}`;
326
+ });
327
+ throw new TaskInvalidInputError(
328
+ `Input ${JSON.stringify(input)} does not match schema: ${errorMessages.join(", ")}`
329
+ );
330
+ }
331
+ return true;
332
+ }
333
+ }
334
+ ```
335
+
336
+ #### Using Zod
337
+
338
+ Zod 4 has built-in JSON Schema support using the `.toJSONSchema()` method:
339
+
340
+ ```typescript
341
+ import { Task, IExecuteContext } from "@workglow/task-graph";
342
+ import { z } from "zod";
343
+ import { DataPortSchema } from "@workglow/util";
344
+
345
+ const MyInputSchemaZod = z.object({
346
+ text: z.string().describe("Text to process"),
347
+ multiplier: z.number().default(1).optional().describe("Repeat multiplier"),
348
+ });
349
+
350
+ const MyInputSchema = MyInputSchemaZod.toJSONSchema() as DataPortSchema;
351
+
352
+ // Infer TypeScript types using Zod's built-in type inference
353
+ type MyInput = z.infer<typeof MyInputSchemaZod>;
354
+
355
+ const MyOutputSchemaZod = z.object({
356
+ processed: z.string().describe("Processed text"),
357
+ length: z.number().describe("Text length"),
358
+ });
359
+
360
+ const MyOutputSchema = MyOutputSchemaZod.toJSONSchema() as DataPortSchema;
361
+
362
+ type MyOutput = z.infer<typeof MyOutputSchemaZod>;
363
+
364
+ class TextProcessorTask extends Task<MyInput, MyOutput> {
365
+ static readonly type = "TextProcessorTask";
366
+ static readonly title = "Text Processor";
367
+ static readonly description = "Processes text";
368
+ static readonly category = "Text Processing";
369
+ static readonly cacheable = true;
370
+
371
+ static inputSchema() {
372
+ return MyInputSchema;
373
+ }
374
+
375
+ static outputSchema() {
376
+ return MyOutputSchema;
377
+ }
378
+
379
+ async execute(input: MyInput, context: IExecuteContext): Promise<MyOutput> {
380
+ const { text, multiplier = 1 } = input;
381
+ const { signal, updateProgress } = context;
382
+
383
+ if (signal?.aborted) {
384
+ throw new TaskAbortedError("Task was cancelled");
385
+ }
386
+
387
+ await updateProgress(0.5, "Processing text...");
388
+ await new Promise((resolve) => setTimeout(resolve, 100));
389
+
390
+ const processed = text.repeat(multiplier);
391
+
392
+ return {
393
+ processed,
394
+ length: processed.length,
395
+ };
396
+ }
397
+
398
+ // Override validation to use Zod's native validation -- only if you needed as the default will work in most cases.
399
+ async validateInput(input: Partial<MyInput>): Promise<boolean> {
400
+ try {
401
+ // Use Zod's .parse() for validation (throws on error)
402
+ MyInputSchemaZod.parse(input);
403
+ return true;
404
+ } catch (error) {
405
+ if (error instanceof z.ZodError) {
406
+ const errorMessages = error.errors.map((err) => {
407
+ const path = err.path.join(".");
408
+ return `${err.message}${path ? ` (${path})` : ""}`;
409
+ });
410
+ throw new TaskInvalidInputError(
411
+ `Input ${JSON.stringify(input)} does not match schema: ${errorMessages.join(", ")}`
412
+ );
413
+ }
414
+ throw error;
415
+ }
416
+ }
417
+ }
418
+ ```
419
+
420
+ **Note:** When using native validation, you still need to return a JSON Schema from `inputSchema()` and `outputSchema()` for compatibility with the task graph system. The native validation only affects runtime validation, not schema compatibility checking.
421
+
422
+ ### Task with Progress and Error Handling
423
+
424
+ ```typescript
425
+ import { DataPortSchema } from "@workglow/util";
426
+
427
+ class FileProcessorTask extends Task<{ filePath: string }, { content: string }> {
428
+ static readonly type = "FileProcessorTask";
429
+
430
+ static inputSchema() {
431
+ return {
432
+ type: "object",
433
+ properties: {
434
+ filePath: {
435
+ type: "string",
436
+ description: "Path to file",
437
+ },
438
+ },
439
+ required: ["filePath"],
440
+ additionalProperties: false,
441
+ } as const satisfies DataPortSchema;
442
+ }
443
+
444
+ static outputSchema() {
445
+ return {
446
+ type: "object",
447
+ properties: {
448
+ content: {
449
+ type: "string",
450
+ description: "File content",
451
+ },
452
+ },
453
+ required: ["content"],
454
+ additionalProperties: false,
455
+ } as const satisfies DataPortSchema;
456
+ }
457
+
458
+ async execute(input: { filePath: string }, { signal, updateProgress }: IExecuteContext) {
459
+ try {
460
+ await updateProgress(0.1, "Starting file read...");
461
+
462
+ if (signal?.aborted) {
463
+ throw new TaskAbortedError("File read cancelled");
464
+ }
465
+
466
+ // Simulate file reading with progress
467
+ await updateProgress(0.5, "Reading file...");
468
+ const content = await this.readFile(input.filePath);
469
+
470
+ await updateProgress(1.0, "File read complete");
471
+
472
+ return { content };
473
+ } catch (error) {
474
+ if (error instanceof TaskAbortedError) {
475
+ throw error; // Re-throw abort errors
476
+ }
477
+ throw new TaskError(`Failed to read file: ${error.message}`);
478
+ }
479
+ }
480
+
481
+ private async readFile(path: string): Promise<string> {
482
+ // Implementation would go here
483
+ return "file content";
484
+ }
485
+ }
486
+ ```
487
+
488
+ ## Building Task Graphs
489
+
490
+ ### Simple Task Graph
491
+
492
+ ```typescript
493
+ import { TaskGraph, Dataflow } from "@workglow/task-graph";
494
+
495
+ // Create tasks
496
+ const task1 = new TextProcessorTask({ text: "Hello" }, { id: "processor1" });
497
+ const task2 = new TextProcessorTask({ text: "World" }, { id: "processor2" });
498
+ const task3 = new TextProcessorTask({ text: "" }, { id: "combiner" });
499
+
500
+ // Build graph
501
+ const graph = new TaskGraph();
502
+ graph.addTasks([task1, task2, task3]);
503
+
504
+ // Define data flows
505
+ graph.addDataflow(new Dataflow("processor1", "processed", "combiner", "text"));
506
+ graph.addDataflow(new Dataflow("processor2", "processed", "combiner", "text"));
507
+
508
+ // Execute
509
+ const results = await graph.run();
510
+ ```
511
+
512
+ ### Task Graph with Dependencies
513
+
514
+ ```typescript
515
+ import { DataPortSchema } from "@workglow/util";
516
+
517
+ class AddTask extends Task<{ a: number; b: number }, { sum: number }> {
518
+ static readonly type = "AddTask";
519
+
520
+ static inputSchema() {
521
+ return {
522
+ type: "object",
523
+ properties: {
524
+ a: { type: "number" },
525
+ b: { type: "number" },
526
+ },
527
+ required: ["a", "b"],
528
+ additionalProperties: false,
529
+ } as const satisfies DataPortSchema;
530
+ }
531
+
532
+ static outputSchema() {
533
+ return {
534
+ type: "object",
535
+ properties: {
536
+ sum: { type: "number" },
537
+ },
538
+ required: ["sum"],
539
+ additionalProperties: false,
540
+ } as const satisfies DataPortSchema;
541
+ }
542
+
543
+ async execute(input: { a: number; b: number }) {
544
+ return { sum: input.a + input.b };
545
+ }
546
+ }
547
+
548
+ // Create a computational pipeline
549
+ const doubleTask = new MultiplyBy2Task({ value: 5 }, { id: "double" });
550
+ const doubleTask2 = new MultiplyBy2Task({ value: 5 }, { id: "double2" });
551
+ const addTask = new AddTask({}, { id: "add" });
552
+
553
+ const graph = new TaskGraph();
554
+ graph.addTasks([doubleTask, doubleTask2, addTask]);
555
+
556
+ // Connect outputs to inputs
557
+ graph.addDataflow(new Dataflow("double", "result", "add", "a"));
558
+ graph.addDataflow(new Dataflow("double2", "result", "add", "b"));
559
+
560
+ const results = await graph.run();
561
+ // Results: double=10, double2=10, add=20
562
+ ```
563
+
564
+ ### Conditional Execution and Error Handling
565
+
566
+ ```typescript
567
+ // Task that might fail
568
+ class RiskyTask extends Task<{ shouldFail: boolean }, { success: boolean }> {
569
+ static readonly type = "RiskyTask";
570
+
571
+ async execute(input: { shouldFail: boolean }) {
572
+ if (input.shouldFail) {
573
+ throw new TaskError("Task failed as requested");
574
+ }
575
+ return { success: true };
576
+ }
577
+ }
578
+
579
+ // Task that handles errors
580
+ class ErrorHandlerTask extends Task<{ fallback: string }, { result: string }> {
581
+ static readonly type = "ErrorHandlerTask";
582
+
583
+ async execute(input: { fallback: string }) {
584
+ return { result: input.fallback };
585
+ }
586
+ }
587
+
588
+ const graph = new TaskGraph();
589
+ const riskyTask = new RiskyTask({ shouldFail: true }, { id: "risky" });
590
+ const handlerTask = new ErrorHandlerTask({ fallback: "default" }, { id: "handler" });
591
+
592
+ graph.addTasks([riskyTask, handlerTask]);
593
+
594
+ // Connect error output to handler
595
+ graph.addDataflow(new Dataflow("risky", "[error]", "handler", "error"));
596
+
597
+ try {
598
+ const results = await graph.run();
599
+ } catch (error) {
600
+ console.log("Graph execution failed:", error.message);
601
+ }
602
+ ```
603
+
604
+ ## Using Workflows
605
+
606
+ ### Basic Workflow
607
+
608
+ ```typescript
609
+ import { Workflow } from "@workglow/task-graph";
610
+
611
+ const workflow = new Workflow();
612
+
613
+ // Add tasks to workflow
614
+ workflow.addTask(new TextProcessorTask({ text: "Hello, World!" }));
615
+
616
+ // Run workflow
617
+ const result = await workflow.run();
618
+ console.log(result); // { processed: "Hello, World!", length: 13 }
619
+ ```
620
+
621
+ ### Pipeline Workflow
622
+
623
+ ```typescript
624
+ // Create a processing pipeline
625
+ const workflow = new Workflow();
626
+
627
+ // Method 1: Using workflow.pipe()
628
+ workflow.pipe(
629
+ new TextProcessorTask({ text: "Start" }),
630
+ new TextProcessorTask({ multiplier: 2 }),
631
+ new TextProcessorTask({ multiplier: 3 })
632
+ );
633
+
634
+ const result = await workflow.run();
635
+
636
+ // Method 2: Using the pipe helper
637
+ import { pipe } from "@workglow/task-graph";
638
+
639
+ const pipeline = pipe([
640
+ new TextProcessorTask({ text: "Start" }),
641
+ new TextProcessorTask({ multiplier: 2 }),
642
+ new TextProcessorTask({ multiplier: 3 }),
643
+ ]);
644
+
645
+ const result2 = await pipeline.run();
646
+ ```
647
+
648
+ ### Parallel Workflow
649
+
650
+ ```typescript
651
+ import { parallel } from "@workglow/task-graph";
652
+
653
+ // Method 1: Using workflow.parallel()
654
+ const workflow = new Workflow();
655
+ workflow.parallel([
656
+ new TextProcessorTask({ text: "Task 1" }),
657
+ new TextProcessorTask({ text: "Task 2" }),
658
+ new TextProcessorTask({ text: "Task 3" }),
659
+ ]);
660
+
661
+ const results = await workflow.run();
662
+ // Results will be an array of outputs
663
+
664
+ // Method 2: Using the parallel helper
665
+ const parallelWorkflow = parallel([
666
+ new TextProcessorTask({ text: "Task A" }),
667
+ new TextProcessorTask({ text: "Task B" }),
668
+ ]);
669
+
670
+ const results2 = await parallelWorkflow.run();
671
+ ```
672
+
673
+ ### Complex Workflow with Auto-connections
674
+
675
+ ```typescript
676
+ // Workflow automatically connects compatible input/output types
677
+ const workflow = new Workflow();
678
+
679
+ // These will auto-connect because output "result" matches input "value"
680
+ workflow.addTask(new MultiplyBy2Task({ value: 5 })); // Outputs: { result: number }
681
+ workflow.addTask(new MultiplyBy2Task({})); // Inputs: { value: number }
682
+ workflow.addTask(new MultiplyBy2Task({})); // Inputs: { value: number }
683
+
684
+ const result = await workflow.run();
685
+ // Result: 5 * 2 * 2 * 2 = 40
686
+ ```
687
+
688
+ ### Custom Task Creation for Workflows
689
+
690
+ ```typescript
691
+ // Register tasks with the workflow system
692
+ declare module "@workglow/task-graph" {
693
+ interface Workflow {
694
+ MyTextProcessor: CreateWorkflow<MyInput, MyOutput>;
695
+ }
696
+ }
697
+
698
+ Workflow.prototype.MyTextProcessor = Workflow.createWorkflow(TextProcessorTask);
699
+
700
+ // Now you can use it fluently
701
+ const workflow = new Workflow();
702
+ workflow.MyTextProcessor({ text: "Hello" }).MyTextProcessor({ multiplier: 3 });
703
+
704
+ const result = await workflow.run();
705
+ ```
706
+
707
+ ## Storage and Caching
708
+
709
+ ### Task Output Caching
710
+
711
+ Output caching lets repeat executions with identical inputs return instantly without redoing work.
712
+
713
+ ```typescript
714
+ import { Task, TaskGraph, Workflow } from "@workglow/task-graph";
715
+ import { DataPortSchema } from "@workglow/util";
716
+ import { InMemoryTaskOutputRepository } from "@workglow/test";
717
+
718
+ // A cacheable task that simulates expensive work
719
+ class ExpensiveTask extends Task<{ n: number }, { result: number }> {
720
+ static readonly type = "ExpensiveTask";
721
+ static readonly cacheable = true;
722
+
723
+ static inputSchema() {
724
+ return {
725
+ type: "object",
726
+ properties: {
727
+ n: { type: "number" },
728
+ },
729
+ required: ["n"],
730
+ additionalProperties: false,
731
+ } as const satisfies DataPortSchema;
732
+ }
733
+
734
+ static outputSchema() {
735
+ return {
736
+ type: "object",
737
+ properties: {
738
+ result: { type: "number" },
739
+ },
740
+ required: ["result"],
741
+ additionalProperties: false,
742
+ } as const satisfies DataPortSchema;
743
+ }
744
+
745
+ async execute(input: { n: number }) {
746
+ // Simulate 500ms of CPU/IO work
747
+ await new Promise((r) => setTimeout(r, 500));
748
+ return { result: input.n * 2 };
749
+ }
750
+ }
751
+
752
+ // Create an output cache
753
+ const outputCache = new InMemoryTaskOutputRepository();
754
+
755
+ // Example 1: TaskGraph caching (second run is near-instant)
756
+ const graph = new TaskGraph({ outputCache });
757
+ graph.addTask(new ExpensiveTask({ n: 42 }, { id: "exp" }));
758
+
759
+ let t = Date.now();
760
+ await graph.run();
761
+ const firstRunMs = Date.now() - t;
762
+
763
+ t = Date.now();
764
+ await graph.run(); // identical inputs -> served from cache
765
+ const secondRunMs = Date.now() - t;
766
+
767
+ console.log({ firstRunMs, secondRunMs });
768
+ // e.g. { firstRunMs: ~500, secondRunMs: ~1-5 }
769
+
770
+ // Example 2: Direct Task caching across instances
771
+ const missTask = new ExpensiveTask({ n: 43 }, { outputCache });
772
+ t = Date.now();
773
+ await missTask.run(); // cache miss -> compute and store
774
+ const missMs = Date.now() - t;
775
+
776
+ const hitTask = new ExpensiveTask({ n: 43 }, { outputCache });
777
+ t = Date.now();
778
+ await hitTask.run(); // cache hit -> instant
779
+ const hitMs = Date.now() - t;
780
+
781
+ console.log({ missMs, hitMs });
782
+ // e.g. { missMs: ~500, hitMs: ~1-5 }
783
+
784
+ // Example 3: Workflow with the same cache
785
+ const workflow = new Workflow(outputCache);
786
+ workflow.addTask(new ExpensiveTask({ n: 10 }));
787
+
788
+ t = Date.now();
789
+ await workflow.run(); // compute
790
+ const wfFirstMs = Date.now() - t;
791
+
792
+ t = Date.now();
793
+ await workflow.run(); // cached
794
+ const wfSecondMs = Date.now() - t;
795
+
796
+ console.log({ wfFirstMs, wfSecondMs });
797
+ ```
798
+
799
+ ### Task Graph Persistence
800
+
801
+ ```typescript
802
+ import { FsFolderTaskGraphRepository } from "@workglow/test";
803
+
804
+ // Create repository
805
+ const repository = new FsFolderTaskGraphRepository("./task-graphs");
806
+
807
+ // Save task graph
808
+ const graph = new TaskGraph();
809
+ graph.addTask(new MultiplyBy2Task({ value: 10 }));
810
+ await repository.saveTaskGraph("my-graph", graph);
811
+
812
+ // Load task graph
813
+ const loadedGraph = await repository.getTaskGraph("my-graph");
814
+ const results = await loadedGraph.run();
815
+ ```
816
+
817
+ ### Different Storage Options
818
+
819
+ ```typescript
820
+ // In-memory (for testing)
821
+ import { InMemoryTaskOutputRepository, InMemoryTaskGraphRepository } from "@workglow/test";
822
+
823
+ // File system
824
+ import { FsFolderTaskOutputRepository, FsFolderTaskGraphRepository } from "@workglow/test";
825
+
826
+ // SQLite
827
+ import { SqliteTaskOutputRepository, SqliteTaskGraphRepository } from "@workglow/test";
828
+
829
+ // IndexedDB (browser)
830
+ import { IndexedDbTaskOutputRepository, IndexedDbTaskGraphRepository } from "@workglow/test";
831
+ ```
832
+
833
+ ## Error Handling
834
+
835
+ ### Task-Level Error Handling
836
+
837
+ ```typescript
838
+ class RobustTask extends Task<{ input: string }, { output: string }> {
839
+ async execute(input: { input: string }, { signal }: IExecuteContext) {
840
+ try {
841
+ // Check for cancellation
842
+ if (signal?.aborted) {
843
+ throw new TaskAbortedError("Task cancelled");
844
+ }
845
+
846
+ // Your logic here
847
+ const result = await this.processInput(input.input);
848
+
849
+ return { output: result };
850
+ } catch (error) {
851
+ if (error instanceof TaskAbortedError) {
852
+ throw error; // Re-throw cancellation
853
+ }
854
+
855
+ // Convert to TaskError with context
856
+ throw new TaskError(`Processing failed: ${error.message}`);
857
+ }
858
+ }
859
+ }
860
+ ```
861
+
862
+ ### Graph-Level Error Handling
863
+
864
+ ```typescript
865
+ try {
866
+ const results = await graph.run();
867
+ } catch (error) {
868
+ if (error instanceof TaskAbortedError) {
869
+ console.log("Execution was cancelled");
870
+ } else if (error instanceof TaskFailedError) {
871
+ console.log("A task failed:", error.message);
872
+ console.log("Failed task:", error.taskId);
873
+ } else if (error instanceof TaskError) {
874
+ console.log("Task error:", error.message);
875
+ }
876
+ }
877
+ ```
878
+
879
+ ### Workflow Error Handling with Events
880
+
881
+ ```typescript
882
+ const workflow = new Workflow();
883
+
884
+ workflow.events.on("error", (error) => {
885
+ console.error("Workflow error:", error);
886
+ });
887
+
888
+ workflow.events.on("start", () => {
889
+ console.log("Workflow started");
890
+ });
891
+
892
+ workflow.events.on("complete", () => {
893
+ console.log("Workflow completed");
894
+ });
895
+
896
+ workflow.addTask(new TextProcessorTask({ text: "Hello" }));
897
+ await workflow.run();
898
+ ```
899
+
900
+ ### Aborting Execution
901
+
902
+ ```typescript
903
+ const workflow = new Workflow();
904
+ workflow.addTask(new LongRunningTask());
905
+
906
+ // Start execution
907
+ const resultPromise = workflow.run();
908
+
909
+ // Abort after 1 second
910
+ setTimeout(() => {
911
+ workflow.abort();
912
+ }, 1000);
913
+
914
+ try {
915
+ await resultPromise;
916
+ } catch (error) {
917
+ if (error instanceof TaskAbortedError) {
918
+ console.log("Execution was aborted");
919
+ }
920
+ }
921
+ ```
922
+
923
+ ## Advanced Patterns
924
+
925
+ ### Array Tasks (Parallel Processing)
926
+
927
+ ```typescript
928
+ class ArrayProcessorTask extends ArrayTask<{ items: string[] }, { results: string[] }> {
929
+ static readonly type = "ArrayProcessorTask";
930
+
931
+ static inputSchema() {
932
+ return {
933
+ type: "object",
934
+ properties: {
935
+ items: {
936
+ type: "array",
937
+ items: {
938
+ type: "string",
939
+ },
940
+ },
941
+ },
942
+ required: ["items"],
943
+ additionalProperties: false,
944
+ } as const satisfies DataPortSchema;
945
+ }
946
+
947
+ static outputSchema() {
948
+ return {
949
+ type: "object",
950
+ properties: {
951
+ results: {
952
+ type: "array",
953
+ items: {
954
+ type: "string",
955
+ },
956
+ },
957
+ },
958
+ required: ["results"],
959
+ additionalProperties: false,
960
+ } as const satisfies DataPortSchema;
961
+ }
962
+
963
+ async execute(input: { items: string[] }) {
964
+ return { results: input.items.map((item) => item.toUpperCase()) };
965
+ }
966
+ }
967
+
968
+ // Process array items in parallel
969
+ const task = new ArrayProcessorTask({
970
+ items: ["hello", "world", "foo", "bar"],
971
+ });
972
+
973
+ const result = await task.run();
974
+ // { results: ["HELLO", "WORLD", "FOO", "BAR"] }
975
+ ```
976
+
977
+ ### Job Queue Tasks
978
+
979
+ ```typescript
980
+ class RemoteProcessingTask extends JobQueueTask<{ data: string }, { result: string }> {
981
+ static readonly type = "RemoteProcessingTask";
982
+
983
+ async createJob() {
984
+ return new Job({
985
+ input: this.runInputData,
986
+ execute: async (input) => {
987
+ // This runs in a job queue (can be distributed)
988
+ const processed = await this.callRemoteAPI(input.data);
989
+ return { result: processed };
990
+ },
991
+ });
992
+ }
993
+
994
+ private async callRemoteAPI(data: string): Promise<string> {
995
+ // Simulate API call
996
+ return `Processed: ${data}`;
997
+ }
998
+ }
999
+ ```
1000
+
1001
+ ### Composite Tasks (Tasks that contain other tasks)
1002
+
1003
+ ```typescript
1004
+ class CompositeTask extends GraphAsTask<{ input: string }, { output: string }> {
1005
+ static readonly type = "CompositeTask";
1006
+
1007
+ constructor(input: { input: string }, config: any = {}) {
1008
+ super(input, config);
1009
+
1010
+ // Build internal graph
1011
+ const subTask1 = new TextProcessorTask({ text: input.input });
1012
+ const subTask2 = new TextProcessorTask({ multiplier: 2 });
1013
+
1014
+ this.subGraph.addTasks([subTask1, subTask2]);
1015
+ this.subGraph.addDataflow(
1016
+ new Dataflow(subTask1.config.id, "processed", subTask2.config.id, "text")
1017
+ );
1018
+ }
1019
+ }
1020
+ ```
1021
+
1022
+ ### Dynamic Task Creation
1023
+
1024
+ ```typescript
1025
+ class TaskFactory extends Task<{ count: number }, { results: any[] }> {
1026
+ async execute(input: { count: number }, context: IExecuteContext) {
1027
+ const results = [];
1028
+
1029
+ for (let i = 0; i < input.count; i++) {
1030
+ // Create tasks dynamically
1031
+ const dynamicTask = new MultiplyBy2Task({ value: i });
1032
+
1033
+ // Register with execution context
1034
+ context.own(dynamicTask);
1035
+
1036
+ const result = await dynamicTask.run();
1037
+ results.push(result);
1038
+ }
1039
+
1040
+ return { results };
1041
+ }
1042
+ }
1043
+ ```
1044
+
1045
+ ### Semantic Format
1046
+
1047
+ ### Semantic Compatibility Utilities for Task Graph Dataflows
1048
+
1049
+ In this project, task graphs have connections between tasks called dataflows. These dataflows have different kinds of compatibility checks:
1050
+
1051
+ #### **Static Compatibility**
1052
+
1053
+ Static rules help decide if an edge should be connected at all. A connection is statically compatible if:
1054
+
1055
+ - The source and target are the same exact type
1056
+ - The source connects to the equivalent of "any" (target accepts anything)
1057
+ - The source type is acceptable to the target (e.g., a string to something that accepts `oneOf[string[], string]`)
1058
+
1059
+ #### **Runtime Compatibility**
1060
+
1061
+ Assuming the connection is allowed at design time (passes static check), runtime rules determine if they are compatible during execution.
1062
+
1063
+ Currently, there is one runtime compatibility check:
1064
+
1065
+ - If both input and output schemas have `format` annotations attached,
1066
+ - The format annotation matches the pattern `/\w+(:\w+)?/`; the first part is the "name". If alone, it matches any other with the same "name". If there is a second part, then that narrows the type.
1067
+ - Format checks apply to all types (strings, arrays, etc.), not just strings.
1068
+ - A schema with format can connect to a schema with no format (source has format, target doesn't).
1069
+ - A schema with no format cannot connect to a schema with format (source doesn't have format, target does).
1070
+
1071
+ **Example:**
1072
+ In the AI package, `format: 'model'` and `format: 'model:EmbeddingTask'` are used on string types.
1073
+ An input with property `model` and `format: 'model'` connects to a target with property `model` and `format: 'model:EmbeddingTask'`—this compatibility is called "runtime".
1074
+ It first passes the static check as compatible and then notices a difference in format at runtime.
1075
+
1076
+ Format is also used on array types, e.g., `format: 'Float64Array'` on arrays containing Float64 numbers.
1077
+
1078
+ > **Note:** Only connections that pass the runtime check will pass data at runtime.
1079
+
1080
+ ## API Reference
1081
+
1082
+ ### Core Classes
1083
+
1084
+ - **`Task<Input, Output, Config>`**: Base class for all tasks
1085
+ - **`TaskGraph`**: Low-level graph execution engine
1086
+ - **`Workflow<Input, Output>`**: High-level workflow builder
1087
+ - **`Dataflow`**: Represents data flow between tasks
1088
+ - **`TaskRunner`**: Handles individual task execution
1089
+
1090
+ ### Key Methods
1091
+
1092
+ #### Task
1093
+
1094
+ - `run(overrides?)`: Execute the task with optional input overrides
1095
+ - `runReactive(overrides?)`: Execute in reactive mode
1096
+ - `abort()`: Cancel execution
1097
+ - `setInput(input)`: Set input values
1098
+ - `validateInput(input)`: Validate input against schema
1099
+
1100
+ #### TaskGraph
1101
+
1102
+ - `addTask(task)` / `addTasks(tasks)`: Add tasks to graph
1103
+ - `addDataflow(dataflow)` / `addDataflows(dataflows)`: Add data flows
1104
+ - `run(input?, config?)`: Execute the graph
1105
+ - `getTask(id)`: Get task by ID
1106
+ - `getDataflow(id)`: Get dataflow by ID
1107
+
1108
+ #### Workflow
1109
+
1110
+ - `addTask(task)`: Add task to workflow
1111
+ - `pipe(...tasks)`: Create pipeline
1112
+ - `parallel(tasks, strategy?)`: Create parallel execution
1113
+ - `run(input?)`: Execute workflow
1114
+ - `abort()`: Cancel execution
1115
+ - `reset()`: Reset workflow state
1116
+
1117
+ ### Storage Interfaces
1118
+
1119
+ - **`TaskOutputRepository`**: Interface for task output caching
1120
+ - **`TaskGraphRepository`**: Interface for task graph persistence
1121
+
1122
+ ### Error Types
1123
+
1124
+ - **`TaskError`**: Base error class
1125
+ - **`TaskAbortedError`**: Task was cancelled
1126
+ - **`TaskFailedError`**: Task execution failed
1127
+ - **`TaskInvalidInputError`**: Invalid input provided
1128
+
1129
+ ## Examples
1130
+
1131
+ ### Data Processing Pipeline
1132
+
1133
+ ```typescript
1134
+ // Define processing tasks
1135
+ class LoadDataTask extends Task<{ source: string }, { data: any[] }> {
1136
+ static readonly type = "LoadDataTask";
1137
+
1138
+ async execute(input: { source: string }) {
1139
+ const data = await this.loadFromSource(input.source);
1140
+ return { data };
1141
+ }
1142
+
1143
+ private async loadFromSource(source: string): Promise<any[]> {
1144
+ // Implementation
1145
+ return [];
1146
+ }
1147
+ }
1148
+
1149
+ class TransformDataTask extends Task<{ data: any[] }, { transformed: any[] }> {
1150
+ static readonly type = "TransformDataTask";
1151
+
1152
+ async execute(input: { data: any[] }) {
1153
+ const transformed = input.data.map((item) => ({
1154
+ ...item,
1155
+ processed: true,
1156
+ timestamp: new Date(),
1157
+ }));
1158
+ return { transformed };
1159
+ }
1160
+ }
1161
+
1162
+ class SaveDataTask extends Task<{ data: any[] }, { saved: boolean }> {
1163
+ static readonly type = "SaveDataTask";
1164
+
1165
+ async execute(input: { data: any[] }) {
1166
+ await this.saveToDestination(input.data);
1167
+ return { saved: true };
1168
+ }
1169
+
1170
+ private async saveToDestination(data: any[]): Promise<void> {
1171
+ // Implementation
1172
+ }
1173
+ }
1174
+
1175
+ // Build pipeline
1176
+ const pipeline = pipe([
1177
+ new LoadDataTask({ source: "database" }),
1178
+ new TransformDataTask(),
1179
+ new SaveDataTask(),
1180
+ ]);
1181
+
1182
+ const result = await pipeline.run();
1183
+ ```
1184
+
1185
+ ### Parallel Data Processing
1186
+
1187
+ ```typescript
1188
+ // Process multiple data sources in parallel
1189
+ const workflow = new Workflow();
1190
+
1191
+ workflow.parallel([
1192
+ new LoadDataTask({ source: "api-1" }),
1193
+ new LoadDataTask({ source: "api-2" }),
1194
+ new LoadDataTask({ source: "api-3" }),
1195
+ ]);
1196
+
1197
+ // Merge results
1198
+ workflow.addTask(new MergeDataTask());
1199
+
1200
+ const result = await workflow.run();
1201
+ ```
1202
+
1203
+ ### Error Recovery Pipeline
1204
+
1205
+ ```typescript
1206
+ class RetryableTask extends Task<{ url: string; retries: number }, { data: any }> {
1207
+ async execute(input: { url: string; retries: number }) {
1208
+ for (let i = 0; i < input.retries; i++) {
1209
+ try {
1210
+ const data = await fetch(input.url).then((r) => r.json());
1211
+ return { data };
1212
+ } catch (error) {
1213
+ if (i === input.retries - 1) {
1214
+ throw new TaskError(`Failed after ${input.retries} retries: ${error.message}`);
1215
+ }
1216
+ await new Promise((resolve) => setTimeout(resolve, 1000 * Math.pow(2, i)));
1217
+ }
1218
+ }
1219
+ throw new TaskError("Unexpected error");
1220
+ }
1221
+ }
1222
+
1223
+ const workflow = new Workflow();
1224
+ workflow.addTask(new RetryableTask({ url: "https://api.example.com", retries: 3 }));
1225
+
1226
+ try {
1227
+ const result = await workflow.run();
1228
+ } catch (error) {
1229
+ console.log("All retries failed:", error.message);
1230
+ }
1231
+ ```
1232
+
1233
+ ## Testing
1234
+
1235
+ The package includes comprehensive test utilities:
1236
+
1237
+ ```bash
1238
+ # Run all tests
1239
+ bun test
1240
+
1241
+ # Run specific test file
1242
+ bun test src/test/task-graph/TaskGraph.test.ts
1243
+
1244
+ # Run tests with coverage
1245
+ bun test --coverage
1246
+ ```
1247
+
1248
+ ### Testing Your Tasks
1249
+
1250
+ ```typescript
1251
+ import { describe, test, expect } from "vitest";
1252
+
1253
+ describe("MyCustomTask", () => {
1254
+ test("should process input correctly", async () => {
1255
+ const task = new MyCustomTask({ input: "test" });
1256
+ const result = await task.run();
1257
+
1258
+ expect(result.output).toBe("expected-result");
1259
+ });
1260
+
1261
+ test("should handle errors gracefully", async () => {
1262
+ const task = new MyCustomTask({ input: "invalid" });
1263
+
1264
+ await await expect(task.run()).rejects.toThrow(TaskError);
1265
+ });
1266
+
1267
+ test("should respect cancellation", async () => {
1268
+ const task = new LongRunningTask();
1269
+
1270
+ const resultPromise = task.run();
1271
+ task.abort();
1272
+
1273
+ await expect(resultPromise).rejects.toThrow(TaskAbortedError);
1274
+ });
1275
+ });
1276
+ ```
1277
+
1278
+ ## License
1279
+
1280
+ Apache 2.0 - See [LICENSE](./LICENSE) for details.