@yigitahmetsahin/workflow-ts 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 yigitahmetsahin
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,255 @@
1
+ # @yigitahmetsahin/workflow-ts
2
+
3
+ A simple, extensible TypeScript workflow engine supporting serial and parallel work execution with full type inference.
4
+
5
+ [![npm version](https://badge.fury.io/js/%40yigitahmetsahin%2Fworkflow-ts.svg)](https://www.npmjs.com/package/@yigitahmetsahin/workflow-ts)
6
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
7
+
8
+ ## Features
9
+
10
+ - 🔄 **Serial & Parallel Execution** - Chain work items sequentially or run them concurrently
11
+ - đŸŽ¯ **Full Type Inference** - Work names and result types are automatically inferred
12
+ - â­ī¸ **Conditional Execution** - Skip work items based on runtime conditions
13
+ - đŸ›Ąī¸ **Error Handling** - Built-in error callbacks and workflow failure states
14
+ - 📊 **Execution Tracking** - Duration tracking for individual works and total workflow
15
+ - đŸĒļ **Zero Dependencies** - Lightweight with no external runtime dependencies
16
+
17
+ ## Installation
18
+
19
+ ```bash
20
+ npm install @yigitahmetsahin/workflow-ts
21
+ ```
22
+
23
+ ```bash
24
+ yarn add @yigitahmetsahin/workflow-ts
25
+ ```
26
+
27
+ ```bash
28
+ pnpm add @yigitahmetsahin/workflow-ts
29
+ ```
30
+
31
+ ## Quick Start
32
+
33
+ ```typescript
34
+ import { Workflow, WorkflowStatus } from '@yigitahmetsahin/workflow-ts';
35
+
36
+ const workflow = new Workflow<{ userId: string }>()
37
+ .serial({
38
+ name: 'validate',
39
+ execute: async (ctx) => ctx.data.userId.length > 0,
40
+ })
41
+ .parallel([
42
+ {
43
+ name: 'fetchOrders',
44
+ execute: async (ctx) => [{ id: 1 }, { id: 2 }],
45
+ },
46
+ {
47
+ name: 'fetchProfile',
48
+ execute: async (ctx) => ({ name: 'John', email: 'john@example.com' }),
49
+ },
50
+ ])
51
+ .serial({
52
+ name: 'process',
53
+ execute: async (ctx) => {
54
+ // ✅ Types are automatically inferred!
55
+ const orders = ctx.workResults.get('fetchOrders'); // { id: number }[] | undefined
56
+ const profile = ctx.workResults.get('fetchProfile'); // { name: string; email: string } | undefined
57
+ return { orderCount: orders?.length ?? 0, userName: profile?.name };
58
+ },
59
+ });
60
+
61
+ const result = await workflow.run({ userId: 'user-123' });
62
+
63
+ if (result.status === WorkflowStatus.COMPLETED) {
64
+ console.log('Workflow completed in', result.totalDuration, 'ms');
65
+ console.log('Final result:', result.context.workResults.get('process'));
66
+ }
67
+ ```
68
+
69
+ ## API Reference
70
+
71
+ ### `Workflow<TData>`
72
+
73
+ Create a new workflow with optional initial data type.
74
+
75
+ ```typescript
76
+ const workflow = new Workflow<{ userId: string }>();
77
+ ```
78
+
79
+ ### `.serial(work)`
80
+
81
+ Add a serial (sequential) work to the workflow.
82
+
83
+ ```typescript
84
+ workflow.serial({
85
+ name: 'workName', // Unique name for this work
86
+ execute: async (ctx) => { // Async function that performs the work
87
+ return result; // Return value becomes available to subsequent works
88
+ },
89
+ shouldRun: (ctx) => true, // Optional: condition to skip this work
90
+ onError: (error, ctx) => { // Optional: error handler
91
+ console.error(error);
92
+ },
93
+ });
94
+ ```
95
+
96
+ ### `.parallel(works)`
97
+
98
+ Add parallel works that execute concurrently.
99
+
100
+ ```typescript
101
+ workflow.parallel([
102
+ { name: 'task1', execute: async (ctx) => result1 },
103
+ { name: 'task2', execute: async (ctx) => result2 },
104
+ { name: 'task3', execute: async (ctx) => result3 },
105
+ ]);
106
+ ```
107
+
108
+ ### `.run(initialData)`
109
+
110
+ Execute the workflow with initial data.
111
+
112
+ ```typescript
113
+ const result = await workflow.run({ userId: '123' });
114
+ ```
115
+
116
+ ### Result Object
117
+
118
+ ```typescript
119
+ interface IWorkflowResult {
120
+ status: WorkflowStatus; // 'completed' | 'failed'
121
+ context: {
122
+ data: TData; // Initial data passed to run()
123
+ workResults: IWorkResultsMap; // Type-safe map of work results
124
+ };
125
+ workResults: Map<string, IWorkResult>; // Detailed results per work
126
+ totalDuration: number; // Total execution time in ms
127
+ error?: Error; // Error if workflow failed
128
+ }
129
+ ```
130
+
131
+ ## Conditional Execution
132
+
133
+ Skip works based on runtime conditions:
134
+
135
+ ```typescript
136
+ workflow.serial({
137
+ name: 'sendEmail',
138
+ shouldRun: (ctx) => ctx.data.sendNotifications,
139
+ execute: async (ctx) => {
140
+ await sendEmail(ctx.data.email);
141
+ return { sent: true };
142
+ },
143
+ });
144
+ ```
145
+
146
+ ## Error Handling
147
+
148
+ Handle errors at the work level:
149
+
150
+ ```typescript
151
+ workflow.serial({
152
+ name: 'riskyOperation',
153
+ execute: async (ctx) => {
154
+ if (Math.random() < 0.5) throw new Error('Random failure');
155
+ return 'success';
156
+ },
157
+ onError: async (error, ctx) => {
158
+ await logError(error, ctx.data);
159
+ // Error will still propagate and fail the workflow
160
+ },
161
+ });
162
+ ```
163
+
164
+ ## Behavior Diagram
165
+
166
+ ```
167
+ ┌─────────────────────────────────────────────────────────────────────────────┐
168
+ │ Workflow.run() │
169
+ └─────────────────────────────────────────────────────────────────────────────┘
170
+ │
171
+ â–ŧ
172
+ ┌─────────────────────────┐
173
+ │ Initialize Context │
174
+ │ { data, workResults } │
175
+ └─────────────────────────┘
176
+ │
177
+ â–ŧ
178
+ ┌─────────────────────────┐
179
+ │ For each work item │◄─────────────────┐
180
+ └─────────────────────────┘ │
181
+ │ │
182
+ ┌───────────┴───────────┐ │
183
+ â–ŧ â–ŧ │
184
+ ┌──────────┐ ┌────────────┐ │
185
+ │ Serial │ │ Parallel │ │
186
+ └──────────┘ └────────────┘ │
187
+ │ │ │
188
+ â–ŧ â–ŧ │
189
+ ┌──────────────────┐ ┌────────────────────┐ │
190
+ │ shouldRun()? │ │ For each work in │ │
191
+ └──────────────────┘ │ parallel │ │
192
+ │ │ └────────────────────┘ │
193
+ Yes No │ │
194
+ │ │ ┌────────â”ŧ────────┐ │
195
+ â–ŧ â–ŧ â–ŧ â–ŧ â–ŧ │
196
+ ┌───────────┐ ┌────────┐ ┌──┐ ┌──┐ ┌──┐ │
197
+ │ execute() │ │ SKIP │ │W1│ │W2│ │W3│ │
198
+ └───────────┘ └────────┘ └──┘ └──┘ └──┘ │
199
+ │ │ │ │ │
200
+ â–ŧ └───────┴───────┘ │
201
+ ┌───────────────┐ │ │
202
+ │ Store result │ â–ŧ │
203
+ │ in context │ ┌─────────────────────┐ │
204
+ └───────────────┘ │ Promise.all() │ │
205
+ │ │ (concurrent exec) │ │
206
+ │ └─────────────────────┘ │
207
+ â–ŧ │ │
208
+ ┌───────────────┐ ┌─────────────────────┐ │
209
+ │ Success │ │ Collect results │ │
210
+ └───────────────┘ │ Check for errors │ │
211
+ │ └─────────────────────┘ │
212
+ │ │ │
213
+ └──────────────â”Ŧ───────────┘ │
214
+ │ │
215
+ â–ŧ │
216
+ ┌─────────────────┐ │
217
+ │ More works? │─────────Yes─────────────┘
218
+ └─────────────────┘
219
+ │
220
+ No
221
+ │
222
+ â–ŧ
223
+ ┌─────────────────────────────┐
224
+ │ Return Result │
225
+ │ { status, workResults, │
226
+ │ context, totalDuration } │
227
+ └─────────────────────────────┘
228
+ ```
229
+
230
+ ## Execution Timeline
231
+
232
+ ```
233
+ Time ──────────────────────────────────────────────────────────────────â–ē
234
+
235
+ ┌──────────────┐
236
+ │ validate │
237
+ │ (serial) │
238
+ └──────────────┘
239
+ ┌──────────────┐
240
+ │ fetchOrders │
241
+ │ (parallel) ├──────────┐
242
+ └──────────────┘ │
243
+ ┌──────────────┐ │ concurrent
244
+ │ fetchProfile │ │
245
+ │ (parallel) ├──────────┘
246
+ └──────────────┘
247
+ ┌──────────────┐
248
+ │ process │
249
+ │ (serial) │
250
+ └──────────────┘
251
+ ```
252
+
253
+ ## License
254
+
255
+ MIT
@@ -0,0 +1,151 @@
1
+ /**
2
+ * Work Status
3
+ */
4
+ declare enum WorkStatus {
5
+ PENDING = "pending",
6
+ RUNNING = "running",
7
+ COMPLETED = "completed",
8
+ FAILED = "failed",
9
+ SKIPPED = "skipped"
10
+ }
11
+ /**
12
+ * Workflow Status
13
+ */
14
+ declare enum WorkflowStatus {
15
+ PENDING = "pending",
16
+ RUNNING = "running",
17
+ COMPLETED = "completed",
18
+ FAILED = "failed"
19
+ }
20
+ /**
21
+ * Context passed between workflow works
22
+ * TData is the type of shared data between works
23
+ * TWorkResults is a record mapping work names to their result types
24
+ */
25
+ interface IWorkflowContext<TData = Record<string, unknown>, TWorkResults extends Record<string, unknown> = Record<string, unknown>> {
26
+ /** Shared data between works */
27
+ data: TData;
28
+ /** Work-specific results keyed by work name with inferred types */
29
+ workResults: IWorkResultsMap<TWorkResults>;
30
+ }
31
+ /**
32
+ * Type-safe map for work results with automatic type inference
33
+ */
34
+ interface IWorkResultsMap<TWorkResults extends Record<string, unknown> = Record<string, unknown>> {
35
+ get<K extends keyof TWorkResults>(name: K): TWorkResults[K] | undefined;
36
+ set<K extends keyof TWorkResults>(name: K, value: TWorkResults[K]): void;
37
+ has(name: keyof TWorkResults): boolean;
38
+ }
39
+ /**
40
+ * Result of a single work execution
41
+ */
42
+ interface IWorkResult<TResult = unknown> {
43
+ status: WorkStatus;
44
+ result?: TResult;
45
+ error?: Error;
46
+ duration: number;
47
+ }
48
+ /**
49
+ * Definition of a work with inferred name and result type
50
+ */
51
+ interface IWorkDefinition<TName extends string, TData = Record<string, unknown>, TResult = unknown, TAvailableWorkResults extends Record<string, unknown> = Record<string, unknown>> {
52
+ /** Unique name for the work */
53
+ name: TName;
54
+ /** Execute function - receives context and returns result */
55
+ execute: (context: IWorkflowContext<TData, TAvailableWorkResults>) => Promise<TResult>;
56
+ /** Optional: condition to determine if work should run */
57
+ shouldRun?: (context: IWorkflowContext<TData, TAvailableWorkResults>) => boolean | Promise<boolean>;
58
+ /** Optional: called when work fails */
59
+ onError?: (error: Error, context: IWorkflowContext<TData, TAvailableWorkResults>) => void | Promise<void>;
60
+ }
61
+ /**
62
+ * Internal work representation
63
+ */
64
+ interface IWorkflowWork {
65
+ type: 'serial' | 'parallel';
66
+ works: IWorkDefinition<string, any, any, any>[];
67
+ }
68
+ /**
69
+ * Result of workflow execution
70
+ */
71
+ interface IWorkflowResult<TData = Record<string, unknown>, TWorkResults extends Record<string, unknown> = Record<string, unknown>> {
72
+ status: WorkflowStatus;
73
+ context: IWorkflowContext<TData, TWorkResults>;
74
+ workResults: Map<keyof TWorkResults, IWorkResult>;
75
+ totalDuration: number;
76
+ error?: Error;
77
+ }
78
+
79
+ /**
80
+ * A simple, extensible workflow engine that supports serial and parallel work execution.
81
+ * Work names and result types are automatically inferred from the workflow definition.
82
+ *
83
+ * @example
84
+ * ```typescript
85
+ * const workflow = new Workflow<{ userId: string }>()
86
+ * .serial({
87
+ * name: 'validate',
88
+ * execute: async (ctx) => true, // returns boolean
89
+ * })
90
+ * .parallel([
91
+ * {
92
+ * name: 'fetchOrders',
93
+ * execute: async (ctx) => [{ id: 1 }], // returns Order[]
94
+ * },
95
+ * {
96
+ * name: 'fetchProfile',
97
+ * execute: async (ctx) => ({ name: 'John' }), // returns Profile
98
+ * },
99
+ * ])
100
+ * .serial({
101
+ * name: 'process',
102
+ * execute: async (ctx) => {
103
+ * // ✅ Autocomplete for names AND types are inferred!
104
+ * const isValid = ctx.workResults.get('validate'); // boolean | undefined
105
+ * const orders = ctx.workResults.get('fetchOrders'); // Order[] | undefined
106
+ * const profile = ctx.workResults.get('fetchProfile'); // Profile | undefined
107
+ * return { orders, profile };
108
+ * },
109
+ * });
110
+ *
111
+ * const result = await workflow.run({ userId: '123' });
112
+ * ```
113
+ */
114
+ declare class Workflow<TData = Record<string, unknown>, TWorkResults extends Record<string, unknown> = NonNullable<unknown>> {
115
+ private works;
116
+ /**
117
+ * Add a serial work to the workflow.
118
+ * The work name and result type are automatically inferred.
119
+ */
120
+ serial<TName extends string, TResult>(work: IWorkDefinition<TName, TData, TResult, TWorkResults>): Workflow<TData, TWorkResults & {
121
+ [K in TName]: TResult;
122
+ }>;
123
+ /**
124
+ * Add parallel works to the workflow.
125
+ * All work names and result types are automatically inferred.
126
+ */
127
+ parallel<const TParallelWorks extends readonly IWorkDefinition<string, TData, unknown, TWorkResults>[]>(works: TParallelWorks): Workflow<TData, TWorkResults & ParallelWorksToRecord<TParallelWorks>>;
128
+ /**
129
+ * Execute the workflow with initial data
130
+ */
131
+ run(initialData: TData): Promise<IWorkflowResult<TData, TWorkResults>>;
132
+ /**
133
+ * Execute a single work
134
+ */
135
+ private executeWork;
136
+ /**
137
+ * Execute multiple works in parallel
138
+ */
139
+ private executeParallelWorks;
140
+ }
141
+ /**
142
+ * Helper type to extract work results from parallel works array
143
+ * Uses Extract to preserve the specific type for each work name
144
+ */
145
+ type ParallelWorksToRecord<T extends readonly IWorkDefinition<string, any, any, any>[]> = {
146
+ [K in T[number]['name']]: Extract<T[number], {
147
+ name: K;
148
+ }> extends IWorkDefinition<string, any, infer R, any> ? R : never;
149
+ };
150
+
151
+ export { type IWorkDefinition, type IWorkResult, type IWorkResultsMap, type IWorkflowContext, type IWorkflowResult, type IWorkflowWork, WorkStatus, Workflow, WorkflowStatus };
@@ -0,0 +1,151 @@
1
+ /**
2
+ * Work Status
3
+ */
4
+ declare enum WorkStatus {
5
+ PENDING = "pending",
6
+ RUNNING = "running",
7
+ COMPLETED = "completed",
8
+ FAILED = "failed",
9
+ SKIPPED = "skipped"
10
+ }
11
+ /**
12
+ * Workflow Status
13
+ */
14
+ declare enum WorkflowStatus {
15
+ PENDING = "pending",
16
+ RUNNING = "running",
17
+ COMPLETED = "completed",
18
+ FAILED = "failed"
19
+ }
20
+ /**
21
+ * Context passed between workflow works
22
+ * TData is the type of shared data between works
23
+ * TWorkResults is a record mapping work names to their result types
24
+ */
25
+ interface IWorkflowContext<TData = Record<string, unknown>, TWorkResults extends Record<string, unknown> = Record<string, unknown>> {
26
+ /** Shared data between works */
27
+ data: TData;
28
+ /** Work-specific results keyed by work name with inferred types */
29
+ workResults: IWorkResultsMap<TWorkResults>;
30
+ }
31
+ /**
32
+ * Type-safe map for work results with automatic type inference
33
+ */
34
+ interface IWorkResultsMap<TWorkResults extends Record<string, unknown> = Record<string, unknown>> {
35
+ get<K extends keyof TWorkResults>(name: K): TWorkResults[K] | undefined;
36
+ set<K extends keyof TWorkResults>(name: K, value: TWorkResults[K]): void;
37
+ has(name: keyof TWorkResults): boolean;
38
+ }
39
+ /**
40
+ * Result of a single work execution
41
+ */
42
+ interface IWorkResult<TResult = unknown> {
43
+ status: WorkStatus;
44
+ result?: TResult;
45
+ error?: Error;
46
+ duration: number;
47
+ }
48
+ /**
49
+ * Definition of a work with inferred name and result type
50
+ */
51
+ interface IWorkDefinition<TName extends string, TData = Record<string, unknown>, TResult = unknown, TAvailableWorkResults extends Record<string, unknown> = Record<string, unknown>> {
52
+ /** Unique name for the work */
53
+ name: TName;
54
+ /** Execute function - receives context and returns result */
55
+ execute: (context: IWorkflowContext<TData, TAvailableWorkResults>) => Promise<TResult>;
56
+ /** Optional: condition to determine if work should run */
57
+ shouldRun?: (context: IWorkflowContext<TData, TAvailableWorkResults>) => boolean | Promise<boolean>;
58
+ /** Optional: called when work fails */
59
+ onError?: (error: Error, context: IWorkflowContext<TData, TAvailableWorkResults>) => void | Promise<void>;
60
+ }
61
+ /**
62
+ * Internal work representation
63
+ */
64
+ interface IWorkflowWork {
65
+ type: 'serial' | 'parallel';
66
+ works: IWorkDefinition<string, any, any, any>[];
67
+ }
68
+ /**
69
+ * Result of workflow execution
70
+ */
71
+ interface IWorkflowResult<TData = Record<string, unknown>, TWorkResults extends Record<string, unknown> = Record<string, unknown>> {
72
+ status: WorkflowStatus;
73
+ context: IWorkflowContext<TData, TWorkResults>;
74
+ workResults: Map<keyof TWorkResults, IWorkResult>;
75
+ totalDuration: number;
76
+ error?: Error;
77
+ }
78
+
79
+ /**
80
+ * A simple, extensible workflow engine that supports serial and parallel work execution.
81
+ * Work names and result types are automatically inferred from the workflow definition.
82
+ *
83
+ * @example
84
+ * ```typescript
85
+ * const workflow = new Workflow<{ userId: string }>()
86
+ * .serial({
87
+ * name: 'validate',
88
+ * execute: async (ctx) => true, // returns boolean
89
+ * })
90
+ * .parallel([
91
+ * {
92
+ * name: 'fetchOrders',
93
+ * execute: async (ctx) => [{ id: 1 }], // returns Order[]
94
+ * },
95
+ * {
96
+ * name: 'fetchProfile',
97
+ * execute: async (ctx) => ({ name: 'John' }), // returns Profile
98
+ * },
99
+ * ])
100
+ * .serial({
101
+ * name: 'process',
102
+ * execute: async (ctx) => {
103
+ * // ✅ Autocomplete for names AND types are inferred!
104
+ * const isValid = ctx.workResults.get('validate'); // boolean | undefined
105
+ * const orders = ctx.workResults.get('fetchOrders'); // Order[] | undefined
106
+ * const profile = ctx.workResults.get('fetchProfile'); // Profile | undefined
107
+ * return { orders, profile };
108
+ * },
109
+ * });
110
+ *
111
+ * const result = await workflow.run({ userId: '123' });
112
+ * ```
113
+ */
114
+ declare class Workflow<TData = Record<string, unknown>, TWorkResults extends Record<string, unknown> = NonNullable<unknown>> {
115
+ private works;
116
+ /**
117
+ * Add a serial work to the workflow.
118
+ * The work name and result type are automatically inferred.
119
+ */
120
+ serial<TName extends string, TResult>(work: IWorkDefinition<TName, TData, TResult, TWorkResults>): Workflow<TData, TWorkResults & {
121
+ [K in TName]: TResult;
122
+ }>;
123
+ /**
124
+ * Add parallel works to the workflow.
125
+ * All work names and result types are automatically inferred.
126
+ */
127
+ parallel<const TParallelWorks extends readonly IWorkDefinition<string, TData, unknown, TWorkResults>[]>(works: TParallelWorks): Workflow<TData, TWorkResults & ParallelWorksToRecord<TParallelWorks>>;
128
+ /**
129
+ * Execute the workflow with initial data
130
+ */
131
+ run(initialData: TData): Promise<IWorkflowResult<TData, TWorkResults>>;
132
+ /**
133
+ * Execute a single work
134
+ */
135
+ private executeWork;
136
+ /**
137
+ * Execute multiple works in parallel
138
+ */
139
+ private executeParallelWorks;
140
+ }
141
+ /**
142
+ * Helper type to extract work results from parallel works array
143
+ * Uses Extract to preserve the specific type for each work name
144
+ */
145
+ type ParallelWorksToRecord<T extends readonly IWorkDefinition<string, any, any, any>[]> = {
146
+ [K in T[number]['name']]: Extract<T[number], {
147
+ name: K;
148
+ }> extends IWorkDefinition<string, any, infer R, any> ? R : never;
149
+ };
150
+
151
+ export { type IWorkDefinition, type IWorkResult, type IWorkResultsMap, type IWorkflowContext, type IWorkflowResult, type IWorkflowWork, WorkStatus, Workflow, WorkflowStatus };
package/dist/index.js ADDED
@@ -0,0 +1,223 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ WorkStatus: () => WorkStatus,
24
+ Workflow: () => Workflow,
25
+ WorkflowStatus: () => WorkflowStatus
26
+ });
27
+ module.exports = __toCommonJS(index_exports);
28
+
29
+ // src/workflow.types.ts
30
+ var WorkStatus = /* @__PURE__ */ ((WorkStatus2) => {
31
+ WorkStatus2["PENDING"] = "pending";
32
+ WorkStatus2["RUNNING"] = "running";
33
+ WorkStatus2["COMPLETED"] = "completed";
34
+ WorkStatus2["FAILED"] = "failed";
35
+ WorkStatus2["SKIPPED"] = "skipped";
36
+ return WorkStatus2;
37
+ })(WorkStatus || {});
38
+ var WorkflowStatus = /* @__PURE__ */ ((WorkflowStatus2) => {
39
+ WorkflowStatus2["PENDING"] = "pending";
40
+ WorkflowStatus2["RUNNING"] = "running";
41
+ WorkflowStatus2["COMPLETED"] = "completed";
42
+ WorkflowStatus2["FAILED"] = "failed";
43
+ return WorkflowStatus2;
44
+ })(WorkflowStatus || {});
45
+
46
+ // src/workflow.ts
47
+ var WorkResultsMap = class {
48
+ constructor() {
49
+ this.map = /* @__PURE__ */ new Map();
50
+ }
51
+ get(name) {
52
+ return this.map.get(name);
53
+ }
54
+ set(name, value) {
55
+ this.map.set(name, value);
56
+ }
57
+ has(name) {
58
+ return this.map.has(name);
59
+ }
60
+ };
61
+ var Workflow = class {
62
+ constructor() {
63
+ this.works = [];
64
+ }
65
+ /**
66
+ * Add a serial work to the workflow.
67
+ * The work name and result type are automatically inferred.
68
+ */
69
+ serial(work) {
70
+ this.works.push({
71
+ type: "serial",
72
+ works: [work]
73
+ });
74
+ return this;
75
+ }
76
+ /**
77
+ * Add parallel works to the workflow.
78
+ * All work names and result types are automatically inferred.
79
+ */
80
+ parallel(works) {
81
+ this.works.push({
82
+ type: "parallel",
83
+ works
84
+ });
85
+ return this;
86
+ }
87
+ /**
88
+ * Execute the workflow with initial data
89
+ */
90
+ async run(initialData) {
91
+ const startTime = Date.now();
92
+ const context = {
93
+ data: initialData,
94
+ workResults: new WorkResultsMap()
95
+ };
96
+ const workResults = /* @__PURE__ */ new Map();
97
+ try {
98
+ for (const workGroup of this.works) {
99
+ if (workGroup.type === "serial") {
100
+ await this.executeWork(workGroup.works[0], context, workResults);
101
+ } else {
102
+ await this.executeParallelWorks(workGroup.works, context, workResults);
103
+ }
104
+ }
105
+ return {
106
+ status: "completed" /* COMPLETED */,
107
+ context,
108
+ workResults,
109
+ totalDuration: Date.now() - startTime
110
+ };
111
+ } catch (error) {
112
+ return {
113
+ status: "failed" /* FAILED */,
114
+ context,
115
+ workResults,
116
+ totalDuration: Date.now() - startTime,
117
+ error: error instanceof Error ? error : new Error(String(error))
118
+ };
119
+ }
120
+ }
121
+ /**
122
+ * Execute a single work
123
+ */
124
+ async executeWork(work, context, workResults) {
125
+ const workStartTime = Date.now();
126
+ if (work.shouldRun) {
127
+ const shouldRun = await work.shouldRun(context);
128
+ if (!shouldRun) {
129
+ workResults.set(work.name, {
130
+ status: "skipped" /* SKIPPED */,
131
+ duration: Date.now() - workStartTime
132
+ });
133
+ return;
134
+ }
135
+ }
136
+ try {
137
+ const result = await work.execute(context);
138
+ context.workResults.set(work.name, result);
139
+ workResults.set(work.name, {
140
+ status: "completed" /* COMPLETED */,
141
+ result,
142
+ duration: Date.now() - workStartTime
143
+ });
144
+ } catch (error) {
145
+ const err = error instanceof Error ? error : new Error(String(error));
146
+ workResults.set(work.name, {
147
+ status: "failed" /* FAILED */,
148
+ error: err,
149
+ duration: Date.now() - workStartTime
150
+ });
151
+ if (work.onError) {
152
+ await work.onError(err, context);
153
+ }
154
+ throw err;
155
+ }
156
+ }
157
+ /**
158
+ * Execute multiple works in parallel
159
+ */
160
+ async executeParallelWorks(works, context, workResults) {
161
+ const promises = works.map(async (work) => {
162
+ const workStartTime = Date.now();
163
+ if (work.shouldRun) {
164
+ const shouldRun = await work.shouldRun(context);
165
+ if (!shouldRun) {
166
+ workResults.set(work.name, {
167
+ status: "skipped" /* SKIPPED */,
168
+ duration: Date.now() - workStartTime
169
+ });
170
+ return { work, skipped: true };
171
+ }
172
+ }
173
+ try {
174
+ const result = await work.execute(context);
175
+ return { work, result, startTime: workStartTime };
176
+ } catch (error) {
177
+ const err = error instanceof Error ? error : new Error(String(error));
178
+ return { work, error: err, startTime: workStartTime };
179
+ }
180
+ });
181
+ const results = await Promise.all(promises);
182
+ const errors = [];
183
+ for (const result of results) {
184
+ if ("skipped" in result && result.skipped) {
185
+ continue;
186
+ }
187
+ const duration = Date.now() - result.startTime;
188
+ if ("error" in result && result.error) {
189
+ workResults.set(result.work.name, {
190
+ status: "failed" /* FAILED */,
191
+ error: result.error,
192
+ duration
193
+ });
194
+ errors.push({ work: result.work, error: result.error });
195
+ } else {
196
+ context.workResults.set(
197
+ result.work.name,
198
+ result.result
199
+ );
200
+ workResults.set(result.work.name, {
201
+ status: "completed" /* COMPLETED */,
202
+ result: result.result,
203
+ duration
204
+ });
205
+ }
206
+ }
207
+ if (errors.length > 0) {
208
+ for (const { work, error } of errors) {
209
+ if (work.onError) {
210
+ await work.onError(error, context);
211
+ }
212
+ }
213
+ throw errors[0].error;
214
+ }
215
+ }
216
+ };
217
+ // Annotate the CommonJS export names for ESM import in node:
218
+ 0 && (module.exports = {
219
+ WorkStatus,
220
+ Workflow,
221
+ WorkflowStatus
222
+ });
223
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/workflow.types.ts","../src/workflow.ts"],"sourcesContent":["export * from './workflow';\nexport * from './workflow.types';\n","/**\n * Work Status\n */\nexport enum WorkStatus {\n PENDING = 'pending',\n RUNNING = 'running',\n COMPLETED = 'completed',\n FAILED = 'failed',\n SKIPPED = 'skipped',\n}\n\n/**\n * Workflow Status\n */\nexport enum WorkflowStatus {\n PENDING = 'pending',\n RUNNING = 'running',\n COMPLETED = 'completed',\n FAILED = 'failed',\n}\n\n/**\n * Context passed between workflow works\n * TData is the type of shared data between works\n * TWorkResults is a record mapping work names to their result types\n */\nexport interface IWorkflowContext<\n TData = Record<string, unknown>,\n TWorkResults extends Record<string, unknown> = Record<string, unknown>,\n> {\n /** Shared data between works */\n data: TData;\n /** Work-specific results keyed by work name with inferred types */\n workResults: IWorkResultsMap<TWorkResults>;\n}\n\n/**\n * Type-safe map for work results with automatic type inference\n */\nexport interface IWorkResultsMap<\n TWorkResults extends Record<string, unknown> = Record<string, unknown>,\n> {\n get<K extends keyof TWorkResults>(name: K): TWorkResults[K] | undefined;\n set<K extends keyof TWorkResults>(name: K, value: TWorkResults[K]): void;\n has(name: keyof TWorkResults): boolean;\n}\n\n/**\n * Result of a single work execution\n */\nexport interface IWorkResult<TResult = unknown> {\n status: WorkStatus;\n result?: TResult;\n error?: Error;\n duration: number;\n}\n\n/**\n * Definition of a work with inferred name and result type\n */\nexport interface IWorkDefinition<\n TName extends string,\n TData = Record<string, unknown>,\n TResult = unknown,\n TAvailableWorkResults extends Record<string, unknown> = Record<string, unknown>,\n> {\n /** Unique name for the work */\n name: TName;\n /** Execute function - receives context and returns result */\n execute: (\n context: IWorkflowContext<TData, TAvailableWorkResults>,\n ) => Promise<TResult>;\n /** Optional: condition to determine if work should run */\n shouldRun?: (\n context: IWorkflowContext<TData, TAvailableWorkResults>,\n ) => boolean | Promise<boolean>;\n /** Optional: called when work fails */\n onError?: (\n error: Error,\n context: IWorkflowContext<TData, TAvailableWorkResults>,\n ) => void | Promise<void>;\n}\n\n/**\n * Internal work representation\n */\nexport interface IWorkflowWork {\n type: 'serial' | 'parallel';\n \n works: IWorkDefinition<string, any, any, any>[];\n}\n\n/**\n * Result of workflow execution\n */\nexport interface IWorkflowResult<\n TData = Record<string, unknown>,\n TWorkResults extends Record<string, unknown> = Record<string, unknown>,\n> {\n status: WorkflowStatus;\n context: IWorkflowContext<TData, TWorkResults>;\n workResults: Map<keyof TWorkResults, IWorkResult>;\n totalDuration: number;\n error?: Error;\n}\n","import {\n IWorkflowContext,\n IWorkflowResult,\n IWorkResultsMap,\n WorkflowStatus,\n IWorkflowWork,\n IWorkDefinition,\n IWorkResult,\n WorkStatus,\n} from './workflow.types';\n\n/**\n * Internal implementation of IWorkResultsMap using a Map\n */\nclass WorkResultsMap<TWorkResults extends Record<string, unknown>>\n implements IWorkResultsMap<TWorkResults>\n{\n private map = new Map<keyof TWorkResults, unknown>();\n\n get<K extends keyof TWorkResults>(name: K): TWorkResults[K] | undefined {\n return this.map.get(name) as TWorkResults[K] | undefined;\n }\n\n set<K extends keyof TWorkResults>(name: K, value: TWorkResults[K]): void {\n this.map.set(name, value);\n }\n\n has(name: keyof TWorkResults): boolean {\n return this.map.has(name);\n }\n}\n\n/**\n * A simple, extensible workflow engine that supports serial and parallel work execution.\n * Work names and result types are automatically inferred from the workflow definition.\n *\n * @example\n * ```typescript\n * const workflow = new Workflow<{ userId: string }>()\n * .serial({\n * name: 'validate',\n * execute: async (ctx) => true, // returns boolean\n * })\n * .parallel([\n * {\n * name: 'fetchOrders',\n * execute: async (ctx) => [{ id: 1 }], // returns Order[]\n * },\n * {\n * name: 'fetchProfile',\n * execute: async (ctx) => ({ name: 'John' }), // returns Profile\n * },\n * ])\n * .serial({\n * name: 'process',\n * execute: async (ctx) => {\n * // ✅ Autocomplete for names AND types are inferred!\n * const isValid = ctx.workResults.get('validate'); // boolean | undefined\n * const orders = ctx.workResults.get('fetchOrders'); // Order[] | undefined\n * const profile = ctx.workResults.get('fetchProfile'); // Profile | undefined\n * return { orders, profile };\n * },\n * });\n *\n * const result = await workflow.run({ userId: '123' });\n * ```\n */\nexport class Workflow<\n TData = Record<string, unknown>,\n TWorkResults extends Record<string, unknown> = NonNullable<unknown>,\n> {\n private works: IWorkflowWork[] = [];\n\n /**\n * Add a serial work to the workflow.\n * The work name and result type are automatically inferred.\n */\n serial<TName extends string, TResult>(\n work: IWorkDefinition<TName, TData, TResult, TWorkResults>,\n ): Workflow<TData, TWorkResults & { [K in TName]: TResult }> {\n this.works.push({\n type: 'serial',\n works: [work],\n });\n return this as unknown as Workflow<\n TData,\n TWorkResults & { [K in TName]: TResult }\n >;\n }\n\n /**\n * Add parallel works to the workflow.\n * All work names and result types are automatically inferred.\n */\n parallel<\n const TParallelWorks extends readonly IWorkDefinition<\n string,\n TData,\n unknown,\n TWorkResults\n >[],\n >(\n works: TParallelWorks,\n ): Workflow<TData, TWorkResults & ParallelWorksToRecord<TParallelWorks>> {\n this.works.push({\n type: 'parallel',\n works: works as unknown as IWorkDefinition<string, TData, unknown, TWorkResults>[],\n });\n return this as unknown as Workflow<\n TData,\n TWorkResults & ParallelWorksToRecord<TParallelWorks>\n >;\n }\n\n /**\n * Execute the workflow with initial data\n */\n async run(initialData: TData): Promise<IWorkflowResult<TData, TWorkResults>> {\n const startTime = Date.now();\n const context: IWorkflowContext<TData, TWorkResults> = {\n data: initialData,\n workResults: new WorkResultsMap<TWorkResults>(),\n };\n const workResults = new Map<keyof TWorkResults, IWorkResult>();\n\n try {\n for (const workGroup of this.works) {\n if (workGroup.type === 'serial') {\n await this.executeWork(workGroup.works[0], context, workResults);\n } else {\n await this.executeParallelWorks(workGroup.works, context, workResults);\n }\n }\n\n return {\n status: WorkflowStatus.COMPLETED,\n context,\n workResults,\n totalDuration: Date.now() - startTime,\n };\n } catch (error) {\n return {\n status: WorkflowStatus.FAILED,\n context,\n workResults,\n totalDuration: Date.now() - startTime,\n error: error instanceof Error ? error : new Error(String(error)),\n };\n }\n }\n\n /**\n * Execute a single work\n */\n private async executeWork(\n \n work: IWorkDefinition<string, TData, any, any>,\n context: IWorkflowContext<TData, TWorkResults>,\n workResults: Map<keyof TWorkResults, IWorkResult>,\n ): Promise<void> {\n const workStartTime = Date.now();\n\n // Check if work should run\n if (work.shouldRun) {\n const shouldRun = await work.shouldRun(context);\n if (!shouldRun) {\n workResults.set(work.name as keyof TWorkResults, {\n status: WorkStatus.SKIPPED,\n duration: Date.now() - workStartTime,\n });\n return;\n }\n }\n\n try {\n const result = await work.execute(context);\n\n // Store result in context for subsequent works\n context.workResults.set(work.name as keyof TWorkResults, result);\n\n workResults.set(work.name as keyof TWorkResults, {\n status: WorkStatus.COMPLETED,\n result,\n duration: Date.now() - workStartTime,\n });\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n\n workResults.set(work.name as keyof TWorkResults, {\n status: WorkStatus.FAILED,\n error: err,\n duration: Date.now() - workStartTime,\n });\n\n // Call error handler if provided\n if (work.onError) {\n await work.onError(err, context);\n }\n\n // Re-throw to stop workflow execution\n throw err;\n }\n }\n\n /**\n * Execute multiple works in parallel\n */\n private async executeParallelWorks(\n \n works: IWorkDefinition<string, TData, any, any>[],\n context: IWorkflowContext<TData, TWorkResults>,\n workResults: Map<keyof TWorkResults, IWorkResult>,\n ): Promise<void> {\n const promises = works.map(async (work) => {\n const workStartTime = Date.now();\n\n // Check if work should run\n if (work.shouldRun) {\n const shouldRun = await work.shouldRun(context);\n if (!shouldRun) {\n workResults.set(work.name as keyof TWorkResults, {\n status: WorkStatus.SKIPPED,\n duration: Date.now() - workStartTime,\n });\n return { work, skipped: true };\n }\n }\n\n try {\n const result = await work.execute(context);\n return { work, result, startTime: workStartTime };\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n return { work, error: err, startTime: workStartTime };\n }\n });\n\n const results = await Promise.all(promises);\n\n // Process results and check for errors\n \n const errors: { work: IWorkDefinition<string, TData, any, any>; error: Error }[] = [];\n\n for (const result of results) {\n if ('skipped' in result && result.skipped) {\n continue;\n }\n\n const duration = Date.now() - result.startTime!;\n\n if ('error' in result && result.error) {\n workResults.set(result.work.name as keyof TWorkResults, {\n status: WorkStatus.FAILED,\n error: result.error,\n duration,\n });\n errors.push({ work: result.work, error: result.error });\n } else {\n context.workResults.set(\n result.work.name as keyof TWorkResults,\n result.result,\n );\n workResults.set(result.work.name as keyof TWorkResults, {\n status: WorkStatus.COMPLETED,\n result: result.result,\n duration,\n });\n }\n }\n\n // Handle errors after all parallel works complete\n if (errors.length > 0) {\n // Call error handlers\n for (const { work, error } of errors) {\n if (work.onError) {\n await work.onError(error, context);\n }\n }\n\n // Throw the first error to stop workflow\n throw errors[0].error;\n }\n }\n}\n\n/**\n * Helper type to extract work results from parallel works array\n * Uses Extract to preserve the specific type for each work name\n */\ntype ParallelWorksToRecord<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n T extends readonly IWorkDefinition<string, any, any, any>[],\n> = {\n [K in T[number]['name']]: Extract<\n T[number],\n { name: K }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n > extends IWorkDefinition<string, any, infer R, any>\n ? R\n : never;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACGO,IAAK,aAAL,kBAAKA,gBAAL;AACL,EAAAA,YAAA,aAAU;AACV,EAAAA,YAAA,aAAU;AACV,EAAAA,YAAA,eAAY;AACZ,EAAAA,YAAA,YAAS;AACT,EAAAA,YAAA,aAAU;AALA,SAAAA;AAAA,GAAA;AAWL,IAAK,iBAAL,kBAAKC,oBAAL;AACL,EAAAA,gBAAA,aAAU;AACV,EAAAA,gBAAA,aAAU;AACV,EAAAA,gBAAA,eAAY;AACZ,EAAAA,gBAAA,YAAS;AAJC,SAAAA;AAAA,GAAA;;;ACAZ,IAAM,iBAAN,MAEA;AAAA,EAFA;AAGE,SAAQ,MAAM,oBAAI,IAAiC;AAAA;AAAA,EAEnD,IAAkC,MAAsC;AACtE,WAAO,KAAK,IAAI,IAAI,IAAI;AAAA,EAC1B;AAAA,EAEA,IAAkC,MAAS,OAA8B;AACvE,SAAK,IAAI,IAAI,MAAM,KAAK;AAAA,EAC1B;AAAA,EAEA,IAAI,MAAmC;AACrC,WAAO,KAAK,IAAI,IAAI,IAAI;AAAA,EAC1B;AACF;AAqCO,IAAM,WAAN,MAGL;AAAA,EAHK;AAIL,SAAQ,QAAyB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMlC,OACE,MAC2D;AAC3D,SAAK,MAAM,KAAK;AAAA,MACd,MAAM;AAAA,MACN,OAAO,CAAC,IAAI;AAAA,IACd,CAAC;AACD,WAAO;AAAA,EAIT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,SAQE,OACuE;AACvE,SAAK,MAAM,KAAK;AAAA,MACd,MAAM;AAAA,MACN;AAAA,IACF,CAAC;AACD,WAAO;AAAA,EAIT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,aAAmE;AAC3E,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,UAAiD;AAAA,MACrD,MAAM;AAAA,MACN,aAAa,IAAI,eAA6B;AAAA,IAChD;AACA,UAAM,cAAc,oBAAI,IAAqC;AAE7D,QAAI;AACF,iBAAW,aAAa,KAAK,OAAO;AAClC,YAAI,UAAU,SAAS,UAAU;AAC/B,gBAAM,KAAK,YAAY,UAAU,MAAM,CAAC,GAAG,SAAS,WAAW;AAAA,QACjE,OAAO;AACL,gBAAM,KAAK,qBAAqB,UAAU,OAAO,SAAS,WAAW;AAAA,QACvE;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA,eAAe,KAAK,IAAI,IAAI;AAAA,MAC9B;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA,eAAe,KAAK,IAAI,IAAI;AAAA,QAC5B,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MACjE;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YAEZ,MACA,SACA,aACe;AACf,UAAM,gBAAgB,KAAK,IAAI;AAG/B,QAAI,KAAK,WAAW;AAClB,YAAM,YAAY,MAAM,KAAK,UAAU,OAAO;AAC9C,UAAI,CAAC,WAAW;AACd,oBAAY,IAAI,KAAK,MAA4B;AAAA,UAC/C;AAAA,UACA,UAAU,KAAK,IAAI,IAAI;AAAA,QACzB,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAEA,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,QAAQ,OAAO;AAGzC,cAAQ,YAAY,IAAI,KAAK,MAA4B,MAAM;AAE/D,kBAAY,IAAI,KAAK,MAA4B;AAAA,QAC/C;AAAA,QACA;AAAA,QACA,UAAU,KAAK,IAAI,IAAI;AAAA,MACzB,CAAC;AAAA,IACH,SAAS,OAAO;AACd,YAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAEpE,kBAAY,IAAI,KAAK,MAA4B;AAAA,QAC/C;AAAA,QACA,OAAO;AAAA,QACP,UAAU,KAAK,IAAI,IAAI;AAAA,MACzB,CAAC;AAGD,UAAI,KAAK,SAAS;AAChB,cAAM,KAAK,QAAQ,KAAK,OAAO;AAAA,MACjC;AAGA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,qBAEZ,OACA,SACA,aACe;AACf,UAAM,WAAW,MAAM,IAAI,OAAO,SAAS;AACzC,YAAM,gBAAgB,KAAK,IAAI;AAG/B,UAAI,KAAK,WAAW;AAClB,cAAM,YAAY,MAAM,KAAK,UAAU,OAAO;AAC9C,YAAI,CAAC,WAAW;AACd,sBAAY,IAAI,KAAK,MAA4B;AAAA,YAC/C;AAAA,YACA,UAAU,KAAK,IAAI,IAAI;AAAA,UACzB,CAAC;AACD,iBAAO,EAAE,MAAM,SAAS,KAAK;AAAA,QAC/B;AAAA,MACF;AAEA,UAAI;AACF,cAAM,SAAS,MAAM,KAAK,QAAQ,OAAO;AACzC,eAAO,EAAE,MAAM,QAAQ,WAAW,cAAc;AAAA,MAClD,SAAS,OAAO;AACd,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,eAAO,EAAE,MAAM,OAAO,KAAK,WAAW,cAAc;AAAA,MACtD;AAAA,IACF,CAAC;AAED,UAAM,UAAU,MAAM,QAAQ,IAAI,QAAQ;AAI1C,UAAM,SAA6E,CAAC;AAEpF,eAAW,UAAU,SAAS;AAC5B,UAAI,aAAa,UAAU,OAAO,SAAS;AACzC;AAAA,MACF;AAEA,YAAM,WAAW,KAAK,IAAI,IAAI,OAAO;AAErC,UAAI,WAAW,UAAU,OAAO,OAAO;AACrC,oBAAY,IAAI,OAAO,KAAK,MAA4B;AAAA,UACtD;AAAA,UACA,OAAO,OAAO;AAAA,UACd;AAAA,QACF,CAAC;AACD,eAAO,KAAK,EAAE,MAAM,OAAO,MAAM,OAAO,OAAO,MAAM,CAAC;AAAA,MACxD,OAAO;AACL,gBAAQ,YAAY;AAAA,UAClB,OAAO,KAAK;AAAA,UACZ,OAAO;AAAA,QACT;AACA,oBAAY,IAAI,OAAO,KAAK,MAA4B;AAAA,UACtD;AAAA,UACA,QAAQ,OAAO;AAAA,UACf;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAGA,QAAI,OAAO,SAAS,GAAG;AAErB,iBAAW,EAAE,MAAM,MAAM,KAAK,QAAQ;AACpC,YAAI,KAAK,SAAS;AAChB,gBAAM,KAAK,QAAQ,OAAO,OAAO;AAAA,QACnC;AAAA,MACF;AAGA,YAAM,OAAO,CAAC,EAAE;AAAA,IAClB;AAAA,EACF;AACF;","names":["WorkStatus","WorkflowStatus"]}
package/dist/index.mjs ADDED
@@ -0,0 +1,194 @@
1
+ // src/workflow.types.ts
2
+ var WorkStatus = /* @__PURE__ */ ((WorkStatus2) => {
3
+ WorkStatus2["PENDING"] = "pending";
4
+ WorkStatus2["RUNNING"] = "running";
5
+ WorkStatus2["COMPLETED"] = "completed";
6
+ WorkStatus2["FAILED"] = "failed";
7
+ WorkStatus2["SKIPPED"] = "skipped";
8
+ return WorkStatus2;
9
+ })(WorkStatus || {});
10
+ var WorkflowStatus = /* @__PURE__ */ ((WorkflowStatus2) => {
11
+ WorkflowStatus2["PENDING"] = "pending";
12
+ WorkflowStatus2["RUNNING"] = "running";
13
+ WorkflowStatus2["COMPLETED"] = "completed";
14
+ WorkflowStatus2["FAILED"] = "failed";
15
+ return WorkflowStatus2;
16
+ })(WorkflowStatus || {});
17
+
18
+ // src/workflow.ts
19
+ var WorkResultsMap = class {
20
+ constructor() {
21
+ this.map = /* @__PURE__ */ new Map();
22
+ }
23
+ get(name) {
24
+ return this.map.get(name);
25
+ }
26
+ set(name, value) {
27
+ this.map.set(name, value);
28
+ }
29
+ has(name) {
30
+ return this.map.has(name);
31
+ }
32
+ };
33
+ var Workflow = class {
34
+ constructor() {
35
+ this.works = [];
36
+ }
37
+ /**
38
+ * Add a serial work to the workflow.
39
+ * The work name and result type are automatically inferred.
40
+ */
41
+ serial(work) {
42
+ this.works.push({
43
+ type: "serial",
44
+ works: [work]
45
+ });
46
+ return this;
47
+ }
48
+ /**
49
+ * Add parallel works to the workflow.
50
+ * All work names and result types are automatically inferred.
51
+ */
52
+ parallel(works) {
53
+ this.works.push({
54
+ type: "parallel",
55
+ works
56
+ });
57
+ return this;
58
+ }
59
+ /**
60
+ * Execute the workflow with initial data
61
+ */
62
+ async run(initialData) {
63
+ const startTime = Date.now();
64
+ const context = {
65
+ data: initialData,
66
+ workResults: new WorkResultsMap()
67
+ };
68
+ const workResults = /* @__PURE__ */ new Map();
69
+ try {
70
+ for (const workGroup of this.works) {
71
+ if (workGroup.type === "serial") {
72
+ await this.executeWork(workGroup.works[0], context, workResults);
73
+ } else {
74
+ await this.executeParallelWorks(workGroup.works, context, workResults);
75
+ }
76
+ }
77
+ return {
78
+ status: "completed" /* COMPLETED */,
79
+ context,
80
+ workResults,
81
+ totalDuration: Date.now() - startTime
82
+ };
83
+ } catch (error) {
84
+ return {
85
+ status: "failed" /* FAILED */,
86
+ context,
87
+ workResults,
88
+ totalDuration: Date.now() - startTime,
89
+ error: error instanceof Error ? error : new Error(String(error))
90
+ };
91
+ }
92
+ }
93
+ /**
94
+ * Execute a single work
95
+ */
96
+ async executeWork(work, context, workResults) {
97
+ const workStartTime = Date.now();
98
+ if (work.shouldRun) {
99
+ const shouldRun = await work.shouldRun(context);
100
+ if (!shouldRun) {
101
+ workResults.set(work.name, {
102
+ status: "skipped" /* SKIPPED */,
103
+ duration: Date.now() - workStartTime
104
+ });
105
+ return;
106
+ }
107
+ }
108
+ try {
109
+ const result = await work.execute(context);
110
+ context.workResults.set(work.name, result);
111
+ workResults.set(work.name, {
112
+ status: "completed" /* COMPLETED */,
113
+ result,
114
+ duration: Date.now() - workStartTime
115
+ });
116
+ } catch (error) {
117
+ const err = error instanceof Error ? error : new Error(String(error));
118
+ workResults.set(work.name, {
119
+ status: "failed" /* FAILED */,
120
+ error: err,
121
+ duration: Date.now() - workStartTime
122
+ });
123
+ if (work.onError) {
124
+ await work.onError(err, context);
125
+ }
126
+ throw err;
127
+ }
128
+ }
129
+ /**
130
+ * Execute multiple works in parallel
131
+ */
132
+ async executeParallelWorks(works, context, workResults) {
133
+ const promises = works.map(async (work) => {
134
+ const workStartTime = Date.now();
135
+ if (work.shouldRun) {
136
+ const shouldRun = await work.shouldRun(context);
137
+ if (!shouldRun) {
138
+ workResults.set(work.name, {
139
+ status: "skipped" /* SKIPPED */,
140
+ duration: Date.now() - workStartTime
141
+ });
142
+ return { work, skipped: true };
143
+ }
144
+ }
145
+ try {
146
+ const result = await work.execute(context);
147
+ return { work, result, startTime: workStartTime };
148
+ } catch (error) {
149
+ const err = error instanceof Error ? error : new Error(String(error));
150
+ return { work, error: err, startTime: workStartTime };
151
+ }
152
+ });
153
+ const results = await Promise.all(promises);
154
+ const errors = [];
155
+ for (const result of results) {
156
+ if ("skipped" in result && result.skipped) {
157
+ continue;
158
+ }
159
+ const duration = Date.now() - result.startTime;
160
+ if ("error" in result && result.error) {
161
+ workResults.set(result.work.name, {
162
+ status: "failed" /* FAILED */,
163
+ error: result.error,
164
+ duration
165
+ });
166
+ errors.push({ work: result.work, error: result.error });
167
+ } else {
168
+ context.workResults.set(
169
+ result.work.name,
170
+ result.result
171
+ );
172
+ workResults.set(result.work.name, {
173
+ status: "completed" /* COMPLETED */,
174
+ result: result.result,
175
+ duration
176
+ });
177
+ }
178
+ }
179
+ if (errors.length > 0) {
180
+ for (const { work, error } of errors) {
181
+ if (work.onError) {
182
+ await work.onError(error, context);
183
+ }
184
+ }
185
+ throw errors[0].error;
186
+ }
187
+ }
188
+ };
189
+ export {
190
+ WorkStatus,
191
+ Workflow,
192
+ WorkflowStatus
193
+ };
194
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/workflow.types.ts","../src/workflow.ts"],"sourcesContent":["/**\n * Work Status\n */\nexport enum WorkStatus {\n PENDING = 'pending',\n RUNNING = 'running',\n COMPLETED = 'completed',\n FAILED = 'failed',\n SKIPPED = 'skipped',\n}\n\n/**\n * Workflow Status\n */\nexport enum WorkflowStatus {\n PENDING = 'pending',\n RUNNING = 'running',\n COMPLETED = 'completed',\n FAILED = 'failed',\n}\n\n/**\n * Context passed between workflow works\n * TData is the type of shared data between works\n * TWorkResults is a record mapping work names to their result types\n */\nexport interface IWorkflowContext<\n TData = Record<string, unknown>,\n TWorkResults extends Record<string, unknown> = Record<string, unknown>,\n> {\n /** Shared data between works */\n data: TData;\n /** Work-specific results keyed by work name with inferred types */\n workResults: IWorkResultsMap<TWorkResults>;\n}\n\n/**\n * Type-safe map for work results with automatic type inference\n */\nexport interface IWorkResultsMap<\n TWorkResults extends Record<string, unknown> = Record<string, unknown>,\n> {\n get<K extends keyof TWorkResults>(name: K): TWorkResults[K] | undefined;\n set<K extends keyof TWorkResults>(name: K, value: TWorkResults[K]): void;\n has(name: keyof TWorkResults): boolean;\n}\n\n/**\n * Result of a single work execution\n */\nexport interface IWorkResult<TResult = unknown> {\n status: WorkStatus;\n result?: TResult;\n error?: Error;\n duration: number;\n}\n\n/**\n * Definition of a work with inferred name and result type\n */\nexport interface IWorkDefinition<\n TName extends string,\n TData = Record<string, unknown>,\n TResult = unknown,\n TAvailableWorkResults extends Record<string, unknown> = Record<string, unknown>,\n> {\n /** Unique name for the work */\n name: TName;\n /** Execute function - receives context and returns result */\n execute: (\n context: IWorkflowContext<TData, TAvailableWorkResults>,\n ) => Promise<TResult>;\n /** Optional: condition to determine if work should run */\n shouldRun?: (\n context: IWorkflowContext<TData, TAvailableWorkResults>,\n ) => boolean | Promise<boolean>;\n /** Optional: called when work fails */\n onError?: (\n error: Error,\n context: IWorkflowContext<TData, TAvailableWorkResults>,\n ) => void | Promise<void>;\n}\n\n/**\n * Internal work representation\n */\nexport interface IWorkflowWork {\n type: 'serial' | 'parallel';\n \n works: IWorkDefinition<string, any, any, any>[];\n}\n\n/**\n * Result of workflow execution\n */\nexport interface IWorkflowResult<\n TData = Record<string, unknown>,\n TWorkResults extends Record<string, unknown> = Record<string, unknown>,\n> {\n status: WorkflowStatus;\n context: IWorkflowContext<TData, TWorkResults>;\n workResults: Map<keyof TWorkResults, IWorkResult>;\n totalDuration: number;\n error?: Error;\n}\n","import {\n IWorkflowContext,\n IWorkflowResult,\n IWorkResultsMap,\n WorkflowStatus,\n IWorkflowWork,\n IWorkDefinition,\n IWorkResult,\n WorkStatus,\n} from './workflow.types';\n\n/**\n * Internal implementation of IWorkResultsMap using a Map\n */\nclass WorkResultsMap<TWorkResults extends Record<string, unknown>>\n implements IWorkResultsMap<TWorkResults>\n{\n private map = new Map<keyof TWorkResults, unknown>();\n\n get<K extends keyof TWorkResults>(name: K): TWorkResults[K] | undefined {\n return this.map.get(name) as TWorkResults[K] | undefined;\n }\n\n set<K extends keyof TWorkResults>(name: K, value: TWorkResults[K]): void {\n this.map.set(name, value);\n }\n\n has(name: keyof TWorkResults): boolean {\n return this.map.has(name);\n }\n}\n\n/**\n * A simple, extensible workflow engine that supports serial and parallel work execution.\n * Work names and result types are automatically inferred from the workflow definition.\n *\n * @example\n * ```typescript\n * const workflow = new Workflow<{ userId: string }>()\n * .serial({\n * name: 'validate',\n * execute: async (ctx) => true, // returns boolean\n * })\n * .parallel([\n * {\n * name: 'fetchOrders',\n * execute: async (ctx) => [{ id: 1 }], // returns Order[]\n * },\n * {\n * name: 'fetchProfile',\n * execute: async (ctx) => ({ name: 'John' }), // returns Profile\n * },\n * ])\n * .serial({\n * name: 'process',\n * execute: async (ctx) => {\n * // ✅ Autocomplete for names AND types are inferred!\n * const isValid = ctx.workResults.get('validate'); // boolean | undefined\n * const orders = ctx.workResults.get('fetchOrders'); // Order[] | undefined\n * const profile = ctx.workResults.get('fetchProfile'); // Profile | undefined\n * return { orders, profile };\n * },\n * });\n *\n * const result = await workflow.run({ userId: '123' });\n * ```\n */\nexport class Workflow<\n TData = Record<string, unknown>,\n TWorkResults extends Record<string, unknown> = NonNullable<unknown>,\n> {\n private works: IWorkflowWork[] = [];\n\n /**\n * Add a serial work to the workflow.\n * The work name and result type are automatically inferred.\n */\n serial<TName extends string, TResult>(\n work: IWorkDefinition<TName, TData, TResult, TWorkResults>,\n ): Workflow<TData, TWorkResults & { [K in TName]: TResult }> {\n this.works.push({\n type: 'serial',\n works: [work],\n });\n return this as unknown as Workflow<\n TData,\n TWorkResults & { [K in TName]: TResult }\n >;\n }\n\n /**\n * Add parallel works to the workflow.\n * All work names and result types are automatically inferred.\n */\n parallel<\n const TParallelWorks extends readonly IWorkDefinition<\n string,\n TData,\n unknown,\n TWorkResults\n >[],\n >(\n works: TParallelWorks,\n ): Workflow<TData, TWorkResults & ParallelWorksToRecord<TParallelWorks>> {\n this.works.push({\n type: 'parallel',\n works: works as unknown as IWorkDefinition<string, TData, unknown, TWorkResults>[],\n });\n return this as unknown as Workflow<\n TData,\n TWorkResults & ParallelWorksToRecord<TParallelWorks>\n >;\n }\n\n /**\n * Execute the workflow with initial data\n */\n async run(initialData: TData): Promise<IWorkflowResult<TData, TWorkResults>> {\n const startTime = Date.now();\n const context: IWorkflowContext<TData, TWorkResults> = {\n data: initialData,\n workResults: new WorkResultsMap<TWorkResults>(),\n };\n const workResults = new Map<keyof TWorkResults, IWorkResult>();\n\n try {\n for (const workGroup of this.works) {\n if (workGroup.type === 'serial') {\n await this.executeWork(workGroup.works[0], context, workResults);\n } else {\n await this.executeParallelWorks(workGroup.works, context, workResults);\n }\n }\n\n return {\n status: WorkflowStatus.COMPLETED,\n context,\n workResults,\n totalDuration: Date.now() - startTime,\n };\n } catch (error) {\n return {\n status: WorkflowStatus.FAILED,\n context,\n workResults,\n totalDuration: Date.now() - startTime,\n error: error instanceof Error ? error : new Error(String(error)),\n };\n }\n }\n\n /**\n * Execute a single work\n */\n private async executeWork(\n \n work: IWorkDefinition<string, TData, any, any>,\n context: IWorkflowContext<TData, TWorkResults>,\n workResults: Map<keyof TWorkResults, IWorkResult>,\n ): Promise<void> {\n const workStartTime = Date.now();\n\n // Check if work should run\n if (work.shouldRun) {\n const shouldRun = await work.shouldRun(context);\n if (!shouldRun) {\n workResults.set(work.name as keyof TWorkResults, {\n status: WorkStatus.SKIPPED,\n duration: Date.now() - workStartTime,\n });\n return;\n }\n }\n\n try {\n const result = await work.execute(context);\n\n // Store result in context for subsequent works\n context.workResults.set(work.name as keyof TWorkResults, result);\n\n workResults.set(work.name as keyof TWorkResults, {\n status: WorkStatus.COMPLETED,\n result,\n duration: Date.now() - workStartTime,\n });\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n\n workResults.set(work.name as keyof TWorkResults, {\n status: WorkStatus.FAILED,\n error: err,\n duration: Date.now() - workStartTime,\n });\n\n // Call error handler if provided\n if (work.onError) {\n await work.onError(err, context);\n }\n\n // Re-throw to stop workflow execution\n throw err;\n }\n }\n\n /**\n * Execute multiple works in parallel\n */\n private async executeParallelWorks(\n \n works: IWorkDefinition<string, TData, any, any>[],\n context: IWorkflowContext<TData, TWorkResults>,\n workResults: Map<keyof TWorkResults, IWorkResult>,\n ): Promise<void> {\n const promises = works.map(async (work) => {\n const workStartTime = Date.now();\n\n // Check if work should run\n if (work.shouldRun) {\n const shouldRun = await work.shouldRun(context);\n if (!shouldRun) {\n workResults.set(work.name as keyof TWorkResults, {\n status: WorkStatus.SKIPPED,\n duration: Date.now() - workStartTime,\n });\n return { work, skipped: true };\n }\n }\n\n try {\n const result = await work.execute(context);\n return { work, result, startTime: workStartTime };\n } catch (error) {\n const err = error instanceof Error ? error : new Error(String(error));\n return { work, error: err, startTime: workStartTime };\n }\n });\n\n const results = await Promise.all(promises);\n\n // Process results and check for errors\n \n const errors: { work: IWorkDefinition<string, TData, any, any>; error: Error }[] = [];\n\n for (const result of results) {\n if ('skipped' in result && result.skipped) {\n continue;\n }\n\n const duration = Date.now() - result.startTime!;\n\n if ('error' in result && result.error) {\n workResults.set(result.work.name as keyof TWorkResults, {\n status: WorkStatus.FAILED,\n error: result.error,\n duration,\n });\n errors.push({ work: result.work, error: result.error });\n } else {\n context.workResults.set(\n result.work.name as keyof TWorkResults,\n result.result,\n );\n workResults.set(result.work.name as keyof TWorkResults, {\n status: WorkStatus.COMPLETED,\n result: result.result,\n duration,\n });\n }\n }\n\n // Handle errors after all parallel works complete\n if (errors.length > 0) {\n // Call error handlers\n for (const { work, error } of errors) {\n if (work.onError) {\n await work.onError(error, context);\n }\n }\n\n // Throw the first error to stop workflow\n throw errors[0].error;\n }\n }\n}\n\n/**\n * Helper type to extract work results from parallel works array\n * Uses Extract to preserve the specific type for each work name\n */\ntype ParallelWorksToRecord<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n T extends readonly IWorkDefinition<string, any, any, any>[],\n> = {\n [K in T[number]['name']]: Extract<\n T[number],\n { name: K }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n > extends IWorkDefinition<string, any, infer R, any>\n ? R\n : never;\n};\n"],"mappings":";AAGO,IAAK,aAAL,kBAAKA,gBAAL;AACL,EAAAA,YAAA,aAAU;AACV,EAAAA,YAAA,aAAU;AACV,EAAAA,YAAA,eAAY;AACZ,EAAAA,YAAA,YAAS;AACT,EAAAA,YAAA,aAAU;AALA,SAAAA;AAAA,GAAA;AAWL,IAAK,iBAAL,kBAAKC,oBAAL;AACL,EAAAA,gBAAA,aAAU;AACV,EAAAA,gBAAA,aAAU;AACV,EAAAA,gBAAA,eAAY;AACZ,EAAAA,gBAAA,YAAS;AAJC,SAAAA;AAAA,GAAA;;;ACAZ,IAAM,iBAAN,MAEA;AAAA,EAFA;AAGE,SAAQ,MAAM,oBAAI,IAAiC;AAAA;AAAA,EAEnD,IAAkC,MAAsC;AACtE,WAAO,KAAK,IAAI,IAAI,IAAI;AAAA,EAC1B;AAAA,EAEA,IAAkC,MAAS,OAA8B;AACvE,SAAK,IAAI,IAAI,MAAM,KAAK;AAAA,EAC1B;AAAA,EAEA,IAAI,MAAmC;AACrC,WAAO,KAAK,IAAI,IAAI,IAAI;AAAA,EAC1B;AACF;AAqCO,IAAM,WAAN,MAGL;AAAA,EAHK;AAIL,SAAQ,QAAyB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMlC,OACE,MAC2D;AAC3D,SAAK,MAAM,KAAK;AAAA,MACd,MAAM;AAAA,MACN,OAAO,CAAC,IAAI;AAAA,IACd,CAAC;AACD,WAAO;AAAA,EAIT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,SAQE,OACuE;AACvE,SAAK,MAAM,KAAK;AAAA,MACd,MAAM;AAAA,MACN;AAAA,IACF,CAAC;AACD,WAAO;AAAA,EAIT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,aAAmE;AAC3E,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,UAAiD;AAAA,MACrD,MAAM;AAAA,MACN,aAAa,IAAI,eAA6B;AAAA,IAChD;AACA,UAAM,cAAc,oBAAI,IAAqC;AAE7D,QAAI;AACF,iBAAW,aAAa,KAAK,OAAO;AAClC,YAAI,UAAU,SAAS,UAAU;AAC/B,gBAAM,KAAK,YAAY,UAAU,MAAM,CAAC,GAAG,SAAS,WAAW;AAAA,QACjE,OAAO;AACL,gBAAM,KAAK,qBAAqB,UAAU,OAAO,SAAS,WAAW;AAAA,QACvE;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA,eAAe,KAAK,IAAI,IAAI;AAAA,MAC9B;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA,eAAe,KAAK,IAAI,IAAI;AAAA,QAC5B,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MACjE;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YAEZ,MACA,SACA,aACe;AACf,UAAM,gBAAgB,KAAK,IAAI;AAG/B,QAAI,KAAK,WAAW;AAClB,YAAM,YAAY,MAAM,KAAK,UAAU,OAAO;AAC9C,UAAI,CAAC,WAAW;AACd,oBAAY,IAAI,KAAK,MAA4B;AAAA,UAC/C;AAAA,UACA,UAAU,KAAK,IAAI,IAAI;AAAA,QACzB,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAEA,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,QAAQ,OAAO;AAGzC,cAAQ,YAAY,IAAI,KAAK,MAA4B,MAAM;AAE/D,kBAAY,IAAI,KAAK,MAA4B;AAAA,QAC/C;AAAA,QACA;AAAA,QACA,UAAU,KAAK,IAAI,IAAI;AAAA,MACzB,CAAC;AAAA,IACH,SAAS,OAAO;AACd,YAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAEpE,kBAAY,IAAI,KAAK,MAA4B;AAAA,QAC/C;AAAA,QACA,OAAO;AAAA,QACP,UAAU,KAAK,IAAI,IAAI;AAAA,MACzB,CAAC;AAGD,UAAI,KAAK,SAAS;AAChB,cAAM,KAAK,QAAQ,KAAK,OAAO;AAAA,MACjC;AAGA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,qBAEZ,OACA,SACA,aACe;AACf,UAAM,WAAW,MAAM,IAAI,OAAO,SAAS;AACzC,YAAM,gBAAgB,KAAK,IAAI;AAG/B,UAAI,KAAK,WAAW;AAClB,cAAM,YAAY,MAAM,KAAK,UAAU,OAAO;AAC9C,YAAI,CAAC,WAAW;AACd,sBAAY,IAAI,KAAK,MAA4B;AAAA,YAC/C;AAAA,YACA,UAAU,KAAK,IAAI,IAAI;AAAA,UACzB,CAAC;AACD,iBAAO,EAAE,MAAM,SAAS,KAAK;AAAA,QAC/B;AAAA,MACF;AAEA,UAAI;AACF,cAAM,SAAS,MAAM,KAAK,QAAQ,OAAO;AACzC,eAAO,EAAE,MAAM,QAAQ,WAAW,cAAc;AAAA,MAClD,SAAS,OAAO;AACd,cAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,eAAO,EAAE,MAAM,OAAO,KAAK,WAAW,cAAc;AAAA,MACtD;AAAA,IACF,CAAC;AAED,UAAM,UAAU,MAAM,QAAQ,IAAI,QAAQ;AAI1C,UAAM,SAA6E,CAAC;AAEpF,eAAW,UAAU,SAAS;AAC5B,UAAI,aAAa,UAAU,OAAO,SAAS;AACzC;AAAA,MACF;AAEA,YAAM,WAAW,KAAK,IAAI,IAAI,OAAO;AAErC,UAAI,WAAW,UAAU,OAAO,OAAO;AACrC,oBAAY,IAAI,OAAO,KAAK,MAA4B;AAAA,UACtD;AAAA,UACA,OAAO,OAAO;AAAA,UACd;AAAA,QACF,CAAC;AACD,eAAO,KAAK,EAAE,MAAM,OAAO,MAAM,OAAO,OAAO,MAAM,CAAC;AAAA,MACxD,OAAO;AACL,gBAAQ,YAAY;AAAA,UAClB,OAAO,KAAK;AAAA,UACZ,OAAO;AAAA,QACT;AACA,oBAAY,IAAI,OAAO,KAAK,MAA4B;AAAA,UACtD;AAAA,UACA,QAAQ,OAAO;AAAA,UACf;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAGA,QAAI,OAAO,SAAS,GAAG;AAErB,iBAAW,EAAE,MAAM,MAAM,KAAK,QAAQ;AACpC,YAAI,KAAK,SAAS;AAChB,gBAAM,KAAK,QAAQ,OAAO,OAAO;AAAA,QACnC;AAAA,MACF;AAGA,YAAM,OAAO,CAAC,EAAE;AAAA,IAClB;AAAA,EACF;AACF;","names":["WorkStatus","WorkflowStatus"]}
package/package.json ADDED
@@ -0,0 +1,57 @@
1
+ {
2
+ "name": "@yigitahmetsahin/workflow-ts",
3
+ "version": "1.0.0",
4
+ "description": "A simple, extensible TypeScript workflow engine supporting serial and parallel work execution with full type inference",
5
+ "main": "dist/index.js",
6
+ "module": "dist/index.mjs",
7
+ "types": "dist/index.d.ts",
8
+ "exports": {
9
+ ".": {
10
+ "types": "./dist/index.d.ts",
11
+ "import": "./dist/index.mjs",
12
+ "require": "./dist/index.js"
13
+ }
14
+ },
15
+ "files": [
16
+ "dist"
17
+ ],
18
+ "scripts": {
19
+ "build": "tsup",
20
+ "test": "vitest run",
21
+ "test:watch": "vitest",
22
+ "test:coverage": "vitest run --coverage",
23
+ "lint": "tsc --noEmit",
24
+ "prepublishOnly": "npm run build"
25
+ },
26
+ "keywords": [
27
+ "workflow",
28
+ "typescript",
29
+ "serial",
30
+ "parallel",
31
+ "pipeline",
32
+ "task",
33
+ "orchestration",
34
+ "async"
35
+ ],
36
+ "author": "yigitahmetsahin",
37
+ "license": "MIT",
38
+ "repository": {
39
+ "type": "git",
40
+ "url": "git+https://github.com/yigitahmetsahin/workflow-ts.git"
41
+ },
42
+ "bugs": {
43
+ "url": "https://github.com/yigitahmetsahin/workflow-ts/issues"
44
+ },
45
+ "homepage": "https://github.com/yigitahmetsahin/workflow-ts#readme",
46
+ "devDependencies": {
47
+ "tsup": "^8.0.0",
48
+ "typescript": "^5.3.0",
49
+ "vitest": "^2.0.0"
50
+ },
51
+ "engines": {
52
+ "node": ">=18"
53
+ },
54
+ "publishConfig": {
55
+ "access": "public"
56
+ }
57
+ }