flowcraft 2.7.1 → 2.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (229) hide show
  1. package/README.md +1 -1
  2. package/dist/index-D3dyjW2G.d.mts +1269 -0
  3. package/dist/index.d.mts +2 -0
  4. package/dist/index.mjs +727 -0
  5. package/dist/index.mjs.map +1 -0
  6. package/dist/replay-BB11M6K1.mjs +107 -0
  7. package/dist/replay-BB11M6K1.mjs.map +1 -0
  8. package/dist/runtime-CmefIAu_.mjs +2216 -0
  9. package/dist/runtime-CmefIAu_.mjs.map +1 -0
  10. package/dist/{sdk.d.ts → sdk.d.mts} +14 -12
  11. package/dist/sdk.mjs +29 -0
  12. package/dist/sdk.mjs.map +1 -0
  13. package/dist/testing/index.d.mts +172 -0
  14. package/dist/testing/index.mjs +277 -0
  15. package/dist/testing/index.mjs.map +1 -0
  16. package/package.json +59 -59
  17. package/LICENSE +0 -21
  18. package/dist/adapters/index.d.ts +0 -4
  19. package/dist/adapters/index.js +0 -4
  20. package/dist/adapters/index.js.map +0 -1
  21. package/dist/adapters/persistent-event-bus.d.ts +0 -69
  22. package/dist/adapters/persistent-event-bus.js +0 -3
  23. package/dist/adapters/persistent-event-bus.js.map +0 -1
  24. package/dist/analysis.d.ts +0 -53
  25. package/dist/analysis.js +0 -3
  26. package/dist/analysis.js.map +0 -1
  27. package/dist/chunk-27STBUGG.js +0 -44
  28. package/dist/chunk-27STBUGG.js.map +0 -1
  29. package/dist/chunk-2TSADFQX.js +0 -46
  30. package/dist/chunk-2TSADFQX.js.map +0 -1
  31. package/dist/chunk-3Y5O5EGB.js +0 -3
  32. package/dist/chunk-3Y5O5EGB.js.map +0 -1
  33. package/dist/chunk-4PELJWF7.js +0 -29
  34. package/dist/chunk-4PELJWF7.js.map +0 -1
  35. package/dist/chunk-55J6XMHW.js +0 -3
  36. package/dist/chunk-55J6XMHW.js.map +0 -1
  37. package/dist/chunk-6RKHCJUU.js +0 -29
  38. package/dist/chunk-6RKHCJUU.js.map +0 -1
  39. package/dist/chunk-7EBKWATZ.js +0 -86
  40. package/dist/chunk-7EBKWATZ.js.map +0 -1
  41. package/dist/chunk-7EMUOH77.js +0 -90
  42. package/dist/chunk-7EMUOH77.js.map +0 -1
  43. package/dist/chunk-7M6FHFHP.js +0 -25
  44. package/dist/chunk-7M6FHFHP.js.map +0 -1
  45. package/dist/chunk-AKDL2ZX7.js +0 -287
  46. package/dist/chunk-AKDL2ZX7.js.map +0 -1
  47. package/dist/chunk-BC4G7OM6.js +0 -42
  48. package/dist/chunk-BC4G7OM6.js.map +0 -1
  49. package/dist/chunk-BCMR7Y4U.js +0 -76
  50. package/dist/chunk-BCMR7Y4U.js.map +0 -1
  51. package/dist/chunk-BCRWXTWX.js +0 -21
  52. package/dist/chunk-BCRWXTWX.js.map +0 -1
  53. package/dist/chunk-BEHVGFIM.js +0 -532
  54. package/dist/chunk-BEHVGFIM.js.map +0 -1
  55. package/dist/chunk-DL7KVYZF.js +0 -39
  56. package/dist/chunk-DL7KVYZF.js.map +0 -1
  57. package/dist/chunk-DV2CXHOY.js +0 -74
  58. package/dist/chunk-DV2CXHOY.js.map +0 -1
  59. package/dist/chunk-H4JTZYIT.js +0 -172
  60. package/dist/chunk-H4JTZYIT.js.map +0 -1
  61. package/dist/chunk-HFJXYY4E.js +0 -3
  62. package/dist/chunk-HFJXYY4E.js.map +0 -1
  63. package/dist/chunk-HNHM3FDK.js +0 -52
  64. package/dist/chunk-HNHM3FDK.js.map +0 -1
  65. package/dist/chunk-HXSK5P2X.js +0 -150
  66. package/dist/chunk-HXSK5P2X.js.map +0 -1
  67. package/dist/chunk-I53JB2KW.js +0 -26
  68. package/dist/chunk-I53JB2KW.js.map +0 -1
  69. package/dist/chunk-IDTYHLDQ.js +0 -16
  70. package/dist/chunk-IDTYHLDQ.js.map +0 -1
  71. package/dist/chunk-IKOTX22J.js +0 -85
  72. package/dist/chunk-IKOTX22J.js.map +0 -1
  73. package/dist/chunk-L3MX5MTA.js +0 -33
  74. package/dist/chunk-L3MX5MTA.js.map +0 -1
  75. package/dist/chunk-L46TQXCV.js +0 -144
  76. package/dist/chunk-L46TQXCV.js.map +0 -1
  77. package/dist/chunk-LM4ACVHL.js +0 -73
  78. package/dist/chunk-LM4ACVHL.js.map +0 -1
  79. package/dist/chunk-LNK7LZER.js +0 -51
  80. package/dist/chunk-LNK7LZER.js.map +0 -1
  81. package/dist/chunk-N63S5NEG.js +0 -107
  82. package/dist/chunk-N63S5NEG.js.map +0 -1
  83. package/dist/chunk-NVLZFLYM.js +0 -3
  84. package/dist/chunk-NVLZFLYM.js.map +0 -1
  85. package/dist/chunk-ONH7PIJZ.js +0 -300
  86. package/dist/chunk-ONH7PIJZ.js.map +0 -1
  87. package/dist/chunk-PH2IYZHV.js +0 -48
  88. package/dist/chunk-PH2IYZHV.js.map +0 -1
  89. package/dist/chunk-RAZWRNAJ.js +0 -54
  90. package/dist/chunk-RAZWRNAJ.js.map +0 -1
  91. package/dist/chunk-RM677CNU.js +0 -52
  92. package/dist/chunk-RM677CNU.js.map +0 -1
  93. package/dist/chunk-TKSSRS5U.js +0 -39
  94. package/dist/chunk-TKSSRS5U.js.map +0 -1
  95. package/dist/chunk-U7DKCIWT.js +0 -340
  96. package/dist/chunk-U7DKCIWT.js.map +0 -1
  97. package/dist/chunk-UNORA7EM.js +0 -103
  98. package/dist/chunk-UNORA7EM.js.map +0 -1
  99. package/dist/chunk-WWGFIYKW.js +0 -47
  100. package/dist/chunk-WWGFIYKW.js.map +0 -1
  101. package/dist/chunk-XZZWIJ4G.js +0 -25
  102. package/dist/chunk-XZZWIJ4G.js.map +0 -1
  103. package/dist/chunk-ZETQCNEF.js +0 -139
  104. package/dist/chunk-ZETQCNEF.js.map +0 -1
  105. package/dist/chunk-ZLW4QOTS.js +0 -192
  106. package/dist/chunk-ZLW4QOTS.js.map +0 -1
  107. package/dist/container-factory.d.ts +0 -17
  108. package/dist/container-factory.js +0 -13
  109. package/dist/container-factory.js.map +0 -1
  110. package/dist/container.d.ts +0 -23
  111. package/dist/container.js +0 -3
  112. package/dist/container.js.map +0 -1
  113. package/dist/context.d.ts +0 -65
  114. package/dist/context.js +0 -3
  115. package/dist/context.js.map +0 -1
  116. package/dist/error-mapper.d.ts +0 -15
  117. package/dist/error-mapper.js +0 -4
  118. package/dist/error-mapper.js.map +0 -1
  119. package/dist/errors.d.ts +0 -20
  120. package/dist/errors.js +0 -3
  121. package/dist/errors.js.map +0 -1
  122. package/dist/evaluator.d.ts +0 -32
  123. package/dist/evaluator.js +0 -3
  124. package/dist/evaluator.js.map +0 -1
  125. package/dist/flow.d.ts +0 -85
  126. package/dist/flow.js +0 -4
  127. package/dist/flow.js.map +0 -1
  128. package/dist/index.d.ts +0 -18
  129. package/dist/index.js +0 -38
  130. package/dist/index.js.map +0 -1
  131. package/dist/linter.d.ts +0 -26
  132. package/dist/linter.js +0 -4
  133. package/dist/linter.js.map +0 -1
  134. package/dist/logger.d.ts +0 -20
  135. package/dist/logger.js +0 -3
  136. package/dist/logger.js.map +0 -1
  137. package/dist/node.d.ts +0 -3
  138. package/dist/node.js +0 -3
  139. package/dist/node.js.map +0 -1
  140. package/dist/nodes/batch-gather.d.ts +0 -9
  141. package/dist/nodes/batch-gather.js +0 -4
  142. package/dist/nodes/batch-gather.js.map +0 -1
  143. package/dist/nodes/batch-scatter.d.ts +0 -9
  144. package/dist/nodes/batch-scatter.js +0 -4
  145. package/dist/nodes/batch-scatter.js.map +0 -1
  146. package/dist/nodes/sleep.d.ts +0 -9
  147. package/dist/nodes/sleep.js +0 -4
  148. package/dist/nodes/sleep.js.map +0 -1
  149. package/dist/nodes/subflow.d.ts +0 -9
  150. package/dist/nodes/subflow.js +0 -10
  151. package/dist/nodes/subflow.js.map +0 -1
  152. package/dist/nodes/wait.d.ts +0 -9
  153. package/dist/nodes/wait.js +0 -4
  154. package/dist/nodes/wait.js.map +0 -1
  155. package/dist/nodes/webhook.d.ts +0 -13
  156. package/dist/nodes/webhook.js +0 -4
  157. package/dist/nodes/webhook.js.map +0 -1
  158. package/dist/runtime/adapter.d.ts +0 -114
  159. package/dist/runtime/adapter.js +0 -28
  160. package/dist/runtime/adapter.js.map +0 -1
  161. package/dist/runtime/builtin-keys.d.ts +0 -38
  162. package/dist/runtime/builtin-keys.js +0 -10
  163. package/dist/runtime/builtin-keys.js.map +0 -1
  164. package/dist/runtime/execution-context.d.ts +0 -3
  165. package/dist/runtime/execution-context.js +0 -6
  166. package/dist/runtime/execution-context.js.map +0 -1
  167. package/dist/runtime/executors.d.ts +0 -3
  168. package/dist/runtime/executors.js +0 -4
  169. package/dist/runtime/executors.js.map +0 -1
  170. package/dist/runtime/index.d.ts +0 -7
  171. package/dist/runtime/index.js +0 -31
  172. package/dist/runtime/index.js.map +0 -1
  173. package/dist/runtime/node-executor-factory.d.ts +0 -12
  174. package/dist/runtime/node-executor-factory.js +0 -6
  175. package/dist/runtime/node-executor-factory.js.map +0 -1
  176. package/dist/runtime/orchestrator.d.ts +0 -9
  177. package/dist/runtime/orchestrator.js +0 -8
  178. package/dist/runtime/orchestrator.js.map +0 -1
  179. package/dist/runtime/orchestrators/replay.d.ts +0 -45
  180. package/dist/runtime/orchestrators/replay.js +0 -3
  181. package/dist/runtime/orchestrators/replay.js.map +0 -1
  182. package/dist/runtime/orchestrators/step-by-step.d.ts +0 -16
  183. package/dist/runtime/orchestrators/step-by-step.js +0 -5
  184. package/dist/runtime/orchestrators/step-by-step.js.map +0 -1
  185. package/dist/runtime/orchestrators/utils.d.ts +0 -35
  186. package/dist/runtime/orchestrators/utils.js +0 -4
  187. package/dist/runtime/orchestrators/utils.js.map +0 -1
  188. package/dist/runtime/runtime.d.ts +0 -3
  189. package/dist/runtime/runtime.js +0 -27
  190. package/dist/runtime/runtime.js.map +0 -1
  191. package/dist/runtime/scheduler.d.ts +0 -3
  192. package/dist/runtime/scheduler.js +0 -3
  193. package/dist/runtime/scheduler.js.map +0 -1
  194. package/dist/runtime/state.d.ts +0 -3
  195. package/dist/runtime/state.js +0 -5
  196. package/dist/runtime/state.js.map +0 -1
  197. package/dist/runtime/traverser.d.ts +0 -3
  198. package/dist/runtime/traverser.js +0 -4
  199. package/dist/runtime/traverser.js.map +0 -1
  200. package/dist/runtime/types.d.ts +0 -3
  201. package/dist/runtime/types.js +0 -3
  202. package/dist/runtime/types.js.map +0 -1
  203. package/dist/runtime/workflow-logic-handler.d.ts +0 -17
  204. package/dist/runtime/workflow-logic-handler.js +0 -5
  205. package/dist/runtime/workflow-logic-handler.js.map +0 -1
  206. package/dist/sanitizer.d.ts +0 -12
  207. package/dist/sanitizer.js +0 -3
  208. package/dist/sanitizer.js.map +0 -1
  209. package/dist/sdk.js +0 -20
  210. package/dist/sdk.js.map +0 -1
  211. package/dist/serializer.d.ts +0 -18
  212. package/dist/serializer.js +0 -3
  213. package/dist/serializer.js.map +0 -1
  214. package/dist/testing/event-logger.d.ts +0 -63
  215. package/dist/testing/event-logger.js +0 -3
  216. package/dist/testing/event-logger.js.map +0 -1
  217. package/dist/testing/index.d.ts +0 -7
  218. package/dist/testing/index.js +0 -37
  219. package/dist/testing/index.js.map +0 -1
  220. package/dist/testing/run-with-trace.d.ts +0 -38
  221. package/dist/testing/run-with-trace.js +0 -33
  222. package/dist/testing/run-with-trace.js.map +0 -1
  223. package/dist/testing/stepper.d.ts +0 -79
  224. package/dist/testing/stepper.js +0 -11
  225. package/dist/testing/stepper.js.map +0 -1
  226. package/dist/types-CKhffqyb.d.ts +0 -666
  227. package/dist/types.d.ts +0 -3
  228. package/dist/types.js +0 -3
  229. package/dist/types.js.map +0 -1
@@ -0,0 +1,1269 @@
1
+ //#region src/errors.d.ts
2
+ /**
3
+ * A single, comprehensive error class for the framework.
4
+ * Use this for all errors to ensure consistent structure and easy debugging.
5
+ */
6
+ declare class FlowcraftError extends Error {
7
+ readonly message: string;
8
+ readonly nodeId?: string;
9
+ readonly blueprintId?: string;
10
+ readonly executionId?: string;
11
+ readonly isFatal: boolean;
12
+ constructor(message: string, options?: {
13
+ cause?: Error;
14
+ nodeId?: string;
15
+ blueprintId?: string;
16
+ executionId?: string;
17
+ isFatal?: boolean;
18
+ });
19
+ }
20
+ //#endregion
21
+ //#region src/node.d.ts
22
+ /** A type guard to reliably distinguish a NodeClass from a NodeFunction. */
23
+ declare function isNodeClass(impl: any): impl is NodeClass;
24
+ /**
25
+ * A structured, class-based node for complex logic with a safe, granular lifecycle.
26
+ * This class is generic, allowing implementations to specify the exact context
27
+ * and dependency types they expect.
28
+ */
29
+ declare abstract class BaseNode<TContext extends Record<string, any> = Record<string, any>, TDependencies extends RuntimeDependencies = RuntimeDependencies, TInput = any, TOutput = any, TAction extends string = string> {
30
+ protected params?: Record<string, any> | undefined;
31
+ protected nodeId?: string | undefined;
32
+ /**
33
+ * @param params Static parameters for this node instance, passed from the blueprint.
34
+ * @param nodeId The ID of the node in the blueprint.
35
+ */
36
+ constructor(params?: Record<string, any> | undefined, nodeId?: string | undefined);
37
+ /**
38
+ * Phase 1: Gathers and prepares data for execution. This phase is NOT retried on failure.
39
+ * @param context The node's execution context.
40
+ * @returns The data needed for the `exec` phase.
41
+ */
42
+ prep(context: NodeContext<TContext, TDependencies, TInput>): Promise<any>;
43
+ /**
44
+ * Phase 2: Performs the core, isolated logic. This is the ONLY phase that is retried.
45
+ * @param prepResult The data returned from the `prep` phase.
46
+ * @param context The node's execution context.
47
+ */
48
+ abstract exec(prepResult: any, context: NodeContext<TContext, TDependencies, TInput>): Promise<Omit<NodeResult<TOutput, TAction>, 'error'>>;
49
+ /**
50
+ * Phase 3: Processes the result and saves state. This phase is NOT retried.
51
+ * @param execResult The successful result from the `exec` or `fallback` phase.
52
+ * @param _context The node's execution context.
53
+ */
54
+ post(execResult: Omit<NodeResult<TOutput, TAction>, 'error'>, _context: NodeContext<TContext, TDependencies, TInput>): Promise<NodeResult<TOutput, TAction>>;
55
+ /**
56
+ * An optional safety net that runs if all `exec` retries fail.
57
+ * @param error The final error from the last `exec` attempt.
58
+ * @param _context The node's execution context.
59
+ */
60
+ fallback(error: Error, _context: NodeContext<TContext, TDependencies, TInput>): Promise<Omit<NodeResult<TOutput, TAction>, 'error'>>;
61
+ /**
62
+ * An optional cleanup phase for non-retriable errors that occur outside the main `exec` method.
63
+ * This method is invoked in a `finally` block or equivalent construct if a fatal, unhandled exception occurs in the `prep`, `exec`, or `post` phases.
64
+ * Allows nodes to perform crucial cleanup, such as closing database connections or releasing locks.
65
+ * @param _error The error that caused the failure.
66
+ * @param _context The node's execution context.
67
+ */
68
+ recover(_error: Error, _context: NodeContext<TContext, TDependencies, TInput>): Promise<void>;
69
+ }
70
+ //#endregion
71
+ //#region src/container.d.ts
72
+ type ServiceToken<_T = any> = string | symbol;
73
+ declare class DIContainer {
74
+ private services;
75
+ private factories;
76
+ register<T>(token: ServiceToken<T>, implementation: T): void;
77
+ registerFactory<T>(token: ServiceToken<T>, factory: (container: DIContainer) => T): void;
78
+ resolve<T>(token: ServiceToken<T>): T;
79
+ has(token: ServiceToken): boolean;
80
+ createChild(): DIContainer;
81
+ }
82
+ declare const ServiceTokens: {
83
+ readonly Logger: symbol;
84
+ readonly Serializer: symbol;
85
+ readonly Evaluator: symbol;
86
+ readonly EventBus: symbol;
87
+ readonly Orchestrator: symbol;
88
+ readonly Middleware: symbol;
89
+ readonly NodeRegistry: symbol;
90
+ readonly BlueprintRegistry: symbol;
91
+ readonly Dependencies: symbol;
92
+ };
93
+ //#endregion
94
+ //#region src/runtime/scheduler.d.ts
95
+ interface AwaitingWorkflow {
96
+ executionId: string;
97
+ blueprintId: string;
98
+ serializedContext: string;
99
+ awaitingNodeId: string;
100
+ wakeUpAt: string;
101
+ }
102
+ declare class WorkflowScheduler {
103
+ private runtime;
104
+ private activeWorkflows;
105
+ private intervalId?;
106
+ private checkIntervalMs;
107
+ constructor(runtime: FlowRuntime<any, any>, checkIntervalMs?: number);
108
+ start(): void;
109
+ stop(): void;
110
+ registerAwaitingWorkflow(executionId: string, blueprintId: string, serializedContext: string, awaitingNodeId: string, wakeUpAt: string): void;
111
+ unregisterWorkflow(executionId: string): void;
112
+ private checkAndResumeWorkflows;
113
+ getActiveWorkflows(): AwaitingWorkflow[];
114
+ }
115
+ //#endregion
116
+ //#region src/runtime/state.d.ts
117
+ declare class WorkflowState<TContext extends Record<string, any>> {
118
+ private _completedNodes;
119
+ private errors;
120
+ private anyFallbackExecuted;
121
+ private context;
122
+ private _isAwaiting;
123
+ private _awaitingNodeIds;
124
+ private _awaitingDetails;
125
+ constructor(initialData: Partial<TContext>, context?: IAsyncContext<TContext>);
126
+ /**
127
+ * Configure the context to emit events when modified.
128
+ * This is called after the ExecutionContext is created.
129
+ */
130
+ setEventEmitter(eventBus: any, executionId: string, sourceNode?: string): void;
131
+ addCompletedNode(nodeId: string, output: any): Promise<void>;
132
+ addError(nodeId: string, error: Error): void;
133
+ clearError(nodeId: string): void;
134
+ markFallbackExecuted(): void;
135
+ getContext(): IAsyncContext<TContext>;
136
+ getCompletedNodes(): Set<string>;
137
+ getErrors(): WorkflowError[];
138
+ getAnyFallbackExecuted(): boolean;
139
+ markAsAwaiting(nodeId: string, details?: any): Promise<void>;
140
+ isAwaiting(): boolean;
141
+ getAwaitingNodeIds(): string[];
142
+ getAwaitingDetails(nodeId: string): any;
143
+ clearAwaiting(nodeId?: string): void;
144
+ getStatus(isTraversalComplete?: boolean): WorkflowResult['status'];
145
+ toResult(serializer: ISerializer, executionId?: string): Promise<WorkflowResult<TContext>>;
146
+ }
147
+ //#endregion
148
+ //#region src/runtime/executors.d.ts
149
+ interface ExecutionStrategy {
150
+ execute: (nodeDef: NodeDefinition, context: NodeContext<any, any, any>, executionId?: string, signal?: AbortSignal) => Promise<NodeResult<any, any>>;
151
+ }
152
+ declare class FunctionNodeExecutor implements ExecutionStrategy {
153
+ private implementation;
154
+ private maxRetries;
155
+ private eventBus;
156
+ constructor(implementation: NodeFunction, maxRetries: number, eventBus: IEventBus);
157
+ execute(nodeDef: NodeDefinition, context: NodeContext<any, any, any>, executionId?: string, signal?: AbortSignal): Promise<NodeResult<any, any>>;
158
+ }
159
+ declare class ClassNodeExecutor implements ExecutionStrategy {
160
+ private implementation;
161
+ private maxRetries;
162
+ private eventBus;
163
+ constructor(implementation: NodeClass, maxRetries: number, eventBus: IEventBus);
164
+ execute(nodeDef: NodeDefinition, context: NodeContext<any, any, any>, executionId?: string, signal?: AbortSignal): Promise<NodeResult<any, any>>;
165
+ }
166
+ type NodeExecutionResult = {
167
+ status: 'success';
168
+ result: NodeResult<any, any>;
169
+ } | {
170
+ status: 'failed_with_fallback';
171
+ fallbackNodeId: string;
172
+ error: FlowcraftError;
173
+ } | {
174
+ status: 'failed';
175
+ error: FlowcraftError;
176
+ };
177
+ interface NodeExecutorConfig<TContext extends Record<string, any>, TDependencies extends Record<string, any>> {
178
+ context: ExecutionContext<TContext, TDependencies>;
179
+ nodeDef: NodeDefinition;
180
+ strategy: ExecutionStrategy;
181
+ }
182
+ declare class NodeExecutor<TContext extends Record<string, any>, TDependencies extends Record<string, any>> {
183
+ private context;
184
+ private nodeDef;
185
+ private strategy;
186
+ constructor(config: NodeExecutorConfig<TContext, TDependencies>);
187
+ execute(input: any): Promise<NodeExecutionResult>;
188
+ }
189
+ //#endregion
190
+ //#region src/runtime/traverser.d.ts
191
+ interface ReadyNode {
192
+ nodeId: string;
193
+ nodeDef: NodeDefinition;
194
+ }
195
+ declare class GraphTraverser {
196
+ private frontier;
197
+ private allPredecessors;
198
+ private allSuccessors;
199
+ private dynamicBlueprint;
200
+ private completedNodes;
201
+ private nodesInLoops;
202
+ constructor(blueprint: WorkflowBlueprint, isStrictMode?: boolean);
203
+ /**
204
+ * Clears all nodes from the execution frontier.
205
+ */
206
+ clearFrontier(): void;
207
+ /**
208
+ * Creates and initializes a GraphTraverser from a saved workflow state.
209
+ * This is the correct way to prepare a traverser for a `resume` operation.
210
+ * @param blueprint The workflow blueprint.
211
+ * @param state The workflow state being resumed.
212
+ * @returns A configured GraphTraverser instance.
213
+ */
214
+ static fromState(blueprint: WorkflowBlueprint, state: WorkflowState<any>): GraphTraverser;
215
+ private isFallbackNode;
216
+ private getJoinStrategy;
217
+ private filterNodesInLoops;
218
+ private getAllLoopSuccessors;
219
+ getReadyNodes(): ReadyNode[];
220
+ hasMoreWork(): boolean;
221
+ markNodeCompleted(nodeId: string, result: NodeResult<any, any>, nextNodes: NodeDefinition[]): void;
222
+ getAllNodeIds(): Set<string>;
223
+ getFallbackNodeIds(): Set<string>;
224
+ getCompletedNodes(): Set<string>;
225
+ getDynamicBlueprint(): WorkflowBlueprint;
226
+ getAllPredecessors(): Map<string, Set<string>>;
227
+ getAllSuccessors(): Map<string, Set<string>>;
228
+ getPredecessors(nodeId: string): Set<string>;
229
+ getSuccessors(nodeId: string): Set<string>;
230
+ getNodesInLoop(id: string): Set<string>;
231
+ resetNodeCompletion(nodeId: string): void;
232
+ getNode(nodeId: string, blueprint: WorkflowBlueprint): NodeDefinition | undefined;
233
+ addDynamicNode(_nodeId: string, dynamicNode: NodeDefinition, predecessorId: string, gatherNodeId?: string): void;
234
+ /**
235
+ * Manually adds a node ID back to the execution frontier.
236
+ * Used by orchestrators that need fine-grained control over steps.
237
+ * @param nodeId The ID of the node to add to the frontier.
238
+ */
239
+ addToFrontier(nodeId: string): void;
240
+ }
241
+ //#endregion
242
+ //#region src/runtime/types.d.ts
243
+ type NodeExecutorFactory = (context: ExecutionContext<any, any>) => (nodeId: string) => NodeExecutor<any, any>;
244
+ interface ExecutionServices {
245
+ determineNextNodes: (blueprint: WorkflowBlueprint, nodeId: string, result: NodeResult<any, any>, context: ContextImplementation<any>, executionId?: string) => Promise<{
246
+ node: NodeDefinition;
247
+ edge: EdgeDefinition;
248
+ }[]>;
249
+ applyEdgeTransform: (edge: EdgeDefinition, sourceResult: NodeResult<any, any>, targetNode: NodeDefinition, context: ContextImplementation<any>, allPredecessors?: Map<string, Set<string>>, executionId?: string) => Promise<void>;
250
+ resolveNodeInput: (nodeId: string, blueprint: WorkflowBlueprint, context: any) => Promise<any>;
251
+ }
252
+ interface IOrchestrator {
253
+ run(context: ExecutionContext<any, any>, traverser: GraphTraverser): Promise<WorkflowResult<any>>;
254
+ }
255
+ interface IRuntime<TContext extends Record<string, any> = Record<string, any>, TDependencies extends RuntimeDependencies = RuntimeDependencies> {
256
+ options: RuntimeOptions<TDependencies>;
257
+ registry: Map<string, NodeFunction | NodeClass>;
258
+ executeNode: (blueprint: WorkflowBlueprint, nodeId: string, state: WorkflowState<TContext>, allPredecessors?: Map<string, Set<string>>, functionRegistry?: Map<string, any>, executionId?: string, signal?: AbortSignal) => Promise<NodeResult>;
259
+ determineNextNodes: (blueprint: WorkflowBlueprint, nodeId: string, result: NodeResult, context: ContextImplementation<TContext>, executionId?: string) => Promise<{
260
+ node: NodeDefinition;
261
+ edge: EdgeDefinition;
262
+ }[]>;
263
+ applyEdgeTransform: (edge: EdgeDefinition, sourceResult: NodeResult, targetNode: NodeDefinition, context: ContextImplementation<TContext>, allPredecessors?: Map<string, Set<string>>, executionId?: string) => Promise<void>;
264
+ createForSubflow: (subBlueprint: WorkflowBlueprint, initialSubState: Partial<TContext>, executionId: string, signal?: AbortSignal) => ExecutionContext<TContext, TDependencies>;
265
+ getExecutorForNode: (nodeId: string, context: ExecutionContext<TContext, TDependencies>) => NodeExecutor<TContext, TDependencies>;
266
+ }
267
+ //#endregion
268
+ //#region src/runtime/runtime.d.ts
269
+ declare class FlowRuntime<TContext extends Record<string, any>, TDependencies extends Record<string, any>> implements IRuntime<TContext, TDependencies> {
270
+ private container;
271
+ registry: Map<string, NodeFunction | NodeClass>;
272
+ private blueprints;
273
+ dependencies: TDependencies;
274
+ logger: ILogger;
275
+ eventBus: IEventBus;
276
+ serializer: ISerializer;
277
+ middleware: Middleware[];
278
+ evaluator: IEvaluator;
279
+ private analysisCache;
280
+ orchestrator: IOrchestrator;
281
+ options: RuntimeOptions<TDependencies>;
282
+ private readonly logicHandler;
283
+ private readonly executorFactory;
284
+ scheduler: WorkflowScheduler;
285
+ getBlueprint(id: string): WorkflowBlueprint | undefined;
286
+ constructor(container: DIContainer, options?: RuntimeOptions<TDependencies>);
287
+ constructor(options: RuntimeOptions<TDependencies>);
288
+ private _setupExecutionContext;
289
+ run(blueprint: WorkflowBlueprint, initialState?: Partial<TContext> | string, options?: {
290
+ functionRegistry?: Map<string, any>;
291
+ strict?: boolean;
292
+ signal?: AbortSignal;
293
+ concurrency?: number;
294
+ }): Promise<WorkflowResult<TContext>>;
295
+ startScheduler(checkIntervalMs?: number): void;
296
+ stopScheduler(): void;
297
+ private _setupResumedExecutionContext;
298
+ resume(blueprint: WorkflowBlueprint, serializedContext: string, resumeData: {
299
+ output?: any;
300
+ action?: string;
301
+ }, nodeId?: string, options?: {
302
+ functionRegistry?: Map<string, any>;
303
+ strict?: boolean;
304
+ signal?: AbortSignal;
305
+ concurrency?: number;
306
+ }): Promise<WorkflowResult<TContext>>;
307
+ _createExecutionRegistry(dynamicRegistry?: Map<string, any>): Map<string, NodeFunction | NodeClass>;
308
+ executeNode(blueprint: WorkflowBlueprint, nodeId: string, state: WorkflowState<TContext>, _allPredecessors?: Map<string, Set<string>>, functionRegistry?: Map<string, any>, executionId?: string, signal?: AbortSignal): Promise<NodeResult<any, any>>;
309
+ getExecutorForNode(nodeId: string, context: ExecutionContext<TContext, TDependencies>): any;
310
+ createForSubflow(subBlueprint: WorkflowBlueprint, initialSubState: Partial<TContext>, executionId: string, signal?: AbortSignal): ExecutionContext<TContext, TDependencies>;
311
+ determineNextNodes(blueprint: WorkflowBlueprint, nodeId: string, result: NodeResult<any, any>, context: ContextImplementation<TContext>, executionId?: string): Promise<{
312
+ node: NodeDefinition;
313
+ edge: EdgeDefinition;
314
+ }[]>;
315
+ applyEdgeTransform(edge: EdgeDefinition, sourceResult: NodeResult<any, any>, targetNode: NodeDefinition, context: ContextImplementation<TContext>, allPredecessors?: Map<string, Set<string>>, executionId?: string): Promise<void>;
316
+ resolveNodeInput(nodeId: string, blueprint: WorkflowBlueprint, context: ContextImplementation<TContext>): Promise<any>;
317
+ /**
318
+ * Replay a workflow execution from a pre-recorded event history.
319
+ * This reconstructs the final workflow state without executing any node logic,
320
+ * enabling time-travel debugging and post-mortem analysis.
321
+ *
322
+ * @param blueprint The workflow blueprint
323
+ * @param events The recorded event history for the execution
324
+ * @param executionId Optional execution ID to filter events (if events contain multiple executions)
325
+ * @returns The reconstructed workflow result
326
+ */
327
+ replay(blueprint: WorkflowBlueprint, events: FlowcraftEvent[], executionId?: string): Promise<WorkflowResult<TContext>>;
328
+ }
329
+ //#endregion
330
+ //#region src/runtime/execution-context.d.ts
331
+ /**
332
+ * A container for all state and dependencies of a single workflow execution.
333
+ * This object is created once per `run` and passed through the execution stack.
334
+ */
335
+ declare class ExecutionContext<TContext extends Record<string, any>, TDependencies extends RuntimeDependencies> {
336
+ readonly blueprint: WorkflowBlueprint;
337
+ readonly state: WorkflowState<TContext>;
338
+ readonly nodeRegistry: Map<string, NodeFunction | NodeClass>;
339
+ readonly executionId: string;
340
+ readonly runtime: FlowRuntime<TContext, TDependencies>;
341
+ readonly services: {
342
+ logger: ILogger;
343
+ eventBus: IEventBus;
344
+ serializer: ISerializer;
345
+ evaluator: IEvaluator;
346
+ middleware: Middleware[];
347
+ dependencies: TDependencies;
348
+ };
349
+ readonly signal?: AbortSignal | undefined;
350
+ readonly concurrency?: number | undefined;
351
+ constructor(blueprint: WorkflowBlueprint, state: WorkflowState<TContext>, nodeRegistry: Map<string, NodeFunction | NodeClass>, executionId: string, runtime: FlowRuntime<TContext, TDependencies>, // A reference back to the runtime for orchestrating subflows
352
+ services: {
353
+ logger: ILogger;
354
+ eventBus: IEventBus;
355
+ serializer: ISerializer;
356
+ evaluator: IEvaluator;
357
+ middleware: Middleware[];
358
+ dependencies: TDependencies;
359
+ }, signal?: AbortSignal | undefined, concurrency?: number | undefined);
360
+ createForSubflow(subBlueprint: WorkflowBlueprint, initialSubState: Partial<TContext>): ExecutionContext<TContext, TDependencies>;
361
+ }
362
+ //#endregion
363
+ //#region src/types.d.ts
364
+ /** Source location information for debugging and visualization. */
365
+ interface SourceLocation {
366
+ file: string;
367
+ line: number;
368
+ column: number;
369
+ }
370
+ /** Metadata associated with a workflow blueprint. */
371
+ interface WorkflowBlueprintMetadata {
372
+ /** Optional version identifier for the blueprint. Used in distributed systems to ensure version compatibility. */
373
+ version?: string;
374
+ /** Entry points for cycles in the workflow graph. */
375
+ cycleEntryPoints?: string[];
376
+ [key: string]: any;
377
+ }
378
+ /** The central, serializable representation of a workflow. */
379
+ interface WorkflowBlueprint {
380
+ id: string;
381
+ nodes: NodeDefinition[];
382
+ edges: EdgeDefinition[];
383
+ metadata?: WorkflowBlueprintMetadata;
384
+ }
385
+ /** Defines a single step in the workflow. */
386
+ interface NodeDefinition {
387
+ id: string;
388
+ /** A key that resolves to an implementation in a registry. */
389
+ uses: string;
390
+ /** Static parameters for the node. */
391
+ params?: Record<string, any>;
392
+ /** Maps context data to this node's `input`. */
393
+ inputs?: string | Record<string, string>;
394
+ /** Configuration for retries, timeouts, etc. */
395
+ config?: NodeConfig;
396
+ /** Source location for debugging and visualization. */
397
+ _sourceLocation?: SourceLocation;
398
+ }
399
+ /** Defines the connection and data flow between two nodes. */
400
+ interface EdgeDefinition {
401
+ source: string;
402
+ target: string;
403
+ /** An 'action' from the source node that triggers this edge. */
404
+ action?: string;
405
+ /** A condition that must be met for this edge to be taken. */
406
+ condition?: string;
407
+ /** A string expression to transform the data before passing it to the target node. */
408
+ transform?: string;
409
+ /** Source location for debugging and visualization. */
410
+ _sourceLocation?: SourceLocation;
411
+ }
412
+ /** Configuration for a node's resiliency and behavior. */
413
+ interface NodeConfig {
414
+ maxRetries?: number;
415
+ retryDelay?: number;
416
+ timeout?: number;
417
+ /** The `uses` key of another node implementation for fallback. */
418
+ fallback?: string;
419
+ /** Determines how a node with multiple incoming edges should be triggered. */
420
+ joinStrategy?: 'all' | 'any';
421
+ }
422
+ /** The required return type for any node implementation. */
423
+ interface NodeResult<TOutput = any, TAction extends string = string> {
424
+ output?: TOutput;
425
+ action?: TAction;
426
+ error?: {
427
+ message: string;
428
+ [key: string]: any;
429
+ };
430
+ /** Allows a node to dynamically schedule new nodes for the orchestrator to execute. */
431
+ dynamicNodes?: NodeDefinition[];
432
+ /** Internal flag: Indicates that this result came from a fallback execution. */
433
+ _fallbackExecuted?: boolean;
434
+ }
435
+ /** The context object passed to every node's execution logic. */
436
+ interface NodeContext<TContext extends Record<string, any> = Record<string, any>, TDependencies extends RuntimeDependencies = RuntimeDependencies, TInput = any> {
437
+ /** The async-only interface for interacting with the workflow's state. */
438
+ context: IAsyncContext<TContext>;
439
+ /** The primary input data for this node, typically from its predecessor. */
440
+ input?: TInput;
441
+ /** Static parameters defined in the blueprint. */
442
+ params: Record<string, any>;
443
+ /** Shared, runtime-level dependencies (e.g., database clients, loggers). */
444
+ dependencies: TDependencies & {
445
+ runtime: ExecutionContext<TContext, TDependencies>;
446
+ workflowState: WorkflowState<TContext>;
447
+ };
448
+ /** A signal to gracefully cancel long-running node operations. */
449
+ signal?: AbortSignal;
450
+ }
451
+ /** A simple function-based node implementation. */
452
+ type NodeFunction<TContext extends Record<string, any> = Record<string, any>, TDependencies extends RuntimeDependencies = RuntimeDependencies, TInput = any, TOutput = any, TAction extends string = string> = (context: NodeContext<TContext, TDependencies, TInput>) => Promise<NodeResult<TOutput, TAction>>;
453
+ /** Represents a constructor for any concrete class that extends the abstract BaseNode. */
454
+ type NodeClass<TContext extends Record<string, any> = Record<string, any>, TDependencies extends RuntimeDependencies = RuntimeDependencies, TInput = any, TOutput = any, TAction extends string = string> = new (params?: Record<string, any>, nodeId?: string) => BaseNode<TContext, TDependencies, TInput, TOutput, TAction>;
455
+ /** A union of all possible node implementation types. */
456
+ type NodeImplementation = NodeFunction | NodeClass;
457
+ /** A registry mapping node types to their implementations. */
458
+ type NodeRegistry = Record<string, NodeImplementation>;
459
+ /** A discriminated union for all possible context implementations. */
460
+ type ContextImplementation<T extends Record<string, any>> = ISyncContext<T> | IAsyncContext<T>;
461
+ /** The synchronous context interface for high-performance, in-memory state. */
462
+ interface ISyncContext<TContext extends Record<string, any> = Record<string, any>> {
463
+ readonly type: 'sync';
464
+ get<K extends keyof TContext>(key: K): TContext[K] | undefined;
465
+ get(key: string): any | undefined;
466
+ set<K extends keyof TContext>(key: K, value: TContext[K]): void;
467
+ set(key: string, value: any): void;
468
+ has<K extends keyof TContext>(key: K): boolean;
469
+ has(key: string): boolean;
470
+ delete<K extends keyof TContext>(key: K): boolean;
471
+ delete(key: string): boolean;
472
+ toJSON: () => Record<string, any>;
473
+ }
474
+ /** The asynchronous context interface for remote or distributed state. */
475
+ interface IAsyncContext<TContext extends Record<string, any> = Record<string, any>> {
476
+ readonly type: 'async';
477
+ get<K extends keyof TContext>(key: K): Promise<TContext[K] | undefined>;
478
+ get(key: string): Promise<any | undefined>;
479
+ set<K extends keyof TContext>(key: K, value: TContext[K]): Promise<void>;
480
+ set(key: string, value: any): Promise<void>;
481
+ has<K extends keyof TContext>(key: K): Promise<boolean>;
482
+ has(key: string): Promise<boolean>;
483
+ delete<K extends keyof TContext>(key: K): Promise<boolean>;
484
+ delete(key: string): Promise<boolean>;
485
+ toJSON: () => Promise<Record<string, any>>;
486
+ /**
487
+ * Applies a batch of patch operations atomically.
488
+ * More efficient than individual set/delete calls for bulk updates.
489
+ */
490
+ patch(operations: PatchOperation[]): Promise<void>;
491
+ }
492
+ /** Represents a single patch operation for delta-based state updates. */
493
+ type PatchOperation = {
494
+ op: 'set';
495
+ key: string;
496
+ value: any;
497
+ } | {
498
+ op: 'delete';
499
+ key: string;
500
+ };
501
+ /** Generic for any set of dependencies. */
502
+ interface RuntimeDependencies {
503
+ [key: string]: any;
504
+ }
505
+ /** Configuration options for the FlowRuntime. */
506
+ interface RuntimeOptions<TDependencies extends RuntimeDependencies = RuntimeDependencies> {
507
+ /** A registry of globally available node implementations. */
508
+ registry?: Record<string, NodeFunction | NodeClass | typeof BaseNode>;
509
+ /** A registry of all available workflow blueprints for subflow execution. */
510
+ blueprints?: Record<string, WorkflowBlueprint>;
511
+ /** Shared dependencies to be injected into every node. */
512
+ dependencies?: TDependencies;
513
+ /** A pluggable logger for consistent output. */
514
+ logger?: ILogger;
515
+ /** A pluggable event bus for observability. */
516
+ eventBus?: IEventBus;
517
+ /**
518
+ * A pluggable evaluator for edge conditions and transforms.
519
+ * @default new PropertyEvaluator() - A safe evaluator for simple property access.
520
+ * For complex logic, provide a custom implementation or use the `UnsafeEvaluator`
521
+ * (not recommended for production).
522
+ */
523
+ evaluator?: IEvaluator;
524
+ /** An array of middleware to wrap node execution. */
525
+ middleware?: Middleware[];
526
+ /** A pluggable serializer for handling complex data types in the context. */
527
+ serializer?: ISerializer;
528
+ /** A flag to enforce strictness in the workflow. */
529
+ strict?: boolean;
530
+ }
531
+ /** Interface for a pluggable expression evaluator for conditions and transforms. */
532
+ interface IEvaluator {
533
+ evaluate: (expression: string, context: Record<string, any>) => any;
534
+ }
535
+ /** Interface for a pluggable logger. */
536
+ interface ILogger {
537
+ debug: (message: string, meta?: Record<string, any>) => void;
538
+ info: (message: string, meta?: Record<string, any>) => void;
539
+ warn: (message: string, meta?: Record<string, any>) => void;
540
+ error: (message: string, meta?: Record<string, any>) => void;
541
+ }
542
+ /** Structured event types for detailed execution tracing. */
543
+ type FlowcraftEvent = {
544
+ type: 'workflow:start';
545
+ payload: {
546
+ blueprintId: string;
547
+ executionId: string;
548
+ };
549
+ } | {
550
+ type: 'workflow:resume';
551
+ payload: {
552
+ blueprintId: string;
553
+ executionId: string;
554
+ };
555
+ } | {
556
+ type: 'workflow:stall';
557
+ payload: {
558
+ blueprintId: string;
559
+ executionId: string;
560
+ remainingNodes: number;
561
+ };
562
+ } | {
563
+ type: 'workflow:pause';
564
+ payload: {
565
+ blueprintId: string;
566
+ executionId: string;
567
+ };
568
+ } | {
569
+ type: 'workflow:finish';
570
+ payload: {
571
+ blueprintId: string;
572
+ executionId: string;
573
+ status: string;
574
+ errors?: WorkflowError[];
575
+ };
576
+ } | {
577
+ type: 'node:start';
578
+ payload: {
579
+ nodeId: string;
580
+ executionId: string;
581
+ input: any;
582
+ blueprintId: string;
583
+ };
584
+ } | {
585
+ type: 'node:finish';
586
+ payload: {
587
+ nodeId: string;
588
+ result: NodeResult;
589
+ executionId: string;
590
+ blueprintId: string;
591
+ };
592
+ } | {
593
+ type: 'node:error';
594
+ payload: {
595
+ nodeId: string;
596
+ error: FlowcraftError;
597
+ executionId: string;
598
+ blueprintId: string;
599
+ };
600
+ } | {
601
+ type: 'node:fallback';
602
+ payload: {
603
+ nodeId: string;
604
+ executionId: string;
605
+ fallback: string;
606
+ blueprintId: string;
607
+ };
608
+ } | {
609
+ type: 'node:retry';
610
+ payload: {
611
+ nodeId: string;
612
+ attempt: number;
613
+ executionId: string;
614
+ blueprintId: string;
615
+ };
616
+ } | {
617
+ type: 'node:skipped';
618
+ payload: {
619
+ nodeId: string;
620
+ edge: EdgeDefinition;
621
+ executionId: string;
622
+ blueprintId: string;
623
+ };
624
+ } | {
625
+ type: 'edge:evaluate';
626
+ payload: {
627
+ source: string;
628
+ target: string;
629
+ condition?: string;
630
+ result: boolean;
631
+ };
632
+ } | {
633
+ type: 'context:change';
634
+ payload: {
635
+ sourceNode: string;
636
+ key: string;
637
+ op: 'set' | 'delete';
638
+ value?: any;
639
+ executionId: string;
640
+ };
641
+ } | {
642
+ type: 'job:enqueued';
643
+ payload: {
644
+ runId: string;
645
+ blueprintId: string;
646
+ nodeId: string;
647
+ queueName?: string;
648
+ };
649
+ } | {
650
+ type: 'job:processed';
651
+ payload: {
652
+ runId: string;
653
+ blueprintId: string;
654
+ nodeId: string;
655
+ duration: number;
656
+ success: boolean;
657
+ };
658
+ } | {
659
+ type: 'job:failed';
660
+ payload: {
661
+ runId: string;
662
+ blueprintId: string;
663
+ nodeId: string;
664
+ error: FlowcraftError;
665
+ };
666
+ } | {
667
+ type: 'batch:start';
668
+ payload: {
669
+ batchId: string;
670
+ scatterNodeId: string;
671
+ workerNodeIds: string[];
672
+ };
673
+ } | {
674
+ type: 'batch:finish';
675
+ payload: {
676
+ batchId: string;
677
+ gatherNodeId: string;
678
+ results: any[];
679
+ };
680
+ };
681
+ /** Interface for a pluggable event bus. */
682
+ interface IEventBus {
683
+ emit: (event: FlowcraftEvent) => void | Promise<void>;
684
+ }
685
+ /** Interface for a pluggable serializer. */
686
+ interface ISerializer {
687
+ serialize: (data: Record<string, any>) => string;
688
+ deserialize: (text: string) => Record<string, any>;
689
+ }
690
+ /** Interface for middleware to handle cross-cutting concerns. */
691
+ interface Middleware<TContext extends Record<string, any> = Record<string, any>> {
692
+ beforeNode?: (ctx: ContextImplementation<TContext>, nodeId: string) => void | Promise<void>;
693
+ afterNode?: (ctx: ContextImplementation<TContext>, nodeId: string, result: NodeResult | undefined, error: Error | undefined) => void | Promise<void>;
694
+ aroundNode?: (ctx: ContextImplementation<TContext>, nodeId: string, next: () => Promise<NodeResult>) => Promise<NodeResult>;
695
+ }
696
+ /** A structured error object returned from a failed workflow execution. */
697
+ interface WorkflowError extends FlowcraftError {
698
+ timestamp: string;
699
+ originalError?: any;
700
+ }
701
+ /** The status of a workflow execution. */
702
+ type WorkflowStatus = 'completed' | 'failed' | 'stalled' | 'cancelled' | 'awaiting';
703
+ /** The final result of a workflow execution. */
704
+ interface WorkflowResult<TContext = Record<string, any>> {
705
+ context: TContext;
706
+ serializedContext: string;
707
+ status: WorkflowStatus;
708
+ errors?: WorkflowError[];
709
+ }
710
+ /** A graph representation of a workflow blueprint. */
711
+ interface UIGraph {
712
+ nodes: Array<Partial<NodeDefinition> & {
713
+ id: string;
714
+ data?: Record<string, any>;
715
+ type?: string;
716
+ }>;
717
+ edges: Array<Partial<EdgeDefinition> & {
718
+ source: string;
719
+ target: string;
720
+ data?: Record<string, any>;
721
+ }>;
722
+ }
723
+ //#endregion
724
+ //#region src/adapters/persistent-event-bus.d.ts
725
+ /**
726
+ * Interface for a persistent storage mechanism for events.
727
+ * Implementations can store events in databases, log streams, files, etc.
728
+ */
729
+ interface IEventStore {
730
+ /**
731
+ * Store an event persistently.
732
+ * @param event The event to store
733
+ * @param executionId The execution ID for grouping events
734
+ */
735
+ store(event: FlowcraftEvent, executionId: string): Promise<void>;
736
+ /**
737
+ * Retrieve all events for a specific execution.
738
+ * @param executionId The execution ID
739
+ * @returns Array of events in chronological order
740
+ */
741
+ retrieve(executionId: string): Promise<FlowcraftEvent[]>;
742
+ /**
743
+ * Retrieve events for multiple executions.
744
+ * @param executionIds Array of execution IDs
745
+ * @returns Map of execution ID to array of events
746
+ */
747
+ retrieveMultiple(executionIds: string[]): Promise<Map<string, FlowcraftEvent[]>>;
748
+ }
749
+ /**
750
+ * A pluggable event bus adapter that persists all workflow events
751
+ * to a configurable storage backend, enabling time-travel debugging and replay.
752
+ *
753
+ * @example
754
+ * ```typescript
755
+ * // Using a database-backed store
756
+ * const eventStore = new DatabaseEventStore(dbConnection)
757
+ * const eventBus = new PersistentEventBusAdapter(eventStore)
758
+ * const runtime = new FlowRuntime({ eventBus })
759
+ *
760
+ * // Later, replay the execution
761
+ * const events = await eventStore.retrieve(executionId)
762
+ * const finalState = await runtime.replay(blueprint, events)
763
+ * ```
764
+ */
765
+ declare class PersistentEventBusAdapter implements IEventBus {
766
+ private store;
767
+ constructor(store: IEventStore);
768
+ /**
769
+ * Emit an event by storing it persistently.
770
+ * Also emits to console for debugging (can be made configurable).
771
+ */
772
+ emit(event: FlowcraftEvent): Promise<void>;
773
+ }
774
+ /**
775
+ * Simple in-memory event store for testing and development.
776
+ * Not suitable for production use.
777
+ */
778
+ declare class InMemoryEventStore implements IEventStore {
779
+ private events;
780
+ store(event: FlowcraftEvent, executionId: string): Promise<void>;
781
+ retrieve(executionId: string): Promise<FlowcraftEvent[]>;
782
+ retrieveMultiple(executionIds: string[]): Promise<Map<string, FlowcraftEvent[]>>;
783
+ /**
784
+ * Clear all stored events (useful for testing).
785
+ */
786
+ clear(): void;
787
+ }
788
+ //#endregion
789
+ //#region src/analysis.d.ts
790
+ /**
791
+ * A list of cycles found in the graph. Each cycle is an array of node IDs.
792
+ */
793
+ type Cycles = string[][];
794
+ /**
795
+ * Analysis result for a workflow blueprint
796
+ */
797
+ interface BlueprintAnalysis {
798
+ /** Cycles found in the graph */
799
+ cycles: Cycles;
800
+ /** Node IDs that have no incoming edges (start nodes) */
801
+ startNodeIds: string[];
802
+ /** Node IDs that have no outgoing edges (terminal nodes) */
803
+ terminalNodeIds: string[];
804
+ /** Total number of nodes */
805
+ nodeCount: number;
806
+ /** Total number of edges */
807
+ edgeCount: number;
808
+ /** Whether the graph is a valid DAG (no cycles) */
809
+ isDag: boolean;
810
+ }
811
+ /**
812
+ * Analyzes a workflow blueprint to detect cycles using an iterative DFS algorithm.
813
+ * This avoids stack overflow issues for deep graphs compared to the recursive version.
814
+ * @param blueprint The WorkflowBlueprint object containing nodes and edges.
815
+ * @returns An array of cycles found. Each cycle is represented as an array of node IDs.
816
+ */
817
+ declare function checkForCycles(blueprint: WorkflowBlueprint): Cycles;
818
+ /**
819
+ * Generates Mermaid diagram syntax from a WorkflowBlueprint
820
+ * @param blueprint The WorkflowBlueprint object containing nodes and edges
821
+ * @returns Mermaid syntax string for the flowchart
822
+ */
823
+ declare function generateMermaid(blueprint: WorkflowBlueprint): string;
824
+ /**
825
+ * Generates Mermaid diagram syntax from a WorkflowBlueprint with execution history styling
826
+ * @param blueprint The WorkflowBlueprint object containing nodes and edges
827
+ * @param events Array of FlowcraftEvent objects from the workflow execution
828
+ * @returns Mermaid syntax string for the flowchart with execution path highlighting
829
+ */
830
+ declare function generateMermaidForRun(blueprint: WorkflowBlueprint, events: FlowcraftEvent[]): string;
831
+ /**
832
+ * Analyzes a workflow blueprint and returns comprehensive analysis
833
+ * @param blueprint The WorkflowBlueprint object containing nodes and edges
834
+ * @returns Analysis result with cycles, start nodes, terminal nodes, and other metrics
835
+ */
836
+ declare function analyzeBlueprint(blueprint: WorkflowBlueprint): BlueprintAnalysis;
837
+ //#endregion
838
+ //#region src/container-factory.d.ts
839
+ interface ContainerOptions<TDependencies extends RuntimeDependencies = RuntimeDependencies> {
840
+ logger?: ILogger;
841
+ serializer?: ISerializer;
842
+ evaluator?: IEvaluator;
843
+ eventBus?: IEventBus;
844
+ middleware?: Middleware[];
845
+ registry?: Record<string, NodeFunction | NodeClass>;
846
+ blueprints?: Record<string, WorkflowBlueprint>;
847
+ dependencies?: TDependencies;
848
+ }
849
+ declare function createDefaultContainer<TDependencies extends RuntimeDependencies = RuntimeDependencies>(options?: ContainerOptions<TDependencies>): DIContainer;
850
+ //#endregion
851
+ //#region src/context.d.ts
852
+ /**
853
+ * A default, high-performance, in-memory implementation of ISyncContext using a Map.
854
+ */
855
+ declare class Context<TContext extends Record<string, any>> implements ISyncContext<TContext> {
856
+ readonly type: "sync";
857
+ private data;
858
+ constructor(initialData?: Partial<TContext>);
859
+ get<K extends keyof TContext>(key: K): TContext[K] | undefined;
860
+ set<K extends keyof TContext>(key: K, value: TContext[K]): void;
861
+ has<K extends keyof TContext>(key: K): boolean;
862
+ delete<K extends keyof TContext>(key: K): boolean;
863
+ toJSON(): Record<string, any>;
864
+ }
865
+ /**
866
+ * An adapter that provides a consistent, Promise-based view of a synchronous context.
867
+ * This is created by the runtime and is transparent to the node author.
868
+ */
869
+ declare class AsyncContextView<TContext extends Record<string, any>> implements IAsyncContext<TContext> {
870
+ private syncContext;
871
+ readonly type: "async";
872
+ constructor(syncContext: ISyncContext<TContext>);
873
+ get<K extends keyof TContext>(key: K): Promise<TContext[K] | undefined>;
874
+ set<K extends keyof TContext>(key: K, value: TContext[K]): Promise<void>;
875
+ has<K extends keyof TContext>(key: K): Promise<boolean>;
876
+ delete<K extends keyof TContext>(key: K): Promise<boolean>;
877
+ toJSON(): Promise<Record<string, any>>;
878
+ patch(_operations: PatchOperation[]): Promise<void>;
879
+ }
880
+ /**
881
+ * A proxy wrapper that tracks changes to an async context for delta-based persistence.
882
+ * Records all mutations (set/delete operations) to enable efficient partial updates.
883
+ */
884
+ declare class TrackedAsyncContext<TContext extends Record<string, any>> implements IAsyncContext<TContext> {
885
+ readonly type: "async";
886
+ private deltas;
887
+ private innerContext;
888
+ private eventBus?;
889
+ private executionId?;
890
+ private sourceNode?;
891
+ constructor(innerContext: IAsyncContext<TContext>, eventBus?: any, executionId?: string, sourceNode?: string);
892
+ get<K extends keyof TContext>(key: K): Promise<TContext[K] | undefined>;
893
+ set<K extends keyof TContext>(key: K, value: TContext[K]): Promise<void>;
894
+ has<K extends keyof TContext>(key: K): Promise<boolean>;
895
+ delete<K extends keyof TContext>(key: K): Promise<boolean>;
896
+ toJSON(): Promise<Record<string, any>>;
897
+ patch(operations: PatchOperation[]): Promise<void>;
898
+ getDeltas(): PatchOperation[];
899
+ clearDeltas(): void;
900
+ /**
901
+ * Configures the event emitter for tracking context changes.
902
+ * This enables the context to emit events when set/delete operations occur,
903
+ * allowing for external monitoring and persistence of context mutations.
904
+ *
905
+ * @param eventBus - The event bus instance to emit context change events
906
+ * @param executionId - The unique identifier for the current workflow execution
907
+ * @param sourceNode - Optional identifier for the node that triggered the context change
908
+ */
909
+ configureEventEmitter(eventBus: any, executionId: string, sourceNode?: string): void;
910
+ }
911
+ //#endregion
912
+ //#region src/error-mapper.d.ts
913
+ /**
914
+ * Creates an error mapper function that enhances runtime errors with source location information.
915
+ * The mapper looks up node IDs in the provided manifest blueprints and returns enhanced errors
916
+ * that point to the original TypeScript source code.
917
+ *
918
+ * @param manifestBlueprints - The compiled blueprint manifest containing source location data
919
+ * @returns A function that maps errors to enhanced errors with source location information
920
+ */
921
+ declare function createErrorMapper(manifestBlueprints: Record<string, WorkflowBlueprint>): (error: Error) => Error;
922
+ //#endregion
923
+ //#region src/evaluator.d.ts
924
+ /**
925
+ * A safe evaluator that only allows simple property access.
926
+ * It cannot execute arbitrary code and is secure for untrusted inputs.
927
+ *
928
+ * Example expressions:
929
+ * - "result.output.status"
930
+ * - "context.user.isAdmin"
931
+ * - "input.value"
932
+ */
933
+ declare class PropertyEvaluator implements IEvaluator {
934
+ evaluate(expression: string, context: Record<string, any>): any;
935
+ }
936
+ /**
937
+ * @warning This evaluator uses `new Function()` and can execute arbitrary
938
+ * JavaScript code. It poses a significant security risk if the expressions
939
+ * are not from a trusted source (e.g., user input).
940
+ *
941
+ * It should only be used in controlled environments where all workflow
942
+ * definitions are static and authored by trusted developers.
943
+ *
944
+ * For safer evaluation, use the default `PropertyEvaluator` or install a
945
+ * sandboxed library like `jsep` to create a custom, secure evaluator.
946
+ */
947
+ declare class UnsafeEvaluator implements IEvaluator {
948
+ evaluate(expression: string, context: Record<string, any>): any;
949
+ }
950
+ //#endregion
951
+ //#region src/flow.d.ts
952
+ /**
953
+ * A fluent API for programmatically constructing a WorkflowBlueprint.
954
+ */
955
+ declare class FlowBuilder<TContext extends Record<string, any> = Record<string, any>, TDependencies extends Record<string, any> = Record<string, any>> {
956
+ private blueprint;
957
+ private functionRegistry;
958
+ private loopDefinitions;
959
+ private batchDefinitions;
960
+ private cycleEntryPoints;
961
+ constructor(id: string);
962
+ node<TInput = any, TOutput = any, TAction extends string = string>(id: string, implementation: NodeFunction<TContext, TDependencies, TInput, TOutput, TAction> | NodeClass<TContext, TDependencies, TInput, TOutput, TAction>, options?: Omit<NodeDefinition, 'id' | 'uses'>): this;
963
+ edge(source: string, target: string, options?: Omit<EdgeDefinition, 'source' | 'target'>): this;
964
+ /**
965
+ * Creates a batch processing pattern.
966
+ * It takes an input array, runs a worker node on each item in parallel, and gathers the results.
967
+ * This method augments the Flow's TContext with a new key for the output array.
968
+ *
969
+ * @param id The base ID for this batch operation.
970
+ * @param worker The node implementation to run on each item.
971
+ * @param options Configuration for the batch operation.
972
+ * @returns The Flow instance with an updated context type for chaining.
973
+ */
974
+ batch<TWorkerInput, TWorkerOutput, TWorkerAction extends string, TOutputKey extends string>(id: string, worker: NodeFunction<TContext, TDependencies, TWorkerInput, TWorkerOutput, TWorkerAction> | NodeClass<TContext, TDependencies, TWorkerInput, TWorkerOutput, TWorkerAction>, options: {
975
+ /** The key in the context that holds the input array for the batch. */inputKey: keyof TContext; /** The key in the context where the array of results will be stored. */
976
+ outputKey: TOutputKey; /** The number of items to process in each chunk to limit memory usage. */
977
+ chunkSize?: number;
978
+ }): FlowBuilder<TContext & { [K in TOutputKey]: TWorkerOutput[] }, TDependencies>;
979
+ /**
980
+ * Creates a sleep node that pauses workflow execution for a specified duration.
981
+ * @param id A unique identifier for the sleep node.
982
+ * @param options Configuration for the sleep duration.
983
+ */
984
+ sleep(id: string, options: {
985
+ /** The duration to sleep in milliseconds or a string like '5s', '1m', '2h', '1d'. */duration: number | string;
986
+ }): this;
987
+ /**
988
+ * Creates a wait node that pauses workflow execution for external input.
989
+ * @param id A unique identifier for the wait node.
990
+ * @param options Optional configuration for the wait node.
991
+ */
992
+ wait(id: string, options?: Omit<NodeDefinition, 'id' | 'uses'>): this;
993
+ /**
994
+ * Creates a loop pattern in the workflow graph.
995
+ * @param id A unique identifier for the loop construct.
996
+ * @param options Defines the start, end, and continuation condition of the loop.
997
+ * @param options.startNodeId The ID of the first node inside the loop body.
998
+ * @param options.endNodeId The ID of the last node inside the loop body.
999
+ * @param options.condition An expression that, if true, causes the loop to run again.
1000
+ */
1001
+ loop(id: string, options: {
1002
+ /** The ID of the first node inside the loop body. */startNodeId: string; /** The ID of the last node inside the loop body. */
1003
+ endNodeId: string; /** An expression that, if true, causes the loop to run again. */
1004
+ condition: string;
1005
+ }): this;
1006
+ /**
1007
+ * Sets the preferred entry point for a cycle in non-DAG workflows.
1008
+ * This helps remove ambiguity when the runtime needs to choose a starting node for cycles.
1009
+ * @param nodeId The ID of the node to use as the entry point for cycles containing this node.
1010
+ */
1011
+ setCycleEntryPoint(nodeId: string): this;
1012
+ toBlueprint(): WorkflowBlueprint;
1013
+ getFunctionRegistry(): Map<string, NodeFunction | NodeClass>;
1014
+ toGraphRepresentation(): UIGraph;
1015
+ }
1016
+ /**
1017
+ * Helper function to create a new Flow builder instance.
1018
+ */
1019
+ declare function createFlow<TContext extends Record<string, any> = Record<string, any>, TDependencies extends Record<string, any> = Record<string, any>>(id: string): FlowBuilder<TContext, TDependencies>;
1020
+ //#endregion
1021
+ //#region src/linter.d.ts
1022
+ type LinterIssueCode = 'INVALID_EDGE_SOURCE' | 'INVALID_EDGE_TARGET' | 'MISSING_NODE_IMPLEMENTATION' | 'ORPHAN_NODE' | 'INVALID_BATCH_WORKER_KEY' | 'INVALID_SUBFLOW_BLUEPRINT_ID';
1023
+ interface LinterIssue {
1024
+ code: LinterIssueCode;
1025
+ message: string;
1026
+ nodeId?: string;
1027
+ relatedId?: string;
1028
+ }
1029
+ interface LinterResult {
1030
+ isValid: boolean;
1031
+ issues: LinterIssue[];
1032
+ }
1033
+ /**
1034
+ * Statically analyzes a workflow blueprint against a registry of implementations
1035
+ * to find common errors before runtime.
1036
+ *
1037
+ * @param blueprint The WorkflowBlueprint to analyze.
1038
+ * @param registry A map of node implementations (functions or classes) to check against.
1039
+ * @returns A LinterResult object containing any issues found.
1040
+ */
1041
+ declare function lintBlueprint(blueprint: WorkflowBlueprint, registry: Map<string, NodeFunction | NodeClass> | Record<string, NodeFunction | NodeClass>, blueprints?: Record<string, WorkflowBlueprint>): LinterResult;
1042
+ //#endregion
1043
+ //#region src/logger.d.ts
1044
+ /** A logger implementation that outputs to the console. */
1045
+ declare class ConsoleLogger implements ILogger {
1046
+ debug(message: string, meta?: Record<string, any>): void;
1047
+ info(message: string, meta?: Record<string, any>): void;
1048
+ warn(message: string, meta?: Record<string, any>): void;
1049
+ error(message: string, meta?: Record<string, any>): void;
1050
+ }
1051
+ /** A logger implementation that does nothing (no-op). */
1052
+ declare class NullLogger implements ILogger {
1053
+ debug(_message: string, _meta?: Record<string, any>): void;
1054
+ info(_message: string, _meta?: Record<string, any>): void;
1055
+ warn(_message: string, _meta?: Record<string, any>): void;
1056
+ error(_message: string, _meta?: Record<string, any>): void;
1057
+ }
1058
+ //#endregion
1059
+ //#region src/runtime/adapter.d.ts
1060
+ /**
1061
+ * Defines the contract for an atomic, distributed key-value store required by
1062
+ * the adapter for coordination tasks like fan-in joins and locking.
1063
+ */
1064
+ interface ICoordinationStore {
1065
+ /** Atomically increments a key and returns the new value. Ideal for 'all' joins. */
1066
+ increment: (key: string, ttlSeconds: number) => Promise<number>;
1067
+ /** Sets a key only if it does not already exist. Ideal for 'any' joins (locking). */
1068
+ setIfNotExist: (key: string, value: string, ttlSeconds: number) => Promise<boolean>;
1069
+ /** Extends the TTL of an existing key. Used for heartbeat mechanism in long-running jobs. */
1070
+ extendTTL: (key: string, ttlSeconds: number) => Promise<boolean>;
1071
+ /** Deletes a key. Used for cleanup. */
1072
+ delete: (key: string) => Promise<void>;
1073
+ /** Gets the value of a key. */
1074
+ get: (key: string) => Promise<string | undefined>;
1075
+ }
1076
+ /** Configuration options for constructing a BaseDistributedAdapter. */
1077
+ interface AdapterOptions {
1078
+ runtimeOptions: RuntimeOptions<any>;
1079
+ coordinationStore: ICoordinationStore;
1080
+ eventBus?: IEventBus;
1081
+ }
1082
+ /** The data payload expected for a job in the queue. */
1083
+ interface JobPayload {
1084
+ runId: string;
1085
+ blueprintId: string;
1086
+ nodeId: string;
1087
+ }
1088
+ /**
1089
+ * The base class for all distributed adapters. It handles the technology-agnostic
1090
+ * orchestration logic and leaves queue-specific implementation to subclasses.
1091
+ */
1092
+ declare abstract class BaseDistributedAdapter {
1093
+ protected readonly runtime: FlowRuntime<any, any>;
1094
+ protected readonly store: ICoordinationStore;
1095
+ protected readonly serializer: ISerializer;
1096
+ protected readonly logger: ILogger;
1097
+ protected readonly eventBus?: IEventBus;
1098
+ constructor(options: AdapterOptions);
1099
+ /**
1100
+ * Starts the worker, which begins listening for and processing jobs from the queue.
1101
+ */
1102
+ start(): void;
1103
+ /**
1104
+ * Creates a technology-specific distributed context for a given workflow run.
1105
+ * @param runId The unique ID for the workflow execution.
1106
+ */
1107
+ protected abstract createContext(runId: string): IAsyncContext<Record<string, any>>;
1108
+ /**
1109
+ * Sets up the listener for the message queue. The implementation should call the
1110
+ * provided `handler` function for each new job received.
1111
+ * @param handler The core logic to execute for each job.
1112
+ */
1113
+ protected abstract processJobs(handler: (job: JobPayload) => Promise<void>): void;
1114
+ /**
1115
+ * Enqueues a new job onto the message queue.
1116
+ * @param job The payload for the job to be enqueued.
1117
+ */
1118
+ protected abstract enqueueJob(job: JobPayload): Promise<void>;
1119
+ /**
1120
+ * Publishes the final result of a completed or failed workflow run.
1121
+ * @param runId The unique ID of the workflow run.
1122
+ * @param result The final status and payload of the workflow.
1123
+ */
1124
+ protected abstract publishFinalResult(runId: string, result: {
1125
+ status: 'completed' | 'failed';
1126
+ payload?: WorkflowResult;
1127
+ reason?: string;
1128
+ }): Promise<void>;
1129
+ /**
1130
+ * Registers a webhook endpoint for a specific node in a workflow run.
1131
+ * @param runId The unique ID of the workflow run.
1132
+ * @param nodeId The ID of the node that will wait for the webhook.
1133
+ * @returns The URL and event name for the webhook.
1134
+ */
1135
+ abstract registerWebhookEndpoint(runId: string, nodeId: string): Promise<{
1136
+ url: string;
1137
+ event: string;
1138
+ }>;
1139
+ /**
1140
+ * Hook called at the start of job processing. Subclasses can override this
1141
+ * to perform additional setup (e.g., timestamp tracking for reconciliation).
1142
+ */
1143
+ protected onJobStart(_runId: string, _blueprintId: string, _nodeId: string): Promise<void>;
1144
+ /**
1145
+ * The main handler for processing a single job from the queue.
1146
+ */
1147
+ protected handleJob(job: JobPayload): Promise<void>;
1148
+ /**
1149
+ * Encapsulates the fan-in join logic using the coordination store.
1150
+ */
1151
+ protected isReadyForFanIn(runId: string, blueprint: WorkflowBlueprint, targetNodeId: string): Promise<boolean>;
1152
+ /**
1153
+ * Reconciles the state of a workflow run. It inspects the persisted
1154
+ * context to find completed nodes, determines the next set of executable
1155
+ * nodes (the frontier), and enqueues jobs for them if they aren't
1156
+ * already running. This is the core of the resume functionality.
1157
+ *
1158
+ * @param runId The unique ID of the workflow execution to reconcile.
1159
+ * @returns The set of node IDs that were enqueued for execution.
1160
+ */
1161
+ reconcile(runId: string): Promise<Set<string>>;
1162
+ private calculateResumedFrontier;
1163
+ /**
1164
+ * Writes a poison pill for 'all' join successors and a cancellation pill for 'any' join successors of a failed node to prevent stalling or ambiguous states.
1165
+ */
1166
+ private writePoisonPillForSuccessors;
1167
+ }
1168
+ //#endregion
1169
+ //#region src/runtime/orchestrator.d.ts
1170
+ declare class DefaultOrchestrator implements IOrchestrator {
1171
+ run(context: ExecutionContext<any, any>, traverser: GraphTraverser): Promise<WorkflowResult<any>>;
1172
+ }
1173
+ //#endregion
1174
+ //#region src/runtime/orchestrators/replay.d.ts
1175
+ /**
1176
+ * An orchestrator that replays a pre-recorded sequence of workflow events
1177
+ * to reconstruct the workflow state without executing any node logic.
1178
+ *
1179
+ * This enables time-travel debugging by allowing developers to inspect
1180
+ * the exact state of a workflow at any point in its execution history.
1181
+ */
1182
+ declare class ReplayOrchestrator implements IOrchestrator {
1183
+ private events;
1184
+ /**
1185
+ * Creates a new ReplayOrchestrator with a sequence of recorded workflow events.
1186
+ *
1187
+ * @param events - Array of FlowcraftEvent objects representing the recorded workflow execution
1188
+ */
1189
+ constructor(events: FlowcraftEvent[]);
1190
+ /**
1191
+ * Replays the recorded workflow events to reconstruct the workflow state.
1192
+ *
1193
+ * This method filters events for the specific execution, applies each event in sequence
1194
+ * to rebuild the context state, and returns the final reconstructed workflow result.
1195
+ * Replayed executions always have a "completed" status since they reconstruct the final state.
1196
+ *
1197
+ * @param context - The execution context containing state and services
1198
+ * @param _traverser - Graph traverser (unused in replay mode)
1199
+ * @returns Promise resolving to the reconstructed workflow result
1200
+ */
1201
+ run(context: ExecutionContext<any, any>, _traverser: GraphTraverser): Promise<WorkflowResult<any>>;
1202
+ /**
1203
+ * Applies a single workflow event to reconstruct the execution state.
1204
+ *
1205
+ * This method handles different event types by updating the workflow state accordingly,
1206
+ * including node completions, context changes, errors, fallbacks, and workflow control events.
1207
+ *
1208
+ * @param event - The workflow event to apply
1209
+ * @param context - The execution context to update
1210
+ * @param fallbackMap - Map tracking fallback node relationships (fallbackNodeId -> originalNodeId)
1211
+ */
1212
+ private applyEvent;
1213
+ }
1214
+ //#endregion
1215
+ //#region src/runtime/orchestrators/utils.d.ts
1216
+ declare function executeBatch(readyNodes: Array<{
1217
+ nodeId: string;
1218
+ nodeDef: any;
1219
+ }>, blueprint: WorkflowBlueprint, state: WorkflowState<any>, executorFactory: (nodeId: string) => any, runtime: any, maxConcurrency?: number): Promise<Array<{
1220
+ status: 'fulfilled';
1221
+ value: {
1222
+ nodeId: string;
1223
+ executionResult: NodeExecutionResult;
1224
+ };
1225
+ } | {
1226
+ status: 'rejected';
1227
+ reason: {
1228
+ nodeId: string;
1229
+ error: unknown;
1230
+ };
1231
+ }>>;
1232
+ declare function processResults(settledResults: Array<{
1233
+ status: 'fulfilled';
1234
+ value: {
1235
+ nodeId: string;
1236
+ executionResult: NodeExecutionResult;
1237
+ };
1238
+ } | {
1239
+ status: 'rejected';
1240
+ reason: {
1241
+ nodeId: string;
1242
+ error: unknown;
1243
+ };
1244
+ }>, traverser: GraphTraverser, state: WorkflowState<any>, runtime: any, _blueprint: WorkflowBlueprint, executionId?: string): Promise<void>;
1245
+ //#endregion
1246
+ //#region src/sanitizer.d.ts
1247
+ /**
1248
+ * Sanitizes a raw workflow blueprint by removing extra properties
1249
+ * added by UI tools (e.g., position, style) and keeping only the
1250
+ * properties defined in NodeDefinition and EdgeDefinition.
1251
+ */
1252
+ declare function sanitizeBlueprint(raw: any): WorkflowBlueprint;
1253
+ //#endregion
1254
+ //#region src/serializer.d.ts
1255
+ /**
1256
+ * A default serializer using standard JSON.
1257
+ *
1258
+ * @warning This implementation is lossy and does not handle complex data types
1259
+ * like `Date`, `Map`, `Set`, `undefined`, etc. It is recommended to provide a robust
1260
+ * serializer like `superjson` if working with complex data types.
1261
+ */
1262
+ declare class JsonSerializer implements ISerializer {
1263
+ private hasWarned;
1264
+ serialize(data: Record<string, any>): string;
1265
+ deserialize(text: string): Record<string, any>;
1266
+ }
1267
+ //#endregion
1268
+ export { NodeResult as $, checkForCycles as A, IEvaluator as B, Context as C, WorkflowState as Ct, BlueprintAnalysis as D, BaseNode as Dt, createDefaultContainer as E, ServiceTokens as Et, PersistentEventBusAdapter as F, Middleware as G, ILogger as H, ContextImplementation as I, NodeContext as J, NodeClass as K, EdgeDefinition as L, generateMermaidForRun as M, IEventStore as N, Cycles as O, isNodeClass as Ot, InMemoryEventStore as P, NodeRegistry as Q, FlowcraftEvent as R, AsyncContextView as S, NodeExecutorConfig as St, ContainerOptions as T, ServiceToken as Tt, ISerializer as U, IEventBus as V, ISyncContext as W, NodeFunction as X, NodeDefinition as Y, NodeImplementation as Z, FlowBuilder as _, ClassNodeExecutor as _t, ReplayOrchestrator as a, WorkflowBlueprint as at, UnsafeEvaluator as b, NodeExecutionResult as bt, BaseDistributedAdapter as c, WorkflowResult as ct, ConsoleLogger as d, ExecutionServices as dt, PatchOperation as et, NullLogger as f, IOrchestrator as ft, lintBlueprint as g, ReadyNode as gt, LinterResult as h, GraphTraverser as ht, processResults as i, UIGraph as it, generateMermaid as j, analyzeBlueprint as k, FlowcraftError as kt, ICoordinationStore as l, WorkflowStatus as lt, LinterIssueCode as m, NodeExecutorFactory as mt, sanitizeBlueprint as n, RuntimeOptions as nt, DefaultOrchestrator as o, WorkflowBlueprintMetadata as ot, LinterIssue as p, IRuntime as pt, NodeConfig as q, executeBatch as r, SourceLocation as rt, AdapterOptions as s, WorkflowError as st, JsonSerializer as t, RuntimeDependencies as tt, JobPayload as u, FlowRuntime as ut, createFlow as v, ExecutionStrategy as vt, TrackedAsyncContext as w, DIContainer as wt, createErrorMapper as x, NodeExecutor as xt, PropertyEvaluator as y, FunctionNodeExecutor as yt, IAsyncContext as z };
1269
+ //# sourceMappingURL=index-D3dyjW2G.d.mts.map