flowcraft 1.0.0 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (201) hide show
  1. package/README.md +37 -134
  2. package/dist/analysis.d.ts +43 -0
  3. package/dist/analysis.js +3 -0
  4. package/dist/chunk-4PELJWF7.js +29 -0
  5. package/dist/chunk-4PELJWF7.js.map +1 -0
  6. package/dist/chunk-55J6XMHW.js +3 -0
  7. package/dist/{chunk-7XUN3OQT.js.map → chunk-55J6XMHW.js.map} +1 -1
  8. package/dist/chunk-5EHIPX23.js +202 -0
  9. package/dist/chunk-5EHIPX23.js.map +1 -0
  10. package/dist/chunk-5QMPFUKA.js +40 -0
  11. package/dist/chunk-5QMPFUKA.js.map +1 -0
  12. package/dist/chunk-5ZWYSKMH.js +147 -0
  13. package/dist/chunk-5ZWYSKMH.js.map +1 -0
  14. package/dist/chunk-5ZXV3R5D.js +28 -0
  15. package/dist/chunk-5ZXV3R5D.js.map +1 -0
  16. package/dist/chunk-CO5BTPKI.js +410 -0
  17. package/dist/chunk-CO5BTPKI.js.map +1 -0
  18. package/dist/chunk-CSZ6EOWG.js +61 -0
  19. package/dist/chunk-CSZ6EOWG.js.map +1 -0
  20. package/dist/chunk-CYHZ2YVH.js +24 -0
  21. package/dist/chunk-CYHZ2YVH.js.map +1 -0
  22. package/dist/chunk-DSYAC4WB.js +27 -0
  23. package/dist/chunk-DSYAC4WB.js.map +1 -0
  24. package/dist/chunk-HMR2GEGE.js +3 -0
  25. package/dist/{chunk-F2RSES6P.js.map → chunk-HMR2GEGE.js.map} +1 -1
  26. package/dist/chunk-HN72TZY5.js +110 -0
  27. package/dist/chunk-HN72TZY5.js.map +1 -0
  28. package/dist/chunk-KWQHFT7E.js +49 -0
  29. package/dist/chunk-KWQHFT7E.js.map +1 -0
  30. package/dist/chunk-PH2IYZHV.js +48 -0
  31. package/dist/chunk-PH2IYZHV.js.map +1 -0
  32. package/dist/chunk-QRMUKDSP.js +141 -0
  33. package/dist/chunk-QRMUKDSP.js.map +1 -0
  34. package/dist/chunk-UETC63DP.js +65 -0
  35. package/dist/chunk-UETC63DP.js.map +1 -0
  36. package/dist/chunk-UMXW3TCY.js +165 -0
  37. package/dist/chunk-UMXW3TCY.js.map +1 -0
  38. package/dist/context.d.ts +23 -105
  39. package/dist/context.js +1 -1
  40. package/dist/errors.d.ts +15 -31
  41. package/dist/errors.js +1 -1
  42. package/dist/evaluator.d.ts +30 -0
  43. package/dist/evaluator.js +3 -0
  44. package/dist/evaluator.js.map +1 -0
  45. package/dist/flow.d.ts +55 -0
  46. package/dist/flow.js +4 -0
  47. package/dist/flow.js.map +1 -0
  48. package/dist/index.d.ts +15 -16
  49. package/dist/index.js +17 -25
  50. package/dist/linter.d.ts +24 -0
  51. package/dist/linter.js +4 -0
  52. package/dist/linter.js.map +1 -0
  53. package/dist/logger.d.ts +15 -40
  54. package/dist/logger.js +1 -1
  55. package/dist/node.d.ts +1 -0
  56. package/dist/node.js +3 -0
  57. package/dist/node.js.map +1 -0
  58. package/dist/runtime/adapter.d.ts +94 -0
  59. package/dist/runtime/adapter.js +15 -0
  60. package/dist/runtime/adapter.js.map +1 -0
  61. package/dist/runtime/executors.d.ts +26 -0
  62. package/dist/runtime/executors.js +4 -0
  63. package/dist/runtime/executors.js.map +1 -0
  64. package/dist/runtime/index.d.ts +7 -0
  65. package/dist/runtime/index.js +16 -0
  66. package/dist/runtime/runtime.d.ts +34 -0
  67. package/dist/runtime/runtime.js +14 -0
  68. package/dist/runtime/runtime.js.map +1 -0
  69. package/dist/runtime/state.d.ts +21 -0
  70. package/dist/runtime/state.js +4 -0
  71. package/dist/runtime/state.js.map +1 -0
  72. package/dist/runtime/traverser.d.ts +25 -0
  73. package/dist/runtime/traverser.js +5 -0
  74. package/dist/runtime/traverser.js.map +1 -0
  75. package/dist/runtime/types.d.ts +15 -0
  76. package/dist/runtime/types.js +3 -0
  77. package/dist/sanitizer.d.ts +10 -0
  78. package/dist/sanitizer.js +3 -0
  79. package/dist/{utils/sanitize.js.map → sanitizer.js.map} +1 -1
  80. package/dist/serializer.d.ts +16 -0
  81. package/dist/serializer.js +3 -0
  82. package/dist/serializer.js.map +1 -0
  83. package/dist/types-lG3xCzp_.d.ts +206 -0
  84. package/dist/types.d.ts +1 -3
  85. package/dist/types.js +1 -1
  86. package/package.json +10 -21
  87. package/LICENSE +0 -21
  88. package/dist/builder/graph/graph.d.ts +0 -57
  89. package/dist/builder/graph/graph.js +0 -21
  90. package/dist/builder/graph/graph.js.map +0 -1
  91. package/dist/builder/graph/index.d.ts +0 -8
  92. package/dist/builder/graph/index.js +0 -23
  93. package/dist/builder/graph/internal-nodes.d.ts +0 -59
  94. package/dist/builder/graph/internal-nodes.js +0 -20
  95. package/dist/builder/graph/internal-nodes.js.map +0 -1
  96. package/dist/builder/graph/runner.d.ts +0 -51
  97. package/dist/builder/graph/runner.js +0 -21
  98. package/dist/builder/graph/runner.js.map +0 -1
  99. package/dist/builder/graph/types.d.ts +0 -3
  100. package/dist/builder/graph/types.js +0 -3
  101. package/dist/builder/index.d.ts +0 -8
  102. package/dist/builder/index.js +0 -24
  103. package/dist/builder/index.js.map +0 -1
  104. package/dist/builder/patterns.d.ts +0 -136
  105. package/dist/builder/patterns.js +0 -19
  106. package/dist/builder/patterns.js.map +0 -1
  107. package/dist/chunk-3YMBNZ77.js +0 -441
  108. package/dist/chunk-3YMBNZ77.js.map +0 -1
  109. package/dist/chunk-64DNBF5W.js +0 -36
  110. package/dist/chunk-64DNBF5W.js.map +0 -1
  111. package/dist/chunk-6QCXIRLA.js +0 -18
  112. package/dist/chunk-6QCXIRLA.js.map +0 -1
  113. package/dist/chunk-7XUN3OQT.js +0 -3
  114. package/dist/chunk-AOHBHYF6.js +0 -7
  115. package/dist/chunk-AOHBHYF6.js.map +0 -1
  116. package/dist/chunk-BRFMFLR6.js +0 -85
  117. package/dist/chunk-BRFMFLR6.js.map +0 -1
  118. package/dist/chunk-ELEHMJPM.js +0 -13
  119. package/dist/chunk-ELEHMJPM.js.map +0 -1
  120. package/dist/chunk-F2RSES6P.js +0 -3
  121. package/dist/chunk-F6C6J7HK.js +0 -3
  122. package/dist/chunk-F6C6J7HK.js.map +0 -1
  123. package/dist/chunk-GMKJ34T2.js +0 -3
  124. package/dist/chunk-GMKJ34T2.js.map +0 -1
  125. package/dist/chunk-HEO3XL4Z.js +0 -328
  126. package/dist/chunk-HEO3XL4Z.js.map +0 -1
  127. package/dist/chunk-IIKTTIW5.js +0 -56
  128. package/dist/chunk-IIKTTIW5.js.map +0 -1
  129. package/dist/chunk-KOBEU2EM.js +0 -3
  130. package/dist/chunk-KOBEU2EM.js.map +0 -1
  131. package/dist/chunk-L5PK5VL2.js +0 -178
  132. package/dist/chunk-L5PK5VL2.js.map +0 -1
  133. package/dist/chunk-P3RPDZHO.js +0 -36
  134. package/dist/chunk-P3RPDZHO.js.map +0 -1
  135. package/dist/chunk-PNWOW52F.js +0 -19
  136. package/dist/chunk-PNWOW52F.js.map +0 -1
  137. package/dist/chunk-R27FIYR5.js +0 -62
  138. package/dist/chunk-R27FIYR5.js.map +0 -1
  139. package/dist/chunk-S4WFNGQG.js +0 -17
  140. package/dist/chunk-S4WFNGQG.js.map +0 -1
  141. package/dist/chunk-TS3M7MWA.js +0 -3
  142. package/dist/chunk-TS3M7MWA.js.map +0 -1
  143. package/dist/chunk-UY4PNPBX.js +0 -156
  144. package/dist/chunk-UY4PNPBX.js.map +0 -1
  145. package/dist/chunk-VMH2LRM6.js +0 -114
  146. package/dist/chunk-VMH2LRM6.js.map +0 -1
  147. package/dist/chunk-VZDHIOCH.js +0 -76
  148. package/dist/chunk-VZDHIOCH.js.map +0 -1
  149. package/dist/chunk-WGVHM7DU.js +0 -66
  150. package/dist/chunk-WGVHM7DU.js.map +0 -1
  151. package/dist/chunk-WR5PDOPP.js +0 -91
  152. package/dist/chunk-WR5PDOPP.js.map +0 -1
  153. package/dist/chunk-YR433ZDA.js +0 -20
  154. package/dist/chunk-YR433ZDA.js.map +0 -1
  155. package/dist/executors/in-memory.d.ts +0 -39
  156. package/dist/executors/in-memory.js +0 -6
  157. package/dist/executors/in-memory.js.map +0 -1
  158. package/dist/executors/types.d.ts +0 -3
  159. package/dist/executors/types.js +0 -3
  160. package/dist/executors/types.js.map +0 -1
  161. package/dist/functions.d.ts +0 -88
  162. package/dist/functions.js +0 -21
  163. package/dist/functions.js.map +0 -1
  164. package/dist/types-U76Ukj96.d.ts +0 -609
  165. package/dist/utils/analysis.d.ts +0 -75
  166. package/dist/utils/analysis.js +0 -3
  167. package/dist/utils/index.d.ts +0 -8
  168. package/dist/utils/index.js +0 -10
  169. package/dist/utils/index.js.map +0 -1
  170. package/dist/utils/mermaid.d.ts +0 -46
  171. package/dist/utils/mermaid.js +0 -4
  172. package/dist/utils/mermaid.js.map +0 -1
  173. package/dist/utils/middleware.d.ts +0 -11
  174. package/dist/utils/middleware.js +0 -3
  175. package/dist/utils/middleware.js.map +0 -1
  176. package/dist/utils/sanitize.d.ts +0 -19
  177. package/dist/utils/sanitize.js +0 -3
  178. package/dist/utils/sleep.d.ts +0 -9
  179. package/dist/utils/sleep.js +0 -4
  180. package/dist/utils/sleep.js.map +0 -1
  181. package/dist/workflow/AbstractNode.d.ts +0 -3
  182. package/dist/workflow/AbstractNode.js +0 -4
  183. package/dist/workflow/AbstractNode.js.map +0 -1
  184. package/dist/workflow/Flow.d.ts +0 -3
  185. package/dist/workflow/Flow.js +0 -16
  186. package/dist/workflow/Flow.js.map +0 -1
  187. package/dist/workflow/Node.d.ts +0 -3
  188. package/dist/workflow/Node.js +0 -15
  189. package/dist/workflow/Node.js.map +0 -1
  190. package/dist/workflow/index.d.ts +0 -4
  191. package/dist/workflow/index.js +0 -18
  192. package/dist/workflow/index.js.map +0 -1
  193. package/dist/workflow/node-patterns.d.ts +0 -55
  194. package/dist/workflow/node-patterns.js +0 -16
  195. package/dist/workflow/node-patterns.js.map +0 -1
  196. package/dist/workflow/registry.d.ts +0 -17
  197. package/dist/workflow/registry.js +0 -3
  198. package/dist/workflow/registry.js.map +0 -1
  199. /package/dist/{utils/analysis.js.map → analysis.js.map} +0 -0
  200. /package/dist/{builder/graph → runtime}/index.js.map +0 -0
  201. /package/dist/{builder/graph → runtime}/types.js.map +0 -0
@@ -1,136 +0,0 @@
1
- import { Context } from '../context.js';
2
- import { NodeFunction } from '../functions.js';
3
- import { F as Flow, P as Params, A as AbstractNode, b as NodeArgs } from '../types-U76Ukj96.js';
4
- import '../logger.js';
5
-
6
- /**
7
- * A `Flow` that creates a linear workflow from a sequence of nodes,
8
- * automatically chaining them in order.
9
- */
10
- declare class SequenceFlow<PrepRes = any, ExecRes = any, TParams extends Params = Params, TContext extends Context = Context> extends Flow<PrepRes, ExecRes, TParams, TContext> {
11
- /**
12
- * @param nodes A sequence of `Node` or `Flow` instances to be executed in order.
13
- */
14
- constructor(...nodes: AbstractNode<any, any, TContext>[]);
15
- }
16
- /**
17
- * A `Flow` that executes a collection of different nodes concurrently.
18
- * This is the core of the "fan-out, fan-in" pattern for structural parallelism.
19
- * After all parallel branches complete, the flow can proceed to a single successor.
20
- */
21
- declare class ParallelFlow<TContext extends Context = Context> extends Flow<void, void, Params, TContext> {
22
- protected nodesToRun: AbstractNode<any, any, TContext>[];
23
- /** A tag to reliably identify this node type in the visualizer. */
24
- readonly isParallelContainer = true;
25
- /**
26
- * @param nodesToRun The array of nodes to execute concurrently.
27
- */
28
- constructor(nodesToRun?: AbstractNode<any, any, TContext>[]);
29
- /**
30
- * Orchestrates the parallel execution of all nodes.
31
- * @internal
32
- */
33
- exec({ ctx, params, signal, logger, executor, visitedInParallel }: NodeArgs<void, void, Params, TContext>): Promise<void>;
34
- }
35
- /**
36
- * An abstract `Flow` that processes a collection of items sequentially, one by one.
37
- * Subclasses must implement the `prep` method to provide the items and the
38
- * `nodeToRun` property to define the processing logic for each item.
39
- */
40
- declare abstract class BatchFlow<T = any, TContext extends Context = Context> extends Flow<Iterable<T>, null, Params, TContext> {
41
- /**
42
- * The `Node` instance that will be executed for each item in the batch.
43
- * This must be implemented by any subclass.
44
- */
45
- protected abstract nodeToRun: AbstractNode<any, any, TContext>;
46
- constructor();
47
- /**
48
- * (Abstract) Prepares the list of items to be processed.
49
- * This method is called once before the batch processing begins.
50
- * @param _args The arguments for the node, including `ctx` and `params`.
51
- * @returns An array or iterable of parameter objects, one for each item.
52
- * The `nodeToRun` will be executed once for each of these objects.
53
- */
54
- prep(_args: NodeArgs<void, void, Params, TContext>): Promise<Iterable<any>>;
55
- /**
56
- * Orchestrates the sequential execution of `nodeToRun` for each item.
57
- * @internal
58
- */
59
- exec(args: NodeArgs<void, void, Params, TContext>): Promise<null>;
60
- }
61
- /**
62
- * An abstract `Flow` that processes a collection of items concurrently.
63
- * Subclasses must implement the `prep` method to provide the items and the
64
- * `nodeToRun` property to define the processing logic for each item.
65
- * This provides a significant performance boost for I/O-bound tasks.
66
- */
67
- declare abstract class ParallelBatchFlow<T = any, TContext extends Context = Context> extends Flow<Iterable<T>, PromiseSettledResult<any>[], Params, TContext> {
68
- /**
69
- * The `Node` instance that will be executed concurrently for each item in the batch.
70
- * This must be implemented by any subclass.
71
- */
72
- protected abstract nodeToRun: AbstractNode<any, any, TContext>;
73
- constructor();
74
- /**
75
- * (Abstract) Prepares the list of items to be processed.
76
- * This method is called once before the batch processing begins.
77
- * @param _args The arguments for the node, including `ctx` and `params`.
78
- * @returns An array or iterable of parameter objects, one for each item.
79
- * The `nodeToRun` will be executed concurrently for each of these objects.
80
- */
81
- prep(_args: NodeArgs<void, void, Params, TContext>): Promise<Iterable<any>>;
82
- /**
83
- * Orchestrates the parallel execution of `nodeToRun` for each item.
84
- * @internal
85
- */
86
- exec(args: NodeArgs<any, void, Params, TContext>): Promise<PromiseSettledResult<any>[]>;
87
- }
88
- /**
89
- * Creates a flow that applies a mapping function to each item in a collection in parallel
90
- * and returns a new array containing the results.
91
- *
92
- * @example
93
- * const numbers = [1, 2, 3];
94
- * const double = (n: number) => n * 2;
95
- * const processingFlow = mapCollection(numbers, double);
96
- * // When run, processingFlow's result will be [2, 4, 6]
97
- *
98
- * @param items The initial array of items of type `T`.
99
- * @param fn An async or sync function that transforms an item from type `T` to type `U`.
100
- * @returns A `Flow` instance that, when run, will output an array of type `U[]`.
101
- */
102
- declare function mapCollection<T, U, TContext extends Context = Context>(items: T[], fn: NodeFunction<T, U>): Flow<void, U[], Params, TContext>;
103
- /**
104
- * Creates a flow that filters a collection based on a predicate function,
105
- * returning a new array containing only the items that pass the predicate.
106
- * The predicate is applied to all items concurrently.
107
- *
108
- * @example
109
- * const users = [{ id: 1, admin: true }, { id: 2, admin: false }];
110
- * const isAdmin = async (user: { admin: boolean }) => user.admin;
111
- * const adminFilterFlow = filterCollection(users, isAdmin);
112
- * // When run, the result will be [{ id: 1, admin: true }]
113
- *
114
- * @param items The initial array of items of type `T`.
115
- * @param predicate An async or sync function that returns `true` or `false` for an item.
116
- * @returns A `Flow` instance that, when run, will output a filtered array of type `T[]`.
117
- */
118
- declare function filterCollection<T, TContext extends Context = Context>(items: T[], predicate: (item: T) => boolean | Promise<boolean>): Flow<void, T[], Params, TContext>;
119
- /**
120
- * Creates a flow that reduces a collection to a single value by executing a
121
- * reducer function sequentially for each item, similar to `Array.prototype.reduce()`.
122
- *
123
- * @example
124
- * const numbers = [1, 2, 3, 4];
125
- * const sumReducer = (acc: number, val: number) => acc + val;
126
- * const sumFlow = reduceCollection(numbers, sumReducer, 0);
127
- * // When run, the result will be 10.
128
- *
129
- * @param items The array of items to be reduced.
130
- * @param reducer An async or sync function that processes the accumulator and the current item.
131
- * @param initialValue The initial value for the accumulator.
132
- * @returns A `Flow` instance that, when run, will output the final accumulated value of type `U`.
133
- */
134
- declare function reduceCollection<T, U, TContext extends Context = Context>(items: T[], reducer: (accumulator: U, item: T) => U | Promise<U>, initialValue: U): Flow<void, U, Params, TContext>;
135
-
136
- export { BatchFlow, ParallelBatchFlow, ParallelFlow, SequenceFlow, filterCollection, mapCollection, reduceCollection };
@@ -1,19 +0,0 @@
1
- export { BatchFlow, ParallelBatchFlow, ParallelFlow, SequenceFlow, filterCollection, mapCollection, reduceCollection } from '../chunk-L5PK5VL2.js';
2
- import '../chunk-F6C6J7HK.js';
3
- import '../chunk-UY4PNPBX.js';
4
- import '../chunk-ELEHMJPM.js';
5
- import '../chunk-HEO3XL4Z.js';
6
- import '../chunk-7XUN3OQT.js';
7
- import '../chunk-WR5PDOPP.js';
8
- import '../chunk-VMH2LRM6.js';
9
- import '../chunk-YR433ZDA.js';
10
- import '../chunk-6QCXIRLA.js';
11
- import '../chunk-BRFMFLR6.js';
12
- import '../chunk-PNWOW52F.js';
13
- import '../chunk-VZDHIOCH.js';
14
- import '../chunk-S4WFNGQG.js';
15
- import '../chunk-64DNBF5W.js';
16
- import '../chunk-IIKTTIW5.js';
17
- import '../chunk-AOHBHYF6.js';
18
- //# sourceMappingURL=patterns.js.map
19
- //# sourceMappingURL=patterns.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":[],"names":[],"mappings":"","file":"patterns.js"}
@@ -1,441 +0,0 @@
1
- import { InputMappingNode, OutputMappingNode, SubWorkflowContainerNode, ConditionalJoinNode, ParallelBranchContainer } from './chunk-WGVHM7DU.js';
2
- import { Flow } from './chunk-UY4PNPBX.js';
3
- import { generateMermaidGraph } from './chunk-VMH2LRM6.js';
4
- import { InMemoryExecutor } from './chunk-BRFMFLR6.js';
5
- import { NullLogger } from './chunk-IIKTTIW5.js';
6
- import { DEFAULT_ACTION } from './chunk-AOHBHYF6.js';
7
-
8
- // src/builder/graph/graph.ts
9
- function createNodeRegistry(registry) {
10
- const finalRegistry = /* @__PURE__ */ new Map();
11
- for (const key in registry) {
12
- if (Object.prototype.hasOwnProperty.call(registry, key))
13
- finalRegistry.set(key, registry[key]);
14
- }
15
- if (!finalRegistry.has("__internal_input_mapper__"))
16
- finalRegistry.set("__internal_input_mapper__", InputMappingNode);
17
- if (!finalRegistry.has("__internal_output_mapper__"))
18
- finalRegistry.set("__internal_output_mapper__", OutputMappingNode);
19
- if (!finalRegistry.has("__internal_sub_workflow_container__"))
20
- finalRegistry.set("__internal_sub_workflow_container__", SubWorkflowContainerNode);
21
- if (!finalRegistry.has("__internal_conditional_join__"))
22
- finalRegistry.set("__internal_conditional_join__", ConditionalJoinNode);
23
- if (!finalRegistry.has("__internal_parallel_container__"))
24
- finalRegistry.set("__internal_parallel_container__", ParallelBranchContainer);
25
- return finalRegistry;
26
- }
27
- var GraphBuilder = class {
28
- // handle both cases
29
- constructor(registry, nodeOptionsContext = {}, options = {}, logger = new NullLogger()) {
30
- this.nodeOptionsContext = nodeOptionsContext;
31
- this.logger = logger;
32
- this.registry = registry instanceof Map ? registry : createNodeRegistry(registry);
33
- this.subWorkflowNodeTypes = options.subWorkflowNodeTypes ?? [];
34
- this.conditionalNodeTypes = options.conditionalNodeTypes ?? [];
35
- this.subWorkflowResolver = options.subWorkflowResolver;
36
- }
37
- registry;
38
- subWorkflowNodeTypes;
39
- conditionalNodeTypes;
40
- subWorkflowResolver;
41
- logger;
42
- _logMermaid(flow) {
43
- if (!(this.logger instanceof NullLogger)) {
44
- this.logger.info("[GraphBuilder] Flattened Graph");
45
- const mermaid = generateMermaidGraph(flow);
46
- mermaid.split("\n").forEach((line) => this.logger.info(line));
47
- }
48
- }
49
- _flattenGraph(graph, idPrefix = "") {
50
- const finalNodes = [];
51
- const finalEdges = [];
52
- const localNodeIds = new Set(graph.nodes.map((n) => n.id));
53
- for (const node of graph.nodes) {
54
- const prefixedNodeId = `${idPrefix}${node.id}`;
55
- const sanitizedId = prefixedNodeId.replace(/:/g, "_").replace(/\W/g, "");
56
- const isRegisteredSubWorkflow = this.subWorkflowNodeTypes.includes(node.type);
57
- const hasWorkflowId = node.data && "workflowId" in node.data;
58
- const newNodeData = JSON.parse(JSON.stringify(node.data || {}));
59
- if (newNodeData.inputs) {
60
- const inputs = newNodeData.inputs;
61
- for (const [templateKey, sourcePathOrPaths] of Object.entries(inputs)) {
62
- const sourcePaths = Array.isArray(sourcePathOrPaths) ? sourcePathOrPaths : [sourcePathOrPaths];
63
- const newSourcePaths = sourcePaths.map((sourcePath) => {
64
- if (localNodeIds.has(sourcePath))
65
- return `${idPrefix}${sourcePath}`;
66
- return sourcePath;
67
- });
68
- inputs[templateKey] = Array.isArray(sourcePathOrPaths) ? newSourcePaths : newSourcePaths[0];
69
- }
70
- }
71
- if (isRegisteredSubWorkflow) {
72
- if (!this.subWorkflowResolver)
73
- throw new Error("GraphBuilder: `subWorkflowResolver` must be provided in options to handle sub-workflows.");
74
- const subWorkflowData = node.data;
75
- const subWorkflowId = subWorkflowData.workflowId;
76
- const subGraph = this.subWorkflowResolver.getGraph(subWorkflowId);
77
- if (!subGraph)
78
- throw new Error(`Sub-workflow with ID ${subWorkflowId} not found in resolver.`);
79
- finalNodes.push({
80
- id: prefixedNodeId,
81
- type: "__internal_sub_workflow_container__",
82
- data: { ...newNodeData, originalId: node.id }
83
- });
84
- const inputMapperId = `${sanitizedId}_input_mapper`;
85
- const outputMapperId = `${sanitizedId}_output_mapper`;
86
- finalNodes.push({
87
- id: inputMapperId,
88
- type: "__internal_input_mapper__",
89
- data: { ...subWorkflowData.inputs || {}, originalId: node.id }
90
- });
91
- finalNodes.push({
92
- id: outputMapperId,
93
- type: "__internal_output_mapper__",
94
- data: { ...subWorkflowData.outputs || {}, originalId: node.id }
95
- });
96
- const inlinedSubGraph = this._flattenGraph(subGraph, `${prefixedNodeId}:`);
97
- const augmentedInlinedNodes = inlinedSubGraph.nodes.map((n) => ({
98
- ...n,
99
- data: { ...n.data || {}, isSubWorkflow: true }
100
- }));
101
- finalNodes.push(...augmentedInlinedNodes);
102
- finalEdges.push(...inlinedSubGraph.edges);
103
- finalEdges.push({ source: prefixedNodeId, target: inputMapperId });
104
- const subGraphStartIds = inlinedSubGraph.nodes.map((n) => n.id).filter((id) => !inlinedSubGraph.edges.some((e) => e.target === id));
105
- for (const startId of subGraphStartIds)
106
- finalEdges.push({ source: inputMapperId, target: startId });
107
- const subGraphTerminalIds = inlinedSubGraph.nodes.map((n) => n.id).filter((id) => !inlinedSubGraph.edges.some((e) => e.source === id));
108
- for (const terminalId of subGraphTerminalIds)
109
- finalEdges.push({ source: terminalId, target: outputMapperId });
110
- } else if (hasWorkflowId) {
111
- throw new Error(`Node with ID '${node.id}' has a 'workflowId' but its type '${node.type}' is not in 'subWorkflowNodeTypes'.`);
112
- } else {
113
- finalNodes.push({ ...node, id: prefixedNodeId, data: { ...newNodeData, originalId: node.id } });
114
- }
115
- }
116
- for (const edge of graph.edges) {
117
- const sourceNode = graph.nodes.find((n) => n.id === edge.source);
118
- const prefixedSourceId = `${idPrefix}${edge.source}`;
119
- const prefixedTargetId = `${idPrefix}${edge.target}`;
120
- const isSourceSub = this.subWorkflowNodeTypes.includes(sourceNode.type);
121
- const sanitizedSourceId = prefixedSourceId.replace(/:/g, "_").replace(/\W/g, "");
122
- if (isSourceSub)
123
- finalEdges.push({ ...edge, source: `${sanitizedSourceId}_output_mapper`, target: prefixedTargetId });
124
- else
125
- finalEdges.push({ ...edge, source: prefixedSourceId, target: prefixedTargetId });
126
- }
127
- return { nodes: finalNodes, edges: finalEdges };
128
- }
129
- build(graph, log) {
130
- const { blueprint } = this.buildBlueprint(graph);
131
- const executor = new BlueprintExecutor(blueprint, this.registry, this.nodeOptionsContext);
132
- if (log)
133
- this._logMermaid(executor.flow);
134
- return {
135
- flow: executor.flow,
136
- nodeMap: executor.nodeMap,
137
- predecessorCountMap: new Map(Object.entries(blueprint.predecessorCountMap)),
138
- predecessorIdMap: new Map(Object.entries(blueprint.originalPredecessorIdMap).map(([k, v]) => [k, v])),
139
- originalPredecessorIdMap: new Map(Object.entries(blueprint.originalPredecessorIdMap).map(([k, v]) => [k, v]))
140
- };
141
- }
142
- // single implementation that handles both cases
143
- buildBlueprint(graph) {
144
- const flatGraph = this._flattenGraph(graph);
145
- const conditionalNodes = flatGraph.nodes.filter((n) => this.conditionalNodeTypes.includes(n.type));
146
- for (const conditionalNode of conditionalNodes) {
147
- const branches = flatGraph.edges.filter((e) => e.source === conditionalNode.id).map((e) => e.target);
148
- if (branches.length > 1) {
149
- const convergenceTargetId = this._findConditionalConvergence(branches, flatGraph);
150
- if (convergenceTargetId) {
151
- const joinNodeId = `${conditionalNode.id}__conditional_join`;
152
- if (!flatGraph.nodes.some((n) => n.id === joinNodeId)) {
153
- this.logger.debug(`[GraphBuilder] Inserting conditional join node for '${conditionalNode.id}' converging at '${convergenceTargetId}'`);
154
- flatGraph.nodes.push({ id: joinNodeId, type: "__internal_conditional_join__", data: {} });
155
- const branchTerminalIds = this._findBranchTerminals(branches, convergenceTargetId, flatGraph);
156
- const terminalsToReroute = new Set(branchTerminalIds);
157
- if (flatGraph.edges.some((e) => e.source === conditionalNode.id && e.target === convergenceTargetId)) {
158
- terminalsToReroute.add(conditionalNode.id);
159
- }
160
- for (const terminalId of terminalsToReroute) {
161
- const edgesToUpdate = flatGraph.edges.filter((e) => e.source === terminalId && e.target === convergenceTargetId);
162
- for (const edge of edgesToUpdate) {
163
- edge.target = joinNodeId;
164
- }
165
- }
166
- flatGraph.edges.push({ source: joinNodeId, target: convergenceTargetId });
167
- }
168
- }
169
- }
170
- }
171
- const edgeGroups = flatGraph.edges.reduce((acc, edge) => {
172
- if (!acc.has(edge.source))
173
- acc.set(edge.source, /* @__PURE__ */ new Map());
174
- const sourceActions = acc.get(edge.source);
175
- const action = edge.action || DEFAULT_ACTION;
176
- if (!sourceActions.has(action))
177
- sourceActions.set(action, []);
178
- sourceActions.get(action).push(edge.target);
179
- return acc;
180
- }, /* @__PURE__ */ new Map());
181
- const nodesToProcess = [...flatGraph.nodes];
182
- for (const sourceNode of nodesToProcess) {
183
- const actions = edgeGroups.get(sourceNode.id);
184
- if (!actions)
185
- continue;
186
- for (const [action, successors] of actions.entries()) {
187
- if (successors.length > 1 && !this.conditionalNodeTypes.includes(sourceNode.type)) {
188
- const parallelNodeId = `${sourceNode.id}__parallel_container`;
189
- if (!flatGraph.nodes.some((n) => n.id === parallelNodeId)) {
190
- flatGraph.nodes.push({ id: parallelNodeId, type: "__internal_parallel_container__", data: {} });
191
- const edgesToReplace = flatGraph.edges.filter((e) => e.source === sourceNode.id && (e.action || DEFAULT_ACTION) === action);
192
- flatGraph.edges = flatGraph.edges.filter((e) => !edgesToReplace.includes(e));
193
- flatGraph.edges.push({ source: sourceNode.id, target: parallelNodeId, action: action === DEFAULT_ACTION ? void 0 : String(action) });
194
- successors.forEach((succId) => flatGraph.edges.push({ source: parallelNodeId, target: succId }));
195
- }
196
- }
197
- }
198
- }
199
- const { predecessorIdMap, originalPredecessorIdMap } = this._createPredecessorIdMaps(flatGraph);
200
- const predecessorCountMap = /* @__PURE__ */ new Map();
201
- for (const [key, val] of predecessorIdMap.entries()) {
202
- predecessorCountMap.set(key, val.length);
203
- }
204
- const allNodeIds = new Set(flatGraph.nodes.map((n) => n.id));
205
- for (const id of allNodeIds) {
206
- if (!predecessorCountMap.has(id))
207
- predecessorCountMap.set(id, 0);
208
- }
209
- const allTargetIds = new Set(flatGraph.edges.map((e) => e.target));
210
- const startNodeIds = [...allNodeIds].filter((id) => !allTargetIds.has(id));
211
- let startNodeId;
212
- if (startNodeIds.length === 0 && allNodeIds.size > 0)
213
- throw new Error("GraphBuilder: This graph has a cycle and no clear start node.");
214
- if (startNodeIds.length === 1) {
215
- startNodeId = startNodeIds[0];
216
- } else {
217
- startNodeId = "__root_parallel_start";
218
- if (!flatGraph.nodes.some((n) => n.id === startNodeId)) {
219
- flatGraph.nodes.push({ id: startNodeId, type: "__internal_parallel_container__", data: {} });
220
- for (const id of startNodeIds)
221
- flatGraph.edges.push({ source: startNodeId, target: id });
222
- }
223
- }
224
- const blueprint = {
225
- nodes: flatGraph.nodes,
226
- edges: flatGraph.edges,
227
- startNodeId,
228
- predecessorCountMap: Object.fromEntries(predecessorCountMap.entries()),
229
- originalPredecessorIdMap: Object.fromEntries(originalPredecessorIdMap.entries())
230
- };
231
- return { blueprint };
232
- }
233
- _createPredecessorIdMaps(graph) {
234
- const predecessorIdMap = /* @__PURE__ */ new Map();
235
- for (const edge of graph.edges) {
236
- if (!predecessorIdMap.has(edge.target))
237
- predecessorIdMap.set(edge.target, []);
238
- predecessorIdMap.get(edge.target).push(edge.source);
239
- }
240
- const originalPredecessorIdMap = /* @__PURE__ */ new Map();
241
- const nodeDataMap = new Map(graph.nodes.map((n) => [n.id, n]));
242
- const memo = /* @__PURE__ */ new Map();
243
- const findOriginalProducers = (nodeId) => {
244
- if (memo.has(nodeId))
245
- return memo.get(nodeId);
246
- const nodeData = nodeDataMap.get(nodeId);
247
- if (!nodeData)
248
- return [];
249
- const selfType = nodeData.type;
250
- const selfOriginalId = nodeData.data?.originalId ?? nodeId;
251
- if (!selfType.startsWith("__internal_")) {
252
- const result2 = [selfOriginalId];
253
- memo.set(nodeId, result2);
254
- return result2;
255
- }
256
- if (selfType === "__internal_output_mapper__") {
257
- const result2 = [selfOriginalId];
258
- memo.set(nodeId, result2);
259
- return result2;
260
- }
261
- const directPredecessors = predecessorIdMap.get(nodeId) || [];
262
- const producers = /* @__PURE__ */ new Set();
263
- for (const predId of directPredecessors) {
264
- findOriginalProducers(predId).forEach((p) => producers.add(p));
265
- }
266
- const result = Array.from(producers);
267
- memo.set(nodeId, result);
268
- return result;
269
- };
270
- for (const targetId of nodeDataMap.keys()) {
271
- const mapKey = targetId;
272
- const directPredecessors = predecessorIdMap.get(targetId) || [];
273
- const producers = /* @__PURE__ */ new Set();
274
- for (const predId of directPredecessors)
275
- findOriginalProducers(predId).forEach((p) => producers.add(p));
276
- if (producers.size > 0)
277
- originalPredecessorIdMap.set(mapKey, Array.from(producers));
278
- }
279
- return { predecessorIdMap, originalPredecessorIdMap };
280
- }
281
- _findBranchTerminals(branchStarts, targetId, graph) {
282
- const terminals = [];
283
- for (const start of branchStarts) {
284
- const queue = [start];
285
- const visitedInBranch = /* @__PURE__ */ new Set();
286
- while (queue.length > 0) {
287
- const currentId = queue.shift();
288
- if (visitedInBranch.has(currentId))
289
- continue;
290
- visitedInBranch.add(currentId);
291
- const successors = graph.edges.filter((e) => e.source === currentId).map((e) => e.target);
292
- if (successors.includes(targetId)) {
293
- if (!terminals.includes(currentId)) {
294
- terminals.push(currentId);
295
- }
296
- } else {
297
- for (const successorId of successors) {
298
- if (!visitedInBranch.has(successorId)) {
299
- queue.push(successorId);
300
- }
301
- }
302
- }
303
- }
304
- }
305
- return terminals;
306
- }
307
- _findConditionalConvergence(branchStarts, graph) {
308
- if (branchStarts.length <= 1)
309
- return void 0;
310
- const queue = [...branchStarts];
311
- const visitedBy = /* @__PURE__ */ new Map();
312
- branchStarts.forEach((startId) => visitedBy.set(startId, /* @__PURE__ */ new Set([startId])));
313
- let head = 0;
314
- while (head < queue.length) {
315
- const currentId = queue[head++];
316
- const successors = graph.edges.filter((e) => e.source === currentId).map((e) => e.target);
317
- for (const successorId of successors) {
318
- if (!visitedBy.has(successorId))
319
- visitedBy.set(successorId, /* @__PURE__ */ new Set());
320
- const visitorSet = visitedBy.get(successorId);
321
- const startingPointsVistingThisNode = visitedBy.get(currentId);
322
- for (const startNodeId of startingPointsVistingThisNode)
323
- visitorSet.add(startNodeId);
324
- if (visitorSet.size === branchStarts.length)
325
- return successorId;
326
- if (!queue.includes(successorId))
327
- queue.push(successorId);
328
- }
329
- }
330
- return void 0;
331
- }
332
- };
333
-
334
- // src/builder/graph/runner.ts
335
- var BlueprintExecutor = class {
336
- constructor(blueprint, registry, nodeOptionsContext = {}) {
337
- this.blueprint = blueprint;
338
- this.nodeOptionsContext = nodeOptionsContext;
339
- this.registry = registry instanceof Map ? registry : createNodeRegistry(registry);
340
- this.nodeMap = this._createNodeMap(blueprint.nodes);
341
- this._wireGraph();
342
- const startNode = this.nodeMap.get(blueprint.startNodeId);
343
- if (!startNode)
344
- throw new Error(`Blueprint start node with ID '${blueprint.startNodeId}' not found in hydrated node map.`);
345
- this.flow = new Flow(startNode);
346
- this._populateContainers();
347
- }
348
- flow;
349
- nodeMap;
350
- registry;
351
- _populateContainers() {
352
- for (const node of this.nodeMap.values()) {
353
- if (node instanceof ParallelBranchContainer)
354
- node.nodesToRun = Array.from(node.successors.values()).flat();
355
- }
356
- }
357
- /**
358
- * Retrieves a hydrated node instance from the blueprint by its ID.
359
- * This is useful for workers that need to execute a specific node from the graph.
360
- * @param nodeId The ID of the node to retrieve.
361
- * @returns The `AbstractNode` instance if found, otherwise `undefined`.
362
- */
363
- getNode(nodeId) {
364
- return this.nodeMap.get(nodeId);
365
- }
366
- /**
367
- * Instantiates all node objects from the blueprint's definition.
368
- * @private
369
- */
370
- _createNodeMap(nodes) {
371
- const nodeMap = /* @__PURE__ */ new Map();
372
- for (const graphNode of nodes) {
373
- const NodeClass = this.registry.get(graphNode.type);
374
- if (!NodeClass)
375
- throw new Error(`BlueprintExecutor: Node type '${graphNode.type}' not found in registry.`);
376
- const nodeOptions = {
377
- ...this.nodeOptionsContext,
378
- data: { ...graphNode.data, nodeId: graphNode.id }
379
- };
380
- const executableNode = new NodeClass(nodeOptions).withId(graphNode.id).withGraphData(graphNode);
381
- nodeMap.set(graphNode.id, executableNode);
382
- }
383
- return nodeMap;
384
- }
385
- /**
386
- * Wires the hydrated node instances together based on the blueprint's edges.
387
- * @private
388
- */
389
- _wireGraph() {
390
- const edgeGroups = /* @__PURE__ */ new Map();
391
- for (const edge of this.blueprint.edges) {
392
- const sourceId = edge.source;
393
- const action = edge.action || DEFAULT_ACTION;
394
- const targetNode = this.nodeMap.get(edge.target);
395
- if (!targetNode)
396
- continue;
397
- if (!edgeGroups.has(sourceId))
398
- edgeGroups.set(sourceId, /* @__PURE__ */ new Map());
399
- const sourceActions = edgeGroups.get(sourceId);
400
- if (!sourceActions.has(action))
401
- sourceActions.set(action, []);
402
- sourceActions.get(action).push(targetNode);
403
- }
404
- for (const [sourceId, actions] of edgeGroups.entries()) {
405
- const sourceNode = this.nodeMap.get(sourceId);
406
- for (const [action, successors] of actions.entries()) {
407
- for (const successor of successors)
408
- sourceNode.next(successor, action);
409
- }
410
- }
411
- }
412
- /**
413
- * Executes the flow defined by the blueprint.
414
- * @param flow The flow to execute.
415
- * @param context The shared context for the workflow.
416
- * @param options Runtime options, including a logger, abort controller, or initial params.
417
- * @returns A promise that resolves with the final action of the workflow.
418
- */
419
- async run(flow, context, options) {
420
- if (flow !== this.flow) {
421
- throw new Error(
422
- "BlueprintExecutor is specialized and can only run the flow instance it was constructed with. To run an arbitrary flow, use an InMemoryExecutor instance."
423
- );
424
- }
425
- const inMemoryExecutor = new InMemoryExecutor();
426
- const finalOptions = { ...options, executor: this };
427
- return inMemoryExecutor.run(this.flow, context, finalOptions);
428
- }
429
- /**
430
- * Determines the next node to execute based on the action returned by the current node.
431
- * For distributed systems, this logic would live on the orchestrator.
432
- * @internal
433
- */
434
- getNextNode(curr, action) {
435
- return curr.successors.get(action)?.[0];
436
- }
437
- };
438
-
439
- export { BlueprintExecutor, GraphBuilder, createNodeRegistry };
440
- //# sourceMappingURL=chunk-3YMBNZ77.js.map
441
- //# sourceMappingURL=chunk-3YMBNZ77.js.map