flowcraft 1.0.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (201) hide show
  1. package/README.md +37 -134
  2. package/dist/analysis.d.ts +43 -0
  3. package/dist/analysis.js +3 -0
  4. package/dist/chunk-55J6XMHW.js +3 -0
  5. package/dist/{chunk-7XUN3OQT.js.map → chunk-55J6XMHW.js.map} +1 -1
  6. package/dist/chunk-6DNEDIIT.js +123 -0
  7. package/dist/chunk-6DNEDIIT.js.map +1 -0
  8. package/dist/chunk-734J4PTM.js +100 -0
  9. package/dist/chunk-734J4PTM.js.map +1 -0
  10. package/dist/chunk-DSZSR7UE.js +40 -0
  11. package/dist/chunk-DSZSR7UE.js.map +1 -0
  12. package/dist/chunk-GTZC6PQI.js +22 -0
  13. package/dist/chunk-GTZC6PQI.js.map +1 -0
  14. package/dist/chunk-HMR2GEGE.js +3 -0
  15. package/dist/{chunk-F2RSES6P.js.map → chunk-HMR2GEGE.js.map} +1 -1
  16. package/dist/chunk-J3RNCPED.js +27 -0
  17. package/dist/chunk-J3RNCPED.js.map +1 -0
  18. package/dist/chunk-M23P46ZL.js +105 -0
  19. package/dist/chunk-M23P46ZL.js.map +1 -0
  20. package/dist/chunk-MICPMOTW.js +49 -0
  21. package/dist/chunk-MICPMOTW.js.map +1 -0
  22. package/dist/chunk-NPAJNLXQ.js +106 -0
  23. package/dist/chunk-NPAJNLXQ.js.map +1 -0
  24. package/dist/chunk-RAZXOMZC.js +64 -0
  25. package/dist/chunk-RAZXOMZC.js.map +1 -0
  26. package/dist/chunk-REH55ZXV.js +13 -0
  27. package/dist/chunk-REH55ZXV.js.map +1 -0
  28. package/dist/chunk-RW4FH7IL.js +135 -0
  29. package/dist/chunk-RW4FH7IL.js.map +1 -0
  30. package/dist/chunk-RYTIQZIB.js +303 -0
  31. package/dist/chunk-RYTIQZIB.js.map +1 -0
  32. package/dist/chunk-UYPIWXZG.js +62 -0
  33. package/dist/chunk-UYPIWXZG.js.map +1 -0
  34. package/dist/chunk-VFC342WL.js +29 -0
  35. package/dist/chunk-VFC342WL.js.map +1 -0
  36. package/dist/chunk-WXT3YEWU.js +28 -0
  37. package/dist/chunk-WXT3YEWU.js.map +1 -0
  38. package/dist/context.d.ts +23 -105
  39. package/dist/context.js +1 -1
  40. package/dist/errors.d.ts +15 -31
  41. package/dist/errors.js +1 -1
  42. package/dist/evaluator.d.ts +22 -0
  43. package/dist/evaluator.js +3 -0
  44. package/dist/evaluator.js.map +1 -0
  45. package/dist/flow.d.ts +54 -0
  46. package/dist/flow.js +4 -0
  47. package/dist/flow.js.map +1 -0
  48. package/dist/index.d.ts +14 -16
  49. package/dist/index.js +17 -25
  50. package/dist/linter.d.ts +24 -0
  51. package/dist/linter.js +4 -0
  52. package/dist/linter.js.map +1 -0
  53. package/dist/logger.d.ts +11 -36
  54. package/dist/logger.js +1 -1
  55. package/dist/node.d.ts +1 -0
  56. package/dist/node.js +3 -0
  57. package/dist/node.js.map +1 -0
  58. package/dist/runtime/adapter.d.ts +78 -0
  59. package/dist/runtime/adapter.js +15 -0
  60. package/dist/runtime/adapter.js.map +1 -0
  61. package/dist/runtime/executors.d.ts +26 -0
  62. package/dist/runtime/executors.js +4 -0
  63. package/dist/runtime/executors.js.map +1 -0
  64. package/dist/runtime/index.d.ts +7 -0
  65. package/dist/runtime/index.js +16 -0
  66. package/dist/runtime/runtime.d.ts +33 -0
  67. package/dist/runtime/runtime.js +14 -0
  68. package/dist/runtime/runtime.js.map +1 -0
  69. package/dist/runtime/state.d.ts +21 -0
  70. package/dist/runtime/state.js +4 -0
  71. package/dist/runtime/state.js.map +1 -0
  72. package/dist/runtime/traverser.d.ts +24 -0
  73. package/dist/runtime/traverser.js +5 -0
  74. package/dist/runtime/traverser.js.map +1 -0
  75. package/dist/runtime/types.d.ts +15 -0
  76. package/dist/runtime/types.js +3 -0
  77. package/dist/sanitizer.d.ts +10 -0
  78. package/dist/sanitizer.js +3 -0
  79. package/dist/{utils/sanitize.js.map → sanitizer.js.map} +1 -1
  80. package/dist/serializer.d.ts +15 -0
  81. package/dist/serializer.js +3 -0
  82. package/dist/serializer.js.map +1 -0
  83. package/dist/types-CZN_FcB6.d.ts +201 -0
  84. package/dist/types.d.ts +1 -3
  85. package/dist/types.js +1 -1
  86. package/package.json +9 -20
  87. package/LICENSE +0 -21
  88. package/dist/builder/graph/graph.d.ts +0 -57
  89. package/dist/builder/graph/graph.js +0 -21
  90. package/dist/builder/graph/graph.js.map +0 -1
  91. package/dist/builder/graph/index.d.ts +0 -8
  92. package/dist/builder/graph/index.js +0 -23
  93. package/dist/builder/graph/internal-nodes.d.ts +0 -59
  94. package/dist/builder/graph/internal-nodes.js +0 -20
  95. package/dist/builder/graph/internal-nodes.js.map +0 -1
  96. package/dist/builder/graph/runner.d.ts +0 -51
  97. package/dist/builder/graph/runner.js +0 -21
  98. package/dist/builder/graph/runner.js.map +0 -1
  99. package/dist/builder/graph/types.d.ts +0 -3
  100. package/dist/builder/graph/types.js +0 -3
  101. package/dist/builder/index.d.ts +0 -8
  102. package/dist/builder/index.js +0 -24
  103. package/dist/builder/index.js.map +0 -1
  104. package/dist/builder/patterns.d.ts +0 -136
  105. package/dist/builder/patterns.js +0 -19
  106. package/dist/builder/patterns.js.map +0 -1
  107. package/dist/chunk-3YMBNZ77.js +0 -441
  108. package/dist/chunk-3YMBNZ77.js.map +0 -1
  109. package/dist/chunk-64DNBF5W.js +0 -36
  110. package/dist/chunk-64DNBF5W.js.map +0 -1
  111. package/dist/chunk-6QCXIRLA.js +0 -18
  112. package/dist/chunk-6QCXIRLA.js.map +0 -1
  113. package/dist/chunk-7XUN3OQT.js +0 -3
  114. package/dist/chunk-AOHBHYF6.js +0 -7
  115. package/dist/chunk-AOHBHYF6.js.map +0 -1
  116. package/dist/chunk-BRFMFLR6.js +0 -85
  117. package/dist/chunk-BRFMFLR6.js.map +0 -1
  118. package/dist/chunk-ELEHMJPM.js +0 -13
  119. package/dist/chunk-ELEHMJPM.js.map +0 -1
  120. package/dist/chunk-F2RSES6P.js +0 -3
  121. package/dist/chunk-F6C6J7HK.js +0 -3
  122. package/dist/chunk-F6C6J7HK.js.map +0 -1
  123. package/dist/chunk-GMKJ34T2.js +0 -3
  124. package/dist/chunk-GMKJ34T2.js.map +0 -1
  125. package/dist/chunk-HEO3XL4Z.js +0 -328
  126. package/dist/chunk-HEO3XL4Z.js.map +0 -1
  127. package/dist/chunk-IIKTTIW5.js +0 -56
  128. package/dist/chunk-IIKTTIW5.js.map +0 -1
  129. package/dist/chunk-KOBEU2EM.js +0 -3
  130. package/dist/chunk-KOBEU2EM.js.map +0 -1
  131. package/dist/chunk-L5PK5VL2.js +0 -178
  132. package/dist/chunk-L5PK5VL2.js.map +0 -1
  133. package/dist/chunk-P3RPDZHO.js +0 -36
  134. package/dist/chunk-P3RPDZHO.js.map +0 -1
  135. package/dist/chunk-PNWOW52F.js +0 -19
  136. package/dist/chunk-PNWOW52F.js.map +0 -1
  137. package/dist/chunk-R27FIYR5.js +0 -62
  138. package/dist/chunk-R27FIYR5.js.map +0 -1
  139. package/dist/chunk-S4WFNGQG.js +0 -17
  140. package/dist/chunk-S4WFNGQG.js.map +0 -1
  141. package/dist/chunk-TS3M7MWA.js +0 -3
  142. package/dist/chunk-TS3M7MWA.js.map +0 -1
  143. package/dist/chunk-UY4PNPBX.js +0 -156
  144. package/dist/chunk-UY4PNPBX.js.map +0 -1
  145. package/dist/chunk-VMH2LRM6.js +0 -114
  146. package/dist/chunk-VMH2LRM6.js.map +0 -1
  147. package/dist/chunk-VZDHIOCH.js +0 -76
  148. package/dist/chunk-VZDHIOCH.js.map +0 -1
  149. package/dist/chunk-WGVHM7DU.js +0 -66
  150. package/dist/chunk-WGVHM7DU.js.map +0 -1
  151. package/dist/chunk-WR5PDOPP.js +0 -91
  152. package/dist/chunk-WR5PDOPP.js.map +0 -1
  153. package/dist/chunk-YR433ZDA.js +0 -20
  154. package/dist/chunk-YR433ZDA.js.map +0 -1
  155. package/dist/executors/in-memory.d.ts +0 -39
  156. package/dist/executors/in-memory.js +0 -6
  157. package/dist/executors/in-memory.js.map +0 -1
  158. package/dist/executors/types.d.ts +0 -3
  159. package/dist/executors/types.js +0 -3
  160. package/dist/executors/types.js.map +0 -1
  161. package/dist/functions.d.ts +0 -88
  162. package/dist/functions.js +0 -21
  163. package/dist/functions.js.map +0 -1
  164. package/dist/types-U76Ukj96.d.ts +0 -609
  165. package/dist/utils/analysis.d.ts +0 -75
  166. package/dist/utils/analysis.js +0 -3
  167. package/dist/utils/index.d.ts +0 -8
  168. package/dist/utils/index.js +0 -10
  169. package/dist/utils/index.js.map +0 -1
  170. package/dist/utils/mermaid.d.ts +0 -46
  171. package/dist/utils/mermaid.js +0 -4
  172. package/dist/utils/mermaid.js.map +0 -1
  173. package/dist/utils/middleware.d.ts +0 -11
  174. package/dist/utils/middleware.js +0 -3
  175. package/dist/utils/middleware.js.map +0 -1
  176. package/dist/utils/sanitize.d.ts +0 -19
  177. package/dist/utils/sanitize.js +0 -3
  178. package/dist/utils/sleep.d.ts +0 -9
  179. package/dist/utils/sleep.js +0 -4
  180. package/dist/utils/sleep.js.map +0 -1
  181. package/dist/workflow/AbstractNode.d.ts +0 -3
  182. package/dist/workflow/AbstractNode.js +0 -4
  183. package/dist/workflow/AbstractNode.js.map +0 -1
  184. package/dist/workflow/Flow.d.ts +0 -3
  185. package/dist/workflow/Flow.js +0 -16
  186. package/dist/workflow/Flow.js.map +0 -1
  187. package/dist/workflow/Node.d.ts +0 -3
  188. package/dist/workflow/Node.js +0 -15
  189. package/dist/workflow/Node.js.map +0 -1
  190. package/dist/workflow/index.d.ts +0 -4
  191. package/dist/workflow/index.js +0 -18
  192. package/dist/workflow/index.js.map +0 -1
  193. package/dist/workflow/node-patterns.d.ts +0 -55
  194. package/dist/workflow/node-patterns.js +0 -16
  195. package/dist/workflow/node-patterns.js.map +0 -1
  196. package/dist/workflow/registry.d.ts +0 -17
  197. package/dist/workflow/registry.js +0 -3
  198. package/dist/workflow/registry.js.map +0 -1
  199. /package/dist/{utils/analysis.js.map → analysis.js.map} +0 -0
  200. /package/dist/{builder/graph → runtime}/index.js.map +0 -0
  201. /package/dist/{builder/graph → runtime}/types.js.map +0 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/sanitizer.ts"],"names":[],"mappings":";AAOO,SAAS,kBAAkB,GAAA,EAA6B;AAC9D,EAAA,MAAM,KAAA,GAA0B,GAAA,CAAI,KAAA,EAAO,GAAA,CAAI,CAAC,IAAA,MAAe;AAAA,IAC9D,IAAI,IAAA,CAAK,EAAA;AAAA,IACT,MAAM,IAAA,CAAK,IAAA;AAAA,IACX,QAAQ,IAAA,CAAK,MAAA;AAAA,IACb,QAAQ,IAAA,CAAK,MAAA;AAAA,IACb,QAAQ,IAAA,CAAK;AAAA,GACd,CAAE,KAAK,EAAC;AAER,EAAA,MAAM,KAAA,GAA0B,GAAA,CAAI,KAAA,EAAO,GAAA,CAAI,CAAC,IAAA,MAAe;AAAA,IAC9D,QAAQ,IAAA,CAAK,MAAA;AAAA,IACb,QAAQ,IAAA,CAAK,MAAA;AAAA,IACb,QAAQ,IAAA,CAAK,MAAA;AAAA,IACb,WAAW,IAAA,CAAK,SAAA;AAAA,IAChB,WAAW,IAAA,CAAK;AAAA,GACjB,CAAE,KAAK,EAAC;AAER,EAAA,OAAO;AAAA,IACN,IAAI,GAAA,CAAI,EAAA;AAAA,IACR,KAAA;AAAA,IACA,KAAA;AAAA,IACA,UAAU,GAAA,CAAI;AAAA,GACf;AACD","file":"chunk-J3RNCPED.js","sourcesContent":["import type { EdgeDefinition, NodeDefinition, WorkflowBlueprint } from './types'\n\n/**\n * Sanitizes a raw workflow blueprint by removing extra properties\n * added by UI tools (e.g., position, style) and keeping only the\n * properties defined in NodeDefinition and EdgeDefinition.\n */\nexport function sanitizeBlueprint(raw: any): WorkflowBlueprint {\n\tconst nodes: NodeDefinition[] = raw.nodes?.map((node: any) => ({\n\t\tid: node.id,\n\t\tuses: node.uses,\n\t\tparams: node.params,\n\t\tinputs: node.inputs,\n\t\tconfig: node.config,\n\t})) || []\n\n\tconst edges: EdgeDefinition[] = raw.edges?.map((edge: any) => ({\n\t\tsource: edge.source,\n\t\ttarget: edge.target,\n\t\taction: edge.action,\n\t\tcondition: edge.condition,\n\t\ttransform: edge.transform,\n\t})) || []\n\n\treturn {\n\t\tid: raw.id,\n\t\tnodes,\n\t\tedges,\n\t\tmetadata: raw.metadata,\n\t}\n}\n"]}
@@ -0,0 +1,105 @@
1
+ // src/analysis.ts
2
+ function checkForCycles(blueprint) {
3
+ const cycles = [];
4
+ if (!blueprint || !blueprint.nodes || blueprint.nodes.length === 0) {
5
+ return cycles;
6
+ }
7
+ const allNodeIds = blueprint.nodes.map((node) => node.id);
8
+ const adj = /* @__PURE__ */ new Map();
9
+ allNodeIds.forEach((id) => adj.set(id, []));
10
+ blueprint.edges.forEach((edge) => adj.get(edge.source)?.push(edge.target));
11
+ const visited = /* @__PURE__ */ new Set();
12
+ const recursionStack = /* @__PURE__ */ new Set();
13
+ function detectCycleUtil(nodeId, path) {
14
+ visited.add(nodeId);
15
+ recursionStack.add(nodeId);
16
+ path.push(nodeId);
17
+ const neighbors = adj.get(nodeId) || [];
18
+ for (const neighbor of neighbors) {
19
+ if (recursionStack.has(neighbor)) {
20
+ const cycleStartIndex = path.indexOf(neighbor);
21
+ const cycle = path.slice(cycleStartIndex);
22
+ cycles.push([...cycle, neighbor]);
23
+ } else if (!visited.has(neighbor)) {
24
+ detectCycleUtil(neighbor, path);
25
+ }
26
+ }
27
+ recursionStack.delete(nodeId);
28
+ path.pop();
29
+ }
30
+ for (const nodeId of allNodeIds) {
31
+ if (!visited.has(nodeId)) {
32
+ detectCycleUtil(nodeId, []);
33
+ }
34
+ }
35
+ return cycles;
36
+ }
37
+ function generateMermaid(blueprint) {
38
+ if (!blueprint || !blueprint.nodes || blueprint.nodes.length === 0) {
39
+ return "flowchart TD\n empty[Empty Blueprint]";
40
+ }
41
+ let mermaid = "flowchart TD\n";
42
+ for (const node of blueprint.nodes) {
43
+ const nodeLabel = node.id;
44
+ mermaid += ` ${node.id}["${nodeLabel}"]
45
+ `;
46
+ }
47
+ for (const edge of blueprint.edges || []) {
48
+ const labelParts = [];
49
+ if (edge.action) {
50
+ labelParts.push(edge.action);
51
+ }
52
+ if (edge.condition) {
53
+ labelParts.push(edge.condition);
54
+ }
55
+ if (edge.transform) {
56
+ labelParts.push(edge.transform);
57
+ }
58
+ if (labelParts.length > 0) {
59
+ const edgeLabel = labelParts.join(" | ");
60
+ mermaid += ` ${edge.source} -- "${edgeLabel}" --> ${edge.target}
61
+ `;
62
+ } else {
63
+ mermaid += ` ${edge.source} --> ${edge.target}
64
+ `;
65
+ }
66
+ }
67
+ return mermaid;
68
+ }
69
+ function analyzeBlueprint(blueprint) {
70
+ if (!blueprint || !blueprint.nodes || blueprint.nodes.length === 0) {
71
+ return {
72
+ cycles: [],
73
+ startNodeIds: [],
74
+ terminalNodeIds: [],
75
+ nodeCount: 0,
76
+ edgeCount: 0,
77
+ isDag: true
78
+ };
79
+ }
80
+ const cycles = checkForCycles(blueprint);
81
+ const nodeCount = blueprint.nodes.length;
82
+ const edgeCount = blueprint.edges?.length || 0;
83
+ const nodesWithIncoming = /* @__PURE__ */ new Set();
84
+ for (const edge of blueprint.edges || []) {
85
+ nodesWithIncoming.add(edge.target);
86
+ }
87
+ const startNodeIds = blueprint.nodes.map((node) => node.id).filter((nodeId) => !nodesWithIncoming.has(nodeId));
88
+ const nodesWithOutgoing = /* @__PURE__ */ new Set();
89
+ for (const edge of blueprint.edges || []) {
90
+ nodesWithOutgoing.add(edge.source);
91
+ }
92
+ const terminalNodeIds = blueprint.nodes.map((node) => node.id).filter((nodeId) => !nodesWithOutgoing.has(nodeId));
93
+ return {
94
+ cycles,
95
+ startNodeIds,
96
+ terminalNodeIds,
97
+ nodeCount,
98
+ edgeCount,
99
+ isDag: cycles.length === 0
100
+ };
101
+ }
102
+
103
+ export { analyzeBlueprint, checkForCycles, generateMermaid };
104
+ //# sourceMappingURL=chunk-M23P46ZL.js.map
105
+ //# sourceMappingURL=chunk-M23P46ZL.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/analysis.ts"],"names":[],"mappings":";AA8BO,SAAS,eAAe,SAAA,EAAsC;AACpE,EAAA,MAAM,SAAiB,EAAC;AACxB,EAAA,IAAI,CAAC,aAAa,CAAC,SAAA,CAAU,SAAS,SAAA,CAAU,KAAA,CAAM,WAAW,CAAA,EAAG;AACnE,IAAA,OAAO,MAAA;AAAA,EACR;AAEA,EAAA,MAAM,aAAa,SAAA,CAAU,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ,KAAK,EAAE,CAAA;AACtD,EAAA,MAAM,GAAA,uBAAU,GAAA,EAAsB;AACtC,EAAA,UAAA,CAAW,QAAQ,CAAA,EAAA,KAAM,GAAA,CAAI,IAAI,EAAA,EAAI,EAAE,CAAC,CAAA;AACxC,EAAA,SAAA,CAAU,KAAA,CAAM,OAAA,CAAQ,CAAA,IAAA,KAAQ,GAAA,CAAI,GAAA,CAAI,IAAA,CAAK,MAAM,CAAA,EAAG,IAAA,CAAK,IAAA,CAAK,MAAM,CAAC,CAAA;AAEvE,EAAA,MAAM,OAAA,uBAAc,GAAA,EAAY;AAChC,EAAA,MAAM,cAAA,uBAAqB,GAAA,EAAY;AAEvC,EAAA,SAAS,eAAA,CAAgB,QAAgB,IAAA,EAAgB;AACxD,IAAA,OAAA,CAAQ,IAAI,MAAM,CAAA;AAClB,IAAA,cAAA,CAAe,IAAI,MAAM,CAAA;AACzB,IAAA,IAAA,CAAK,KAAK,MAAM,CAAA;AAEhB,IAAA,MAAM,SAAA,GAAY,GAAA,CAAI,GAAA,CAAI,MAAM,KAAK,EAAC;AACtC,IAAA,KAAA,MAAW,YAAY,SAAA,EAAW;AACjC,MAAA,IAAI,cAAA,CAAe,GAAA,CAAI,QAAQ,CAAA,EAAG;AACjC,QAAA,MAAM,eAAA,GAAkB,IAAA,CAAK,OAAA,CAAQ,QAAQ,CAAA;AAC7C,QAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,eAAe,CAAA;AACxC,QAAA,MAAA,CAAO,IAAA,CAAK,CAAC,GAAG,KAAA,EAAO,QAAQ,CAAC,CAAA;AAAA,MACjC,CAAA,MAAA,IACS,CAAC,OAAA,CAAQ,GAAA,CAAI,QAAQ,CAAA,EAAG;AAChC,QAAA,eAAA,CAAgB,UAAU,IAAI,CAAA;AAAA,MAC/B;AAAA,IACD;AAEA,IAAA,cAAA,CAAe,OAAO,MAAM,CAAA;AAC5B,IAAA,IAAA,CAAK,GAAA,EAAI;AAAA,EACV;AAEA,EAAA,KAAA,MAAW,UAAU,UAAA,EAAY;AAChC,IAAA,IAAI,CAAC,OAAA,CAAQ,GAAA,CAAI,MAAM,CAAA,EAAG;AACzB,MAAA,eAAA,CAAgB,MAAA,EAAQ,EAAE,CAAA;AAAA,IAC3B;AAAA,EACD;AAEA,EAAA,OAAO,MAAA;AACR;AAOO,SAAS,gBAAgB,SAAA,EAAsC;AACrE,EAAA,IAAI,CAAC,aAAa,CAAC,SAAA,CAAU,SAAS,SAAA,CAAU,KAAA,CAAM,WAAW,CAAA,EAAG;AACnE,IAAA,OAAO,0CAAA;AAAA,EACR;AAEA,EAAA,IAAI,OAAA,GAAU,gBAAA;AAGd,EAAA,KAAA,MAAW,IAAA,IAAQ,UAAU,KAAA,EAAO;AACnC,IAAA,MAAM,YAAY,IAAA,CAAK,EAAA;AACvB,IAAA,OAAA,IAAW,CAAA,IAAA,EAAO,IAAA,CAAK,EAAE,CAAA,EAAA,EAAK,SAAS,CAAA;AAAA,CAAA;AAAA,EACxC;AAGA,EAAA,KAAA,MAAW,IAAA,IAAQ,SAAA,CAAU,KAAA,IAAS,EAAC,EAAG;AACzC,IAAA,MAAM,aAAuB,EAAC;AAE9B,IAAA,IAAI,KAAK,MAAA,EAAQ;AAChB,MAAA,UAAA,CAAW,IAAA,CAAK,KAAK,MAAM,CAAA;AAAA,IAC5B;AACA,IAAA,IAAI,KAAK,SAAA,EAAW;AACnB,MAAA,UAAA,CAAW,IAAA,CAAK,KAAK,SAAS,CAAA;AAAA,IAC/B;AACA,IAAA,IAAI,KAAK,SAAA,EAAW;AACnB,MAAA,UAAA,CAAW,IAAA,CAAK,KAAK,SAAS,CAAA;AAAA,IAC/B;AAEA,IAAA,IAAI,UAAA,CAAW,SAAS,CAAA,EAAG;AAC1B,MAAA,MAAM,SAAA,GAAY,UAAA,CAAW,IAAA,CAAK,KAAK,CAAA;AACvC,MAAA,OAAA,IAAW,OAAO,IAAA,CAAK,MAAM,QAAQ,SAAS,CAAA,MAAA,EAAS,KAAK,MAAM;AAAA,CAAA;AAAA,IACnE,CAAA,MACK;AACJ,MAAA,OAAA,IAAW,CAAA,IAAA,EAAO,IAAA,CAAK,MAAM,CAAA,KAAA,EAAQ,KAAK,MAAM;AAAA,CAAA;AAAA,IACjD;AAAA,EACD;AAEA,EAAA,OAAO,OAAA;AACR;AAOO,SAAS,iBAAiB,SAAA,EAAiD;AACjF,EAAA,IAAI,CAAC,aAAa,CAAC,SAAA,CAAU,SAAS,SAAA,CAAU,KAAA,CAAM,WAAW,CAAA,EAAG;AACnE,IAAA,OAAO;AAAA,MACN,QAAQ,EAAC;AAAA,MACT,cAAc,EAAC;AAAA,MACf,iBAAiB,EAAC;AAAA,MAClB,SAAA,EAAW,CAAA;AAAA,MACX,SAAA,EAAW,CAAA;AAAA,MACX,KAAA,EAAO;AAAA,KACR;AAAA,EACD;AAEA,EAAA,MAAM,MAAA,GAAS,eAAe,SAAS,CAAA;AACvC,EAAA,MAAM,SAAA,GAAY,UAAU,KAAA,CAAM,MAAA;AAClC,EAAA,MAAM,SAAA,GAAY,SAAA,CAAU,KAAA,EAAO,MAAA,IAAU,CAAA;AAG7C,EAAA,MAAM,iBAAA,uBAAwB,GAAA,EAAY;AAC1C,EAAA,KAAA,MAAW,IAAA,IAAQ,SAAA,CAAU,KAAA,IAAS,EAAC,EAAG;AACzC,IAAA,iBAAA,CAAkB,GAAA,CAAI,KAAK,MAAM,CAAA;AAAA,EAClC;AAEA,EAAA,MAAM,YAAA,GAAe,SAAA,CAAU,KAAA,CAC7B,GAAA,CAAI,UAAQ,IAAA,CAAK,EAAE,CAAA,CACnB,MAAA,CAAO,CAAA,MAAA,KAAU,CAAC,iBAAA,CAAkB,GAAA,CAAI,MAAM,CAAC,CAAA;AAGjD,EAAA,MAAM,iBAAA,uBAAwB,GAAA,EAAY;AAC1C,EAAA,KAAA,MAAW,IAAA,IAAQ,SAAA,CAAU,KAAA,IAAS,EAAC,EAAG;AACzC,IAAA,iBAAA,CAAkB,GAAA,CAAI,KAAK,MAAM,CAAA;AAAA,EAClC;AAEA,EAAA,MAAM,eAAA,GAAkB,SAAA,CAAU,KAAA,CAChC,GAAA,CAAI,UAAQ,IAAA,CAAK,EAAE,CAAA,CACnB,MAAA,CAAO,CAAA,MAAA,KAAU,CAAC,iBAAA,CAAkB,GAAA,CAAI,MAAM,CAAC,CAAA;AAEjD,EAAA,OAAO;AAAA,IACN,MAAA;AAAA,IACA,YAAA;AAAA,IACA,eAAA;AAAA,IACA,SAAA;AAAA,IACA,SAAA;AAAA,IACA,KAAA,EAAO,OAAO,MAAA,KAAW;AAAA,GAC1B;AACD","file":"chunk-M23P46ZL.js","sourcesContent":["import type { WorkflowBlueprint } from './types'\n\n/**\n * A list of cycles found in the graph. Each cycle is an array of node IDs.\n */\nexport type Cycles = string[][]\n\n/**\n * Analysis result for a workflow blueprint\n */\nexport interface BlueprintAnalysis {\n\t/** Cycles found in the graph */\n\tcycles: Cycles\n\t/** Node IDs that have no incoming edges (start nodes) */\n\tstartNodeIds: string[]\n\t/** Node IDs that have no outgoing edges (terminal nodes) */\n\tterminalNodeIds: string[]\n\t/** Total number of nodes */\n\tnodeCount: number\n\t/** Total number of edges */\n\tedgeCount: number\n\t/** Whether the graph is a valid DAG (no cycles) */\n\tisDag: boolean\n}\n\n/**\n * Analyzes a workflow blueprint to detect cycles.\n * @param blueprint The WorkflowBlueprint object containing nodes and edges.\n * @returns An array of cycles found. Each cycle is represented as an array of node IDs.\n */\nexport function checkForCycles(blueprint: WorkflowBlueprint): Cycles {\n\tconst cycles: Cycles = []\n\tif (!blueprint || !blueprint.nodes || blueprint.nodes.length === 0) {\n\t\treturn cycles\n\t}\n\n\tconst allNodeIds = blueprint.nodes.map(node => node.id)\n\tconst adj = new Map<string, string[]>()\n\tallNodeIds.forEach(id => adj.set(id, []))\n\tblueprint.edges.forEach(edge => adj.get(edge.source)?.push(edge.target))\n\n\tconst visited = new Set<string>()\n\tconst recursionStack = new Set<string>()\n\n\tfunction detectCycleUtil(nodeId: string, path: string[]) {\n\t\tvisited.add(nodeId)\n\t\trecursionStack.add(nodeId)\n\t\tpath.push(nodeId)\n\n\t\tconst neighbors = adj.get(nodeId) || []\n\t\tfor (const neighbor of neighbors) {\n\t\t\tif (recursionStack.has(neighbor)) {\n\t\t\t\tconst cycleStartIndex = path.indexOf(neighbor)\n\t\t\t\tconst cycle = path.slice(cycleStartIndex)\n\t\t\t\tcycles.push([...cycle, neighbor])\n\t\t\t}\n\t\t\telse if (!visited.has(neighbor)) {\n\t\t\t\tdetectCycleUtil(neighbor, path)\n\t\t\t}\n\t\t}\n\n\t\trecursionStack.delete(nodeId)\n\t\tpath.pop()\n\t}\n\n\tfor (const nodeId of allNodeIds) {\n\t\tif (!visited.has(nodeId)) {\n\t\t\tdetectCycleUtil(nodeId, [])\n\t\t}\n\t}\n\n\treturn cycles\n}\n\n/**\n * Generates Mermaid diagram syntax from a WorkflowBlueprint\n * @param blueprint The WorkflowBlueprint object containing nodes and edges\n * @returns Mermaid syntax string for the flowchart\n */\nexport function generateMermaid(blueprint: WorkflowBlueprint): string {\n\tif (!blueprint || !blueprint.nodes || blueprint.nodes.length === 0) {\n\t\treturn 'flowchart TD\\n empty[Empty Blueprint]'\n\t}\n\n\tlet mermaid = 'flowchart TD\\n'\n\n\t// Add nodes\n\tfor (const node of blueprint.nodes) {\n\t\tconst nodeLabel = node.id\n\t\tmermaid += ` ${node.id}[\"${nodeLabel}\"]\\n`\n\t}\n\n\t// Add edges\n\tfor (const edge of blueprint.edges || []) {\n\t\tconst labelParts: string[] = []\n\n\t\tif (edge.action) {\n\t\t\tlabelParts.push(edge.action)\n\t\t}\n\t\tif (edge.condition) {\n\t\t\tlabelParts.push(edge.condition)\n\t\t}\n\t\tif (edge.transform) {\n\t\t\tlabelParts.push(edge.transform)\n\t\t}\n\n\t\tif (labelParts.length > 0) {\n\t\t\tconst edgeLabel = labelParts.join(' | ')\n\t\t\tmermaid += ` ${edge.source} -- \"${edgeLabel}\" --> ${edge.target}\\n`\n\t\t}\n\t\telse {\n\t\t\tmermaid += ` ${edge.source} --> ${edge.target}\\n`\n\t\t}\n\t}\n\n\treturn mermaid\n}\n\n/**\n * Analyzes a workflow blueprint and returns comprehensive analysis\n * @param blueprint The WorkflowBlueprint object containing nodes and edges\n * @returns Analysis result with cycles, start nodes, terminal nodes, and other metrics\n */\nexport function analyzeBlueprint(blueprint: WorkflowBlueprint): BlueprintAnalysis {\n\tif (!blueprint || !blueprint.nodes || blueprint.nodes.length === 0) {\n\t\treturn {\n\t\t\tcycles: [],\n\t\t\tstartNodeIds: [],\n\t\t\tterminalNodeIds: [],\n\t\t\tnodeCount: 0,\n\t\t\tedgeCount: 0,\n\t\t\tisDag: true,\n\t\t}\n\t}\n\n\tconst cycles = checkForCycles(blueprint)\n\tconst nodeCount = blueprint.nodes.length\n\tconst edgeCount = blueprint.edges?.length || 0\n\n\t// Find nodes with no incoming edges (start nodes)\n\tconst nodesWithIncoming = new Set<string>()\n\tfor (const edge of blueprint.edges || []) {\n\t\tnodesWithIncoming.add(edge.target)\n\t}\n\n\tconst startNodeIds = blueprint.nodes\n\t\t.map(node => node.id)\n\t\t.filter(nodeId => !nodesWithIncoming.has(nodeId))\n\n\t// Find nodes with no outgoing edges (terminal nodes)\n\tconst nodesWithOutgoing = new Set<string>()\n\tfor (const edge of blueprint.edges || []) {\n\t\tnodesWithOutgoing.add(edge.source)\n\t}\n\n\tconst terminalNodeIds = blueprint.nodes\n\t\t.map(node => node.id)\n\t\t.filter(nodeId => !nodesWithOutgoing.has(nodeId))\n\n\treturn {\n\t\tcycles,\n\t\tstartNodeIds,\n\t\tterminalNodeIds,\n\t\tnodeCount,\n\t\tedgeCount,\n\t\tisDag: cycles.length === 0,\n\t}\n}\n"]}
@@ -0,0 +1,49 @@
1
+ // src/context.ts
2
+ var Context = class {
3
+ type = "sync";
4
+ data;
5
+ constructor(initialData = {}) {
6
+ this.data = new Map(Object.entries(initialData));
7
+ }
8
+ get(key) {
9
+ return this.data.get(String(key));
10
+ }
11
+ set(key, value) {
12
+ this.data.set(String(key), value);
13
+ }
14
+ has(key) {
15
+ return this.data.has(String(key));
16
+ }
17
+ delete(key) {
18
+ return this.data.delete(String(key));
19
+ }
20
+ toJSON() {
21
+ return Object.fromEntries(this.data);
22
+ }
23
+ };
24
+ var AsyncContextView = class {
25
+ constructor(syncContext) {
26
+ this.syncContext = syncContext;
27
+ }
28
+ type = "async";
29
+ get(key) {
30
+ return Promise.resolve(this.syncContext.get(key));
31
+ }
32
+ set(key, value) {
33
+ this.syncContext.set(key, value);
34
+ return Promise.resolve();
35
+ }
36
+ has(key) {
37
+ return Promise.resolve(this.syncContext.has(key));
38
+ }
39
+ delete(key) {
40
+ return Promise.resolve(this.syncContext.delete(key));
41
+ }
42
+ toJSON() {
43
+ return Promise.resolve(this.syncContext.toJSON());
44
+ }
45
+ };
46
+
47
+ export { AsyncContextView, Context };
48
+ //# sourceMappingURL=chunk-MICPMOTW.js.map
49
+ //# sourceMappingURL=chunk-MICPMOTW.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/context.ts"],"names":[],"mappings":";AAKO,IAAM,UAAN,MAAsF;AAAA,EAC5E,IAAA,GAAO,MAAA;AAAA,EACf,IAAA;AAAA,EAER,WAAA,CAAY,WAAA,GAAiC,EAAC,EAAG;AAChD,IAAA,IAAA,CAAK,OAAO,IAAI,GAAA,CAAI,MAAA,CAAO,OAAA,CAAQ,WAAW,CAAC,CAAA;AAAA,EAChD;AAAA,EAEA,IAA8B,GAAA,EAAiC;AAC9D,IAAA,OAAO,IAAA,CAAK,IAAA,CAAK,GAAA,CAAI,MAAA,CAAO,GAAG,CAAC,CAAA;AAAA,EACjC;AAAA,EAEA,GAAA,CAA8B,KAAQ,KAAA,EAA0B;AAC/D,IAAA,IAAA,CAAK,IAAA,CAAK,GAAA,CAAI,MAAA,CAAO,GAAG,GAAG,KAAK,CAAA;AAAA,EACjC;AAAA,EAEA,IAAI,GAAA,EAA8B;AACjC,IAAA,OAAO,IAAA,CAAK,IAAA,CAAK,GAAA,CAAI,MAAA,CAAO,GAAG,CAAC,CAAA;AAAA,EACjC;AAAA,EAEA,OAAO,GAAA,EAA8B;AACpC,IAAA,OAAO,IAAA,CAAK,IAAA,CAAK,MAAA,CAAO,MAAA,CAAO,GAAG,CAAC,CAAA;AAAA,EACpC;AAAA,EAEA,MAAA,GAA8B;AAC7B,IAAA,OAAO,MAAA,CAAO,WAAA,CAAY,IAAA,CAAK,IAAI,CAAA;AAAA,EACpC;AACD;AAMO,IAAM,mBAAN,MAAgG;AAAA,EAGtG,YAAoB,WAAA,EAAqC;AAArC,IAAA,IAAA,CAAA,WAAA,GAAA,WAAA;AAAA,EAAuC;AAAA,EAF3C,IAAA,GAAO,OAAA;AAAA,EAIvB,IAA8B,GAAA,EAA0C;AACvE,IAAA,OAAO,QAAQ,OAAA,CAAQ,IAAA,CAAK,WAAA,CAAY,GAAA,CAAI,GAAG,CAAC,CAAA;AAAA,EACjD;AAAA,EAEA,GAAA,CAA8B,KAAQ,KAAA,EAAmC;AACxE,IAAA,IAAA,CAAK,WAAA,CAAY,GAAA,CAAI,GAAA,EAAK,KAAK,CAAA;AAC/B,IAAA,OAAO,QAAQ,OAAA,EAAQ;AAAA,EACxB;AAAA,EAEA,IAAI,GAAA,EAAuC;AAC1C,IAAA,OAAO,QAAQ,OAAA,CAAQ,IAAA,CAAK,WAAA,CAAY,GAAA,CAAI,GAAG,CAAC,CAAA;AAAA,EACjD;AAAA,EAEA,OAAO,GAAA,EAAuC;AAC7C,IAAA,OAAO,QAAQ,OAAA,CAAQ,IAAA,CAAK,WAAA,CAAY,MAAA,CAAO,GAAG,CAAC,CAAA;AAAA,EACpD;AAAA,EAEA,MAAA,GAAuC;AACtC,IAAA,OAAO,OAAA,CAAQ,OAAA,CAAQ,IAAA,CAAK,WAAA,CAAY,QAAQ,CAAA;AAAA,EACjD;AACD","file":"chunk-MICPMOTW.js","sourcesContent":["import type { IAsyncContext, ISyncContext } from './types'\n\n/**\n * A default, high-performance, in-memory implementation of ISyncContext using a Map.\n */\nexport class Context<TContext extends Record<string, any>> implements ISyncContext<TContext> {\n\tpublic readonly type = 'sync' as const\n\tprivate data: Map<string, any>\n\n\tconstructor(initialData: Partial<TContext> = {}) {\n\t\tthis.data = new Map(Object.entries(initialData))\n\t}\n\n\tget<K extends keyof TContext>(key: K): TContext[K] | undefined {\n\t\treturn this.data.get(String(key))\n\t}\n\n\tset<K extends keyof TContext>(key: K, value: TContext[K]): void {\n\t\tthis.data.set(String(key), value)\n\t}\n\n\thas(key: keyof TContext): boolean {\n\t\treturn this.data.has(String(key))\n\t}\n\n\tdelete(key: keyof TContext): boolean {\n\t\treturn this.data.delete(String(key))\n\t}\n\n\ttoJSON(): Record<string, any> {\n\t\treturn Object.fromEntries(this.data)\n\t}\n}\n\n/**\n * An adapter that provides a consistent, Promise-based view of a synchronous context.\n * This is created by the runtime and is transparent to the node author.\n */\nexport class AsyncContextView<TContext extends Record<string, any>> implements IAsyncContext<TContext> {\n\tpublic readonly type = 'async' as const\n\n\tconstructor(private syncContext: ISyncContext<TContext>) { }\n\n\tget<K extends keyof TContext>(key: K): Promise<TContext[K] | undefined> {\n\t\treturn Promise.resolve(this.syncContext.get(key))\n\t}\n\n\tset<K extends keyof TContext>(key: K, value: TContext[K]): Promise<void> {\n\t\tthis.syncContext.set(key, value)\n\t\treturn Promise.resolve()\n\t}\n\n\thas(key: keyof TContext): Promise<boolean> {\n\t\treturn Promise.resolve(this.syncContext.has(key))\n\t}\n\n\tdelete(key: keyof TContext): Promise<boolean> {\n\t\treturn Promise.resolve(this.syncContext.delete(key))\n\t}\n\n\ttoJSON(): Promise<Record<string, any>> {\n\t\treturn Promise.resolve(this.syncContext.toJSON())\n\t}\n}\n"]}
@@ -0,0 +1,106 @@
1
+ import { FlowRuntime } from './chunk-RYTIQZIB.js';
2
+ import { JsonSerializer } from './chunk-REH55ZXV.js';
3
+
4
+ // src/runtime/adapter.ts
5
+ var BaseDistributedAdapter = class {
6
+ runtime;
7
+ store;
8
+ serializer;
9
+ constructor(options) {
10
+ this.runtime = new FlowRuntime(options.runtimeOptions);
11
+ this.store = options.coordinationStore;
12
+ this.serializer = options.runtimeOptions.serializer || new JsonSerializer();
13
+ console.log("[Adapter] BaseDistributedAdapter initialized.");
14
+ }
15
+ /**
16
+ * Starts the worker, which begins listening for and processing jobs from the queue.
17
+ */
18
+ start() {
19
+ console.log("[Adapter] Starting worker...");
20
+ this.processJobs(this.handleJob.bind(this));
21
+ }
22
+ /**
23
+ * The main handler for processing a single job from the queue.
24
+ */
25
+ async handleJob(job) {
26
+ const { runId, blueprintId, nodeId } = job;
27
+ const blueprint = (this.runtime.options.blueprints || {})[blueprintId];
28
+ if (!blueprint) {
29
+ const reason = `Blueprint with ID '${blueprintId}' not found in the worker's runtime registry.`;
30
+ console.error(`[Adapter] FATAL: ${reason}`);
31
+ await this.publishFinalResult(runId, { status: "failed", reason });
32
+ return;
33
+ }
34
+ const context = this.createContext(runId);
35
+ const mockState = {
36
+ getContext: () => context,
37
+ markFallbackExecuted: () => {
38
+ },
39
+ addError: (nodeId2, error) => {
40
+ console.error(`[Adapter] Error in node ${nodeId2}:`, error);
41
+ }
42
+ };
43
+ try {
44
+ const result = await this.runtime.executeNode(blueprint, nodeId, mockState);
45
+ await context.set(nodeId, result.output);
46
+ const nodeDef = blueprint.nodes.find((n) => n.id === nodeId);
47
+ if (nodeDef?.uses === "output") {
48
+ console.log(`[Adapter] \u2705 Output node '${nodeId}' finished. Declaring workflow complete for Run ID: ${runId}`);
49
+ const finalContext = await context.toJSON();
50
+ const finalResult = {
51
+ context: finalContext,
52
+ serializedContext: this.serializer.serialize(finalContext),
53
+ status: "completed"
54
+ };
55
+ await this.publishFinalResult(runId, { status: "completed", payload: finalResult });
56
+ return;
57
+ }
58
+ const nextNodes = await this.runtime.determineNextNodes(blueprint, nodeId, result, context);
59
+ if (nextNodes.length === 0) {
60
+ console.log(`[Adapter] Terminal node '${nodeId}' reached for Run ID '${runId}', but it was not an 'output' node. This branch will now terminate.`);
61
+ return;
62
+ }
63
+ for (const { node: nextNodeDef, edge } of nextNodes) {
64
+ await this.runtime.applyEdgeTransform(edge, result, nextNodeDef, context);
65
+ const isReady = await this.isReadyForFanIn(runId, blueprint, nextNodeDef.id);
66
+ if (isReady) {
67
+ console.log(`[Adapter] Node '${nextNodeDef.id}' is ready. Enqueuing job.`);
68
+ await this.enqueueJob({ runId, blueprintId, nodeId: nextNodeDef.id });
69
+ } else {
70
+ console.log(`[Adapter] Node '${nextNodeDef.id}' is waiting for other predecessors to complete.`);
71
+ }
72
+ }
73
+ } catch (error) {
74
+ const reason = error.message || "Unknown execution error";
75
+ console.error(`[Adapter] FATAL: Job for node '${nodeId}' failed for Run ID '${runId}': ${reason}`);
76
+ await this.publishFinalResult(runId, { status: "failed", reason });
77
+ }
78
+ }
79
+ /**
80
+ * Encapsulates the fan-in join logic using the coordination store.
81
+ */
82
+ async isReadyForFanIn(runId, blueprint, targetNodeId) {
83
+ const targetNode = blueprint.nodes.find((n) => n.id === targetNodeId);
84
+ const joinStrategy = targetNode.config?.joinStrategy || "all";
85
+ const predecessors = blueprint.edges.filter((e) => e.target === targetNodeId);
86
+ if (predecessors.length <= 1) {
87
+ return true;
88
+ }
89
+ if (joinStrategy === "any") {
90
+ const lockKey = `flowcraft:joinlock:${runId}:${targetNodeId}`;
91
+ return await this.store.setIfNotExist(lockKey, "locked", 3600);
92
+ } else {
93
+ const fanInKey = `flowcraft:fanin:${runId}:${targetNodeId}`;
94
+ const readyCount = await this.store.increment(fanInKey, 3600);
95
+ if (readyCount >= predecessors.length) {
96
+ await this.store.delete(fanInKey);
97
+ return true;
98
+ }
99
+ return false;
100
+ }
101
+ }
102
+ };
103
+
104
+ export { BaseDistributedAdapter };
105
+ //# sourceMappingURL=chunk-NPAJNLXQ.js.map
106
+ //# sourceMappingURL=chunk-NPAJNLXQ.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/runtime/adapter.ts"],"names":["nodeId"],"mappings":";;;;AAkCO,IAAe,yBAAf,MAAsC;AAAA,EACzB,OAAA;AAAA,EACA,KAAA;AAAA,EACA,UAAA;AAAA,EAEnB,YAAY,OAAA,EAAyB;AACpC,IAAA,IAAA,CAAK,OAAA,GAAU,IAAI,WAAA,CAAY,OAAA,CAAQ,cAAc,CAAA;AACrD,IAAA,IAAA,CAAK,QAAQ,OAAA,CAAQ,iBAAA;AACrB,IAAA,IAAA,CAAK,UAAA,GAAa,OAAA,CAAQ,cAAA,CAAe,UAAA,IAAc,IAAI,cAAA,EAAe;AAC1E,IAAA,OAAA,CAAQ,IAAI,+CAA+C,CAAA;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKO,KAAA,GAAc;AACpB,IAAA,OAAA,CAAQ,IAAI,8BAA8B,CAAA;AAC1C,IAAA,IAAA,CAAK,WAAA,CAAY,IAAA,CAAK,SAAA,CAAU,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EA8BA,MAAc,UAAU,GAAA,EAAgC;AACvD,IAAA,MAAM,EAAE,KAAA,EAAO,WAAA,EAAa,MAAA,EAAO,GAAI,GAAA;AAEvC,IAAA,MAAM,aAAa,IAAA,CAAK,OAAA,CAAQ,QAAQ,UAAA,IAAc,IAAI,WAAW,CAAA;AACrE,IAAA,IAAI,CAAC,SAAA,EAAW;AACf,MAAA,MAAM,MAAA,GAAS,sBAAsB,WAAW,CAAA,6CAAA,CAAA;AAChD,MAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,iBAAA,EAAoB,MAAM,CAAA,CAAE,CAAA;AAC1C,MAAA,MAAM,KAAK,kBAAA,CAAmB,KAAA,EAAO,EAAE,MAAA,EAAQ,QAAA,EAAU,QAAQ,CAAA;AACjE,MAAA;AAAA,IACD;AAEA,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,aAAA,CAAc,KAAK,CAAA;AACxC,IAAA,MAAM,SAAA,GAAY;AAAA,MACjB,YAAY,MAAM,OAAA;AAAA,MAClB,sBAAsB,MAAM;AAAA,MAAE,CAAA;AAAA,MAC9B,QAAA,EAAU,CAACA,OAAAA,EAAgB,KAAA,KAAiB;AAC3C,QAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,wBAAA,EAA2BA,OAAM,CAAA,CAAA,CAAA,EAAK,KAAK,CAAA;AAAA,MAC1D;AAAA,KACD;AAEA,IAAA,IAAI;AACH,MAAA,MAAM,SAAS,MAAM,IAAA,CAAK,QAAQ,WAAA,CAAY,SAAA,EAAW,QAAQ,SAAS,CAAA;AAC1E,MAAA,MAAM,OAAA,CAAQ,GAAA,CAAI,MAAA,EAAe,MAAA,CAAO,MAAM,CAAA;AAE9C,MAAA,MAAM,UAAU,SAAA,CAAU,KAAA,CAAM,KAAK,CAAA,CAAA,KAAK,CAAA,CAAE,OAAO,MAAM,CAAA;AAEzD,MAAA,IAAI,OAAA,EAAS,SAAS,QAAA,EAAU;AAC/B,QAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,8BAAA,EAA4B,MAAM,CAAA,oDAAA,EAAuD,KAAK,CAAA,CAAE,CAAA;AAC5G,QAAA,MAAM,YAAA,GAAe,MAAM,OAAA,CAAQ,MAAA,EAAO;AAC1C,QAAA,MAAM,WAAA,GAA8B;AAAA,UACnC,OAAA,EAAS,YAAA;AAAA,UACT,iBAAA,EAAmB,IAAA,CAAK,UAAA,CAAW,SAAA,CAAU,YAAY,CAAA;AAAA,UACzD,MAAA,EAAQ;AAAA,SACT;AACA,QAAA,MAAM,IAAA,CAAK,mBAAmB,KAAA,EAAO,EAAE,QAAQ,WAAA,EAAa,OAAA,EAAS,aAAa,CAAA;AAClF,QAAA;AAAA,MACD;AAEA,MAAA,MAAM,SAAA,GAAY,MAAM,IAAA,CAAK,OAAA,CAAQ,mBAAmB,SAAA,EAAW,MAAA,EAAQ,QAAQ,OAAO,CAAA;AAG1F,MAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC3B,QAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,yBAAA,EAA4B,MAAM,CAAA,sBAAA,EAAyB,KAAK,CAAA,mEAAA,CAAqE,CAAA;AACjJ,QAAA;AAAA,MACD;AAEA,MAAA,KAAA,MAAW,EAAE,IAAA,EAAM,WAAA,EAAa,IAAA,MAAU,SAAA,EAAW;AACpD,QAAA,MAAM,KAAK,OAAA,CAAQ,kBAAA,CAAmB,IAAA,EAAM,MAAA,EAAQ,aAAa,OAAO,CAAA;AACxE,QAAA,MAAM,UAAU,MAAM,IAAA,CAAK,gBAAgB,KAAA,EAAO,SAAA,EAAW,YAAY,EAAE,CAAA;AAC3E,QAAA,IAAI,OAAA,EAAS;AACZ,UAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,gBAAA,EAAmB,WAAA,CAAY,EAAE,CAAA,0BAAA,CAA4B,CAAA;AACzE,UAAA,MAAM,IAAA,CAAK,WAAW,EAAE,KAAA,EAAO,aAAa,MAAA,EAAQ,WAAA,CAAY,IAAI,CAAA;AAAA,QACrE,CAAA,MACK;AACJ,UAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,gBAAA,EAAmB,WAAA,CAAY,EAAE,CAAA,gDAAA,CAAkD,CAAA;AAAA,QAChG;AAAA,MACD;AAAA,IACD,SACO,KAAA,EAAY;AAClB,MAAA,MAAM,MAAA,GAAS,MAAM,OAAA,IAAW,yBAAA;AAChC,MAAA,OAAA,CAAQ,MAAM,CAAA,+BAAA,EAAkC,MAAM,wBAAwB,KAAK,CAAA,GAAA,EAAM,MAAM,CAAA,CAAE,CAAA;AACjG,MAAA,MAAM,KAAK,kBAAA,CAAmB,KAAA,EAAO,EAAE,MAAA,EAAQ,QAAA,EAAU,QAAQ,CAAA;AAAA,IAClE;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eAAA,CAAgB,KAAA,EAAe,SAAA,EAA8B,YAAA,EAAwC;AAClH,IAAA,MAAM,aAAa,SAAA,CAAU,KAAA,CAAM,KAAK,CAAA,CAAA,KAAK,CAAA,CAAE,OAAO,YAAY,CAAA;AAClE,IAAA,MAAM,YAAA,GAAe,UAAA,CAAW,MAAA,EAAQ,YAAA,IAAgB,KAAA;AACxD,IAAA,MAAM,eAAe,SAAA,CAAU,KAAA,CAAM,OAAO,CAAA,CAAA,KAAK,CAAA,CAAE,WAAW,YAAY,CAAA;AAE1E,IAAA,IAAI,YAAA,CAAa,UAAU,CAAA,EAAG;AAC7B,MAAA,OAAO,IAAA;AAAA,IACR;AAEA,IAAA,IAAI,iBAAiB,KAAA,EAAO;AAC3B,MAAA,MAAM,OAAA,GAAU,CAAA,mBAAA,EAAsB,KAAK,CAAA,CAAA,EAAI,YAAY,CAAA,CAAA;AAC3D,MAAA,OAAO,MAAM,IAAA,CAAK,KAAA,CAAM,aAAA,CAAc,OAAA,EAAS,UAAU,IAAI,CAAA;AAAA,IAC9D,CAAA,MACK;AACJ,MAAA,MAAM,QAAA,GAAW,CAAA,gBAAA,EAAmB,KAAK,CAAA,CAAA,EAAI,YAAY,CAAA,CAAA;AACzD,MAAA,MAAM,aAAa,MAAM,IAAA,CAAK,KAAA,CAAM,SAAA,CAAU,UAAU,IAAI,CAAA;AAC5D,MAAA,IAAI,UAAA,IAAc,aAAa,MAAA,EAAQ;AACtC,QAAA,MAAM,IAAA,CAAK,KAAA,CAAM,MAAA,CAAO,QAAQ,CAAA;AAChC,QAAA,OAAO,IAAA;AAAA,MACR;AACA,MAAA,OAAO,KAAA;AAAA,IACR;AAAA,EACD;AACD","file":"chunk-NPAJNLXQ.js","sourcesContent":["import type { IAsyncContext, ISerializer, RuntimeOptions, WorkflowBlueprint, WorkflowResult } from '../types'\nimport { JsonSerializer } from '../serializer'\nimport { FlowRuntime } from './runtime'\n\n/**\n * Defines the contract for an atomic, distributed key-value store required by\n * the adapter for coordination tasks like fan-in joins and locking.\n */\nexport interface ICoordinationStore {\n\t/** Atomically increments a key and returns the new value. Ideal for 'all' joins. */\n\tincrement: (key: string, ttlSeconds: number) => Promise<number>\n\t/** Sets a key only if it does not already exist. Ideal for 'any' joins (locking). */\n\tsetIfNotExist: (key: string, value: string, ttlSeconds: number) => Promise<boolean>\n\t/** Deletes a key. Used for cleanup. */\n\tdelete: (key: string) => Promise<void>\n}\n\n/** Configuration options for constructing a BaseDistributedAdapter. */\nexport interface AdapterOptions {\n\truntimeOptions: RuntimeOptions<any>\n\tcoordinationStore: ICoordinationStore\n}\n\n/** The data payload expected for a job in the queue. */\nexport interface JobPayload {\n\trunId: string\n\tblueprintId: string\n\tnodeId: string\n}\n\n/**\n * The base class for all distributed adapters. It handles the technology-agnostic\n * orchestration logic and leaves queue-specific implementation to subclasses.\n */\nexport abstract class BaseDistributedAdapter {\n\tprotected readonly runtime: FlowRuntime<any, any>\n\tprotected readonly store: ICoordinationStore\n\tprotected readonly serializer: ISerializer\n\n\tconstructor(options: AdapterOptions) {\n\t\tthis.runtime = new FlowRuntime(options.runtimeOptions)\n\t\tthis.store = options.coordinationStore\n\t\tthis.serializer = options.runtimeOptions.serializer || new JsonSerializer()\n\t\tconsole.log('[Adapter] BaseDistributedAdapter initialized.')\n\t}\n\n\t/**\n\t * Starts the worker, which begins listening for and processing jobs from the queue.\n\t */\n\tpublic start(): void {\n\t\tconsole.log('[Adapter] Starting worker...')\n\t\tthis.processJobs(this.handleJob.bind(this))\n\t}\n\n\t/**\n\t * Creates a technology-specific distributed context for a given workflow run.\n\t * @param runId The unique ID for the workflow execution.\n\t */\n\tprotected abstract createContext(runId: string): IAsyncContext<Record<string, any>>\n\t/**\n\t * Sets up the listener for the message queue. The implementation should call the\n\t * provided `handler` function for each new job received.\n\t * @param handler The core logic to execute for each job.\n\t */\n\tprotected abstract processJobs(handler: (job: JobPayload) => Promise<void>): void\n\n\t/**\n\t * Enqueues a new job onto the message queue.\n\t * @param job The payload for the job to be enqueued.\n\t */\n\tprotected abstract enqueueJob(job: JobPayload): Promise<void>\n\n\t/**\n\t * Publishes the final result of a completed or failed workflow run.\n\t * @param runId The unique ID of the workflow run.\n\t * @param result The final status and payload of the workflow.\n\t */\n\tprotected abstract publishFinalResult(runId: string, result: { status: 'completed' | 'failed', payload?: WorkflowResult, reason?: string }): Promise<void>\n\n\t/**\n\t * The main handler for processing a single job from the queue.\n\t */\n\tprivate async handleJob(job: JobPayload): Promise<void> {\n\t\tconst { runId, blueprintId, nodeId } = job\n\n\t\tconst blueprint = (this.runtime.options.blueprints || {})[blueprintId]\n\t\tif (!blueprint) {\n\t\t\tconst reason = `Blueprint with ID '${blueprintId}' not found in the worker's runtime registry.`\n\t\t\tconsole.error(`[Adapter] FATAL: ${reason}`)\n\t\t\tawait this.publishFinalResult(runId, { status: 'failed', reason })\n\t\t\treturn\n\t\t}\n\n\t\tconst context = this.createContext(runId)\n\t\tconst mockState = {\n\t\t\tgetContext: () => context,\n\t\t\tmarkFallbackExecuted: () => { },\n\t\t\taddError: (nodeId: string, error: Error) => {\n\t\t\t\tconsole.error(`[Adapter] Error in node ${nodeId}:`, error)\n\t\t\t},\n\t\t} as any\n\n\t\ttry {\n\t\t\tconst result = await this.runtime.executeNode(blueprint, nodeId, mockState)\n\t\t\tawait context.set(nodeId as any, result.output)\n\n\t\t\tconst nodeDef = blueprint.nodes.find(n => n.id === nodeId)\n\t\t\t// Convention: The workflow is considered complete when the first 'output' node finishes.\n\t\t\tif (nodeDef?.uses === 'output') {\n\t\t\t\tconsole.log(`[Adapter] ✅ Output node '${nodeId}' finished. Declaring workflow complete for Run ID: ${runId}`)\n\t\t\t\tconst finalContext = await context.toJSON()\n\t\t\t\tconst finalResult: WorkflowResult = {\n\t\t\t\t\tcontext: finalContext,\n\t\t\t\t\tserializedContext: this.serializer.serialize(finalContext),\n\t\t\t\t\tstatus: 'completed',\n\t\t\t\t}\n\t\t\t\tawait this.publishFinalResult(runId, { status: 'completed', payload: finalResult })\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tconst nextNodes = await this.runtime.determineNextNodes(blueprint, nodeId, result, context)\n\n\t\t\t// If a branch terminates but it wasn't an 'output' node, just stop.\n\t\t\tif (nextNodes.length === 0) {\n\t\t\t\tconsole.log(`[Adapter] Terminal node '${nodeId}' reached for Run ID '${runId}', but it was not an 'output' node. This branch will now terminate.`)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tfor (const { node: nextNodeDef, edge } of nextNodes) {\n\t\t\t\tawait this.runtime.applyEdgeTransform(edge, result, nextNodeDef, context)\n\t\t\t\tconst isReady = await this.isReadyForFanIn(runId, blueprint, nextNodeDef.id)\n\t\t\t\tif (isReady) {\n\t\t\t\t\tconsole.log(`[Adapter] Node '${nextNodeDef.id}' is ready. Enqueuing job.`)\n\t\t\t\t\tawait this.enqueueJob({ runId, blueprintId, nodeId: nextNodeDef.id })\n\t\t\t\t}\n\t\t\t\telse {\n\t\t\t\t\tconsole.log(`[Adapter] Node '${nextNodeDef.id}' is waiting for other predecessors to complete.`)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tcatch (error: any) {\n\t\t\tconst reason = error.message || 'Unknown execution error'\n\t\t\tconsole.error(`[Adapter] FATAL: Job for node '${nodeId}' failed for Run ID '${runId}': ${reason}`)\n\t\t\tawait this.publishFinalResult(runId, { status: 'failed', reason })\n\t\t}\n\t}\n\n\t/**\n\t * Encapsulates the fan-in join logic using the coordination store.\n\t */\n\tprivate async isReadyForFanIn(runId: string, blueprint: WorkflowBlueprint, targetNodeId: string): Promise<boolean> {\n\t\tconst targetNode = blueprint.nodes.find(n => n.id === targetNodeId)!\n\t\tconst joinStrategy = targetNode.config?.joinStrategy || 'all'\n\t\tconst predecessors = blueprint.edges.filter(e => e.target === targetNodeId)\n\n\t\tif (predecessors.length <= 1) {\n\t\t\treturn true\n\t\t}\n\n\t\tif (joinStrategy === 'any') {\n\t\t\tconst lockKey = `flowcraft:joinlock:${runId}:${targetNodeId}`\n\t\t\treturn await this.store.setIfNotExist(lockKey, 'locked', 3600)\n\t\t}\n\t\telse {\n\t\t\tconst fanInKey = `flowcraft:fanin:${runId}:${targetNodeId}`\n\t\t\tconst readyCount = await this.store.increment(fanInKey, 3600)\n\t\t\tif (readyCount >= predecessors.length) {\n\t\t\t\tawait this.store.delete(fanInKey)\n\t\t\t\treturn true\n\t\t\t}\n\t\t\treturn false\n\t\t}\n\t}\n}\n"]}
@@ -0,0 +1,64 @@
1
+ import { analyzeBlueprint } from './chunk-M23P46ZL.js';
2
+
3
+ // src/linter.ts
4
+ function lintBlueprint(blueprint, registry) {
5
+ const issues = [];
6
+ const nodeIds = new Set(blueprint.nodes.map((n) => n.id));
7
+ const registryKeys = registry instanceof Map ? new Set(registry.keys()) : new Set(Object.keys(registry));
8
+ for (const node of blueprint.nodes) {
9
+ if (!node.uses.startsWith("batch-") && !node.uses.startsWith("loop-") && !registryKeys.has(node.uses)) {
10
+ issues.push({
11
+ code: "MISSING_NODE_IMPLEMENTATION",
12
+ message: `Node implementation key '${node.uses}' is not found in the provided registry.`,
13
+ nodeId: node.id
14
+ });
15
+ }
16
+ }
17
+ for (const edge of blueprint.edges || []) {
18
+ if (!nodeIds.has(edge.source)) {
19
+ issues.push({
20
+ code: "INVALID_EDGE_SOURCE",
21
+ message: `Edge source '${edge.source}' does not correspond to a valid node ID.`,
22
+ relatedId: edge.target
23
+ });
24
+ }
25
+ if (!nodeIds.has(edge.target)) {
26
+ issues.push({
27
+ code: "INVALID_EDGE_TARGET",
28
+ message: `Edge target '${edge.target}' does not correspond to a valid node ID.`,
29
+ relatedId: edge.source
30
+ });
31
+ }
32
+ }
33
+ if (blueprint.nodes.length > 1) {
34
+ const analysis = analyzeBlueprint(blueprint);
35
+ const connectedNodes = /* @__PURE__ */ new Set();
36
+ const nodesToVisit = [...analysis.startNodeIds];
37
+ const visited = /* @__PURE__ */ new Set();
38
+ while (nodesToVisit.length > 0) {
39
+ const currentId = nodesToVisit.pop();
40
+ if (visited.has(currentId))
41
+ continue;
42
+ visited.add(currentId);
43
+ connectedNodes.add(currentId);
44
+ blueprint.edges.filter((e) => e.source === currentId).forEach((e) => nodesToVisit.push(e.target));
45
+ }
46
+ for (const nodeId of nodeIds) {
47
+ if (!connectedNodes.has(nodeId)) {
48
+ issues.push({
49
+ code: "ORPHAN_NODE",
50
+ message: `Node '${nodeId}' is not reachable from any start node.`,
51
+ nodeId
52
+ });
53
+ }
54
+ }
55
+ }
56
+ return {
57
+ isValid: issues.length === 0,
58
+ issues
59
+ };
60
+ }
61
+
62
+ export { lintBlueprint };
63
+ //# sourceMappingURL=chunk-RAZXOMZC.js.map
64
+ //# sourceMappingURL=chunk-RAZXOMZC.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/linter.ts"],"names":[],"mappings":";;;AA4BO,SAAS,aAAA,CACf,WACA,QAAA,EACe;AACf,EAAA,MAAM,SAAwB,EAAC;AAC/B,EAAA,MAAM,OAAA,GAAU,IAAI,GAAA,CAAI,SAAA,CAAU,MAAM,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,EAAE,CAAC,CAAA;AACtD,EAAA,MAAM,YAAA,GAAe,QAAA,YAAoB,GAAA,GAAM,IAAI,IAAI,QAAA,CAAS,IAAA,EAAM,CAAA,GAAI,IAAI,GAAA,CAAI,MAAA,CAAO,IAAA,CAAK,QAAQ,CAAC,CAAA;AAGvG,EAAA,KAAA,MAAW,IAAA,IAAQ,UAAU,KAAA,EAAO;AACnC,IAAA,IAAI,CAAC,IAAA,CAAK,IAAA,CAAK,UAAA,CAAW,QAAQ,KAAK,CAAC,IAAA,CAAK,IAAA,CAAK,UAAA,CAAW,OAAO,CAAA,IAAK,CAAC,aAAa,GAAA,CAAI,IAAA,CAAK,IAAI,CAAA,EAAG;AACtG,MAAA,MAAA,CAAO,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,6BAAA;AAAA,QACN,OAAA,EAAS,CAAA,yBAAA,EAA4B,IAAA,CAAK,IAAI,CAAA,wCAAA,CAAA;AAAA,QAC9C,QAAQ,IAAA,CAAK;AAAA,OACb,CAAA;AAAA,IACF;AAAA,EACD;AAGA,EAAA,KAAA,MAAW,IAAA,IAAQ,SAAA,CAAU,KAAA,IAAS,EAAC,EAAG;AACzC,IAAA,IAAI,CAAC,OAAA,CAAQ,GAAA,CAAI,IAAA,CAAK,MAAM,CAAA,EAAG;AAC9B,MAAA,MAAA,CAAO,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,qBAAA;AAAA,QACN,OAAA,EAAS,CAAA,aAAA,EAAgB,IAAA,CAAK,MAAM,CAAA,yCAAA,CAAA;AAAA,QACpC,WAAW,IAAA,CAAK;AAAA,OAChB,CAAA;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAA,CAAQ,GAAA,CAAI,IAAA,CAAK,MAAM,CAAA,EAAG;AAC9B,MAAA,MAAA,CAAO,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,qBAAA;AAAA,QACN,OAAA,EAAS,CAAA,aAAA,EAAgB,IAAA,CAAK,MAAM,CAAA,yCAAA,CAAA;AAAA,QACpC,WAAW,IAAA,CAAK;AAAA,OAChB,CAAA;AAAA,IACF;AAAA,EACD;AAGA,EAAA,IAAI,SAAA,CAAU,KAAA,CAAM,MAAA,GAAS,CAAA,EAAG;AAC/B,IAAA,MAAM,QAAA,GAAW,iBAAiB,SAAS,CAAA;AAC3C,IAAA,MAAM,cAAA,uBAAqB,GAAA,EAAY;AACvC,IAAA,MAAM,YAAA,GAAe,CAAC,GAAG,QAAA,CAAS,YAAY,CAAA;AAC9C,IAAA,MAAM,OAAA,uBAAc,GAAA,EAAY;AAEhC,IAAA,OAAO,YAAA,CAAa,SAAS,CAAA,EAAG;AAC/B,MAAA,MAAM,SAAA,GAAY,aAAa,GAAA,EAAI;AACnC,MAAA,IAAI,OAAA,CAAQ,IAAI,SAAS,CAAA;AACxB,QAAA;AAED,MAAA,OAAA,CAAQ,IAAI,SAAS,CAAA;AACrB,MAAA,cAAA,CAAe,IAAI,SAAS,CAAA;AAE5B,MAAA,SAAA,CAAU,KAAA,CACR,MAAA,CAAO,CAAA,CAAA,KAAK,CAAA,CAAE,MAAA,KAAW,SAAS,CAAA,CAClC,OAAA,CAAQ,CAAA,CAAA,KAAK,YAAA,CAAa,IAAA,CAAK,CAAA,CAAE,MAAM,CAAC,CAAA;AAAA,IAC3C;AAEA,IAAA,KAAA,MAAW,UAAU,OAAA,EAAS;AAC7B,MAAA,IAAI,CAAC,cAAA,CAAe,GAAA,CAAI,MAAM,CAAA,EAAG;AAChC,QAAA,MAAA,CAAO,IAAA,CAAK;AAAA,UACX,IAAA,EAAM,aAAA;AAAA,UACN,OAAA,EAAS,SAAS,MAAM,CAAA,uCAAA,CAAA;AAAA,UACxB;AAAA,SACA,CAAA;AAAA,MACF;AAAA,IACD;AAAA,EACD;AAEA,EAAA,OAAO;AAAA,IACN,OAAA,EAAS,OAAO,MAAA,KAAW,CAAA;AAAA,IAC3B;AAAA,GACD;AACD","file":"chunk-RAZXOMZC.js","sourcesContent":["import type { NodeClass, NodeFunction, WorkflowBlueprint } from './types'\nimport { analyzeBlueprint } from './analysis'\n\nexport type LinterIssueCode = | 'INVALID_EDGE_SOURCE'\n\t| 'INVALID_EDGE_TARGET'\n\t| 'MISSING_NODE_IMPLEMENTATION'\n\t| 'ORPHAN_NODE'\n\nexport interface LinterIssue {\n\tcode: LinterIssueCode\n\tmessage: string\n\tnodeId?: string\n\trelatedId?: string\n}\n\nexport interface LinterResult {\n\tisValid: boolean\n\tissues: LinterIssue[]\n}\n\n/**\n * Statically analyzes a workflow blueprint against a registry of implementations\n * to find common errors before runtime.\n *\n * @param blueprint The WorkflowBlueprint to analyze.\n * @param registry A map of node implementations (functions or classes) to check against.\n * @returns A LinterResult object containing any issues found.\n */\nexport function lintBlueprint(\n\tblueprint: WorkflowBlueprint,\n\tregistry: Map<string, NodeFunction | NodeClass> | Record<string, NodeFunction | NodeClass>,\n): LinterResult {\n\tconst issues: LinterIssue[] = []\n\tconst nodeIds = new Set(blueprint.nodes.map(n => n.id))\n\tconst registryKeys = registry instanceof Map ? new Set(registry.keys()) : new Set(Object.keys(registry))\n\n\t// 1. Check for missing node implementations\n\tfor (const node of blueprint.nodes) {\n\t\tif (!node.uses.startsWith('batch-') && !node.uses.startsWith('loop-') && !registryKeys.has(node.uses)) {\n\t\t\tissues.push({\n\t\t\t\tcode: 'MISSING_NODE_IMPLEMENTATION',\n\t\t\t\tmessage: `Node implementation key '${node.uses}' is not found in the provided registry.`,\n\t\t\t\tnodeId: node.id,\n\t\t\t})\n\t\t}\n\t}\n\n\t// 2. Check for graph integrity (edges must point to valid nodes)\n\tfor (const edge of blueprint.edges || []) {\n\t\tif (!nodeIds.has(edge.source)) {\n\t\t\tissues.push({\n\t\t\t\tcode: 'INVALID_EDGE_SOURCE',\n\t\t\t\tmessage: `Edge source '${edge.source}' does not correspond to a valid node ID.`,\n\t\t\t\trelatedId: edge.target,\n\t\t\t})\n\t\t}\n\t\tif (!nodeIds.has(edge.target)) {\n\t\t\tissues.push({\n\t\t\t\tcode: 'INVALID_EDGE_TARGET',\n\t\t\t\tmessage: `Edge target '${edge.target}' does not correspond to a valid node ID.`,\n\t\t\t\trelatedId: edge.source,\n\t\t\t})\n\t\t}\n\t}\n\n\t// 3. Check for orphan nodes (not connected to the main graph)\n\tif (blueprint.nodes.length > 1) {\n\t\tconst analysis = analyzeBlueprint(blueprint)\n\t\tconst connectedNodes = new Set<string>()\n\t\tconst nodesToVisit = [...analysis.startNodeIds]\n\t\tconst visited = new Set<string>()\n\n\t\twhile (nodesToVisit.length > 0) {\n\t\t\tconst currentId = nodesToVisit.pop()!\n\t\t\tif (visited.has(currentId))\n\t\t\t\tcontinue\n\n\t\t\tvisited.add(currentId)\n\t\t\tconnectedNodes.add(currentId)\n\n\t\t\tblueprint.edges\n\t\t\t\t.filter(e => e.source === currentId)\n\t\t\t\t.forEach(e => nodesToVisit.push(e.target))\n\t\t}\n\n\t\tfor (const nodeId of nodeIds) {\n\t\t\tif (!connectedNodes.has(nodeId)) {\n\t\t\t\tissues.push({\n\t\t\t\t\tcode: 'ORPHAN_NODE',\n\t\t\t\t\tmessage: `Node '${nodeId}' is not reachable from any start node.`,\n\t\t\t\t\tnodeId,\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n\n\treturn {\n\t\tisValid: issues.length === 0,\n\t\tissues,\n\t}\n}\n"]}
@@ -0,0 +1,13 @@
1
+ // src/serializer.ts
2
+ var JsonSerializer = class {
3
+ serialize(data) {
4
+ return JSON.stringify(data);
5
+ }
6
+ deserialize(text) {
7
+ return JSON.parse(text);
8
+ }
9
+ };
10
+
11
+ export { JsonSerializer };
12
+ //# sourceMappingURL=chunk-REH55ZXV.js.map
13
+ //# sourceMappingURL=chunk-REH55ZXV.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/serializer.ts"],"names":[],"mappings":";AASO,IAAM,iBAAN,MAA4C;AAAA,EAClD,UAAU,IAAA,EAAmC;AAC5C,IAAA,OAAO,IAAA,CAAK,UAAU,IAAI,CAAA;AAAA,EAC3B;AAAA,EAEA,YAAY,IAAA,EAAmC;AAC9C,IAAA,OAAO,IAAA,CAAK,MAAM,IAAI,CAAA;AAAA,EACvB;AACD","file":"chunk-REH55ZXV.js","sourcesContent":["import type { ISerializer } from './types'\n\n/**\n * A default serializer using standard JSON.\n *\n * @warning This implementation is lossy and does not handle complex data types\n * like `Date`, `Map`, `Set`, `undefined`, etc. It is recommended to provide a robust\n * serializer like `superjson` if working with complex data types.\n */\nexport class JsonSerializer implements ISerializer {\n\tserialize(data: Record<string, any>): string {\n\t\treturn JSON.stringify(data)\n\t}\n\n\tdeserialize(text: string): Record<string, any> {\n\t\treturn JSON.parse(text)\n\t}\n}\n"]}
@@ -0,0 +1,135 @@
1
+ import { CancelledWorkflowError } from './chunk-WXT3YEWU.js';
2
+ import { analyzeBlueprint } from './chunk-M23P46ZL.js';
3
+
4
+ // src/runtime/traverser.ts
5
+ var GraphTraverser = class {
6
+ constructor(blueprint, runtime, state, functionRegistry, executionId, signal) {
7
+ this.blueprint = blueprint;
8
+ this.runtime = runtime;
9
+ this.state = state;
10
+ this.functionRegistry = functionRegistry;
11
+ this.executionId = executionId;
12
+ this.signal = signal;
13
+ this.dynamicBlueprint = JSON.parse(JSON.stringify(blueprint));
14
+ this.allPredecessors = /* @__PURE__ */ new Map();
15
+ this.dynamicBlueprint.nodes.forEach((node) => this.allPredecessors.set(node.id, /* @__PURE__ */ new Set()));
16
+ this.dynamicBlueprint.edges.forEach((edge) => this.allPredecessors.get(edge.target)?.add(edge.source));
17
+ const analysis = analyzeBlueprint(blueprint);
18
+ this.frontier = new Set(analysis.startNodeIds.filter((id) => !this.isFallbackNode(id)));
19
+ if (this.frontier.size === 0 && analysis.cycles.length > 0 && this.runtime.options.strict !== true) {
20
+ const uniqueStartNodes = /* @__PURE__ */ new Set();
21
+ for (const cycle of analysis.cycles) {
22
+ if (cycle.length > 0)
23
+ uniqueStartNodes.add(cycle[0]);
24
+ }
25
+ this.frontier = new Set(uniqueStartNodes);
26
+ }
27
+ }
28
+ frontier = /* @__PURE__ */ new Set();
29
+ allPredecessors;
30
+ dynamicBlueprint;
31
+ isFallbackNode(nodeId) {
32
+ return this.dynamicBlueprint.nodes.some((n) => n.config?.fallback === nodeId);
33
+ }
34
+ async traverse() {
35
+ try {
36
+ this.signal?.throwIfAborted();
37
+ } catch (error) {
38
+ if (error instanceof DOMException && error.name === "AbortError")
39
+ throw new CancelledWorkflowError("Workflow cancelled");
40
+ throw error;
41
+ }
42
+ let iterations = 0;
43
+ const maxIterations = 1e4;
44
+ while (this.frontier.size > 0) {
45
+ if (++iterations > maxIterations)
46
+ throw new Error("Traversal exceeded maximum iterations, possible infinite loop");
47
+ try {
48
+ this.signal?.throwIfAborted();
49
+ const currentJobs = Array.from(this.frontier);
50
+ this.frontier.clear();
51
+ const promises = currentJobs.map(
52
+ (nodeId) => this.runtime.executeNode(this.dynamicBlueprint, nodeId, this.state, this.allPredecessors, this.functionRegistry, this.executionId, this.signal).then((result) => ({ status: "fulfilled", value: { nodeId, result } })).catch((error) => ({ status: "rejected", reason: { nodeId, error } }))
53
+ );
54
+ const settledResults = await Promise.all(promises);
55
+ const completedThisTurn = /* @__PURE__ */ new Set();
56
+ for (const promiseResult of settledResults) {
57
+ if (promiseResult.status === "rejected") {
58
+ const { nodeId: nodeId2, error } = promiseResult.reason;
59
+ if (error instanceof CancelledWorkflowError)
60
+ throw error;
61
+ this.state.addError(nodeId2, error);
62
+ continue;
63
+ }
64
+ const { nodeId, result } = promiseResult.value;
65
+ this.state.addCompletedNode(nodeId, result.output);
66
+ completedThisTurn.add(nodeId);
67
+ if (result._fallbackExecuted)
68
+ this.state.markFallbackExecuted();
69
+ await this.handleDynamicNodes(nodeId, result);
70
+ if (!result._fallbackExecuted) {
71
+ const matched = await this.runtime.determineNextNodes(this.dynamicBlueprint, nodeId, result, this.state.getContext());
72
+ const loopControllerMatch = matched.find((m) => m.node.uses === "loop-controller");
73
+ const finalMatched = loopControllerMatch ? [loopControllerMatch] : matched;
74
+ for (const { node, edge } of finalMatched) {
75
+ const joinStrategy = node.config?.joinStrategy || "all";
76
+ if (joinStrategy !== "any" && this.state.getCompletedNodes().has(node.id))
77
+ continue;
78
+ await this.runtime.applyEdgeTransform(edge, result, node, this.state.getContext(), this.allPredecessors);
79
+ const requiredPredecessors = this.allPredecessors.get(node.id);
80
+ const isReady = joinStrategy === "any" ? [...requiredPredecessors].some((p) => completedThisTurn.has(p)) : [...requiredPredecessors].every((p) => this.state.getCompletedNodes().has(p));
81
+ if (isReady)
82
+ this.frontier.add(node.id);
83
+ }
84
+ if (matched.length === 0) {
85
+ for (const [potentialNextId, predecessors] of this.allPredecessors) {
86
+ if (predecessors.has(nodeId) && !this.state.getCompletedNodes().has(potentialNextId)) {
87
+ const joinStrategy = this.dynamicBlueprint.nodes.find((n) => n.id === potentialNextId)?.config?.joinStrategy || "all";
88
+ const isReady = joinStrategy === "any" ? [...predecessors].some((p) => completedThisTurn.has(p)) : [...predecessors].every((p) => this.state.getCompletedNodes().has(p));
89
+ if (isReady)
90
+ this.frontier.add(potentialNextId);
91
+ }
92
+ }
93
+ }
94
+ }
95
+ }
96
+ } catch (error) {
97
+ if (error instanceof DOMException && error.name === "AbortError") {
98
+ throw new CancelledWorkflowError("Workflow cancelled");
99
+ }
100
+ throw error;
101
+ }
102
+ }
103
+ }
104
+ async handleDynamicNodes(nodeId, result) {
105
+ if (result.dynamicNodes && result.dynamicNodes.length > 0) {
106
+ const gatherNodeId = result.output?.gatherNodeId;
107
+ for (const dynamicNode of result.dynamicNodes) {
108
+ this.dynamicBlueprint.nodes.push(dynamicNode);
109
+ this.allPredecessors.set(dynamicNode.id, /* @__PURE__ */ new Set([nodeId]));
110
+ if (gatherNodeId) {
111
+ this.allPredecessors.get(gatherNodeId)?.add(dynamicNode.id);
112
+ }
113
+ this.frontier.add(dynamicNode.id);
114
+ }
115
+ }
116
+ }
117
+ getAllNodeIds() {
118
+ return new Set(this.dynamicBlueprint.nodes.map((n) => n.id));
119
+ }
120
+ getFallbackNodeIds() {
121
+ const fallbackNodeIds = /* @__PURE__ */ new Set();
122
+ for (const node of this.dynamicBlueprint.nodes) {
123
+ if (node.config?.fallback)
124
+ fallbackNodeIds.add(node.config.fallback);
125
+ }
126
+ return fallbackNodeIds;
127
+ }
128
+ getDynamicBlueprint() {
129
+ return this.dynamicBlueprint;
130
+ }
131
+ };
132
+
133
+ export { GraphTraverser };
134
+ //# sourceMappingURL=chunk-RW4FH7IL.js.map
135
+ //# sourceMappingURL=chunk-RW4FH7IL.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/runtime/traverser.ts"],"names":["nodeId"],"mappings":";;;;AAMO,IAAM,iBAAN,MAAsG;AAAA,EAK5G,YACS,SAAA,EACA,OAAA,EACA,KAAA,EACA,gBAAA,EACA,aACA,MAAA,EACP;AANO,IAAA,IAAA,CAAA,SAAA,GAAA,SAAA;AACA,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AACA,IAAA,IAAA,CAAA,KAAA,GAAA,KAAA;AACA,IAAA,IAAA,CAAA,gBAAA,GAAA,gBAAA;AACA,IAAA,IAAA,CAAA,WAAA,GAAA,WAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAER,IAAA,IAAA,CAAK,mBAAmB,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,SAAA,CAAU,SAAS,CAAC,CAAA;AAC5D,IAAA,IAAA,CAAK,eAAA,uBAAsB,GAAA,EAAyB;AACpD,IAAA,IAAA,CAAK,gBAAA,CAAiB,KAAA,CAAM,OAAA,CAAQ,CAAA,IAAA,KAAQ,IAAA,CAAK,eAAA,CAAgB,GAAA,CAAI,IAAA,CAAK,EAAA,kBAAI,IAAI,GAAA,EAAK,CAAC,CAAA;AACxF,IAAA,IAAA,CAAK,gBAAA,CAAiB,KAAA,CAAM,OAAA,CAAQ,CAAA,IAAA,KAAQ,IAAA,CAAK,eAAA,CAAgB,GAAA,CAAI,IAAA,CAAK,MAAM,CAAA,EAAG,GAAA,CAAI,IAAA,CAAK,MAAM,CAAC,CAAA;AACnG,IAAA,MAAM,QAAA,GAAW,iBAAiB,SAAS,CAAA;AAC3C,IAAA,IAAA,CAAK,QAAA,GAAW,IAAI,GAAA,CAAI,QAAA,CAAS,YAAA,CAAa,MAAA,CAAO,CAAA,EAAA,KAAM,CAAC,IAAA,CAAK,cAAA,CAAe,EAAE,CAAC,CAAC,CAAA;AACpF,IAAA,IAAI,IAAA,CAAK,QAAA,CAAS,IAAA,KAAS,CAAA,IAAK,QAAA,CAAS,MAAA,CAAO,MAAA,GAAS,CAAA,IAAK,IAAA,CAAK,OAAA,CAAQ,OAAA,CAAQ,MAAA,KAAW,IAAA,EAAM;AACnG,MAAA,MAAM,gBAAA,uBAAuB,GAAA,EAAY;AACzC,MAAA,KAAA,MAAW,KAAA,IAAS,SAAS,MAAA,EAAQ;AACpC,QAAA,IAAI,MAAM,MAAA,GAAS,CAAA;AAClB,UAAA,gBAAA,CAAiB,GAAA,CAAI,KAAA,CAAM,CAAC,CAAC,CAAA;AAAA,MAC/B;AACA,MAAA,IAAA,CAAK,QAAA,GAAW,IAAI,GAAA,CAAI,gBAAgB,CAAA;AAAA,IACzC;AAAA,EACD;AAAA,EA1BQ,QAAA,uBAAe,GAAA,EAAY;AAAA,EAC3B,eAAA;AAAA,EACA,gBAAA;AAAA,EA0BA,eAAe,MAAA,EAAyB;AAC/C,IAAA,OAAO,IAAA,CAAK,iBAAiB,KAAA,CAAM,IAAA,CAAK,OAAK,CAAA,CAAE,MAAA,EAAQ,aAAa,MAAM,CAAA;AAAA,EAC3E;AAAA,EAEA,MAAM,QAAA,GAA0B;AAC/B,IAAA,IAAI;AACH,MAAA,IAAA,CAAK,QAAQ,cAAA,EAAe;AAAA,IAC7B,SACO,KAAA,EAAO;AACb,MAAA,IAAI,KAAA,YAAiB,YAAA,IAAgB,KAAA,CAAM,IAAA,KAAS,YAAA;AACnD,QAAA,MAAM,IAAI,uBAAuB,oBAAoB,CAAA;AACtD,MAAA,MAAM,KAAA;AAAA,IACP;AACA,IAAA,IAAI,UAAA,GAAa,CAAA;AACjB,IAAA,MAAM,aAAA,GAAgB,GAAA;AACtB,IAAA,OAAO,IAAA,CAAK,QAAA,CAAS,IAAA,GAAO,CAAA,EAAG;AAC9B,MAAA,IAAI,EAAE,UAAA,GAAa,aAAA;AAClB,QAAA,MAAM,IAAI,MAAM,+DAA+D,CAAA;AAEhF,MAAA,IAAI;AACH,QAAA,IAAA,CAAK,QAAQ,cAAA,EAAe;AAC5B,QAAA,MAAM,WAAA,GAAc,KAAA,CAAM,IAAA,CAAK,IAAA,CAAK,QAAQ,CAAA;AAC5C,QAAA,IAAA,CAAK,SAAS,KAAA,EAAM;AACpB,QAAA,MAAM,WAAW,WAAA,CAAY,GAAA;AAAA,UAAI,YAChC,IAAA,CAAK,OAAA,CACH,YAAY,IAAA,CAAK,gBAAA,EAAkB,QAAQ,IAAA,CAAK,KAAA,EAAO,KAAK,eAAA,EAAiB,IAAA,CAAK,kBAAkB,IAAA,CAAK,WAAA,EAAa,KAAK,MAAM,CAAA,CACjI,KAAK,CAAC,MAAA,MAAwB,EAAE,MAAA,EAAQ,aAAsB,KAAA,EAAO,EAAE,QAAQ,MAAA,EAAO,GAAI,CAAA,CAC1F,KAAA,CAAM,CAAC,KAAA,MAAoB,EAAE,QAAQ,UAAA,EAAqB,MAAA,EAAQ,EAAE,MAAA,EAAQ,KAAA,IAAQ,CAAE;AAAA,SACzF;AACA,QAAA,MAAM,cAAA,GAAiB,MAAM,OAAA,CAAQ,GAAA,CAAI,QAAQ,CAAA;AACjD,QAAA,MAAM,iBAAA,uBAAwB,GAAA,EAAY;AAC1C,QAAA,KAAA,MAAW,iBAAiB,cAAA,EAAgB;AAC3C,UAAA,IAAI,aAAA,CAAc,WAAW,UAAA,EAAY;AACxC,YAAA,MAAM,EAAE,MAAA,EAAAA,OAAAA,EAAQ,KAAA,KAAU,aAAA,CAAc,MAAA;AACxC,YAAA,IAAI,KAAA,YAAiB,sBAAA;AACpB,cAAA,MAAM,KAAA;AACP,YAAA,IAAA,CAAK,KAAA,CAAM,QAAA,CAASA,OAAAA,EAAQ,KAAc,CAAA;AAC1C,YAAA;AAAA,UACD;AACA,UAAA,MAAM,EAAE,MAAA,EAAQ,MAAA,EAAO,GAAI,aAAA,CAAc,KAAA;AACzC,UAAA,IAAA,CAAK,KAAA,CAAM,gBAAA,CAAiB,MAAA,EAAQ,MAAA,CAAO,MAAM,CAAA;AACjD,UAAA,iBAAA,CAAkB,IAAI,MAAM,CAAA;AAC5B,UAAA,IAAI,MAAA,CAAO,iBAAA;AACV,YAAA,IAAA,CAAK,MAAM,oBAAA,EAAqB;AACjC,UAAA,MAAM,IAAA,CAAK,kBAAA,CAAmB,MAAA,EAAQ,MAAM,CAAA;AAC5C,UAAA,IAAI,CAAC,OAAO,iBAAA,EAAmB;AAC9B,YAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,OAAA,CAAQ,kBAAA,CAAmB,IAAA,CAAK,gBAAA,EAAkB,MAAA,EAAQ,MAAA,EAAQ,IAAA,CAAK,KAAA,CAAM,UAAA,EAAY,CAAA;AAGpH,YAAA,MAAM,sBAAsB,OAAA,CAAQ,IAAA,CAAK,OAAK,CAAA,CAAE,IAAA,CAAK,SAAS,iBAAiB,CAAA;AAC/E,YAAA,MAAM,YAAA,GAAe,mBAAA,GAAsB,CAAC,mBAAmB,CAAA,GAAI,OAAA;AAEnE,YAAA,KAAA,MAAW,EAAE,IAAA,EAAM,IAAA,EAAK,IAAK,YAAA,EAAc;AAC1C,cAAA,MAAM,YAAA,GAAe,IAAA,CAAK,MAAA,EAAQ,YAAA,IAAgB,KAAA;AAClD,cAAA,IAAI,YAAA,KAAiB,SAAS,IAAA,CAAK,KAAA,CAAM,mBAAkB,CAAE,GAAA,CAAI,KAAK,EAAE,CAAA;AACvE,gBAAA;AACD,cAAA,MAAM,IAAA,CAAK,OAAA,CAAQ,kBAAA,CAAmB,IAAA,EAAM,MAAA,EAAQ,IAAA,EAAM,IAAA,CAAK,KAAA,CAAM,UAAA,EAAW,EAAG,IAAA,CAAK,eAAe,CAAA;AACvG,cAAA,MAAM,oBAAA,GAAuB,IAAA,CAAK,eAAA,CAAgB,GAAA,CAAI,KAAK,EAAE,CAAA;AAC7D,cAAA,MAAM,OAAA,GAAU,YAAA,KAAiB,KAAA,GAC9B,CAAC,GAAG,oBAAoB,CAAA,CAAE,IAAA,CAAK,CAAA,CAAA,KAAK,iBAAA,CAAkB,GAAA,CAAI,CAAC,CAAC,IAC5D,CAAC,GAAG,oBAAoB,CAAA,CAAE,KAAA,CAAM,CAAA,CAAA,KAAK,IAAA,CAAK,KAAA,CAAM,iBAAA,EAAkB,CAAE,GAAA,CAAI,CAAC,CAAC,CAAA;AAC7E,cAAA,IAAI,OAAA;AACH,gBAAA,IAAA,CAAK,QAAA,CAAS,GAAA,CAAI,IAAA,CAAK,EAAE,CAAA;AAAA,YAC3B;AACA,YAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACzB,cAAA,KAAA,MAAW,CAAC,eAAA,EAAiB,YAAY,CAAA,IAAK,KAAK,eAAA,EAAiB;AACnE,gBAAA,IAAI,YAAA,CAAa,GAAA,CAAI,MAAM,CAAA,IAAK,CAAC,IAAA,CAAK,KAAA,CAAM,iBAAA,EAAkB,CAAE,GAAA,CAAI,eAAe,CAAA,EAAG;AACrF,kBAAA,MAAM,YAAA,GAAe,IAAA,CAAK,gBAAA,CAAiB,KAAA,CAAM,IAAA,CAAK,CAAA,CAAA,KAAK,CAAA,CAAE,EAAA,KAAO,eAAe,CAAA,EAAG,MAAA,EAAQ,YAAA,IAAgB,KAAA;AAC9G,kBAAA,MAAM,OAAA,GAAU,YAAA,KAAiB,KAAA,GAC9B,CAAC,GAAG,YAAY,CAAA,CAAE,IAAA,CAAK,CAAA,CAAA,KAAK,iBAAA,CAAkB,GAAA,CAAI,CAAC,CAAC,IACpD,CAAC,GAAG,YAAY,CAAA,CAAE,KAAA,CAAM,CAAA,CAAA,KAAK,IAAA,CAAK,KAAA,CAAM,iBAAA,EAAkB,CAAE,GAAA,CAAI,CAAC,CAAC,CAAA;AACrE,kBAAA,IAAI,OAAA;AACH,oBAAA,IAAA,CAAK,QAAA,CAAS,IAAI,eAAe,CAAA;AAAA,gBACnC;AAAA,cACD;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAAA,MACD,SACO,KAAA,EAAO;AACb,QAAA,IAAI,KAAA,YAAiB,YAAA,IAAgB,KAAA,CAAM,IAAA,KAAS,YAAA,EAAc;AACjE,UAAA,MAAM,IAAI,uBAAuB,oBAAoB,CAAA;AAAA,QACtD;AACA,QAAA,MAAM,KAAA;AAAA,MACP;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAc,kBAAA,CAAmB,MAAA,EAAgB,MAAA,EAAoB;AACpE,IAAA,IAAI,MAAA,CAAO,YAAA,IAAgB,MAAA,CAAO,YAAA,CAAa,SAAS,CAAA,EAAG;AAC1D,MAAA,MAAM,YAAA,GAAe,OAAO,MAAA,EAAQ,YAAA;AACpC,MAAA,KAAA,MAAW,WAAA,IAAe,OAAO,YAAA,EAAc;AAC9C,QAAA,IAAA,CAAK,gBAAA,CAAiB,KAAA,CAAM,IAAA,CAAK,WAAW,CAAA;AAC5C,QAAA,IAAA,CAAK,eAAA,CAAgB,IAAI,WAAA,CAAY,EAAA,sBAAQ,GAAA,CAAI,CAAC,MAAM,CAAC,CAAC,CAAA;AAC1D,QAAA,IAAI,YAAA,EAAc;AACjB,UAAA,IAAA,CAAK,gBAAgB,GAAA,CAAI,YAAY,CAAA,EAAG,GAAA,CAAI,YAAY,EAAE,CAAA;AAAA,QAC3D;AACA,QAAA,IAAA,CAAK,QAAA,CAAS,GAAA,CAAI,WAAA,CAAY,EAAE,CAAA;AAAA,MACjC;AAAA,IACD;AAAA,EACD;AAAA,EAEA,aAAA,GAA6B;AAC5B,IAAA,OAAO,IAAI,IAAI,IAAA,CAAK,gBAAA,CAAiB,MAAM,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,EAAE,CAAC,CAAA;AAAA,EAC1D;AAAA,EAEA,kBAAA,GAAkC;AACjC,IAAA,MAAM,eAAA,uBAAsB,GAAA,EAAY;AACxC,IAAA,KAAA,MAAW,IAAA,IAAQ,IAAA,CAAK,gBAAA,CAAiB,KAAA,EAAO;AAC/C,MAAA,IAAI,KAAK,MAAA,EAAQ,QAAA;AAChB,QAAA,eAAA,CAAgB,GAAA,CAAI,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA;AAAA,IAC1C;AACA,IAAA,OAAO,eAAA;AAAA,EACR;AAAA,EAEA,mBAAA,GAAyC;AACxC,IAAA,OAAO,IAAA,CAAK,gBAAA;AAAA,EACb;AACD","file":"chunk-RW4FH7IL.js","sourcesContent":["import type { NodeResult, WorkflowBlueprint } from '../types'\nimport type { WorkflowState } from './state'\nimport type { IRuntime } from './types'\nimport { analyzeBlueprint } from '../analysis'\nimport { CancelledWorkflowError } from '../errors'\n\nexport class GraphTraverser<TContext extends Record<string, any>, TDependencies extends Record<string, any>> {\n\tprivate frontier = new Set<string>()\n\tprivate allPredecessors: Map<string, Set<string>>\n\tprivate dynamicBlueprint: WorkflowBlueprint\n\n\tconstructor(\n\t\tprivate blueprint: WorkflowBlueprint,\n\t\tprivate runtime: IRuntime<TContext, TDependencies>,\n\t\tprivate state: WorkflowState<TContext>,\n\t\tprivate functionRegistry: Map<string, any> | undefined,\n\t\tprivate executionId: string,\n\t\tprivate signal?: AbortSignal,\n\t) {\n\t\tthis.dynamicBlueprint = JSON.parse(JSON.stringify(blueprint)) as WorkflowBlueprint\n\t\tthis.allPredecessors = new Map<string, Set<string>>()\n\t\tthis.dynamicBlueprint.nodes.forEach(node => this.allPredecessors.set(node.id, new Set()))\n\t\tthis.dynamicBlueprint.edges.forEach(edge => this.allPredecessors.get(edge.target)?.add(edge.source))\n\t\tconst analysis = analyzeBlueprint(blueprint)\n\t\tthis.frontier = new Set(analysis.startNodeIds.filter(id => !this.isFallbackNode(id)))\n\t\tif (this.frontier.size === 0 && analysis.cycles.length > 0 && this.runtime.options.strict !== true) {\n\t\t\tconst uniqueStartNodes = new Set<string>()\n\t\t\tfor (const cycle of analysis.cycles) {\n\t\t\t\tif (cycle.length > 0)\n\t\t\t\t\tuniqueStartNodes.add(cycle[0])\n\t\t\t}\n\t\t\tthis.frontier = new Set(uniqueStartNodes)\n\t\t}\n\t}\n\n\tprivate isFallbackNode(nodeId: string): boolean {\n\t\treturn this.dynamicBlueprint.nodes.some(n => n.config?.fallback === nodeId)\n\t}\n\n\tasync traverse(): Promise<void> {\n\t\ttry {\n\t\t\tthis.signal?.throwIfAborted()\n\t\t}\n\t\tcatch (error) {\n\t\t\tif (error instanceof DOMException && error.name === 'AbortError')\n\t\t\t\tthrow new CancelledWorkflowError('Workflow cancelled')\n\t\t\tthrow error\n\t\t}\n\t\tlet iterations = 0\n\t\tconst maxIterations = 10000\n\t\twhile (this.frontier.size > 0) {\n\t\t\tif (++iterations > maxIterations)\n\t\t\t\tthrow new Error('Traversal exceeded maximum iterations, possible infinite loop')\n\n\t\t\ttry {\n\t\t\t\tthis.signal?.throwIfAborted()\n\t\t\t\tconst currentJobs = Array.from(this.frontier)\n\t\t\t\tthis.frontier.clear()\n\t\t\t\tconst promises = currentJobs.map(nodeId =>\n\t\t\t\t\tthis.runtime\n\t\t\t\t\t\t.executeNode(this.dynamicBlueprint, nodeId, this.state, this.allPredecessors, this.functionRegistry, this.executionId, this.signal)\n\t\t\t\t\t\t.then((result: NodeResult) => ({ status: 'fulfilled' as const, value: { nodeId, result } }))\n\t\t\t\t\t\t.catch((error: unknown) => ({ status: 'rejected' as const, reason: { nodeId, error } })),\n\t\t\t\t)\n\t\t\t\tconst settledResults = await Promise.all(promises)\n\t\t\t\tconst completedThisTurn = new Set<string>()\n\t\t\t\tfor (const promiseResult of settledResults) {\n\t\t\t\t\tif (promiseResult.status === 'rejected') {\n\t\t\t\t\t\tconst { nodeId, error } = promiseResult.reason\n\t\t\t\t\t\tif (error instanceof CancelledWorkflowError)\n\t\t\t\t\t\t\tthrow error\n\t\t\t\t\t\tthis.state.addError(nodeId, error as Error)\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\t\t\t\t\tconst { nodeId, result } = promiseResult.value\n\t\t\t\t\tthis.state.addCompletedNode(nodeId, result.output)\n\t\t\t\t\tcompletedThisTurn.add(nodeId)\n\t\t\t\t\tif (result._fallbackExecuted)\n\t\t\t\t\t\tthis.state.markFallbackExecuted()\n\t\t\t\t\tawait this.handleDynamicNodes(nodeId, result)\n\t\t\t\t\tif (!result._fallbackExecuted) {\n\t\t\t\t\t\tconst matched = await this.runtime.determineNextNodes(this.dynamicBlueprint, nodeId, result, this.state.getContext())\n\n\t\t\t\t\t\t// If one of the next nodes is a loop controller, prioritize it to avoid ambiguity from manual cycle edges.\n\t\t\t\t\t\tconst loopControllerMatch = matched.find(m => m.node.uses === 'loop-controller')\n\t\t\t\t\t\tconst finalMatched = loopControllerMatch ? [loopControllerMatch] : matched\n\n\t\t\t\t\t\tfor (const { node, edge } of finalMatched) {\n\t\t\t\t\t\t\tconst joinStrategy = node.config?.joinStrategy || 'all'\n\t\t\t\t\t\t\tif (joinStrategy !== 'any' && this.state.getCompletedNodes().has(node.id))\n\t\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\t\tawait this.runtime.applyEdgeTransform(edge, result, node, this.state.getContext(), this.allPredecessors)\n\t\t\t\t\t\t\tconst requiredPredecessors = this.allPredecessors.get(node.id)!\n\t\t\t\t\t\t\tconst isReady = joinStrategy === 'any'\n\t\t\t\t\t\t\t\t? [...requiredPredecessors].some(p => completedThisTurn.has(p))\n\t\t\t\t\t\t\t\t: [...requiredPredecessors].every(p => this.state.getCompletedNodes().has(p))\n\t\t\t\t\t\t\tif (isReady)\n\t\t\t\t\t\t\t\tthis.frontier.add(node.id)\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (matched.length === 0) {\n\t\t\t\t\t\t\tfor (const [potentialNextId, predecessors] of this.allPredecessors) {\n\t\t\t\t\t\t\t\tif (predecessors.has(nodeId) && !this.state.getCompletedNodes().has(potentialNextId)) {\n\t\t\t\t\t\t\t\t\tconst joinStrategy = this.dynamicBlueprint.nodes.find(n => n.id === potentialNextId)?.config?.joinStrategy || 'all'\n\t\t\t\t\t\t\t\t\tconst isReady = joinStrategy === 'any'\n\t\t\t\t\t\t\t\t\t\t? [...predecessors].some(p => completedThisTurn.has(p))\n\t\t\t\t\t\t\t\t\t\t: [...predecessors].every(p => this.state.getCompletedNodes().has(p))\n\t\t\t\t\t\t\t\t\tif (isReady)\n\t\t\t\t\t\t\t\t\t\tthis.frontier.add(potentialNextId)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tcatch (error) {\n\t\t\t\tif (error instanceof DOMException && error.name === 'AbortError') {\n\t\t\t\t\tthrow new CancelledWorkflowError('Workflow cancelled')\n\t\t\t\t}\n\t\t\t\tthrow error\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate async handleDynamicNodes(nodeId: string, result: NodeResult) {\n\t\tif (result.dynamicNodes && result.dynamicNodes.length > 0) {\n\t\t\tconst gatherNodeId = result.output?.gatherNodeId\n\t\t\tfor (const dynamicNode of result.dynamicNodes) {\n\t\t\t\tthis.dynamicBlueprint.nodes.push(dynamicNode)\n\t\t\t\tthis.allPredecessors.set(dynamicNode.id, new Set([nodeId]))\n\t\t\t\tif (gatherNodeId) {\n\t\t\t\t\tthis.allPredecessors.get(gatherNodeId)?.add(dynamicNode.id)\n\t\t\t\t}\n\t\t\t\tthis.frontier.add(dynamicNode.id)\n\t\t\t}\n\t\t}\n\t}\n\n\tgetAllNodeIds(): Set<string> {\n\t\treturn new Set(this.dynamicBlueprint.nodes.map(n => n.id))\n\t}\n\n\tgetFallbackNodeIds(): Set<string> {\n\t\tconst fallbackNodeIds = new Set<string>()\n\t\tfor (const node of this.dynamicBlueprint.nodes) {\n\t\t\tif (node.config?.fallback)\n\t\t\t\tfallbackNodeIds.add(node.config.fallback)\n\t\t}\n\t\treturn fallbackNodeIds\n\t}\n\n\tgetDynamicBlueprint(): WorkflowBlueprint {\n\t\treturn this.dynamicBlueprint\n\t}\n}\n"]}