flowcraft 1.0.0 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (201) hide show
  1. package/README.md +37 -134
  2. package/dist/analysis.d.ts +43 -0
  3. package/dist/analysis.js +3 -0
  4. package/dist/chunk-4PELJWF7.js +29 -0
  5. package/dist/chunk-4PELJWF7.js.map +1 -0
  6. package/dist/chunk-55J6XMHW.js +3 -0
  7. package/dist/{chunk-7XUN3OQT.js.map → chunk-55J6XMHW.js.map} +1 -1
  8. package/dist/chunk-5EHIPX23.js +202 -0
  9. package/dist/chunk-5EHIPX23.js.map +1 -0
  10. package/dist/chunk-5QMPFUKA.js +40 -0
  11. package/dist/chunk-5QMPFUKA.js.map +1 -0
  12. package/dist/chunk-5ZWYSKMH.js +147 -0
  13. package/dist/chunk-5ZWYSKMH.js.map +1 -0
  14. package/dist/chunk-5ZXV3R5D.js +28 -0
  15. package/dist/chunk-5ZXV3R5D.js.map +1 -0
  16. package/dist/chunk-CO5BTPKI.js +410 -0
  17. package/dist/chunk-CO5BTPKI.js.map +1 -0
  18. package/dist/chunk-CSZ6EOWG.js +61 -0
  19. package/dist/chunk-CSZ6EOWG.js.map +1 -0
  20. package/dist/chunk-CYHZ2YVH.js +24 -0
  21. package/dist/chunk-CYHZ2YVH.js.map +1 -0
  22. package/dist/chunk-DSYAC4WB.js +27 -0
  23. package/dist/chunk-DSYAC4WB.js.map +1 -0
  24. package/dist/chunk-HMR2GEGE.js +3 -0
  25. package/dist/{chunk-F2RSES6P.js.map → chunk-HMR2GEGE.js.map} +1 -1
  26. package/dist/chunk-HN72TZY5.js +110 -0
  27. package/dist/chunk-HN72TZY5.js.map +1 -0
  28. package/dist/chunk-KWQHFT7E.js +49 -0
  29. package/dist/chunk-KWQHFT7E.js.map +1 -0
  30. package/dist/chunk-PH2IYZHV.js +48 -0
  31. package/dist/chunk-PH2IYZHV.js.map +1 -0
  32. package/dist/chunk-QRMUKDSP.js +141 -0
  33. package/dist/chunk-QRMUKDSP.js.map +1 -0
  34. package/dist/chunk-UETC63DP.js +65 -0
  35. package/dist/chunk-UETC63DP.js.map +1 -0
  36. package/dist/chunk-UMXW3TCY.js +165 -0
  37. package/dist/chunk-UMXW3TCY.js.map +1 -0
  38. package/dist/context.d.ts +23 -105
  39. package/dist/context.js +1 -1
  40. package/dist/errors.d.ts +15 -31
  41. package/dist/errors.js +1 -1
  42. package/dist/evaluator.d.ts +30 -0
  43. package/dist/evaluator.js +3 -0
  44. package/dist/evaluator.js.map +1 -0
  45. package/dist/flow.d.ts +55 -0
  46. package/dist/flow.js +4 -0
  47. package/dist/flow.js.map +1 -0
  48. package/dist/index.d.ts +15 -16
  49. package/dist/index.js +17 -25
  50. package/dist/linter.d.ts +24 -0
  51. package/dist/linter.js +4 -0
  52. package/dist/linter.js.map +1 -0
  53. package/dist/logger.d.ts +15 -40
  54. package/dist/logger.js +1 -1
  55. package/dist/node.d.ts +1 -0
  56. package/dist/node.js +3 -0
  57. package/dist/node.js.map +1 -0
  58. package/dist/runtime/adapter.d.ts +94 -0
  59. package/dist/runtime/adapter.js +15 -0
  60. package/dist/runtime/adapter.js.map +1 -0
  61. package/dist/runtime/executors.d.ts +26 -0
  62. package/dist/runtime/executors.js +4 -0
  63. package/dist/runtime/executors.js.map +1 -0
  64. package/dist/runtime/index.d.ts +7 -0
  65. package/dist/runtime/index.js +16 -0
  66. package/dist/runtime/runtime.d.ts +34 -0
  67. package/dist/runtime/runtime.js +14 -0
  68. package/dist/runtime/runtime.js.map +1 -0
  69. package/dist/runtime/state.d.ts +21 -0
  70. package/dist/runtime/state.js +4 -0
  71. package/dist/runtime/state.js.map +1 -0
  72. package/dist/runtime/traverser.d.ts +25 -0
  73. package/dist/runtime/traverser.js +5 -0
  74. package/dist/runtime/traverser.js.map +1 -0
  75. package/dist/runtime/types.d.ts +15 -0
  76. package/dist/runtime/types.js +3 -0
  77. package/dist/sanitizer.d.ts +10 -0
  78. package/dist/sanitizer.js +3 -0
  79. package/dist/{utils/sanitize.js.map → sanitizer.js.map} +1 -1
  80. package/dist/serializer.d.ts +16 -0
  81. package/dist/serializer.js +3 -0
  82. package/dist/serializer.js.map +1 -0
  83. package/dist/types-lG3xCzp_.d.ts +206 -0
  84. package/dist/types.d.ts +1 -3
  85. package/dist/types.js +1 -1
  86. package/package.json +10 -21
  87. package/LICENSE +0 -21
  88. package/dist/builder/graph/graph.d.ts +0 -57
  89. package/dist/builder/graph/graph.js +0 -21
  90. package/dist/builder/graph/graph.js.map +0 -1
  91. package/dist/builder/graph/index.d.ts +0 -8
  92. package/dist/builder/graph/index.js +0 -23
  93. package/dist/builder/graph/internal-nodes.d.ts +0 -59
  94. package/dist/builder/graph/internal-nodes.js +0 -20
  95. package/dist/builder/graph/internal-nodes.js.map +0 -1
  96. package/dist/builder/graph/runner.d.ts +0 -51
  97. package/dist/builder/graph/runner.js +0 -21
  98. package/dist/builder/graph/runner.js.map +0 -1
  99. package/dist/builder/graph/types.d.ts +0 -3
  100. package/dist/builder/graph/types.js +0 -3
  101. package/dist/builder/index.d.ts +0 -8
  102. package/dist/builder/index.js +0 -24
  103. package/dist/builder/index.js.map +0 -1
  104. package/dist/builder/patterns.d.ts +0 -136
  105. package/dist/builder/patterns.js +0 -19
  106. package/dist/builder/patterns.js.map +0 -1
  107. package/dist/chunk-3YMBNZ77.js +0 -441
  108. package/dist/chunk-3YMBNZ77.js.map +0 -1
  109. package/dist/chunk-64DNBF5W.js +0 -36
  110. package/dist/chunk-64DNBF5W.js.map +0 -1
  111. package/dist/chunk-6QCXIRLA.js +0 -18
  112. package/dist/chunk-6QCXIRLA.js.map +0 -1
  113. package/dist/chunk-7XUN3OQT.js +0 -3
  114. package/dist/chunk-AOHBHYF6.js +0 -7
  115. package/dist/chunk-AOHBHYF6.js.map +0 -1
  116. package/dist/chunk-BRFMFLR6.js +0 -85
  117. package/dist/chunk-BRFMFLR6.js.map +0 -1
  118. package/dist/chunk-ELEHMJPM.js +0 -13
  119. package/dist/chunk-ELEHMJPM.js.map +0 -1
  120. package/dist/chunk-F2RSES6P.js +0 -3
  121. package/dist/chunk-F6C6J7HK.js +0 -3
  122. package/dist/chunk-F6C6J7HK.js.map +0 -1
  123. package/dist/chunk-GMKJ34T2.js +0 -3
  124. package/dist/chunk-GMKJ34T2.js.map +0 -1
  125. package/dist/chunk-HEO3XL4Z.js +0 -328
  126. package/dist/chunk-HEO3XL4Z.js.map +0 -1
  127. package/dist/chunk-IIKTTIW5.js +0 -56
  128. package/dist/chunk-IIKTTIW5.js.map +0 -1
  129. package/dist/chunk-KOBEU2EM.js +0 -3
  130. package/dist/chunk-KOBEU2EM.js.map +0 -1
  131. package/dist/chunk-L5PK5VL2.js +0 -178
  132. package/dist/chunk-L5PK5VL2.js.map +0 -1
  133. package/dist/chunk-P3RPDZHO.js +0 -36
  134. package/dist/chunk-P3RPDZHO.js.map +0 -1
  135. package/dist/chunk-PNWOW52F.js +0 -19
  136. package/dist/chunk-PNWOW52F.js.map +0 -1
  137. package/dist/chunk-R27FIYR5.js +0 -62
  138. package/dist/chunk-R27FIYR5.js.map +0 -1
  139. package/dist/chunk-S4WFNGQG.js +0 -17
  140. package/dist/chunk-S4WFNGQG.js.map +0 -1
  141. package/dist/chunk-TS3M7MWA.js +0 -3
  142. package/dist/chunk-TS3M7MWA.js.map +0 -1
  143. package/dist/chunk-UY4PNPBX.js +0 -156
  144. package/dist/chunk-UY4PNPBX.js.map +0 -1
  145. package/dist/chunk-VMH2LRM6.js +0 -114
  146. package/dist/chunk-VMH2LRM6.js.map +0 -1
  147. package/dist/chunk-VZDHIOCH.js +0 -76
  148. package/dist/chunk-VZDHIOCH.js.map +0 -1
  149. package/dist/chunk-WGVHM7DU.js +0 -66
  150. package/dist/chunk-WGVHM7DU.js.map +0 -1
  151. package/dist/chunk-WR5PDOPP.js +0 -91
  152. package/dist/chunk-WR5PDOPP.js.map +0 -1
  153. package/dist/chunk-YR433ZDA.js +0 -20
  154. package/dist/chunk-YR433ZDA.js.map +0 -1
  155. package/dist/executors/in-memory.d.ts +0 -39
  156. package/dist/executors/in-memory.js +0 -6
  157. package/dist/executors/in-memory.js.map +0 -1
  158. package/dist/executors/types.d.ts +0 -3
  159. package/dist/executors/types.js +0 -3
  160. package/dist/executors/types.js.map +0 -1
  161. package/dist/functions.d.ts +0 -88
  162. package/dist/functions.js +0 -21
  163. package/dist/functions.js.map +0 -1
  164. package/dist/types-U76Ukj96.d.ts +0 -609
  165. package/dist/utils/analysis.d.ts +0 -75
  166. package/dist/utils/analysis.js +0 -3
  167. package/dist/utils/index.d.ts +0 -8
  168. package/dist/utils/index.js +0 -10
  169. package/dist/utils/index.js.map +0 -1
  170. package/dist/utils/mermaid.d.ts +0 -46
  171. package/dist/utils/mermaid.js +0 -4
  172. package/dist/utils/mermaid.js.map +0 -1
  173. package/dist/utils/middleware.d.ts +0 -11
  174. package/dist/utils/middleware.js +0 -3
  175. package/dist/utils/middleware.js.map +0 -1
  176. package/dist/utils/sanitize.d.ts +0 -19
  177. package/dist/utils/sanitize.js +0 -3
  178. package/dist/utils/sleep.d.ts +0 -9
  179. package/dist/utils/sleep.js +0 -4
  180. package/dist/utils/sleep.js.map +0 -1
  181. package/dist/workflow/AbstractNode.d.ts +0 -3
  182. package/dist/workflow/AbstractNode.js +0 -4
  183. package/dist/workflow/AbstractNode.js.map +0 -1
  184. package/dist/workflow/Flow.d.ts +0 -3
  185. package/dist/workflow/Flow.js +0 -16
  186. package/dist/workflow/Flow.js.map +0 -1
  187. package/dist/workflow/Node.d.ts +0 -3
  188. package/dist/workflow/Node.js +0 -15
  189. package/dist/workflow/Node.js.map +0 -1
  190. package/dist/workflow/index.d.ts +0 -4
  191. package/dist/workflow/index.js +0 -18
  192. package/dist/workflow/index.js.map +0 -1
  193. package/dist/workflow/node-patterns.d.ts +0 -55
  194. package/dist/workflow/node-patterns.js +0 -16
  195. package/dist/workflow/node-patterns.js.map +0 -1
  196. package/dist/workflow/registry.d.ts +0 -17
  197. package/dist/workflow/registry.js +0 -3
  198. package/dist/workflow/registry.js.map +0 -1
  199. /package/dist/{utils/analysis.js.map → analysis.js.map} +0 -0
  200. /package/dist/{builder/graph → runtime}/index.js.map +0 -0
  201. /package/dist/{builder/graph → runtime}/types.js.map +0 -0
package/README.md CHANGED
@@ -1,153 +1,56 @@
1
- # Flowcraft: A Workflow Framework
1
+ # Flowcraft
2
2
 
3
- [![npm version](https://img.shields.io/npm/v/flowcraft.svg)](https://www.npmjs.com/package/flowcraft)
3
+ [![NPM Version](https://img.shields.io/npm/v/flowcraft.svg)](https://www.npmjs.com/package/flowcraft)
4
4
  [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
5
+ [![Codecov](https://img.shields.io/codecov/c/github/gorango/flowcraft/master)](https://codecov.io/github/gorango/flowcraft)
5
6
 
6
- Build complex, multi-step processes with a lightweight, composable, and type-safe TypeScript framework. Model everything from simple sequences to dynamic AI agents, running in-memory or across distributed systems.
7
+ Build complex, multi-step processes with a lightweight, composable, and type-safe approach. Model complex business processes, data pipelines, ETL workflows, or AI agents and scale from in-memory scripts to distributed systems without changing the core business logic.
7
8
 
8
- **[Read the Friendly Manual »](https://gorango.github.io/flowcraft/guide/)**
9
-
10
- ## Features
9
+ ## Key Features
11
10
 
12
11
  - **Zero Dependencies**: Lightweight and dependency-free, ensuring a small footprint and easy integration.
13
- - **Composable & Reusable**: Define workflows by chaining nodes or declaratively embedding other flows as nodes.
14
- - **Type-Safe by Default**: Strong typing for workflow definitions, shared state, and node parameters.
15
- - **Async First**: Built on an asynchronous foundation to handle I/O-bound tasks gracefully.
16
- - **Resilient & Reliable**: Built-in support for retries with configurable delays and fallback logic.
17
- - **Dynamic Graph Engine**: Construct executable workflows from declarative JSON, ideal for AI agents.
18
- - **Extensible Execution**: A pluggable Executor pattern enables in-memory or distributed flows.
19
- - **Advanced Control Flow**: Full support for conditional branching, loops, and parallel execution.
20
- - **Modern Tooling**: A fluent functional API, static graph validation, and automatic visualizations.
21
-
22
- ---
23
-
24
- Flowcraft is a lightweight, zero-dependency TypeScript framework for building complex, multi-step processes. It empowers you to model everything from simple sequential tasks to dynamic, graph-driven AI agents with a clear and composable API.
25
-
26
- At its core, Flowcraft is guided by a few key principles:
27
-
28
- 1. **Structure for Complexity**: It provides a clear way to model asynchronous processes. By breaking logic into discrete `Node`s with a defined lifecycle, you can turn tangled promise chains and `async/await` blocks into maintainable, testable graphs.
29
- 2. **Start Simple, Scale Gracefully**: You can start with an in-memory workflow in a single file. As your needs grow, the architecture allows you to scale up to a robust, distributed system using message queues—**without changing your core business logic**.
30
- 3. **Composability is Key**: A `Flow` is just a specialized `Node`. This simple but powerful concept means entire workflows can be treated as building blocks, allowing you to create highly modular and reusable systems.
31
-
32
- ## The Two Paths of Flowcraft
33
-
34
- Flowcraft is designed to cater to two primary use cases, and the documentation is structured to guide you down the path that best fits your needs:
35
-
36
- ### 1. Programmatic Workflows
37
-
38
- This is the path for developers who want to build and manage workflows directly within their application's code. Using a fluent, chainable API and functional helpers, you can quickly define, test, and run complex processes in-memory.
39
-
40
- **Choose this path if you are:**
41
-
42
- - Building background jobs for a web application.
43
- - Creating complex, multi-step data processing pipelines.
44
- - Looking for a structured way to manage complex `async/await` logic.
45
-
46
- **[Learn how to build Programmatic Workflows »](https://gorango.github.io/flowcraft/guide/programmatic/basics.html)**
47
-
48
- ### 2. Declarative Workflows (for Scale)
49
-
50
- This is the path for architects and developers building dynamic, data-driven, or distributed systems. You define your workflow's structure as a declarative data format (like JSON), and the `GraphBuilder` "compiles" it into an executable, serializable `Blueprint`.
51
-
52
- **Choose this path if you are:**
53
-
54
- - Building a system where workflows are defined by users or stored in a database.
55
- - Creating a runtime for dynamic AI agents.
56
- - Architecting a distributed system where tasks are executed by a pool of workers.
57
-
58
- **[Learn how to build Declarative Workflows »](https://gorango.github.io/flowcraft/guide/declarative/basics.html)**
59
-
60
- ---
61
-
62
- ## Learn by Example
63
-
64
- > [!TIP]
65
- > The best way to learn is by exploring the included sandbox examples. They are ordered by complexity, each demonstrating a new feature of the core engine.
66
-
67
- ### 1. Basic Sequential Flow: Article Writer
68
-
69
- A simple, linear workflow that demonstrates the core concepts of creating a sequence of nodes to perform a multi-step task.
70
-
71
- ```mermaid
72
- graph LR
73
- A[Generate Outline] --> B[Write Content]
74
- B --> C[Apply Style]
12
+ - **Declarative Workflows**: Define workflows as serializable objects with nodes and edges.
13
+ - **Unopinionated Logic**: Nodes can be simple functions or structured classes, supporting any logic.
14
+ - **Progressive Scalability**: Run in-memory or scale to distributed systems using the same blueprint.
15
+ - **Resilient Execution**: Built-in support for retries, fallbacks, timeouts, and graceful cancellation.
16
+ - **Advanced Patterns**: Includes batch processing and loop constructs for complex workflows.
17
+ - **Extensibility**: Pluggable loggers, evaluators, serializers, and middleware for custom behavior.
18
+ - **Static Analysis**: Tools to detect cycles, validate blueprints, and generate visual diagrams.
19
+ - **Type-Safe API**: Fully typed with TypeScript for a robust developer experience.
20
+
21
+ ## Installation
22
+
23
+ ```bash
24
+ npm install flowcraft
75
25
  ```
76
26
 
77
- - **Demonstrates**: `Node` chaining, passing data via `Context`.
78
- - **[Explore the Basic example »](https://github.com/gorango/flowcraft/tree/main/sandbox/1.basic/)**
79
-
80
- ### 2. Conditional Branching: Research Agent
27
+ ## Usage
81
28
 
82
- A simple agent that uses a loop and conditional branching to decide whether to search the web for information or answer a question based on the current context.
29
+ ```typescript
30
+ import { createFlow, FlowRuntime } from './index'
83
31
 
84
- ```mermaid
85
- graph TD
86
- A{Decide Action} -->|"search"| B[Search Web]
87
- A -->|"answer"| C[Answer Question]
88
- B --> A
89
- ```
90
-
91
- - **Demonstrates**: Conditional branching, creating loops, and building simple state machines.
92
- - **[Explore the Research Agent example »](https://github.com/gorango/flowcraft/tree/main/sandbox/2.research/)**
32
+ const flow = createFlow('simple-workflow')
33
+ .node('start', async () => ({ output: 42 }))
34
+ .node('double', async ({ input }) => ({ output: input * 2 }), { inputs: 'start' })
35
+ .edge('start', 'double')
36
+ .toBlueprint()
93
37
 
94
- ### 3. Parallel Batch Processing: Document Translator
38
+ const runtime = new FlowRuntime({
39
+ registry: flow.getFunctionRegistry(),
40
+ })
95
41
 
96
- A practical example that translates a document into multiple languages concurrently using `ParallelBatchFlow` for a massive performance boost on I/O-bound tasks.
42
+ async function run() {
43
+ const result = await runtime.run(flow, {})
44
+ console.log(result) // { context: { start: 42, double: 84 }, status: 'completed' }
45
+ }
97
46
 
98
- ```mermaid
99
- graph TD
100
- A[Load README.md] --> B["ParallelBatchFlow"]
101
- B -- "Chinese" --> T1[TranslateNode]
102
- B -- "Spanish" --> T2[TranslateNode]
103
- B -- "Japanese" --> T3[TranslateNode]
104
- T1 & T2 & T3 --> S[Save Files]
47
+ run()
105
48
  ```
106
49
 
107
- - **Demonstrates**: High-throughput concurrent processing for data-parallel tasks.
108
- - **[Explore the Parallel Translation example »](https://github.com/gorango/flowcraft/tree/main/sandbox/3.parallel/)**
109
-
110
- ### 4. Dynamic Graph Engine: AI Agent Runtime
111
-
112
- A powerful runtime that executes complex, graph-based AI workflows defined in simple JSON-like objects. This shows how to build highly dynamic and modular AI agent systems.
113
-
114
- ```mermaid
115
- graph TD
116
- A(User Post) --> B[check_pii] & C[check_hate_speech] & D[check_spam]
117
- B & C & D --> E{triage_post}
118
- E -- action_ban --> F["Sub-Workflow: Ban User"]
119
- E -- action_approve --> G[approve_post_branch]
120
- ```
121
-
122
- - **Demonstrates**:
123
- - Type-safe graph construction from declarative definitions using `GraphBuilder`.
124
- - Parallel fan-in and fan-out.
125
- - Reusable, data-driven nodes and complex sub-workflow composition.
126
- - **[Explore the Dynamic AI Agent example »](https://github.com/gorango/flowcraft/tree/main/sandbox/4.dag/)**
127
-
128
- ### 5. Distributed Execution: AI Agent with BullMQ
129
-
130
- This example takes the same type-safe graph definition from the previous example and runs it in a distributed environment using a custom `BullMQExecutor`, demonstrating a client-worker architecture for scalable background jobs.
131
-
132
- - **Demonstrates**:
133
- - A pluggable `IExecutor` for distributed workflows.
134
- - How business logic (the graph) remains unchanged when the execution environment changes.
135
- - **[Explore the Distributed AI Agent example »](https://github.com/gorango/flowcraft/tree/main/sandbox/5.distributed/)**
136
-
137
- ### 6. Advanced RAG Agent: Complex Data & Serialization
138
-
139
- A complete Retrieval-Augmented Generation (RAG) agent that ingests a document, creates embeddings, performs a vector search, and synthesizes an answer, showcasing a sophisticated, data-driven AI workflow.
140
-
141
- - **Demonstrates**:
142
- - A full, practical RAG pipeline with custom nodes.
143
- - Handling complex data types (`Map`, `Date`, etc.) in the `Context`.
144
- - Robust serialization (using `superjson`) for reliable state management.
145
- - **[Explore the RAG Agent example »](https://github.com/gorango/flowcraft/tree/main/sandbox/6.rag/)**
146
-
147
50
  ## Documentation
148
51
 
149
- For a deep dive into all features, patterns, and APIs, please see the **[complete Flowcraft documentation](https://gorango.github.io/flowcraft/guide/)**.
52
+ For a complete overview of all features, patterns, examples, and APIs, see the **[Flowcraft documentation](https://flowcraft.js.org/)**.
150
53
 
151
- ---
54
+ ## License
152
55
 
153
- Licensed under the [MIT License](https://github.com/gorango/flowcraft/tree/main/LICENSE).
56
+ Flowcraft is licensed under the [MIT License](LICENSE).
@@ -0,0 +1,43 @@
1
+ import { W as WorkflowBlueprint } from './types-lG3xCzp_.js';
2
+
3
+ /**
4
+ * A list of cycles found in the graph. Each cycle is an array of node IDs.
5
+ */
6
+ type Cycles = string[][];
7
+ /**
8
+ * Analysis result for a workflow blueprint
9
+ */
10
+ interface BlueprintAnalysis {
11
+ /** Cycles found in the graph */
12
+ cycles: Cycles;
13
+ /** Node IDs that have no incoming edges (start nodes) */
14
+ startNodeIds: string[];
15
+ /** Node IDs that have no outgoing edges (terminal nodes) */
16
+ terminalNodeIds: string[];
17
+ /** Total number of nodes */
18
+ nodeCount: number;
19
+ /** Total number of edges */
20
+ edgeCount: number;
21
+ /** Whether the graph is a valid DAG (no cycles) */
22
+ isDag: boolean;
23
+ }
24
+ /**
25
+ * Analyzes a workflow blueprint to detect cycles.
26
+ * @param blueprint The WorkflowBlueprint object containing nodes and edges.
27
+ * @returns An array of cycles found. Each cycle is represented as an array of node IDs.
28
+ */
29
+ declare function checkForCycles(blueprint: WorkflowBlueprint): Cycles;
30
+ /**
31
+ * Generates Mermaid diagram syntax from a WorkflowBlueprint
32
+ * @param blueprint The WorkflowBlueprint object containing nodes and edges
33
+ * @returns Mermaid syntax string for the flowchart
34
+ */
35
+ declare function generateMermaid(blueprint: WorkflowBlueprint): string;
36
+ /**
37
+ * Analyzes a workflow blueprint and returns comprehensive analysis
38
+ * @param blueprint The WorkflowBlueprint object containing nodes and edges
39
+ * @returns Analysis result with cycles, start nodes, terminal nodes, and other metrics
40
+ */
41
+ declare function analyzeBlueprint(blueprint: WorkflowBlueprint): BlueprintAnalysis;
42
+
43
+ export { type BlueprintAnalysis, type Cycles, analyzeBlueprint, checkForCycles, generateMermaid };
@@ -0,0 +1,3 @@
1
+ export { analyzeBlueprint, checkForCycles, generateMermaid } from './chunk-HN72TZY5.js';
2
+ //# sourceMappingURL=analysis.js.map
3
+ //# sourceMappingURL=analysis.js.map
@@ -0,0 +1,29 @@
1
+ // src/logger.ts
2
+ var ConsoleLogger = class {
3
+ debug(message, meta) {
4
+ console.debug(`[DEBUG] ${message}`, meta || "");
5
+ }
6
+ info(message, meta) {
7
+ console.info(`[INFO] ${message}`, meta || "");
8
+ }
9
+ warn(message, meta) {
10
+ console.warn(`[WARN] ${message}`, meta || "");
11
+ }
12
+ error(message, meta) {
13
+ console.error(`[ERROR] ${message}`, meta || "");
14
+ }
15
+ };
16
+ var NullLogger = class {
17
+ debug(_message, _meta) {
18
+ }
19
+ info(_message, _meta) {
20
+ }
21
+ warn(_message, _meta) {
22
+ }
23
+ error(_message, _meta) {
24
+ }
25
+ };
26
+
27
+ export { ConsoleLogger, NullLogger };
28
+ //# sourceMappingURL=chunk-4PELJWF7.js.map
29
+ //# sourceMappingURL=chunk-4PELJWF7.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/logger.ts"],"names":[],"mappings":";AAGO,IAAM,gBAAN,MAAuC;AAAA,EAC7C,KAAA,CAAM,SAAiB,IAAA,EAAkC;AACxD,IAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,QAAA,EAAW,OAAO,CAAA,CAAA,EAAI,QAAQ,EAAE,CAAA;AAAA,EAC/C;AAAA,EAEA,IAAA,CAAK,SAAiB,IAAA,EAAkC;AACvD,IAAA,OAAA,CAAQ,IAAA,CAAK,CAAA,OAAA,EAAU,OAAO,CAAA,CAAA,EAAI,QAAQ,EAAE,CAAA;AAAA,EAC7C;AAAA,EAEA,IAAA,CAAK,SAAiB,IAAA,EAAkC;AACvD,IAAA,OAAA,CAAQ,IAAA,CAAK,CAAA,OAAA,EAAU,OAAO,CAAA,CAAA,EAAI,QAAQ,EAAE,CAAA;AAAA,EAC7C;AAAA,EAEA,KAAA,CAAM,SAAiB,IAAA,EAAkC;AACxD,IAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,QAAA,EAAW,OAAO,CAAA,CAAA,EAAI,QAAQ,EAAE,CAAA;AAAA,EAC/C;AACD;AAGO,IAAM,aAAN,MAAoC;AAAA,EAC1C,KAAA,CAAM,UAAkB,KAAA,EAAmC;AAAA,EAAC;AAAA,EAC5D,IAAA,CAAK,UAAkB,KAAA,EAAmC;AAAA,EAAC;AAAA,EAC3D,IAAA,CAAK,UAAkB,KAAA,EAAmC;AAAA,EAAC;AAAA,EAC3D,KAAA,CAAM,UAAkB,KAAA,EAAmC;AAAA,EAAC;AAC7D","file":"chunk-4PELJWF7.js","sourcesContent":["import type { ILogger } from './types'\n\n/** A logger implementation that outputs to the console. */\nexport class ConsoleLogger implements ILogger {\n\tdebug(message: string, meta?: Record<string, any>): void {\n\t\tconsole.debug(`[DEBUG] ${message}`, meta || '')\n\t}\n\n\tinfo(message: string, meta?: Record<string, any>): void {\n\t\tconsole.info(`[INFO] ${message}`, meta || '')\n\t}\n\n\twarn(message: string, meta?: Record<string, any>): void {\n\t\tconsole.warn(`[WARN] ${message}`, meta || '')\n\t}\n\n\terror(message: string, meta?: Record<string, any>): void {\n\t\tconsole.error(`[ERROR] ${message}`, meta || '')\n\t}\n}\n\n/** A logger implementation that does nothing (no-op). */\nexport class NullLogger implements ILogger {\n\tdebug(_message: string, _meta?: Record<string, any>): void {}\n\tinfo(_message: string, _meta?: Record<string, any>): void {}\n\twarn(_message: string, _meta?: Record<string, any>): void {}\n\terror(_message: string, _meta?: Record<string, any>): void {}\n}\n"]}
@@ -0,0 +1,3 @@
1
+
2
+ //# sourceMappingURL=chunk-55J6XMHW.js.map
3
+ //# sourceMappingURL=chunk-55J6XMHW.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":[],"names":[],"mappings":"","file":"chunk-7XUN3OQT.js"}
1
+ {"version":3,"sources":[],"names":[],"mappings":"","file":"chunk-55J6XMHW.js"}
@@ -0,0 +1,202 @@
1
+ import { FlowRuntime } from './chunk-CO5BTPKI.js';
2
+ import { JsonSerializer } from './chunk-CYHZ2YVH.js';
3
+
4
+ // src/runtime/adapter.ts
5
+ var BaseDistributedAdapter = class {
6
+ runtime;
7
+ store;
8
+ serializer;
9
+ constructor(options) {
10
+ this.runtime = new FlowRuntime(options.runtimeOptions);
11
+ this.store = options.coordinationStore;
12
+ this.serializer = options.runtimeOptions.serializer || new JsonSerializer();
13
+ console.log("[Adapter] BaseDistributedAdapter initialized.");
14
+ }
15
+ /**
16
+ * Starts the worker, which begins listening for and processing jobs from the queue.
17
+ */
18
+ start() {
19
+ console.log("[Adapter] Starting worker...");
20
+ this.processJobs(this.handleJob.bind(this));
21
+ }
22
+ /**
23
+ * Hook called at the start of job processing. Subclasses can override this
24
+ * to perform additional setup (e.g., timestamp tracking for reconciliation).
25
+ */
26
+ async onJobStart(_runId, _blueprintId, _nodeId) {
27
+ }
28
+ /**
29
+ * The main handler for processing a single job from the queue.
30
+ */
31
+ async handleJob(job) {
32
+ const { runId, blueprintId, nodeId } = job;
33
+ await this.onJobStart(runId, blueprintId, nodeId);
34
+ const blueprint = this.runtime.options.blueprints?.[blueprintId];
35
+ if (!blueprint) {
36
+ const reason = `Blueprint with ID '${blueprintId}' not found in the worker's runtime registry.`;
37
+ console.error(`[Adapter] FATAL: ${reason}`);
38
+ await this.publishFinalResult(runId, { status: "failed", reason });
39
+ return;
40
+ }
41
+ const context = this.createContext(runId);
42
+ const hasBlueprintId = await context.has("blueprintId");
43
+ if (!hasBlueprintId) {
44
+ await context.set("blueprintId", blueprintId);
45
+ }
46
+ const workerState = {
47
+ getContext: () => context,
48
+ markFallbackExecuted: () => {
49
+ },
50
+ addError: (nodeId2, error) => {
51
+ console.error(`[Adapter] Error in node ${nodeId2}:`, error);
52
+ }
53
+ };
54
+ try {
55
+ const result = await this.runtime.executeNode(blueprint, nodeId, workerState);
56
+ await context.set(nodeId, result.output);
57
+ const nodeDef = blueprint.nodes.find((n) => n.id === nodeId);
58
+ if (nodeDef?.uses === "output") {
59
+ console.log(`[Adapter] \u2705 Output node '${nodeId}' finished. Declaring workflow complete for Run ID: ${runId}`);
60
+ const finalContext = await context.toJSON();
61
+ const finalResult = {
62
+ context: finalContext,
63
+ serializedContext: this.serializer.serialize(finalContext),
64
+ status: "completed"
65
+ };
66
+ await this.publishFinalResult(runId, {
67
+ status: "completed",
68
+ payload: finalResult
69
+ });
70
+ return;
71
+ }
72
+ const nextNodes = await this.runtime.determineNextNodes(blueprint, nodeId, result, context);
73
+ if (nextNodes.length === 0) {
74
+ console.log(
75
+ `[Adapter] Terminal node '${nodeId}' reached for Run ID '${runId}', but it was not an 'output' node. This branch will now terminate.`
76
+ );
77
+ return;
78
+ }
79
+ for (const { node: nextNodeDef, edge } of nextNodes) {
80
+ await this.runtime.applyEdgeTransform(edge, result, nextNodeDef, context);
81
+ const isReady = await this.isReadyForFanIn(runId, blueprint, nextNodeDef.id);
82
+ if (isReady) {
83
+ console.log(`[Adapter] Node '${nextNodeDef.id}' is ready. Enqueuing job.`);
84
+ await this.enqueueJob({ runId, blueprintId, nodeId: nextNodeDef.id });
85
+ } else {
86
+ console.log(`[Adapter] Node '${nextNodeDef.id}' is waiting for other predecessors to complete.`);
87
+ }
88
+ }
89
+ } catch (error) {
90
+ const reason = error.message || "Unknown execution error";
91
+ console.error(`[Adapter] FATAL: Job for node '${nodeId}' failed for Run ID '${runId}': ${reason}`);
92
+ await this.publishFinalResult(runId, { status: "failed", reason });
93
+ }
94
+ }
95
+ /**
96
+ * Encapsulates the fan-in join logic using the coordination store.
97
+ */
98
+ async isReadyForFanIn(runId, blueprint, targetNodeId) {
99
+ const targetNode = blueprint.nodes.find((n) => n.id === targetNodeId);
100
+ if (!targetNode) {
101
+ throw new Error(`Node '${targetNodeId}' not found in blueprint`);
102
+ }
103
+ const joinStrategy = targetNode.config?.joinStrategy || "all";
104
+ const predecessors = blueprint.edges.filter((e) => e.target === targetNodeId);
105
+ if (predecessors.length <= 1) {
106
+ return true;
107
+ }
108
+ if (joinStrategy === "any") {
109
+ const lockKey = `flowcraft:joinlock:${runId}:${targetNodeId}`;
110
+ return await this.store.setIfNotExist(lockKey, "locked", 3600);
111
+ } else {
112
+ const fanInKey = `flowcraft:fanin:${runId}:${targetNodeId}`;
113
+ const readyCount = await this.store.increment(fanInKey, 3600);
114
+ if (readyCount >= predecessors.length) {
115
+ await this.store.delete(fanInKey);
116
+ return true;
117
+ }
118
+ return false;
119
+ }
120
+ }
121
+ /**
122
+ * Reconciles the state of a workflow run. It inspects the persisted
123
+ * context to find completed nodes, determines the next set of executable
124
+ * nodes (the frontier), and enqueues jobs for them if they aren't
125
+ * already running. This is the core of the resume functionality.
126
+ *
127
+ * @param runId The unique ID of the workflow execution to reconcile.
128
+ * @returns The set of node IDs that were enqueued for execution.
129
+ */
130
+ async reconcile(runId) {
131
+ const context = this.createContext(runId);
132
+ const blueprintId = await context.get("blueprintId");
133
+ if (!blueprintId) {
134
+ throw new Error(`Cannot reconcile runId '${runId}': blueprintId not found in context.`);
135
+ }
136
+ const blueprint = this.runtime.options.blueprints?.[blueprintId];
137
+ if (!blueprint) {
138
+ throw new Error(`Cannot reconcile runId '${runId}': Blueprint with ID '${blueprintId}' not found.`);
139
+ }
140
+ const state = await context.toJSON();
141
+ const completedNodes = new Set(Object.keys(state).filter((k) => blueprint.nodes.some((n) => n.id === k)));
142
+ const frontier = this.calculateResumedFrontier(blueprint, completedNodes);
143
+ const enqueuedNodes = /* @__PURE__ */ new Set();
144
+ for (const nodeId of frontier) {
145
+ const nodeDef = blueprint.nodes.find((n) => n.id === nodeId);
146
+ const joinStrategy = nodeDef?.config?.joinStrategy || "all";
147
+ let shouldEnqueue = false;
148
+ if (joinStrategy === "any") {
149
+ const lockKey = `flowcraft:joinlock:${runId}:${nodeId}`;
150
+ if (await this.store.setIfNotExist(lockKey, "locked-by-reconcile", 3600)) {
151
+ shouldEnqueue = true;
152
+ } else {
153
+ console.log(`[Adapter] Reconciling: Node '${nodeId}' is an 'any' join and is already locked.`, { runId });
154
+ }
155
+ } else {
156
+ const lockKey = `flowcraft:nodelock:${runId}:${nodeId}`;
157
+ if (await this.store.setIfNotExist(lockKey, "locked", 120)) {
158
+ shouldEnqueue = true;
159
+ } else {
160
+ console.log(`[Adapter] Reconciling: Node '${nodeId}' is already locked.`, { runId });
161
+ }
162
+ }
163
+ if (shouldEnqueue) {
164
+ console.log(`[Adapter] Reconciling: Enqueuing ready job for node '${nodeId}'`, { runId });
165
+ await this.enqueueJob({ runId, blueprintId: blueprint.id, nodeId });
166
+ enqueuedNodes.add(nodeId);
167
+ }
168
+ }
169
+ return enqueuedNodes;
170
+ }
171
+ calculateResumedFrontier(blueprint, completedNodes) {
172
+ const newFrontier = /* @__PURE__ */ new Set();
173
+ const allPredecessors = /* @__PURE__ */ new Map();
174
+ for (const node of blueprint.nodes) {
175
+ allPredecessors.set(node.id, /* @__PURE__ */ new Set());
176
+ }
177
+ for (const edge of blueprint.edges) {
178
+ allPredecessors.get(edge.target)?.add(edge.source);
179
+ }
180
+ for (const node of blueprint.nodes) {
181
+ if (completedNodes.has(node.id)) {
182
+ continue;
183
+ }
184
+ const predecessors = allPredecessors.get(node.id) ?? /* @__PURE__ */ new Set();
185
+ if (predecessors.size === 0 && !completedNodes.has(node.id)) {
186
+ newFrontier.add(node.id);
187
+ continue;
188
+ }
189
+ const joinStrategy = node.config?.joinStrategy || "all";
190
+ const completedPredecessors = [...predecessors].filter((p) => completedNodes.has(p));
191
+ const isReady = joinStrategy === "any" ? completedPredecessors.length > 0 : completedPredecessors.length === predecessors.size;
192
+ if (isReady) {
193
+ newFrontier.add(node.id);
194
+ }
195
+ }
196
+ return newFrontier;
197
+ }
198
+ };
199
+
200
+ export { BaseDistributedAdapter };
201
+ //# sourceMappingURL=chunk-5EHIPX23.js.map
202
+ //# sourceMappingURL=chunk-5EHIPX23.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/runtime/adapter.ts"],"names":["nodeId"],"mappings":";;;;AAyCO,IAAe,yBAAf,MAAsC;AAAA,EACzB,OAAA;AAAA,EACA,KAAA;AAAA,EACA,UAAA;AAAA,EAEnB,YAAY,OAAA,EAAyB;AACpC,IAAA,IAAA,CAAK,OAAA,GAAU,IAAI,WAAA,CAAY,OAAA,CAAQ,cAAc,CAAA;AACrD,IAAA,IAAA,CAAK,QAAQ,OAAA,CAAQ,iBAAA;AACrB,IAAA,IAAA,CAAK,UAAA,GAAa,OAAA,CAAQ,cAAA,CAAe,UAAA,IAAc,IAAI,cAAA,EAAe;AAC1E,IAAA,OAAA,CAAQ,IAAI,+CAA+C,CAAA;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKO,KAAA,GAAc;AACpB,IAAA,OAAA,CAAQ,IAAI,8BAA8B,CAAA;AAC1C,IAAA,IAAA,CAAK,WAAA,CAAY,IAAA,CAAK,SAAA,CAAU,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA,EAsCA,MAAgB,UAAA,CAAW,MAAA,EAAgB,YAAA,EAAsB,OAAA,EAAgC;AAAA,EAEjG;AAAA;AAAA;AAAA;AAAA,EAKA,MAAgB,UAAU,GAAA,EAAgC;AACzD,IAAA,MAAM,EAAE,KAAA,EAAO,WAAA,EAAa,MAAA,EAAO,GAAI,GAAA;AAEvC,IAAA,MAAM,IAAA,CAAK,UAAA,CAAW,KAAA,EAAO,WAAA,EAAa,MAAM,CAAA;AAEhD,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,OAAA,CAAQ,OAAA,CAAQ,aAAa,WAAW,CAAA;AAC/D,IAAA,IAAI,CAAC,SAAA,EAAW;AACf,MAAA,MAAM,MAAA,GAAS,sBAAsB,WAAW,CAAA,6CAAA,CAAA;AAChD,MAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,iBAAA,EAAoB,MAAM,CAAA,CAAE,CAAA;AAC1C,MAAA,MAAM,KAAK,kBAAA,CAAmB,KAAA,EAAO,EAAE,MAAA,EAAQ,QAAA,EAAU,QAAQ,CAAA;AACjE,MAAA;AAAA,IACD;AAEA,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,aAAA,CAAc,KAAK,CAAA;AAGxC,IAAA,MAAM,cAAA,GAAiB,MAAM,OAAA,CAAQ,GAAA,CAAI,aAAoB,CAAA;AAC7D,IAAA,IAAI,CAAC,cAAA,EAAgB;AACpB,MAAA,MAAM,OAAA,CAAQ,GAAA,CAAI,aAAA,EAAsB,WAAW,CAAA;AAAA,IACpD;AACA,IAAA,MAAM,WAAA,GAAc;AAAA,MACnB,YAAY,MAAM,OAAA;AAAA,MAClB,sBAAsB,MAAM;AAAA,MAAC,CAAA;AAAA,MAC7B,QAAA,EAAU,CAACA,OAAAA,EAAgB,KAAA,KAAiB;AAC3C,QAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,wBAAA,EAA2BA,OAAM,CAAA,CAAA,CAAA,EAAK,KAAK,CAAA;AAAA,MAC1D;AAAA,KACD;AAEA,IAAA,IAAI;AACH,MAAA,MAAM,SAA+B,MAAM,IAAA,CAAK,QAAQ,WAAA,CAAY,SAAA,EAAW,QAAQ,WAAW,CAAA;AAClG,MAAA,MAAM,OAAA,CAAQ,GAAA,CAAI,MAAA,EAAe,MAAA,CAAO,MAAM,CAAA;AAE9C,MAAA,MAAM,OAAA,GAAU,UAAU,KAAA,CAAM,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,OAAO,MAAM,CAAA;AAE3D,MAAA,IAAI,OAAA,EAAS,SAAS,QAAA,EAAU;AAC/B,QAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,8BAAA,EAA4B,MAAM,CAAA,oDAAA,EAAuD,KAAK,CAAA,CAAE,CAAA;AAC5G,QAAA,MAAM,YAAA,GAAe,MAAM,OAAA,CAAQ,MAAA,EAAO;AAC1C,QAAA,MAAM,WAAA,GAA8B;AAAA,UACnC,OAAA,EAAS,YAAA;AAAA,UACT,iBAAA,EAAmB,IAAA,CAAK,UAAA,CAAW,SAAA,CAAU,YAAY,CAAA;AAAA,UACzD,MAAA,EAAQ;AAAA,SACT;AACA,QAAA,MAAM,IAAA,CAAK,mBAAmB,KAAA,EAAO;AAAA,UACpC,MAAA,EAAQ,WAAA;AAAA,UACR,OAAA,EAAS;AAAA,SACT,CAAA;AACD,QAAA;AAAA,MACD;AAEA,MAAA,MAAM,SAAA,GAAY,MAAM,IAAA,CAAK,OAAA,CAAQ,mBAAmB,SAAA,EAAW,MAAA,EAAQ,QAAQ,OAAO,CAAA;AAG1F,MAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC3B,QAAA,OAAA,CAAQ,GAAA;AAAA,UACP,CAAA,yBAAA,EAA4B,MAAM,CAAA,sBAAA,EAAyB,KAAK,CAAA,mEAAA;AAAA,SACjE;AACA,QAAA;AAAA,MACD;AAEA,MAAA,KAAA,MAAW,EAAE,IAAA,EAAM,WAAA,EAAa,IAAA,MAAU,SAAA,EAAW;AACpD,QAAA,MAAM,KAAK,OAAA,CAAQ,kBAAA,CAAmB,IAAA,EAAM,MAAA,EAAQ,aAAa,OAAO,CAAA;AACxE,QAAA,MAAM,UAAU,MAAM,IAAA,CAAK,gBAAgB,KAAA,EAAO,SAAA,EAAW,YAAY,EAAE,CAAA;AAC3E,QAAA,IAAI,OAAA,EAAS;AACZ,UAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,gBAAA,EAAmB,WAAA,CAAY,EAAE,CAAA,0BAAA,CAA4B,CAAA;AACzE,UAAA,MAAM,IAAA,CAAK,WAAW,EAAE,KAAA,EAAO,aAAa,MAAA,EAAQ,WAAA,CAAY,IAAI,CAAA;AAAA,QACrE,CAAA,MAAO;AACN,UAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,gBAAA,EAAmB,WAAA,CAAY,EAAE,CAAA,gDAAA,CAAkD,CAAA;AAAA,QAChG;AAAA,MACD;AAAA,IACD,SAAS,KAAA,EAAY;AACpB,MAAA,MAAM,MAAA,GAAS,MAAM,OAAA,IAAW,yBAAA;AAChC,MAAA,OAAA,CAAQ,MAAM,CAAA,+BAAA,EAAkC,MAAM,wBAAwB,KAAK,CAAA,GAAA,EAAM,MAAM,CAAA,CAAE,CAAA;AACjG,MAAA,MAAM,KAAK,kBAAA,CAAmB,KAAA,EAAO,EAAE,MAAA,EAAQ,QAAA,EAAU,QAAQ,CAAA;AAAA,IAClE;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAgB,eAAA,CAAgB,KAAA,EAAe,SAAA,EAA8B,YAAA,EAAwC;AACpH,IAAA,MAAM,UAAA,GAAa,UAAU,KAAA,CAAM,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,OAAO,YAAY,CAAA;AACpE,IAAA,IAAI,CAAC,UAAA,EAAY;AAChB,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,MAAA,EAAS,YAAY,CAAA,wBAAA,CAA0B,CAAA;AAAA,IAChE;AACA,IAAA,MAAM,YAAA,GAAe,UAAA,CAAW,MAAA,EAAQ,YAAA,IAAgB,KAAA;AACxD,IAAA,MAAM,YAAA,GAAe,UAAU,KAAA,CAAM,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,WAAW,YAAY,CAAA;AAE5E,IAAA,IAAI,YAAA,CAAa,UAAU,CAAA,EAAG;AAC7B,MAAA,OAAO,IAAA;AAAA,IACR;AAEA,IAAA,IAAI,iBAAiB,KAAA,EAAO;AAC3B,MAAA,MAAM,OAAA,GAAU,CAAA,mBAAA,EAAsB,KAAK,CAAA,CAAA,EAAI,YAAY,CAAA,CAAA;AAC3D,MAAA,OAAO,MAAM,IAAA,CAAK,KAAA,CAAM,aAAA,CAAc,OAAA,EAAS,UAAU,IAAI,CAAA;AAAA,IAC9D,CAAA,MAAO;AACN,MAAA,MAAM,QAAA,GAAW,CAAA,gBAAA,EAAmB,KAAK,CAAA,CAAA,EAAI,YAAY,CAAA,CAAA;AACzD,MAAA,MAAM,aAAa,MAAM,IAAA,CAAK,KAAA,CAAM,SAAA,CAAU,UAAU,IAAI,CAAA;AAC5D,MAAA,IAAI,UAAA,IAAc,aAAa,MAAA,EAAQ;AACtC,QAAA,MAAM,IAAA,CAAK,KAAA,CAAM,MAAA,CAAO,QAAQ,CAAA;AAChC,QAAA,OAAO,IAAA;AAAA,MACR;AACA,MAAA,OAAO,KAAA;AAAA,IACR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAa,UAAU,KAAA,EAAqC;AAC3D,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,aAAA,CAAc,KAAK,CAAA;AACxC,IAAA,MAAM,WAAA,GAAe,MAAM,OAAA,CAAQ,GAAA,CAAI,aAAoB,CAAA;AAE3D,IAAA,IAAI,CAAC,WAAA,EAAa;AACjB,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,wBAAA,EAA2B,KAAK,CAAA,oCAAA,CAAsC,CAAA;AAAA,IACvF;AACA,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,OAAA,CAAQ,OAAA,CAAQ,aAAa,WAAW,CAAA;AAC/D,IAAA,IAAI,CAAC,SAAA,EAAW;AACf,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,wBAAA,EAA2B,KAAK,CAAA,sBAAA,EAAyB,WAAW,CAAA,YAAA,CAAc,CAAA;AAAA,IACnG;AAEA,IAAA,MAAM,KAAA,GAAQ,MAAM,OAAA,CAAQ,MAAA,EAAO;AAEnC,IAAA,MAAM,iBAAiB,IAAI,GAAA,CAAI,OAAO,IAAA,CAAK,KAAK,EAAE,MAAA,CAAO,CAAC,MAAM,SAAA,CAAU,KAAA,CAAM,KAAK,CAAC,CAAA,KAAM,EAAE,EAAA,KAAO,CAAC,CAAC,CAAC,CAAA;AAExG,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,wBAAA,CAAyB,SAAA,EAAW,cAAc,CAAA;AAExE,IAAA,MAAM,aAAA,uBAAoB,GAAA,EAAY;AACtC,IAAA,KAAA,MAAW,UAAU,QAAA,EAAU;AAC9B,MAAA,MAAM,OAAA,GAAU,UAAU,KAAA,CAAM,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,OAAO,MAAM,CAAA;AAC3D,MAAA,MAAM,YAAA,GAAe,OAAA,EAAS,MAAA,EAAQ,YAAA,IAAgB,KAAA;AAEtD,MAAA,IAAI,aAAA,GAAgB,KAAA;AAEpB,MAAA,IAAI,iBAAiB,KAAA,EAAO;AAE3B,QAAA,MAAM,OAAA,GAAU,CAAA,mBAAA,EAAsB,KAAK,CAAA,CAAA,EAAI,MAAM,CAAA,CAAA;AACrD,QAAA,IAAI,MAAM,IAAA,CAAK,KAAA,CAAM,cAAc,OAAA,EAAS,qBAAA,EAAuB,IAAI,CAAA,EAAG;AACzE,UAAA,aAAA,GAAgB,IAAA;AAAA,QACjB,CAAA,MAAO;AACN,UAAA,OAAA,CAAQ,IAAI,CAAA,6BAAA,EAAgC,MAAM,CAAA,yCAAA,CAAA,EAA6C,EAAE,OAAO,CAAA;AAAA,QACzG;AAAA,MACD,CAAA,MAAO;AAEN,QAAA,MAAM,OAAA,GAAU,CAAA,mBAAA,EAAsB,KAAK,CAAA,CAAA,EAAI,MAAM,CAAA,CAAA;AACrD,QAAA,IAAI,MAAM,IAAA,CAAK,KAAA,CAAM,cAAc,OAAA,EAAS,QAAA,EAAU,GAAG,CAAA,EAAG;AAC3D,UAAA,aAAA,GAAgB,IAAA;AAAA,QACjB,CAAA,MAAO;AACN,UAAA,OAAA,CAAQ,IAAI,CAAA,6BAAA,EAAgC,MAAM,CAAA,oBAAA,CAAA,EAAwB,EAAE,OAAO,CAAA;AAAA,QACpF;AAAA,MACD;AAEA,MAAA,IAAI,aAAA,EAAe;AAClB,QAAA,OAAA,CAAQ,IAAI,CAAA,qDAAA,EAAwD,MAAM,CAAA,CAAA,CAAA,EAAK,EAAE,OAAO,CAAA;AACxF,QAAA,MAAM,IAAA,CAAK,WAAW,EAAE,KAAA,EAAO,aAAa,SAAA,CAAU,EAAA,EAAI,QAAQ,CAAA;AAClE,QAAA,aAAA,CAAc,IAAI,MAAM,CAAA;AAAA,MACzB;AAAA,IACD;AAEA,IAAA,OAAO,aAAA;AAAA,EACR;AAAA,EAEQ,wBAAA,CAAyB,WAA8B,cAAA,EAA0C;AACxG,IAAA,MAAM,WAAA,uBAAkB,GAAA,EAAY;AACpC,IAAA,MAAM,eAAA,uBAAsB,GAAA,EAAyB;AAErD,IAAA,KAAA,MAAW,IAAA,IAAQ,UAAU,KAAA,EAAO;AACnC,MAAA,eAAA,CAAgB,GAAA,CAAI,IAAA,CAAK,EAAA,kBAAI,IAAI,KAAK,CAAA;AAAA,IACvC;AACA,IAAA,KAAA,MAAW,IAAA,IAAQ,UAAU,KAAA,EAAO;AACnC,MAAA,eAAA,CAAgB,IAAI,IAAA,CAAK,MAAM,CAAA,EAAG,GAAA,CAAI,KAAK,MAAM,CAAA;AAAA,IAClD;AAEA,IAAA,KAAA,MAAW,IAAA,IAAQ,UAAU,KAAA,EAAO;AACnC,MAAA,IAAI,cAAA,CAAe,GAAA,CAAI,IAAA,CAAK,EAAE,CAAA,EAAG;AAChC,QAAA;AAAA,MACD;AAEA,MAAA,MAAM,eAAe,eAAA,CAAgB,GAAA,CAAI,KAAK,EAAE,CAAA,wBAAS,GAAA,EAAI;AAC7D,MAAA,IAAI,YAAA,CAAa,SAAS,CAAA,IAAK,CAAC,eAAe,GAAA,CAAI,IAAA,CAAK,EAAE,CAAA,EAAG;AAC5D,QAAA,WAAA,CAAY,GAAA,CAAI,KAAK,EAAE,CAAA;AACvB,QAAA;AAAA,MACD;AAEA,MAAA,MAAM,YAAA,GAAe,IAAA,CAAK,MAAA,EAAQ,YAAA,IAAgB,KAAA;AAClD,MAAA,MAAM,qBAAA,GAAwB,CAAC,GAAG,YAAY,CAAA,CAAE,MAAA,CAAO,CAAC,CAAA,KAAM,cAAA,CAAe,GAAA,CAAI,CAAC,CAAC,CAAA;AAEnF,MAAA,MAAM,OAAA,GACL,iBAAiB,KAAA,GAAQ,qBAAA,CAAsB,SAAS,CAAA,GAAI,qBAAA,CAAsB,WAAW,YAAA,CAAa,IAAA;AAE3G,MAAA,IAAI,OAAA,EAAS;AACZ,QAAA,WAAA,CAAY,GAAA,CAAI,KAAK,EAAE,CAAA;AAAA,MACxB;AAAA,IACD;AACA,IAAA,OAAO,WAAA;AAAA,EACR;AACD","file":"chunk-5EHIPX23.js","sourcesContent":["import { JsonSerializer } from '../serializer'\nimport type {\n\tIAsyncContext,\n\tISerializer,\n\tNodeResult,\n\tRuntimeOptions,\n\tWorkflowBlueprint,\n\tWorkflowResult,\n} from '../types'\nimport { FlowRuntime } from './runtime'\n\n/**\n * Defines the contract for an atomic, distributed key-value store required by\n * the adapter for coordination tasks like fan-in joins and locking.\n */\nexport interface ICoordinationStore {\n\t/** Atomically increments a key and returns the new value. Ideal for 'all' joins. */\n\tincrement: (key: string, ttlSeconds: number) => Promise<number>\n\t/** Sets a key only if it does not already exist. Ideal for 'any' joins (locking). */\n\tsetIfNotExist: (key: string, value: string, ttlSeconds: number) => Promise<boolean>\n\t/** Deletes a key. Used for cleanup. */\n\tdelete: (key: string) => Promise<void>\n}\n\n/** Configuration options for constructing a BaseDistributedAdapter. */\nexport interface AdapterOptions {\n\truntimeOptions: RuntimeOptions<any>\n\tcoordinationStore: ICoordinationStore\n}\n\n/** The data payload expected for a job in the queue. */\nexport interface JobPayload {\n\trunId: string\n\tblueprintId: string\n\tnodeId: string\n}\n\n/**\n * The base class for all distributed adapters. It handles the technology-agnostic\n * orchestration logic and leaves queue-specific implementation to subclasses.\n */\nexport abstract class BaseDistributedAdapter {\n\tprotected readonly runtime: FlowRuntime<any, any>\n\tprotected readonly store: ICoordinationStore\n\tprotected readonly serializer: ISerializer\n\n\tconstructor(options: AdapterOptions) {\n\t\tthis.runtime = new FlowRuntime(options.runtimeOptions)\n\t\tthis.store = options.coordinationStore\n\t\tthis.serializer = options.runtimeOptions.serializer || new JsonSerializer()\n\t\tconsole.log('[Adapter] BaseDistributedAdapter initialized.')\n\t}\n\n\t/**\n\t * Starts the worker, which begins listening for and processing jobs from the queue.\n\t */\n\tpublic start(): void {\n\t\tconsole.log('[Adapter] Starting worker...')\n\t\tthis.processJobs(this.handleJob.bind(this))\n\t}\n\n\t/**\n\t * Creates a technology-specific distributed context for a given workflow run.\n\t * @param runId The unique ID for the workflow execution.\n\t */\n\tprotected abstract createContext(runId: string): IAsyncContext<Record<string, any>>\n\t/**\n\t * Sets up the listener for the message queue. The implementation should call the\n\t * provided `handler` function for each new job received.\n\t * @param handler The core logic to execute for each job.\n\t */\n\tprotected abstract processJobs(handler: (job: JobPayload) => Promise<void>): void\n\n\t/**\n\t * Enqueues a new job onto the message queue.\n\t * @param job The payload for the job to be enqueued.\n\t */\n\tprotected abstract enqueueJob(job: JobPayload): Promise<void>\n\n\t/**\n\t * Publishes the final result of a completed or failed workflow run.\n\t * @param runId The unique ID of the workflow run.\n\t * @param result The final status and payload of the workflow.\n\t */\n\tprotected abstract publishFinalResult(\n\t\trunId: string,\n\t\tresult: {\n\t\t\tstatus: 'completed' | 'failed'\n\t\t\tpayload?: WorkflowResult\n\t\t\treason?: string\n\t\t},\n\t): Promise<void>\n\n\t/**\n\t * Hook called at the start of job processing. Subclasses can override this\n\t * to perform additional setup (e.g., timestamp tracking for reconciliation).\n\t */\n\tprotected async onJobStart(_runId: string, _blueprintId: string, _nodeId: string): Promise<void> {\n\t\t// default implementation does nothing\n\t}\n\n\t/**\n\t * The main handler for processing a single job from the queue.\n\t */\n\tprotected async handleJob(job: JobPayload): Promise<void> {\n\t\tconst { runId, blueprintId, nodeId } = job\n\n\t\tawait this.onJobStart(runId, blueprintId, nodeId)\n\n\t\tconst blueprint = this.runtime.options.blueprints?.[blueprintId]\n\t\tif (!blueprint) {\n\t\t\tconst reason = `Blueprint with ID '${blueprintId}' not found in the worker's runtime registry.`\n\t\t\tconsole.error(`[Adapter] FATAL: ${reason}`)\n\t\t\tawait this.publishFinalResult(runId, { status: 'failed', reason })\n\t\t\treturn\n\t\t}\n\n\t\tconst context = this.createContext(runId)\n\n\t\t// persist the blueprintId for the reconcile method to find later\n\t\tconst hasBlueprintId = await context.has('blueprintId' as any)\n\t\tif (!hasBlueprintId) {\n\t\t\tawait context.set('blueprintId' as any, blueprintId)\n\t\t}\n\t\tconst workerState = {\n\t\t\tgetContext: () => context,\n\t\t\tmarkFallbackExecuted: () => {},\n\t\t\taddError: (nodeId: string, error: Error) => {\n\t\t\t\tconsole.error(`[Adapter] Error in node ${nodeId}:`, error)\n\t\t\t},\n\t\t} as any\n\n\t\ttry {\n\t\t\tconst result: NodeResult<any, any> = await this.runtime.executeNode(blueprint, nodeId, workerState)\n\t\t\tawait context.set(nodeId as any, result.output)\n\n\t\t\tconst nodeDef = blueprint.nodes.find((n) => n.id === nodeId)\n\t\t\t// workflow is considered complete when the first 'output' node finishes.\n\t\t\tif (nodeDef?.uses === 'output') {\n\t\t\t\tconsole.log(`[Adapter] ✅ Output node '${nodeId}' finished. Declaring workflow complete for Run ID: ${runId}`)\n\t\t\t\tconst finalContext = await context.toJSON()\n\t\t\t\tconst finalResult: WorkflowResult = {\n\t\t\t\t\tcontext: finalContext,\n\t\t\t\t\tserializedContext: this.serializer.serialize(finalContext),\n\t\t\t\t\tstatus: 'completed',\n\t\t\t\t}\n\t\t\t\tawait this.publishFinalResult(runId, {\n\t\t\t\t\tstatus: 'completed',\n\t\t\t\t\tpayload: finalResult,\n\t\t\t\t})\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tconst nextNodes = await this.runtime.determineNextNodes(blueprint, nodeId, result, context)\n\n\t\t\t// stop if a branch terminates but it wasn't an 'output' node\n\t\t\tif (nextNodes.length === 0) {\n\t\t\t\tconsole.log(\n\t\t\t\t\t`[Adapter] Terminal node '${nodeId}' reached for Run ID '${runId}', but it was not an 'output' node. This branch will now terminate.`,\n\t\t\t\t)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tfor (const { node: nextNodeDef, edge } of nextNodes) {\n\t\t\t\tawait this.runtime.applyEdgeTransform(edge, result, nextNodeDef, context)\n\t\t\t\tconst isReady = await this.isReadyForFanIn(runId, blueprint, nextNodeDef.id)\n\t\t\t\tif (isReady) {\n\t\t\t\t\tconsole.log(`[Adapter] Node '${nextNodeDef.id}' is ready. Enqueuing job.`)\n\t\t\t\t\tawait this.enqueueJob({ runId, blueprintId, nodeId: nextNodeDef.id })\n\t\t\t\t} else {\n\t\t\t\t\tconsole.log(`[Adapter] Node '${nextNodeDef.id}' is waiting for other predecessors to complete.`)\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (error: any) {\n\t\t\tconst reason = error.message || 'Unknown execution error'\n\t\t\tconsole.error(`[Adapter] FATAL: Job for node '${nodeId}' failed for Run ID '${runId}': ${reason}`)\n\t\t\tawait this.publishFinalResult(runId, { status: 'failed', reason })\n\t\t}\n\t}\n\n\t/**\n\t * Encapsulates the fan-in join logic using the coordination store.\n\t */\n\tprotected async isReadyForFanIn(runId: string, blueprint: WorkflowBlueprint, targetNodeId: string): Promise<boolean> {\n\t\tconst targetNode = blueprint.nodes.find((n) => n.id === targetNodeId)\n\t\tif (!targetNode) {\n\t\t\tthrow new Error(`Node '${targetNodeId}' not found in blueprint`)\n\t\t}\n\t\tconst joinStrategy = targetNode.config?.joinStrategy || 'all'\n\t\tconst predecessors = blueprint.edges.filter((e) => e.target === targetNodeId)\n\n\t\tif (predecessors.length <= 1) {\n\t\t\treturn true\n\t\t}\n\n\t\tif (joinStrategy === 'any') {\n\t\t\tconst lockKey = `flowcraft:joinlock:${runId}:${targetNodeId}`\n\t\t\treturn await this.store.setIfNotExist(lockKey, 'locked', 3600)\n\t\t} else {\n\t\t\tconst fanInKey = `flowcraft:fanin:${runId}:${targetNodeId}`\n\t\t\tconst readyCount = await this.store.increment(fanInKey, 3600)\n\t\t\tif (readyCount >= predecessors.length) {\n\t\t\t\tawait this.store.delete(fanInKey)\n\t\t\t\treturn true\n\t\t\t}\n\t\t\treturn false\n\t\t}\n\t}\n\n\t/**\n\t * Reconciles the state of a workflow run. It inspects the persisted\n\t * context to find completed nodes, determines the next set of executable\n\t * nodes (the frontier), and enqueues jobs for them if they aren't\n\t * already running. This is the core of the resume functionality.\n\t *\n\t * @param runId The unique ID of the workflow execution to reconcile.\n\t * @returns The set of node IDs that were enqueued for execution.\n\t */\n\tpublic async reconcile(runId: string): Promise<Set<string>> {\n\t\tconst context = this.createContext(runId)\n\t\tconst blueprintId = (await context.get('blueprintId' as any)) as string | undefined\n\n\t\tif (!blueprintId) {\n\t\t\tthrow new Error(`Cannot reconcile runId '${runId}': blueprintId not found in context.`)\n\t\t}\n\t\tconst blueprint = this.runtime.options.blueprints?.[blueprintId]\n\t\tif (!blueprint) {\n\t\t\tthrow new Error(`Cannot reconcile runId '${runId}': Blueprint with ID '${blueprintId}' not found.`)\n\t\t}\n\n\t\tconst state = await context.toJSON()\n\t\t// filter out internal keys\n\t\tconst completedNodes = new Set(Object.keys(state).filter((k) => blueprint.nodes.some((n) => n.id === k)))\n\n\t\tconst frontier = this.calculateResumedFrontier(blueprint, completedNodes)\n\n\t\tconst enqueuedNodes = new Set<string>()\n\t\tfor (const nodeId of frontier) {\n\t\t\tconst nodeDef = blueprint.nodes.find((n) => n.id === nodeId)\n\t\t\tconst joinStrategy = nodeDef?.config?.joinStrategy || 'all'\n\n\t\t\tlet shouldEnqueue = false\n\n\t\t\tif (joinStrategy === 'any') {\n\t\t\t\t// acquire the permanent join lock\n\t\t\t\tconst lockKey = `flowcraft:joinlock:${runId}:${nodeId}`\n\t\t\t\tif (await this.store.setIfNotExist(lockKey, 'locked-by-reconcile', 3600)) {\n\t\t\t\t\tshouldEnqueue = true\n\t\t\t\t} else {\n\t\t\t\t\tconsole.log(`[Adapter] Reconciling: Node '${nodeId}' is an 'any' join and is already locked.`, { runId })\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// 'all' joins and single-predecessor nodes use a temporary lock\n\t\t\t\tconst lockKey = `flowcraft:nodelock:${runId}:${nodeId}`\n\t\t\t\tif (await this.store.setIfNotExist(lockKey, 'locked', 120)) {\n\t\t\t\t\tshouldEnqueue = true\n\t\t\t\t} else {\n\t\t\t\t\tconsole.log(`[Adapter] Reconciling: Node '${nodeId}' is already locked.`, { runId })\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (shouldEnqueue) {\n\t\t\t\tconsole.log(`[Adapter] Reconciling: Enqueuing ready job for node '${nodeId}'`, { runId })\n\t\t\t\tawait this.enqueueJob({ runId, blueprintId: blueprint.id, nodeId })\n\t\t\t\tenqueuedNodes.add(nodeId)\n\t\t\t}\n\t\t}\n\n\t\treturn enqueuedNodes\n\t}\n\n\tprivate calculateResumedFrontier(blueprint: WorkflowBlueprint, completedNodes: Set<string>): Set<string> {\n\t\tconst newFrontier = new Set<string>()\n\t\tconst allPredecessors = new Map<string, Set<string>>()\n\t\t// (logic extracted from the GraphTraverser)\n\t\tfor (const node of blueprint.nodes) {\n\t\t\tallPredecessors.set(node.id, new Set())\n\t\t}\n\t\tfor (const edge of blueprint.edges) {\n\t\t\tallPredecessors.get(edge.target)?.add(edge.source)\n\t\t}\n\n\t\tfor (const node of blueprint.nodes) {\n\t\t\tif (completedNodes.has(node.id)) {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tconst predecessors = allPredecessors.get(node.id) ?? new Set()\n\t\t\tif (predecessors.size === 0 && !completedNodes.has(node.id)) {\n\t\t\t\tnewFrontier.add(node.id)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tconst joinStrategy = node.config?.joinStrategy || 'all'\n\t\t\tconst completedPredecessors = [...predecessors].filter((p) => completedNodes.has(p))\n\n\t\t\tconst isReady =\n\t\t\t\tjoinStrategy === 'any' ? completedPredecessors.length > 0 : completedPredecessors.length === predecessors.size\n\n\t\t\tif (isReady) {\n\t\t\t\tnewFrontier.add(node.id)\n\t\t\t}\n\t\t}\n\t\treturn newFrontier\n\t}\n}\n"]}
@@ -0,0 +1,40 @@
1
+ // src/node.ts
2
+ function isNodeClass(impl) {
3
+ return typeof impl === "function" && !!impl.prototype?.exec;
4
+ }
5
+ var BaseNode = class {
6
+ /**
7
+ * @param params Static parameters for this node instance, passed from the blueprint.
8
+ */
9
+ constructor(params) {
10
+ this.params = params;
11
+ }
12
+ /**
13
+ * Phase 1: Gathers and prepares data for execution. This phase is NOT retried on failure.
14
+ * @param context The node's execution context.
15
+ * @returns The data needed for the `exec` phase.
16
+ */
17
+ async prep(context) {
18
+ return context.input;
19
+ }
20
+ /**
21
+ * Phase 3: Processes the result and saves state. This phase is NOT retried.
22
+ * @param execResult The successful result from the `exec` or `fallback` phase.
23
+ * @param _context The node's execution context.
24
+ */
25
+ async post(execResult, _context) {
26
+ return execResult;
27
+ }
28
+ /**
29
+ * An optional safety net that runs if all `exec` retries fail.
30
+ * @param error The final error from the last `exec` attempt.
31
+ * @param _context The node's execution context.
32
+ */
33
+ async fallback(error, _context) {
34
+ throw error;
35
+ }
36
+ };
37
+
38
+ export { BaseNode, isNodeClass };
39
+ //# sourceMappingURL=chunk-5QMPFUKA.js.map
40
+ //# sourceMappingURL=chunk-5QMPFUKA.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/node.ts"],"names":[],"mappings":";AAGO,SAAS,YAAY,IAAA,EAA8B;AACzD,EAAA,OAAO,OAAO,IAAA,KAAS,UAAA,IAAc,CAAC,CAAC,KAAK,SAAA,EAAW,IAAA;AACxD;AAOO,IAAe,WAAf,MAML;AAAA;AAAA;AAAA;AAAA,EAID,YAAsB,MAAA,EAA8B;AAA9B,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAAA,EAA+B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOrD,MAAM,KAAK,OAAA,EAAqE;AAC/E,IAAA,OAAO,OAAA,CAAQ,KAAA;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,IAAA,CACL,UAAA,EACA,QAAA,EACwC;AACxC,IAAA,OAAO,UAAA;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,QAAA,CACL,KAAA,EACA,QAAA,EACuD;AAEvD,IAAA,MAAM,KAAA;AAAA,EACP;AACD","file":"chunk-5QMPFUKA.js","sourcesContent":["import type { NodeClass, NodeContext, NodeResult, RuntimeDependencies } from './types'\n\n/** A type guard to reliably distinguish a NodeClass from a NodeFunction. */\nexport function isNodeClass(impl: any): impl is NodeClass {\n\treturn typeof impl === 'function' && !!impl.prototype?.exec\n}\n\n/**\n * A structured, class-based node for complex logic with a safe, granular lifecycle.\n * This class is generic, allowing implementations to specify the exact context\n * and dependency types they expect.\n */\nexport abstract class BaseNode<\n\tTContext extends Record<string, any> = Record<string, any>,\n\tTDependencies extends RuntimeDependencies = RuntimeDependencies,\n\tTInput = any,\n\tTOutput = any,\n\tTAction extends string = string,\n> {\n\t/**\n\t * @param params Static parameters for this node instance, passed from the blueprint.\n\t */\n\tconstructor(protected params?: Record<string, any>) {}\n\n\t/**\n\t * Phase 1: Gathers and prepares data for execution. This phase is NOT retried on failure.\n\t * @param context The node's execution context.\n\t * @returns The data needed for the `exec` phase.\n\t */\n\tasync prep(context: NodeContext<TContext, TDependencies, TInput>): Promise<any> {\n\t\treturn context.input\n\t}\n\n\t/**\n\t * Phase 2: Performs the core, isolated logic. This is the ONLY phase that is retried.\n\t * @param prepResult The data returned from the `prep` phase.\n\t * @param context The node's execution context.\n\t */\n\tabstract exec(\n\t\tprepResult: any,\n\t\tcontext: NodeContext<TContext, TDependencies, TInput>,\n\t): Promise<Omit<NodeResult<TOutput, TAction>, 'error'>>\n\n\t/**\n\t * Phase 3: Processes the result and saves state. This phase is NOT retried.\n\t * @param execResult The successful result from the `exec` or `fallback` phase.\n\t * @param _context The node's execution context.\n\t */\n\tasync post(\n\t\texecResult: Omit<NodeResult<TOutput, TAction>, 'error'>,\n\t\t_context: NodeContext<TContext, TDependencies, TInput>,\n\t): Promise<NodeResult<TOutput, TAction>> {\n\t\treturn execResult\n\t}\n\n\t/**\n\t * An optional safety net that runs if all `exec` retries fail.\n\t * @param error The final error from the last `exec` attempt.\n\t * @param _context The node's execution context.\n\t */\n\tasync fallback(\n\t\terror: Error,\n\t\t_context: NodeContext<TContext, TDependencies, TInput>,\n\t): Promise<Omit<NodeResult<TOutput, TAction>, 'error'>> {\n\t\t// By default, re-throw the error, failing the node.\n\t\tthrow error\n\t}\n}\n"]}