flowcraft 2.10.0 → 2.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/dist/adapter-DzeZVjSE.d.mts +133 -0
- package/dist/adapters/index.d.mts +2 -0
- package/dist/adapters/index.mjs +3 -0
- package/dist/adapters/persistent-event-bus.d.mts +2 -0
- package/dist/adapters/persistent-event-bus.mjs +59 -0
- package/dist/analysis-B5Twr7sD.d.mts +52 -0
- package/dist/analysis.d.mts +2 -0
- package/dist/analysis.mjs +164 -0
- package/dist/batch-gather-BhF-IzQR.d.mts +8 -0
- package/dist/batch-scatter-DD8TU0Wm.d.mts +8 -0
- package/dist/container-BKdd-9wf.d.mts +24 -0
- package/dist/container-factory-fDY2kkxt.d.mts +17 -0
- package/dist/container-factory.d.mts +2 -0
- package/dist/container-factory.mjs +23 -0
- package/dist/container.d.mts +2 -0
- package/dist/container.mjs +43 -0
- package/dist/context-ZVtzXuZu.d.mts +64 -0
- package/dist/context.d.mts +2 -0
- package/dist/context.mjs +145 -0
- package/dist/error-mapper-BAv_YQMQ.d.mts +14 -0
- package/dist/error-mapper.d.mts +2 -0
- package/dist/error-mapper.mjs +37 -0
- package/dist/errors-CyyIj3OO.d.mts +21 -0
- package/dist/errors.d.mts +2 -0
- package/dist/errors.mjs +24 -0
- package/dist/evaluator-Dnj5qJ92.d.mts +31 -0
- package/dist/evaluator.d.mts +2 -0
- package/dist/evaluator.mjs +80 -0
- package/dist/flow-CZGpYpl-.d.mts +94 -0
- package/dist/flow.d.mts +2 -0
- package/dist/flow.mjs +328 -0
- package/dist/index-9iG2qHLe.d.mts +1 -0
- package/dist/index-Bk0eNZmQ.d.mts +1 -0
- package/dist/index-CNgSR_kt.d.mts +1 -0
- package/dist/index-CW2WHUXP.d.mts +1 -0
- package/dist/index.d.mts +24 -1
- package/dist/index.mjs +31 -791
- package/dist/linter-B8KALEae.d.mts +25 -0
- package/dist/linter.d.mts +2 -0
- package/dist/linter.mjs +74 -0
- package/dist/logger-BvDgvNHQ.d.mts +19 -0
- package/dist/logger.d.mts +2 -0
- package/dist/logger.mjs +26 -0
- package/dist/node.d.mts +2 -0
- package/dist/node.mjs +55 -0
- package/dist/nodes/batch-gather.d.mts +2 -0
- package/dist/nodes/batch-gather.mjs +47 -0
- package/dist/nodes/batch-scatter.d.mts +2 -0
- package/dist/nodes/batch-scatter.mjs +52 -0
- package/dist/nodes/index.d.mts +7 -0
- package/dist/nodes/index.mjs +8 -0
- package/dist/nodes/sleep.d.mts +2 -0
- package/dist/nodes/sleep.mjs +41 -0
- package/dist/nodes/subflow.d.mts +2 -0
- package/dist/nodes/subflow.mjs +64 -0
- package/dist/nodes/wait.d.mts +2 -0
- package/dist/nodes/wait.mjs +12 -0
- package/dist/nodes/webhook.d.mts +2 -0
- package/dist/nodes/webhook.mjs +24 -0
- package/dist/orchestrator-DwMIJRFI.d.mts +8 -0
- package/dist/persistent-event-bus-COiQOpWh.d.mts +68 -0
- package/dist/replay-CVOy6d_L.d.mts +44 -0
- package/dist/runtime/adapter.d.mts +2 -0
- package/dist/runtime/adapter.mjs +349 -0
- package/dist/runtime/builtin-keys.d.mts +37 -0
- package/dist/runtime/builtin-keys.mjs +12 -0
- package/dist/runtime/execution-context.d.mts +2 -0
- package/dist/runtime/execution-context.mjs +26 -0
- package/dist/runtime/executors.d.mts +2 -0
- package/dist/runtime/executors.mjs +259 -0
- package/dist/runtime/index.d.mts +6 -0
- package/dist/runtime/index.mjs +10 -0
- package/dist/runtime/node-executor-factory.d.mts +11 -0
- package/dist/runtime/node-executor-factory.mjs +41 -0
- package/dist/runtime/orchestrator.d.mts +2 -0
- package/dist/runtime/orchestrator.mjs +41 -0
- package/dist/runtime/orchestrators/replay.d.mts +2 -0
- package/dist/{replay-BB11M6K1.mjs → runtime/orchestrators/replay.mjs} +1 -20
- package/dist/runtime/orchestrators/step-by-step.d.mts +15 -0
- package/dist/runtime/orchestrators/step-by-step.mjs +41 -0
- package/dist/runtime/orchestrators/utils.d.mts +2 -0
- package/dist/runtime/orchestrators/utils.mjs +79 -0
- package/dist/runtime/runtime.d.mts +2 -0
- package/dist/runtime/runtime.mjs +425 -0
- package/dist/runtime/scheduler.d.mts +2 -0
- package/dist/runtime/scheduler.mjs +64 -0
- package/dist/runtime/state.d.mts +2 -0
- package/dist/runtime/state.mjs +127 -0
- package/dist/runtime/traverser.d.mts +2 -0
- package/dist/runtime/traverser.mjs +213 -0
- package/dist/runtime/types.d.mts +2 -0
- package/dist/runtime/types.mjs +1 -0
- package/dist/runtime/workflow-logic-handler.d.mts +16 -0
- package/dist/runtime/workflow-logic-handler.mjs +159 -0
- package/dist/sanitizer-Bi00YjvO.d.mts +11 -0
- package/dist/sanitizer.d.mts +2 -0
- package/dist/sanitizer.mjs +37 -0
- package/dist/sdk.d.mts +1 -2
- package/dist/sdk.mjs +1 -2
- package/dist/serializer-BnmJr13R.d.mts +17 -0
- package/dist/serializer.d.mts +2 -0
- package/dist/serializer.mjs +34 -0
- package/dist/sleep-DpwYaY5b.d.mts +8 -0
- package/dist/subflow-n2IMsRe2.d.mts +8 -0
- package/dist/testing/event-logger.d.mts +62 -0
- package/dist/testing/event-logger.mjs +98 -0
- package/dist/testing/index.d.mts +5 -172
- package/dist/testing/index.mjs +6 -276
- package/dist/testing/run-with-trace.d.mts +37 -0
- package/dist/testing/run-with-trace.mjs +49 -0
- package/dist/testing/stepper.d.mts +78 -0
- package/dist/testing/stepper.mjs +100 -0
- package/dist/types-BcrXJEPI.d.mts +687 -0
- package/dist/types.d.mts +2 -0
- package/dist/types.mjs +1 -0
- package/dist/utils-BUEgr9V2.d.mts +34 -0
- package/dist/wait-2Q-LA7V7.d.mts +8 -0
- package/dist/webhook-BiCm-HLx.d.mts +12 -0
- package/package.json +4 -4
- package/dist/index-BXRN44Qf.d.mts +0 -1347
- package/dist/index.mjs.map +0 -1
- package/dist/replay-BB11M6K1.mjs.map +0 -1
- package/dist/runtime-ChsWirQN.mjs +0 -2256
- package/dist/runtime-ChsWirQN.mjs.map +0 -1
- package/dist/sdk.mjs.map +0 -1
- package/dist/testing/index.mjs.map +0 -1
package/dist/index.mjs
CHANGED
|
@@ -1,792 +1,32 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import {
|
|
1
|
+
import { analyzeBlueprint, checkForCycles, generateMermaid, generateMermaidForRun } from "./analysis.mjs";
|
|
2
|
+
import { DIContainer, ServiceTokens } from "./container.mjs";
|
|
3
|
+
import { PropertyEvaluator, UnsafeEvaluator } from "./evaluator.mjs";
|
|
4
|
+
import { ConsoleLogger, NullLogger } from "./logger.mjs";
|
|
5
|
+
import { FlowcraftError } from "./errors.mjs";
|
|
6
|
+
import { AsyncContextView, Context, TrackedAsyncContext } from "./context.mjs";
|
|
7
|
+
import { WorkflowState } from "./runtime/state.mjs";
|
|
8
|
+
import { executeBatch, processResults } from "./runtime/orchestrators/utils.mjs";
|
|
9
|
+
import { DefaultOrchestrator } from "./runtime/orchestrator.mjs";
|
|
10
|
+
import { JsonSerializer } from "./serializer.mjs";
|
|
11
|
+
import { createDefaultContainer } from "./container-factory.mjs";
|
|
12
|
+
import { createErrorMapper } from "./error-mapper.mjs";
|
|
13
|
+
import { BaseNode, isNodeClass } from "./node.mjs";
|
|
14
|
+
import { FlowBuilder, createFlow } from "./flow.mjs";
|
|
15
|
+
import { InMemoryEventStore, PersistentEventBusAdapter } from "./adapters/persistent-event-bus.mjs";
|
|
16
|
+
import "./adapters/index.mjs";
|
|
17
|
+
import { lintBlueprint } from "./linter.mjs";
|
|
18
|
+
import { BatchGatherNode } from "./nodes/batch-gather.mjs";
|
|
19
|
+
import { BatchScatterNode } from "./nodes/batch-scatter.mjs";
|
|
20
|
+
import { SleepNode } from "./nodes/sleep.mjs";
|
|
21
|
+
import { GraphTraverser } from "./runtime/traverser.mjs";
|
|
22
|
+
import { SubflowNode } from "./nodes/subflow.mjs";
|
|
23
|
+
import { WaitNode } from "./nodes/wait.mjs";
|
|
24
|
+
import { WebhookNode } from "./nodes/webhook.mjs";
|
|
25
|
+
import { sanitizeBlueprint } from "./sanitizer.mjs";
|
|
26
|
+
import { ClassNodeExecutor, FunctionNodeExecutor, NodeExecutor } from "./runtime/executors.mjs";
|
|
27
|
+
import { FlowRuntime } from "./runtime/runtime.mjs";
|
|
28
|
+
import { BaseDistributedAdapter } from "./runtime/adapter.mjs";
|
|
29
|
+
import { ReplayOrchestrator } from "./runtime/orchestrators/replay.mjs";
|
|
30
|
+
import "./types.mjs";
|
|
3
31
|
|
|
4
|
-
|
|
5
|
-
function createDefaultContainer(options = {}) {
|
|
6
|
-
const container = new DIContainer();
|
|
7
|
-
container.register(ServiceTokens.Logger, options.logger || new NullLogger());
|
|
8
|
-
container.register(ServiceTokens.Serializer, options.serializer || new JsonSerializer());
|
|
9
|
-
container.register(ServiceTokens.Evaluator, options.evaluator || new PropertyEvaluator());
|
|
10
|
-
container.register(ServiceTokens.EventBus, options.eventBus || { emit: async () => {} });
|
|
11
|
-
container.register(ServiceTokens.Middleware, options.middleware || []);
|
|
12
|
-
container.register(ServiceTokens.NodeRegistry, options.registry || {});
|
|
13
|
-
container.register(ServiceTokens.BlueprintRegistry, options.blueprints || {});
|
|
14
|
-
container.register(ServiceTokens.Dependencies, options.dependencies || {});
|
|
15
|
-
container.registerFactory(ServiceTokens.Orchestrator, () => new DefaultOrchestrator());
|
|
16
|
-
return container;
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
//#endregion
|
|
20
|
-
//#region src/error-mapper.ts
|
|
21
|
-
/**
|
|
22
|
-
* Creates an error mapper function that enhances runtime errors with source location information.
|
|
23
|
-
* The mapper looks up node IDs in the provided manifest blueprints and returns enhanced errors
|
|
24
|
-
* that point to the original TypeScript source code.
|
|
25
|
-
*
|
|
26
|
-
* @param manifestBlueprints - The compiled blueprint manifest containing source location data
|
|
27
|
-
* @returns A function that maps errors to enhanced errors with source location information
|
|
28
|
-
*/
|
|
29
|
-
function createErrorMapper(manifestBlueprints) {
|
|
30
|
-
const locationMap = /* @__PURE__ */ new Map();
|
|
31
|
-
for (const blueprint of Object.values(manifestBlueprints)) {
|
|
32
|
-
for (const node of blueprint.nodes) if (node._sourceLocation) locationMap.set(node.id, node._sourceLocation);
|
|
33
|
-
for (const edge of blueprint.edges) if (edge._sourceLocation) {
|
|
34
|
-
const edgeKey = `${edge.source}-${edge.target}`;
|
|
35
|
-
locationMap.set(edgeKey, edge._sourceLocation);
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
return function mapError(error) {
|
|
39
|
-
if (error instanceof FlowcraftError && error.nodeId) {
|
|
40
|
-
const location = locationMap.get(error.nodeId);
|
|
41
|
-
if (location) return /* @__PURE__ */ new Error(`Workflow error at ${location.file}:${location.line}:${location.column}. Original error: ${error.message}`);
|
|
42
|
-
}
|
|
43
|
-
const nodeIdMatch = error.message.match(/nodeId[:\s]+([^\s,]+)/i);
|
|
44
|
-
if (nodeIdMatch) {
|
|
45
|
-
const nodeId = nodeIdMatch[1];
|
|
46
|
-
const location = locationMap.get(nodeId);
|
|
47
|
-
if (location) return /* @__PURE__ */ new Error(`Workflow error at ${location.file}:${location.line}:${location.column}. Original error: ${error.message}`);
|
|
48
|
-
}
|
|
49
|
-
return error;
|
|
50
|
-
};
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
//#endregion
|
|
54
|
-
//#region src/flow.ts
|
|
55
|
-
/**
|
|
56
|
-
* Generates a deterministic hash for a function based on its source code and a unique counter.
|
|
57
|
-
*/
|
|
58
|
-
let hashCounter = 0;
|
|
59
|
-
function _hashFunction(fn) {
|
|
60
|
-
const source = fn.toString();
|
|
61
|
-
let hash = 0;
|
|
62
|
-
for (let i = 0; i < source.length; i++) {
|
|
63
|
-
const char = source.charCodeAt(i);
|
|
64
|
-
hash = (hash << 5) - hash + char;
|
|
65
|
-
hash = hash & hash;
|
|
66
|
-
}
|
|
67
|
-
return (Math.abs(hash) + hashCounter++).toString(16);
|
|
68
|
-
}
|
|
69
|
-
/**
|
|
70
|
-
* A fluent API for programmatically constructing a WorkflowBlueprint.
|
|
71
|
-
*/
|
|
72
|
-
var FlowBuilder = class {
|
|
73
|
-
blueprint;
|
|
74
|
-
functionRegistry;
|
|
75
|
-
loopDefinitions;
|
|
76
|
-
batchDefinitions;
|
|
77
|
-
cycleEntryPoints;
|
|
78
|
-
constructor(id) {
|
|
79
|
-
this.blueprint = {
|
|
80
|
-
id,
|
|
81
|
-
nodes: [],
|
|
82
|
-
edges: []
|
|
83
|
-
};
|
|
84
|
-
this.functionRegistry = /* @__PURE__ */ new Map();
|
|
85
|
-
this.loopDefinitions = [];
|
|
86
|
-
this.batchDefinitions = [];
|
|
87
|
-
this.cycleEntryPoints = /* @__PURE__ */ new Map();
|
|
88
|
-
}
|
|
89
|
-
node(id, implementation, options) {
|
|
90
|
-
let usesKey;
|
|
91
|
-
if (isNodeClass(implementation)) {
|
|
92
|
-
usesKey = implementation.name && implementation.name !== "BaseNode" ? implementation.name : `class_${_hashFunction(implementation)}`;
|
|
93
|
-
this.functionRegistry.set(usesKey, implementation);
|
|
94
|
-
} else {
|
|
95
|
-
usesKey = `fn_${_hashFunction(implementation)}`;
|
|
96
|
-
this.functionRegistry.set(usesKey, implementation);
|
|
97
|
-
}
|
|
98
|
-
const nodeDef = {
|
|
99
|
-
id,
|
|
100
|
-
uses: usesKey,
|
|
101
|
-
...options
|
|
102
|
-
};
|
|
103
|
-
this.blueprint.nodes?.push(nodeDef);
|
|
104
|
-
return this;
|
|
105
|
-
}
|
|
106
|
-
edge(source, target, options) {
|
|
107
|
-
const edgeDef = {
|
|
108
|
-
source,
|
|
109
|
-
target,
|
|
110
|
-
...options
|
|
111
|
-
};
|
|
112
|
-
this.blueprint.edges?.push(edgeDef);
|
|
113
|
-
return this;
|
|
114
|
-
}
|
|
115
|
-
/**
|
|
116
|
-
* Creates a batch processing pattern.
|
|
117
|
-
* It takes an input array, runs a worker node on each item in parallel, and gathers the results.
|
|
118
|
-
* This method augments the Flow's TContext with a new key for the output array.
|
|
119
|
-
*
|
|
120
|
-
* @param id The base ID for this batch operation.
|
|
121
|
-
* @param worker The node implementation to run on each item.
|
|
122
|
-
* @param options Configuration for the batch operation.
|
|
123
|
-
* @returns The Flow instance with an updated context type for chaining.
|
|
124
|
-
*/
|
|
125
|
-
batch(id, worker, options) {
|
|
126
|
-
const { inputKey, outputKey } = options;
|
|
127
|
-
const scatterId = `${id}_scatter`;
|
|
128
|
-
const gatherId = `${id}_gather`;
|
|
129
|
-
this.batchDefinitions.push({
|
|
130
|
-
id,
|
|
131
|
-
scatterId,
|
|
132
|
-
gatherId
|
|
133
|
-
});
|
|
134
|
-
let workerUsesKey;
|
|
135
|
-
if (isNodeClass(worker)) {
|
|
136
|
-
workerUsesKey = worker.name && worker.name !== "BaseNode" ? worker.name : `class_batch_worker_${_hashFunction(worker)}`;
|
|
137
|
-
this.functionRegistry.set(workerUsesKey, worker);
|
|
138
|
-
} else {
|
|
139
|
-
workerUsesKey = `fn_batch_worker_${_hashFunction(worker)}`;
|
|
140
|
-
this.functionRegistry.set(workerUsesKey, worker);
|
|
141
|
-
}
|
|
142
|
-
this.blueprint.nodes?.push({
|
|
143
|
-
id: scatterId,
|
|
144
|
-
uses: "batch-scatter",
|
|
145
|
-
inputs: inputKey,
|
|
146
|
-
params: {
|
|
147
|
-
workerUsesKey,
|
|
148
|
-
outputKey,
|
|
149
|
-
gatherNodeId: gatherId,
|
|
150
|
-
chunkSize: options.chunkSize
|
|
151
|
-
}
|
|
152
|
-
});
|
|
153
|
-
this.blueprint.nodes?.push({
|
|
154
|
-
id: gatherId,
|
|
155
|
-
uses: "batch-gather",
|
|
156
|
-
params: {
|
|
157
|
-
outputKey,
|
|
158
|
-
gatherNodeId: gatherId
|
|
159
|
-
},
|
|
160
|
-
config: { joinStrategy: "all" }
|
|
161
|
-
});
|
|
162
|
-
this.edge(scatterId, gatherId);
|
|
163
|
-
return this;
|
|
164
|
-
}
|
|
165
|
-
/**
|
|
166
|
-
* Creates a sleep node that pauses workflow execution for a specified duration.
|
|
167
|
-
* @param id A unique identifier for the sleep node.
|
|
168
|
-
* @param options Configuration for the sleep duration.
|
|
169
|
-
*/
|
|
170
|
-
sleep(id, options) {
|
|
171
|
-
const nodeDef = {
|
|
172
|
-
id,
|
|
173
|
-
uses: "sleep",
|
|
174
|
-
params: { duration: options.duration }
|
|
175
|
-
};
|
|
176
|
-
this.blueprint.nodes?.push(nodeDef);
|
|
177
|
-
return this;
|
|
178
|
-
}
|
|
179
|
-
/**
|
|
180
|
-
* Creates a wait node that pauses workflow execution for external input.
|
|
181
|
-
* @param id A unique identifier for the wait node.
|
|
182
|
-
* @param options Optional configuration for the wait node.
|
|
183
|
-
*/
|
|
184
|
-
wait(id, options) {
|
|
185
|
-
const nodeDef = {
|
|
186
|
-
id,
|
|
187
|
-
uses: "wait",
|
|
188
|
-
...options
|
|
189
|
-
};
|
|
190
|
-
this.blueprint.nodes?.push(nodeDef);
|
|
191
|
-
return this;
|
|
192
|
-
}
|
|
193
|
-
/**
|
|
194
|
-
* Creates a loop pattern in the workflow graph.
|
|
195
|
-
* @param id A unique identifier for the loop construct.
|
|
196
|
-
* @param options Defines the start, end, and continuation condition of the loop.
|
|
197
|
-
* @param options.startNodeId The ID of the first node inside the loop body.
|
|
198
|
-
* @param options.endNodeId The ID of the last node inside the loop body.
|
|
199
|
-
* @param options.condition An expression that, if true, causes the loop to run again.
|
|
200
|
-
*/
|
|
201
|
-
loop(id, options) {
|
|
202
|
-
const { startNodeId, endNodeId, condition } = options;
|
|
203
|
-
this.loopDefinitions.push({
|
|
204
|
-
id,
|
|
205
|
-
startNodeId,
|
|
206
|
-
endNodeId,
|
|
207
|
-
condition
|
|
208
|
-
});
|
|
209
|
-
this.blueprint.nodes?.push({
|
|
210
|
-
id,
|
|
211
|
-
uses: "loop-controller",
|
|
212
|
-
params: { condition },
|
|
213
|
-
config: { joinStrategy: "any" }
|
|
214
|
-
});
|
|
215
|
-
this.edge(endNodeId, id);
|
|
216
|
-
this.edge(id, startNodeId, {
|
|
217
|
-
action: "continue",
|
|
218
|
-
transform: `context["${endNodeId}"]`
|
|
219
|
-
});
|
|
220
|
-
return this;
|
|
221
|
-
}
|
|
222
|
-
/**
|
|
223
|
-
* Sets the preferred entry point for a cycle in non-DAG workflows.
|
|
224
|
-
* This helps remove ambiguity when the runtime needs to choose a starting node for cycles.
|
|
225
|
-
* @param nodeId The ID of the node to use as the entry point for cycles containing this node.
|
|
226
|
-
*/
|
|
227
|
-
setCycleEntryPoint(nodeId) {
|
|
228
|
-
this.cycleEntryPoints.set(nodeId, nodeId);
|
|
229
|
-
return this;
|
|
230
|
-
}
|
|
231
|
-
toBlueprint() {
|
|
232
|
-
if (!this.blueprint.nodes || this.blueprint.nodes.length === 0) throw new Error("Cannot build a blueprint with no nodes.");
|
|
233
|
-
const finalEdges = [];
|
|
234
|
-
const processedOriginalEdges = /* @__PURE__ */ new Set();
|
|
235
|
-
const allOriginalEdges = this.blueprint.edges || [];
|
|
236
|
-
for (const loopDef of this.loopDefinitions) {
|
|
237
|
-
const edgesToRewire = allOriginalEdges.filter((e) => e.source === loopDef.id && e.target !== loopDef.startNodeId);
|
|
238
|
-
for (const edge of edgesToRewire) {
|
|
239
|
-
finalEdges.push({
|
|
240
|
-
...edge,
|
|
241
|
-
action: edge.action || "break",
|
|
242
|
-
transform: `context["${loopDef.endNodeId}"]`
|
|
243
|
-
});
|
|
244
|
-
processedOriginalEdges.add(edge);
|
|
245
|
-
}
|
|
246
|
-
}
|
|
247
|
-
for (const batchDef of this.batchDefinitions) {
|
|
248
|
-
const incomingEdges = allOriginalEdges.filter((e) => e.target === batchDef.id);
|
|
249
|
-
for (const edge of incomingEdges) {
|
|
250
|
-
finalEdges.push({
|
|
251
|
-
...edge,
|
|
252
|
-
target: batchDef.scatterId
|
|
253
|
-
});
|
|
254
|
-
processedOriginalEdges.add(edge);
|
|
255
|
-
}
|
|
256
|
-
const outgoingEdges = allOriginalEdges.filter((e) => e.source === batchDef.id);
|
|
257
|
-
for (const edge of outgoingEdges) {
|
|
258
|
-
finalEdges.push({
|
|
259
|
-
...edge,
|
|
260
|
-
source: batchDef.gatherId
|
|
261
|
-
});
|
|
262
|
-
processedOriginalEdges.add(edge);
|
|
263
|
-
}
|
|
264
|
-
}
|
|
265
|
-
for (const edge of allOriginalEdges) if (!processedOriginalEdges.has(edge)) finalEdges.push(edge);
|
|
266
|
-
this.blueprint.edges = finalEdges;
|
|
267
|
-
for (const loopDef of this.loopDefinitions) {
|
|
268
|
-
const startNode = this.blueprint.nodes?.find((n) => n.id === loopDef.startNodeId);
|
|
269
|
-
const endNode = this.blueprint.nodes?.find((n) => n.id === loopDef.endNodeId);
|
|
270
|
-
if (!startNode) throw new Error(`Loop '${loopDef.id}' references non-existent start node '${loopDef.startNodeId}'.`);
|
|
271
|
-
if (!endNode) throw new Error(`Loop '${loopDef.id}' references non-existent end node '${loopDef.endNodeId}'.`);
|
|
272
|
-
}
|
|
273
|
-
if (this.cycleEntryPoints.size > 0) this.blueprint.metadata = {
|
|
274
|
-
...this.blueprint.metadata,
|
|
275
|
-
cycleEntryPoints: Array.from(this.cycleEntryPoints.keys())
|
|
276
|
-
};
|
|
277
|
-
return this.blueprint;
|
|
278
|
-
}
|
|
279
|
-
getFunctionRegistry() {
|
|
280
|
-
return this.functionRegistry;
|
|
281
|
-
}
|
|
282
|
-
/**
|
|
283
|
-
* Runs this flow on the given runtime, automatically passing the function registry.
|
|
284
|
-
* Convenience wrapper around `runtime.run(blueprint, initialState, { functionRegistry })`.
|
|
285
|
-
*/
|
|
286
|
-
async run(runtime, initialState = {}, options) {
|
|
287
|
-
return runtime.run(this.toBlueprint(), initialState, {
|
|
288
|
-
...options,
|
|
289
|
-
functionRegistry: this.functionRegistry
|
|
290
|
-
});
|
|
291
|
-
}
|
|
292
|
-
/**
|
|
293
|
-
* Resumes this flow on the given runtime, automatically passing the function registry.
|
|
294
|
-
* Convenience wrapper around `runtime.resume(blueprint, ...)`.
|
|
295
|
-
*/
|
|
296
|
-
async resume(runtime, serializedContext, resumeData, nodeId, options) {
|
|
297
|
-
return runtime.resume(this.toBlueprint(), serializedContext, resumeData, nodeId, {
|
|
298
|
-
...options,
|
|
299
|
-
functionRegistry: this.functionRegistry
|
|
300
|
-
});
|
|
301
|
-
}
|
|
302
|
-
toGraphRepresentation() {
|
|
303
|
-
const blueprint = this.toBlueprint();
|
|
304
|
-
const uiNodes = [];
|
|
305
|
-
const uiEdges = [];
|
|
306
|
-
const ignoredNodeIds = /* @__PURE__ */ new Set();
|
|
307
|
-
for (const loopDef of this.loopDefinitions) {
|
|
308
|
-
const id = loopDef.id;
|
|
309
|
-
ignoredNodeIds.add(id);
|
|
310
|
-
uiEdges.push({
|
|
311
|
-
source: loopDef.endNodeId,
|
|
312
|
-
target: loopDef.startNodeId,
|
|
313
|
-
data: {
|
|
314
|
-
isLoopback: true,
|
|
315
|
-
condition: loopDef.condition,
|
|
316
|
-
label: `continue if: ${loopDef.condition}`
|
|
317
|
-
}
|
|
318
|
-
});
|
|
319
|
-
const breakEdges = blueprint.edges.filter((edge) => edge.source === id && edge.action === "break");
|
|
320
|
-
for (const breakEdge of breakEdges) uiEdges.push({
|
|
321
|
-
...breakEdge,
|
|
322
|
-
source: loopDef.endNodeId
|
|
323
|
-
});
|
|
324
|
-
const incomingEdges = blueprint.edges.filter((edge) => edge.target === id && edge.source !== loopDef.endNodeId);
|
|
325
|
-
for (const incomingEdge of incomingEdges) uiEdges.push({
|
|
326
|
-
...incomingEdge,
|
|
327
|
-
target: loopDef.startNodeId
|
|
328
|
-
});
|
|
329
|
-
}
|
|
330
|
-
const scatterNodes = blueprint.nodes.filter((n) => n.uses === "batch-scatter");
|
|
331
|
-
for (const scatterNode of scatterNodes) {
|
|
332
|
-
const gatherNodeId = scatterNode.params?.gatherNodeId;
|
|
333
|
-
if (!gatherNodeId) continue;
|
|
334
|
-
ignoredNodeIds.add(scatterNode.id);
|
|
335
|
-
ignoredNodeIds.add(gatherNodeId);
|
|
336
|
-
const batchId = scatterNode.id.replace("_scatter", "");
|
|
337
|
-
const gatherNode = blueprint.nodes.find((n) => n.id === gatherNodeId);
|
|
338
|
-
uiNodes.push({
|
|
339
|
-
id: batchId,
|
|
340
|
-
uses: scatterNode.params?.workerUsesKey,
|
|
341
|
-
type: "batch-worker",
|
|
342
|
-
data: {
|
|
343
|
-
label: `Batch: ${batchId}`,
|
|
344
|
-
isBatchPlaceholder: true,
|
|
345
|
-
workerUsesKey: scatterNode.params?.workerUsesKey,
|
|
346
|
-
inputKey: scatterNode.inputs,
|
|
347
|
-
outputKey: gatherNode?.params?.outputKey
|
|
348
|
-
}
|
|
349
|
-
});
|
|
350
|
-
const incomingEdges = blueprint.edges.filter((e) => e.target === scatterNode.id);
|
|
351
|
-
for (const edge of incomingEdges) uiEdges.push({
|
|
352
|
-
...edge,
|
|
353
|
-
target: batchId
|
|
354
|
-
});
|
|
355
|
-
const outgoingEdges = blueprint.edges.filter((e) => e.source === gatherNodeId);
|
|
356
|
-
for (const edge of outgoingEdges) uiEdges.push({
|
|
357
|
-
...edge,
|
|
358
|
-
source: batchId
|
|
359
|
-
});
|
|
360
|
-
}
|
|
361
|
-
for (const node of blueprint.nodes) if (!ignoredNodeIds.has(node.id)) uiNodes.push(node);
|
|
362
|
-
for (const edge of blueprint.edges) if (!ignoredNodeIds.has(edge.source) && !ignoredNodeIds.has(edge.target)) {
|
|
363
|
-
if (!uiEdges.some((e) => e.source === edge.source && e.target === edge.target && e.action === edge.action)) uiEdges.push(edge);
|
|
364
|
-
}
|
|
365
|
-
return {
|
|
366
|
-
nodes: uiNodes,
|
|
367
|
-
edges: uiEdges
|
|
368
|
-
};
|
|
369
|
-
}
|
|
370
|
-
};
|
|
371
|
-
/**
|
|
372
|
-
* Helper function to create a new Flow builder instance.
|
|
373
|
-
*/
|
|
374
|
-
function createFlow(id) {
|
|
375
|
-
return new FlowBuilder(id);
|
|
376
|
-
}
|
|
377
|
-
|
|
378
|
-
//#endregion
|
|
379
|
-
//#region src/linter.ts
|
|
380
|
-
/**
|
|
381
|
-
* Statically analyzes a workflow blueprint against a registry of implementations
|
|
382
|
-
* to find common errors before runtime.
|
|
383
|
-
*
|
|
384
|
-
* @param blueprint The WorkflowBlueprint to analyze.
|
|
385
|
-
* @param registry A map of node implementations (functions or classes) to check against.
|
|
386
|
-
* @returns A LinterResult object containing any issues found.
|
|
387
|
-
*/
|
|
388
|
-
function lintBlueprint(blueprint, registry, blueprints) {
|
|
389
|
-
const issues = [];
|
|
390
|
-
const nodeIds = new Set(blueprint.nodes.map((n) => n.id));
|
|
391
|
-
const registryKeys = registry instanceof Map ? new Set(registry.keys()) : new Set(Object.keys(registry));
|
|
392
|
-
for (const node of blueprint.nodes) if (!node.uses.startsWith("batch-") && !node.uses.startsWith("loop-") && !registryKeys.has(node.uses)) issues.push({
|
|
393
|
-
code: "MISSING_NODE_IMPLEMENTATION",
|
|
394
|
-
message: `Node implementation key '${node.uses}' is not found in the provided registry.`,
|
|
395
|
-
nodeId: node.id
|
|
396
|
-
});
|
|
397
|
-
for (const node of blueprint.nodes) {
|
|
398
|
-
if (node.uses.startsWith("batch-") && node.params?.workerUsesKey) {
|
|
399
|
-
if (!registryKeys.has(node.params.workerUsesKey)) issues.push({
|
|
400
|
-
code: "INVALID_BATCH_WORKER_KEY",
|
|
401
|
-
message: `Batch node '${node.id}' references workerUsesKey '${node.params.workerUsesKey}' which is not found in the registry.`,
|
|
402
|
-
nodeId: node.id
|
|
403
|
-
});
|
|
404
|
-
}
|
|
405
|
-
if (node.uses === "subflow" && node.params?.blueprintId) {
|
|
406
|
-
if (!blueprints?.[node.params.blueprintId]) issues.push({
|
|
407
|
-
code: "INVALID_SUBFLOW_BLUEPRINT_ID",
|
|
408
|
-
message: `Subflow node '${node.id}' references blueprintId '${node.params.blueprintId}' which is not found in the blueprints registry.`,
|
|
409
|
-
nodeId: node.id
|
|
410
|
-
});
|
|
411
|
-
}
|
|
412
|
-
}
|
|
413
|
-
for (const edge of blueprint.edges || []) {
|
|
414
|
-
if (!nodeIds.has(edge.source)) issues.push({
|
|
415
|
-
code: "INVALID_EDGE_SOURCE",
|
|
416
|
-
message: `Edge source '${edge.source}' does not correspond to a valid node ID.`,
|
|
417
|
-
relatedId: edge.target
|
|
418
|
-
});
|
|
419
|
-
if (!nodeIds.has(edge.target)) issues.push({
|
|
420
|
-
code: "INVALID_EDGE_TARGET",
|
|
421
|
-
message: `Edge target '${edge.target}' does not correspond to a valid node ID.`,
|
|
422
|
-
relatedId: edge.source
|
|
423
|
-
});
|
|
424
|
-
}
|
|
425
|
-
if (blueprint.nodes.length > 1) {
|
|
426
|
-
const analysis = analyzeBlueprint(blueprint);
|
|
427
|
-
const connectedNodes = /* @__PURE__ */ new Set();
|
|
428
|
-
const nodesToVisit = [...analysis.startNodeIds];
|
|
429
|
-
const visited = /* @__PURE__ */ new Set();
|
|
430
|
-
while (nodesToVisit.length > 0) {
|
|
431
|
-
const currentId = nodesToVisit.pop();
|
|
432
|
-
if (!currentId || visited.has(currentId)) continue;
|
|
433
|
-
visited.add(currentId);
|
|
434
|
-
connectedNodes.add(currentId);
|
|
435
|
-
for (const targetEdge of blueprint.edges.filter((e) => e.source === currentId)) nodesToVisit.push(targetEdge.target);
|
|
436
|
-
}
|
|
437
|
-
for (const nodeId of nodeIds) if (!connectedNodes.has(nodeId)) issues.push({
|
|
438
|
-
code: "ORPHAN_NODE",
|
|
439
|
-
message: `Node '${nodeId}' is not reachable from any start node.`,
|
|
440
|
-
nodeId
|
|
441
|
-
});
|
|
442
|
-
}
|
|
443
|
-
return {
|
|
444
|
-
isValid: issues.length === 0,
|
|
445
|
-
issues
|
|
446
|
-
};
|
|
447
|
-
}
|
|
448
|
-
|
|
449
|
-
//#endregion
|
|
450
|
-
//#region src/runtime/adapter.ts
|
|
451
|
-
/**
|
|
452
|
-
* The base class for all distributed adapters. It handles the technology-agnostic
|
|
453
|
-
* orchestration logic and leaves queue-specific implementation to subclasses.
|
|
454
|
-
*/
|
|
455
|
-
var BaseDistributedAdapter = class {
|
|
456
|
-
runtime;
|
|
457
|
-
store;
|
|
458
|
-
serializer;
|
|
459
|
-
logger;
|
|
460
|
-
eventBus;
|
|
461
|
-
constructor(options) {
|
|
462
|
-
this.runtime = new FlowRuntime({
|
|
463
|
-
...options.runtimeOptions,
|
|
464
|
-
dependencies: {
|
|
465
|
-
...options.runtimeOptions.dependencies,
|
|
466
|
-
adapter: this
|
|
467
|
-
}
|
|
468
|
-
});
|
|
469
|
-
this.store = options.coordinationStore;
|
|
470
|
-
this.serializer = options.runtimeOptions.serializer || new JsonSerializer();
|
|
471
|
-
this.logger = options.runtimeOptions.logger || new ConsoleLogger();
|
|
472
|
-
this.eventBus = options.eventBus;
|
|
473
|
-
this.logger.info("[Adapter] BaseDistributedAdapter initialized.");
|
|
474
|
-
}
|
|
475
|
-
/**
|
|
476
|
-
* Starts the worker, which begins listening for and processing jobs from the queue.
|
|
477
|
-
*/
|
|
478
|
-
start() {
|
|
479
|
-
this.logger.info("[Adapter] Starting worker...");
|
|
480
|
-
this.processJobs(this.handleJob.bind(this));
|
|
481
|
-
}
|
|
482
|
-
/**
|
|
483
|
-
* Hook called by the execution factory to determine if a node's automatic
|
|
484
|
-
* retries should be executed synchronously in-process (true) or delegated
|
|
485
|
-
* to the Queue backoff behavior configured by the adapter (false).
|
|
486
|
-
*/
|
|
487
|
-
shouldRetryInProcess(_nodeDef) {
|
|
488
|
-
return true;
|
|
489
|
-
}
|
|
490
|
-
/**
|
|
491
|
-
* Returns queue-level retry options for enqueuing successor jobs.
|
|
492
|
-
* Only used when shouldRetryInProcess() returns false.
|
|
493
|
-
*/
|
|
494
|
-
getQueueRetryOptions(_nodeDef) {}
|
|
495
|
-
/**
|
|
496
|
-
* Hook called at the start of job processing. Subclasses can override this
|
|
497
|
-
* to perform additional setup (e.g., timestamp tracking for reconciliation).
|
|
498
|
-
*/
|
|
499
|
-
async onJobStart(_runId, _blueprintId, _nodeId) {}
|
|
500
|
-
/**
|
|
501
|
-
* The main handler for processing a single job from the queue.
|
|
502
|
-
*/
|
|
503
|
-
async handleJob(job) {
|
|
504
|
-
const { runId, blueprintId, nodeId } = job;
|
|
505
|
-
const startTime = Date.now();
|
|
506
|
-
await this.onJobStart(runId, blueprintId, nodeId);
|
|
507
|
-
const blueprint = this.runtime.options.blueprints?.[blueprintId];
|
|
508
|
-
if (!blueprint) {
|
|
509
|
-
const reason = `Blueprint with ID '${blueprintId}' not found in the worker's runtime registry.`;
|
|
510
|
-
this.logger.error(`[Adapter] FATAL: ${reason}`);
|
|
511
|
-
await this.publishFinalResult(runId, {
|
|
512
|
-
status: "failed",
|
|
513
|
-
reason
|
|
514
|
-
});
|
|
515
|
-
return;
|
|
516
|
-
}
|
|
517
|
-
const context = this.createContext(runId);
|
|
518
|
-
const storedVersion = await context.get("blueprintVersion");
|
|
519
|
-
const currentVersion = blueprint.metadata?.version || null;
|
|
520
|
-
if (storedVersion !== currentVersion) {
|
|
521
|
-
const reason = `Blueprint version mismatch: stored version '${storedVersion}', current version '${currentVersion}'. Rejecting job to prevent state corruption.`;
|
|
522
|
-
this.logger.warn(`[Adapter] Version mismatch for run ${runId}, node ${nodeId}: ${reason}`);
|
|
523
|
-
return;
|
|
524
|
-
}
|
|
525
|
-
if (!await context.has("blueprintId")) {
|
|
526
|
-
await context.set("blueprintId", blueprintId);
|
|
527
|
-
await context.set("blueprintVersion", blueprint.metadata?.version || null);
|
|
528
|
-
const blueprintKey = `flowcraft:blueprint:${runId}`;
|
|
529
|
-
await this.store.setIfNotExist(blueprintKey, blueprintId, 3600);
|
|
530
|
-
}
|
|
531
|
-
const joinLockKey = `flowcraft:joinlock:${runId}:${nodeId}`;
|
|
532
|
-
const fanInKey = `flowcraft:fanin:${runId}:${nodeId}`;
|
|
533
|
-
const blueprintKey = `flowcraft:blueprint:${runId}`;
|
|
534
|
-
const heartbeatInterval = setInterval(async () => {
|
|
535
|
-
await this.store.extendTTL(joinLockKey, 3600);
|
|
536
|
-
await this.store.extendTTL(fanInKey, 3600);
|
|
537
|
-
await this.store.extendTTL(blueprintKey, 3600);
|
|
538
|
-
this.logger.debug(`[Adapter] Extended TTLs for run ${runId}, node ${nodeId}`);
|
|
539
|
-
}, 18e5);
|
|
540
|
-
try {
|
|
541
|
-
const state = new WorkflowState(await context.toJSON(), context);
|
|
542
|
-
state.isLastAttempt = job.isLastAttempt !== false;
|
|
543
|
-
let result;
|
|
544
|
-
if (await context.has(`_outputs.${nodeId}`)) {
|
|
545
|
-
this.logger.info(`[Adapter] Node '${nodeId}' already completed, skipping re-execution.`);
|
|
546
|
-
result = { output: await context.get(`_outputs.${nodeId}`) };
|
|
547
|
-
} else {
|
|
548
|
-
const nodeDef = blueprint.nodes.find((n) => n.id === nodeId);
|
|
549
|
-
if (nodeDef && !this.shouldRetryInProcess(nodeDef)) {
|
|
550
|
-
if (nodeDef.config?.maxRetries && nodeDef.config.maxRetries > 1) nodeDef.config.maxRetries = 1;
|
|
551
|
-
}
|
|
552
|
-
result = await this.runtime.executeNode(blueprint, nodeId, state);
|
|
553
|
-
await context.set(`_outputs.${nodeId}`, result.output);
|
|
554
|
-
const stateContext = state.getContext();
|
|
555
|
-
if (stateContext instanceof TrackedAsyncContext) {
|
|
556
|
-
const deltas = stateContext.getDeltas();
|
|
557
|
-
if (deltas.length > 0) {
|
|
558
|
-
await stateContext.patch(deltas);
|
|
559
|
-
stateContext.clearDeltas();
|
|
560
|
-
}
|
|
561
|
-
}
|
|
562
|
-
}
|
|
563
|
-
await this.handleNodeCompletion({
|
|
564
|
-
runId,
|
|
565
|
-
blueprintId,
|
|
566
|
-
nodeId,
|
|
567
|
-
blueprint,
|
|
568
|
-
result,
|
|
569
|
-
context,
|
|
570
|
-
startTime,
|
|
571
|
-
heartbeatInterval
|
|
572
|
-
});
|
|
573
|
-
} catch (error) {
|
|
574
|
-
const reason = error.message || "Unknown execution error";
|
|
575
|
-
this.logger.error(`[Adapter] FATAL: Job for node '${nodeId}' failed for Run ID '${runId}': ${reason}`);
|
|
576
|
-
if (this.eventBus) await this.eventBus.emit({
|
|
577
|
-
type: "job:failed",
|
|
578
|
-
payload: {
|
|
579
|
-
runId,
|
|
580
|
-
blueprintId,
|
|
581
|
-
nodeId,
|
|
582
|
-
error
|
|
583
|
-
}
|
|
584
|
-
});
|
|
585
|
-
if (job.isLastAttempt === false) {
|
|
586
|
-
this.logger.warn(`[Adapter] Job for node '${nodeId}' failed (queue attempt ${job.attempt || 1}), delegating retry to queue.`);
|
|
587
|
-
return;
|
|
588
|
-
}
|
|
589
|
-
await this.publishFinalResult(runId, {
|
|
590
|
-
status: "failed",
|
|
591
|
-
reason
|
|
592
|
-
});
|
|
593
|
-
await this.writePoisonPillForSuccessors(runId, blueprint, nodeId);
|
|
594
|
-
} finally {
|
|
595
|
-
clearInterval(heartbeatInterval);
|
|
596
|
-
}
|
|
597
|
-
}
|
|
598
|
-
/**
|
|
599
|
-
* Handles post-execution logic: terminal node checks, successor determination,
|
|
600
|
-
* and enqueueing of ready nodes. Extracted to support the idempotency guard.
|
|
601
|
-
*/
|
|
602
|
-
async handleNodeCompletion(params) {
|
|
603
|
-
const { runId, blueprintId, nodeId, blueprint, result, context, startTime, heartbeatInterval } = params;
|
|
604
|
-
const analysis = analyzeBlueprint(blueprint);
|
|
605
|
-
const isTerminalNode = analysis.terminalNodeIds.includes(nodeId);
|
|
606
|
-
if (isTerminalNode) {
|
|
607
|
-
const allContextKeys = Object.keys(await context.toJSON());
|
|
608
|
-
const completedNodes = /* @__PURE__ */ new Set();
|
|
609
|
-
for (const key of allContextKeys) if (key.startsWith("_outputs.")) completedNodes.add(key.substring(9));
|
|
610
|
-
if (analysis.terminalNodeIds.every((terminalId) => completedNodes.has(terminalId))) {
|
|
611
|
-
this.logger.info(`[Adapter] All terminal nodes completed for Run ID: ${runId}. Declaring workflow complete.`);
|
|
612
|
-
const finalContext = await context.toJSON();
|
|
613
|
-
const finalResult = {
|
|
614
|
-
context: finalContext,
|
|
615
|
-
serializedContext: this.serializer.serialize(finalContext),
|
|
616
|
-
status: "completed"
|
|
617
|
-
};
|
|
618
|
-
await this.publishFinalResult(runId, {
|
|
619
|
-
status: "completed",
|
|
620
|
-
payload: finalResult
|
|
621
|
-
});
|
|
622
|
-
clearInterval(heartbeatInterval);
|
|
623
|
-
return;
|
|
624
|
-
} else this.logger.info(`[Adapter] Terminal node '${nodeId}' completed for Run ID '${runId}', but other terminal nodes are still running.`);
|
|
625
|
-
}
|
|
626
|
-
const nextNodes = await this.runtime.determineNextNodes(blueprint, nodeId, result, context, runId);
|
|
627
|
-
if (nextNodes.length === 0 && !isTerminalNode) {
|
|
628
|
-
this.logger.info(`[Adapter] Non-terminal node '${nodeId}' reached end of branch for Run ID '${runId}'. This branch will now terminate.`);
|
|
629
|
-
clearInterval(heartbeatInterval);
|
|
630
|
-
return;
|
|
631
|
-
}
|
|
632
|
-
for (const { node: nextNodeDef, edge } of nextNodes) {
|
|
633
|
-
await this.runtime.applyEdgeTransform(edge, result, nextNodeDef, context, void 0, runId);
|
|
634
|
-
if (await this.isReadyForFanIn(runId, blueprint, nextNodeDef.id)) {
|
|
635
|
-
this.logger.info(`[Adapter] Node '${nextNodeDef.id}' is ready. Enqueuing job.`);
|
|
636
|
-
await this.enqueueJob({
|
|
637
|
-
runId,
|
|
638
|
-
blueprintId,
|
|
639
|
-
nodeId: nextNodeDef.id
|
|
640
|
-
});
|
|
641
|
-
if (this.eventBus) await this.eventBus.emit({
|
|
642
|
-
type: "job:enqueued",
|
|
643
|
-
payload: {
|
|
644
|
-
runId,
|
|
645
|
-
blueprintId,
|
|
646
|
-
nodeId: nextNodeDef.id
|
|
647
|
-
}
|
|
648
|
-
});
|
|
649
|
-
} else this.logger.info(`[Adapter] Node '${nextNodeDef.id}' is waiting for other predecessors to complete.`);
|
|
650
|
-
}
|
|
651
|
-
const duration = Date.now() - startTime;
|
|
652
|
-
if (this.eventBus) await this.eventBus.emit({
|
|
653
|
-
type: "job:processed",
|
|
654
|
-
payload: {
|
|
655
|
-
runId,
|
|
656
|
-
blueprintId,
|
|
657
|
-
nodeId,
|
|
658
|
-
duration,
|
|
659
|
-
success: true
|
|
660
|
-
}
|
|
661
|
-
});
|
|
662
|
-
}
|
|
663
|
-
/**
|
|
664
|
-
* Encapsulates the fan-in join logic using the coordination store.
|
|
665
|
-
*/
|
|
666
|
-
async isReadyForFanIn(runId, blueprint, targetNodeId) {
|
|
667
|
-
const targetNode = blueprint.nodes.find((n) => n.id === targetNodeId);
|
|
668
|
-
if (!targetNode) throw new Error(`Node '${targetNodeId}' not found in blueprint`);
|
|
669
|
-
const joinStrategy = targetNode.config?.joinStrategy || "all";
|
|
670
|
-
const predecessors = blueprint.edges.filter((e) => e.target === targetNodeId);
|
|
671
|
-
if (predecessors.length <= 1) return true;
|
|
672
|
-
const poisonKey = `flowcraft:fanin:poison:${runId}:${targetNodeId}`;
|
|
673
|
-
if (await this.store.get(poisonKey)) {
|
|
674
|
-
this.logger.info(`[Adapter] Node '${targetNodeId}' is poisoned due to failed predecessor. Failing immediately.`);
|
|
675
|
-
throw new Error(`Node '${targetNodeId}' failed due to poisoned predecessor in run '${runId}'`);
|
|
676
|
-
}
|
|
677
|
-
if (joinStrategy === "any") {
|
|
678
|
-
const lockKey = `flowcraft:joinlock:${runId}:${targetNodeId}`;
|
|
679
|
-
if (!await this.store.setIfNotExist(lockKey, "locked", 3600)) {
|
|
680
|
-
const cancelKey = `flowcraft:fanin:cancel:${runId}:${targetNodeId}`;
|
|
681
|
-
if (!await this.store.setIfNotExist(cancelKey, "cancelled", 3600)) {
|
|
682
|
-
this.logger.info(`[Adapter] Node '${targetNodeId}' is cancelled due to failed predecessor. Failing immediately.`);
|
|
683
|
-
throw new Error(`Node '${targetNodeId}' failed due to cancelled predecessor in run '${runId}'`);
|
|
684
|
-
}
|
|
685
|
-
return false;
|
|
686
|
-
}
|
|
687
|
-
return true;
|
|
688
|
-
} else {
|
|
689
|
-
const fanInKey = `flowcraft:fanin:${runId}:${targetNodeId}`;
|
|
690
|
-
if (await this.store.increment(fanInKey, 3600) >= predecessors.length) {
|
|
691
|
-
await this.store.delete(fanInKey);
|
|
692
|
-
return true;
|
|
693
|
-
}
|
|
694
|
-
return false;
|
|
695
|
-
}
|
|
696
|
-
}
|
|
697
|
-
/**
|
|
698
|
-
* Reconciles the state of a workflow run. It inspects the persisted
|
|
699
|
-
* context to find completed nodes, determines the next set of executable
|
|
700
|
-
* nodes (the frontier), and enqueues jobs for them if they aren't
|
|
701
|
-
* already running. This is the core of the resume functionality.
|
|
702
|
-
*
|
|
703
|
-
* @param runId The unique ID of the workflow execution to reconcile.
|
|
704
|
-
* @returns The set of node IDs that were enqueued for execution.
|
|
705
|
-
*/
|
|
706
|
-
async reconcile(runId) {
|
|
707
|
-
const context = this.createContext(runId);
|
|
708
|
-
let blueprintId = await context.get("blueprintId");
|
|
709
|
-
if (!blueprintId) {
|
|
710
|
-
const blueprintKey = `flowcraft:blueprint:${runId}`;
|
|
711
|
-
blueprintId = await this.store.get(blueprintKey);
|
|
712
|
-
if (blueprintId) await context.set("blueprintId", blueprintId);
|
|
713
|
-
else throw new Error(`Cannot reconcile runId '${runId}': blueprintId not found in context or coordination store.`);
|
|
714
|
-
}
|
|
715
|
-
const blueprint = this.runtime.options.blueprints?.[blueprintId];
|
|
716
|
-
if (blueprint && !await context.has("blueprintVersion")) await context.set("blueprintVersion", blueprint.metadata?.version || null);
|
|
717
|
-
if (!blueprint) throw new Error(`Cannot reconcile runId '${runId}': Blueprint with ID '${blueprintId}' not found.`);
|
|
718
|
-
const state = await context.toJSON();
|
|
719
|
-
const completedNodes = /* @__PURE__ */ new Set();
|
|
720
|
-
for (const key of Object.keys(state)) if (key.startsWith("_outputs.")) completedNodes.add(key.substring(9));
|
|
721
|
-
const frontier = this.calculateResumedFrontier(blueprint, completedNodes);
|
|
722
|
-
const enqueuedNodes = /* @__PURE__ */ new Set();
|
|
723
|
-
for (const nodeId of frontier) {
|
|
724
|
-
const joinStrategy = blueprint.nodes.find((n) => n.id === nodeId)?.config?.joinStrategy || "all";
|
|
725
|
-
const poisonKey = `flowcraft:fanin:poison:${runId}:${nodeId}`;
|
|
726
|
-
if (await this.store.get(poisonKey)) {
|
|
727
|
-
this.logger.info(`[Adapter] Reconciling: Node '${nodeId}' is poisoned, skipping.`, { runId });
|
|
728
|
-
continue;
|
|
729
|
-
}
|
|
730
|
-
let shouldEnqueue = false;
|
|
731
|
-
if (joinStrategy === "any") {
|
|
732
|
-
const lockKey = `flowcraft:joinlock:${runId}:${nodeId}`;
|
|
733
|
-
if (await this.store.setIfNotExist(lockKey, "locked-by-reconcile", 3600)) shouldEnqueue = true;
|
|
734
|
-
else this.logger.info(`[Adapter] Reconciling: Node '${nodeId}' is an 'any' join and is already locked.`, { runId });
|
|
735
|
-
} else {
|
|
736
|
-
const lockKey = `flowcraft:nodelock:${runId}:${nodeId}`;
|
|
737
|
-
if (await this.store.setIfNotExist(lockKey, "locked", 120)) shouldEnqueue = true;
|
|
738
|
-
else this.logger.info(`[Adapter] Reconciling: Node '${nodeId}' is already locked.`, { runId });
|
|
739
|
-
}
|
|
740
|
-
if (shouldEnqueue) {
|
|
741
|
-
this.logger.info(`[Adapter] Reconciling: Enqueuing ready job for node '${nodeId}'`, { runId });
|
|
742
|
-
await this.enqueueJob({
|
|
743
|
-
runId,
|
|
744
|
-
blueprintId: blueprint.id,
|
|
745
|
-
nodeId
|
|
746
|
-
});
|
|
747
|
-
enqueuedNodes.add(nodeId);
|
|
748
|
-
}
|
|
749
|
-
}
|
|
750
|
-
return enqueuedNodes;
|
|
751
|
-
}
|
|
752
|
-
calculateResumedFrontier(blueprint, completedNodes) {
|
|
753
|
-
const newFrontier = /* @__PURE__ */ new Set();
|
|
754
|
-
const allPredecessors = /* @__PURE__ */ new Map();
|
|
755
|
-
for (const node of blueprint.nodes) allPredecessors.set(node.id, /* @__PURE__ */ new Set());
|
|
756
|
-
for (const edge of blueprint.edges) allPredecessors.get(edge.target)?.add(edge.source);
|
|
757
|
-
for (const node of blueprint.nodes) {
|
|
758
|
-
if (completedNodes.has(node.id)) continue;
|
|
759
|
-
const predecessors = allPredecessors.get(node.id) ?? /* @__PURE__ */ new Set();
|
|
760
|
-
if (predecessors.size === 0 && !completedNodes.has(node.id)) {
|
|
761
|
-
newFrontier.add(node.id);
|
|
762
|
-
continue;
|
|
763
|
-
}
|
|
764
|
-
const joinStrategy = node.config?.joinStrategy || "all";
|
|
765
|
-
const completedPredecessors = [...predecessors].filter((p) => completedNodes.has(p));
|
|
766
|
-
if (joinStrategy === "any" ? completedPredecessors.length > 0 : completedPredecessors.length === predecessors.size) newFrontier.add(node.id);
|
|
767
|
-
}
|
|
768
|
-
return newFrontier;
|
|
769
|
-
}
|
|
770
|
-
/**
|
|
771
|
-
* Writes a poison pill for 'all' join successors and a cancellation pill for 'any' join successors of a failed node to prevent stalling or ambiguous states.
|
|
772
|
-
*/
|
|
773
|
-
async writePoisonPillForSuccessors(runId, blueprint, failedNodeId) {
|
|
774
|
-
const successors = blueprint.edges.filter((edge) => edge.source === failedNodeId).map((edge) => edge.target).map((targetId) => blueprint.nodes.find((node) => node.id === targetId)).filter((node) => node);
|
|
775
|
-
for (const successor of successors) if (successor) {
|
|
776
|
-
const joinStrategy = successor.config?.joinStrategy || "all";
|
|
777
|
-
if (joinStrategy === "all") {
|
|
778
|
-
const poisonKey = `flowcraft:fanin:poison:${runId}:${successor.id}`;
|
|
779
|
-
await this.store.setIfNotExist(poisonKey, "poisoned", 3600);
|
|
780
|
-
this.logger.info(`[Adapter] Wrote poison pill for 'all' join node '${successor.id}' due to failed predecessor '${failedNodeId}'`);
|
|
781
|
-
} else if (joinStrategy === "any") {
|
|
782
|
-
const cancelKey = `flowcraft:fanin:cancel:${runId}:${successor.id}`;
|
|
783
|
-
await this.store.setIfNotExist(cancelKey, "cancelled", 3600);
|
|
784
|
-
this.logger.info(`[Adapter] Wrote cancellation pill for 'any' join node '${successor.id}' due to failed predecessor '${failedNodeId}'`);
|
|
785
|
-
}
|
|
786
|
-
}
|
|
787
|
-
}
|
|
788
|
-
};
|
|
789
|
-
|
|
790
|
-
//#endregion
|
|
791
|
-
export { AsyncContextView, BaseDistributedAdapter, BaseNode, BatchGatherNode, BatchScatterNode, ClassNodeExecutor, ConsoleLogger, Context, DIContainer, DefaultOrchestrator, FlowBuilder, FlowRuntime, FlowcraftError, FunctionNodeExecutor, GraphTraverser, InMemoryEventStore, JsonSerializer, NodeExecutor, NullLogger, PersistentEventBusAdapter, PropertyEvaluator, ReplayOrchestrator, ServiceTokens, SleepNode, SubflowNode, TrackedAsyncContext, UnsafeEvaluator, WaitNode, WebhookNode, WorkflowState, analyzeBlueprint, checkForCycles, createDefaultContainer, createErrorMapper, createFlow, executeBatch, generateMermaid, generateMermaidForRun, isNodeClass, lintBlueprint, processResults, sanitizeBlueprint };
|
|
792
|
-
//# sourceMappingURL=index.mjs.map
|
|
32
|
+
export { AsyncContextView, BaseDistributedAdapter, BaseNode, BatchGatherNode, BatchScatterNode, ClassNodeExecutor, ConsoleLogger, Context, DIContainer, DefaultOrchestrator, FlowBuilder, FlowRuntime, FlowcraftError, FunctionNodeExecutor, GraphTraverser, InMemoryEventStore, JsonSerializer, NodeExecutor, NullLogger, PersistentEventBusAdapter, PropertyEvaluator, ReplayOrchestrator, ServiceTokens, SleepNode, SubflowNode, TrackedAsyncContext, UnsafeEvaluator, WaitNode, WebhookNode, WorkflowState, analyzeBlueprint, checkForCycles, createDefaultContainer, createErrorMapper, createFlow, executeBatch, generateMermaid, generateMermaidForRun, isNodeClass, lintBlueprint, processResults, sanitizeBlueprint };
|