flowcraft 2.10.0 → 2.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/dist/adapter-DzeZVjSE.d.mts +133 -0
- package/dist/adapters/index.d.mts +2 -0
- package/dist/adapters/index.mjs +3 -0
- package/dist/adapters/persistent-event-bus.d.mts +2 -0
- package/dist/adapters/persistent-event-bus.mjs +59 -0
- package/dist/analysis-B5Twr7sD.d.mts +52 -0
- package/dist/analysis.d.mts +2 -0
- package/dist/analysis.mjs +164 -0
- package/dist/batch-gather-BhF-IzQR.d.mts +8 -0
- package/dist/batch-scatter-DD8TU0Wm.d.mts +8 -0
- package/dist/container-BKdd-9wf.d.mts +24 -0
- package/dist/container-factory-fDY2kkxt.d.mts +17 -0
- package/dist/container-factory.d.mts +2 -0
- package/dist/container-factory.mjs +23 -0
- package/dist/container.d.mts +2 -0
- package/dist/container.mjs +43 -0
- package/dist/context-ZVtzXuZu.d.mts +64 -0
- package/dist/context.d.mts +2 -0
- package/dist/context.mjs +145 -0
- package/dist/error-mapper-BAv_YQMQ.d.mts +14 -0
- package/dist/error-mapper.d.mts +2 -0
- package/dist/error-mapper.mjs +37 -0
- package/dist/errors-CyyIj3OO.d.mts +21 -0
- package/dist/errors.d.mts +2 -0
- package/dist/errors.mjs +24 -0
- package/dist/evaluator-Dnj5qJ92.d.mts +31 -0
- package/dist/evaluator.d.mts +2 -0
- package/dist/evaluator.mjs +80 -0
- package/dist/flow-CZGpYpl-.d.mts +94 -0
- package/dist/flow.d.mts +2 -0
- package/dist/flow.mjs +328 -0
- package/dist/index-9iG2qHLe.d.mts +1 -0
- package/dist/index-Bk0eNZmQ.d.mts +1 -0
- package/dist/index-CNgSR_kt.d.mts +1 -0
- package/dist/index-CW2WHUXP.d.mts +1 -0
- package/dist/index.d.mts +24 -1
- package/dist/index.mjs +31 -791
- package/dist/linter-B8KALEae.d.mts +25 -0
- package/dist/linter.d.mts +2 -0
- package/dist/linter.mjs +74 -0
- package/dist/logger-BvDgvNHQ.d.mts +19 -0
- package/dist/logger.d.mts +2 -0
- package/dist/logger.mjs +26 -0
- package/dist/node.d.mts +2 -0
- package/dist/node.mjs +55 -0
- package/dist/nodes/batch-gather.d.mts +2 -0
- package/dist/nodes/batch-gather.mjs +47 -0
- package/dist/nodes/batch-scatter.d.mts +2 -0
- package/dist/nodes/batch-scatter.mjs +52 -0
- package/dist/nodes/index.d.mts +7 -0
- package/dist/nodes/index.mjs +8 -0
- package/dist/nodes/sleep.d.mts +2 -0
- package/dist/nodes/sleep.mjs +41 -0
- package/dist/nodes/subflow.d.mts +2 -0
- package/dist/nodes/subflow.mjs +64 -0
- package/dist/nodes/wait.d.mts +2 -0
- package/dist/nodes/wait.mjs +12 -0
- package/dist/nodes/webhook.d.mts +2 -0
- package/dist/nodes/webhook.mjs +24 -0
- package/dist/orchestrator-DwMIJRFI.d.mts +8 -0
- package/dist/persistent-event-bus-COiQOpWh.d.mts +68 -0
- package/dist/replay-CVOy6d_L.d.mts +44 -0
- package/dist/runtime/adapter.d.mts +2 -0
- package/dist/runtime/adapter.mjs +349 -0
- package/dist/runtime/builtin-keys.d.mts +37 -0
- package/dist/runtime/builtin-keys.mjs +12 -0
- package/dist/runtime/execution-context.d.mts +2 -0
- package/dist/runtime/execution-context.mjs +26 -0
- package/dist/runtime/executors.d.mts +2 -0
- package/dist/runtime/executors.mjs +259 -0
- package/dist/runtime/index.d.mts +6 -0
- package/dist/runtime/index.mjs +10 -0
- package/dist/runtime/node-executor-factory.d.mts +11 -0
- package/dist/runtime/node-executor-factory.mjs +41 -0
- package/dist/runtime/orchestrator.d.mts +2 -0
- package/dist/runtime/orchestrator.mjs +41 -0
- package/dist/runtime/orchestrators/replay.d.mts +2 -0
- package/dist/{replay-BB11M6K1.mjs → runtime/orchestrators/replay.mjs} +1 -20
- package/dist/runtime/orchestrators/step-by-step.d.mts +15 -0
- package/dist/runtime/orchestrators/step-by-step.mjs +41 -0
- package/dist/runtime/orchestrators/utils.d.mts +2 -0
- package/dist/runtime/orchestrators/utils.mjs +79 -0
- package/dist/runtime/runtime.d.mts +2 -0
- package/dist/runtime/runtime.mjs +425 -0
- package/dist/runtime/scheduler.d.mts +2 -0
- package/dist/runtime/scheduler.mjs +64 -0
- package/dist/runtime/state.d.mts +2 -0
- package/dist/runtime/state.mjs +127 -0
- package/dist/runtime/traverser.d.mts +2 -0
- package/dist/runtime/traverser.mjs +213 -0
- package/dist/runtime/types.d.mts +2 -0
- package/dist/runtime/types.mjs +1 -0
- package/dist/runtime/workflow-logic-handler.d.mts +16 -0
- package/dist/runtime/workflow-logic-handler.mjs +159 -0
- package/dist/sanitizer-Bi00YjvO.d.mts +11 -0
- package/dist/sanitizer.d.mts +2 -0
- package/dist/sanitizer.mjs +37 -0
- package/dist/sdk.d.mts +1 -2
- package/dist/sdk.mjs +1 -2
- package/dist/serializer-BnmJr13R.d.mts +17 -0
- package/dist/serializer.d.mts +2 -0
- package/dist/serializer.mjs +34 -0
- package/dist/sleep-DpwYaY5b.d.mts +8 -0
- package/dist/subflow-n2IMsRe2.d.mts +8 -0
- package/dist/testing/event-logger.d.mts +62 -0
- package/dist/testing/event-logger.mjs +98 -0
- package/dist/testing/index.d.mts +5 -172
- package/dist/testing/index.mjs +6 -276
- package/dist/testing/run-with-trace.d.mts +37 -0
- package/dist/testing/run-with-trace.mjs +49 -0
- package/dist/testing/stepper.d.mts +78 -0
- package/dist/testing/stepper.mjs +100 -0
- package/dist/types-BcrXJEPI.d.mts +687 -0
- package/dist/types.d.mts +2 -0
- package/dist/types.mjs +1 -0
- package/dist/utils-BUEgr9V2.d.mts +34 -0
- package/dist/wait-2Q-LA7V7.d.mts +8 -0
- package/dist/webhook-BiCm-HLx.d.mts +12 -0
- package/package.json +4 -4
- package/dist/index-BXRN44Qf.d.mts +0 -1347
- package/dist/index.mjs.map +0 -1
- package/dist/replay-BB11M6K1.mjs.map +0 -1
- package/dist/runtime-ChsWirQN.mjs +0 -2256
- package/dist/runtime-ChsWirQN.mjs.map +0 -1
- package/dist/sdk.mjs.map +0 -1
- package/dist/testing/index.mjs.map +0 -1
|
@@ -0,0 +1,425 @@
|
|
|
1
|
+
import { analyzeBlueprint } from "../analysis.mjs";
|
|
2
|
+
import { DIContainer, ServiceTokens } from "../container.mjs";
|
|
3
|
+
import { PropertyEvaluator } from "../evaluator.mjs";
|
|
4
|
+
import { NullLogger } from "../logger.mjs";
|
|
5
|
+
import { FlowcraftError } from "../errors.mjs";
|
|
6
|
+
import { WorkflowState } from "./state.mjs";
|
|
7
|
+
import { ExecutionContext } from "./execution-context.mjs";
|
|
8
|
+
import { DefaultOrchestrator } from "./orchestrator.mjs";
|
|
9
|
+
import { JsonSerializer } from "../serializer.mjs";
|
|
10
|
+
import { BatchGatherNode } from "../nodes/batch-gather.mjs";
|
|
11
|
+
import { BatchScatterNode } from "../nodes/batch-scatter.mjs";
|
|
12
|
+
import { SleepNode } from "../nodes/sleep.mjs";
|
|
13
|
+
import { GraphTraverser } from "./traverser.mjs";
|
|
14
|
+
import { SubflowNode } from "../nodes/subflow.mjs";
|
|
15
|
+
import { WaitNode } from "../nodes/wait.mjs";
|
|
16
|
+
import { WebhookNode } from "../nodes/webhook.mjs";
|
|
17
|
+
import { sanitizeBlueprint } from "../sanitizer.mjs";
|
|
18
|
+
import { NodeExecutorFactory } from "./node-executor-factory.mjs";
|
|
19
|
+
import { WorkflowScheduler } from "./scheduler.mjs";
|
|
20
|
+
import { WorkflowLogicHandler } from "./workflow-logic-handler.mjs";
|
|
21
|
+
|
|
22
|
+
//#region src/runtime/runtime.ts
|
|
23
|
+
var FlowRuntime = class {
|
|
24
|
+
container;
|
|
25
|
+
registry;
|
|
26
|
+
blueprints;
|
|
27
|
+
dependencies;
|
|
28
|
+
logger;
|
|
29
|
+
eventBus;
|
|
30
|
+
serializer;
|
|
31
|
+
middleware;
|
|
32
|
+
evaluator;
|
|
33
|
+
analysisCache;
|
|
34
|
+
orchestrator;
|
|
35
|
+
options;
|
|
36
|
+
logicHandler;
|
|
37
|
+
executorFactory;
|
|
38
|
+
scheduler;
|
|
39
|
+
getBlueprint(id) {
|
|
40
|
+
return this.blueprints[id];
|
|
41
|
+
}
|
|
42
|
+
constructor(containerOrOptions, legacyOptions) {
|
|
43
|
+
let userRegistry;
|
|
44
|
+
if (containerOrOptions instanceof DIContainer) {
|
|
45
|
+
this.container = containerOrOptions;
|
|
46
|
+
this.logger = this.container.resolve(ServiceTokens.Logger);
|
|
47
|
+
this.serializer = this.container.resolve(ServiceTokens.Serializer);
|
|
48
|
+
this.evaluator = this.container.resolve(ServiceTokens.Evaluator);
|
|
49
|
+
this.eventBus = this.container.resolve(ServiceTokens.EventBus) || { emit: async () => {} };
|
|
50
|
+
this.middleware = this.container.resolve(ServiceTokens.Middleware) || [];
|
|
51
|
+
userRegistry = this.container.resolve(ServiceTokens.NodeRegistry);
|
|
52
|
+
this.blueprints = this.container.resolve(ServiceTokens.BlueprintRegistry);
|
|
53
|
+
this.dependencies = this.container.resolve(ServiceTokens.Dependencies);
|
|
54
|
+
this.options = legacyOptions || {};
|
|
55
|
+
this.orchestrator = this.container.resolve(ServiceTokens.Orchestrator);
|
|
56
|
+
this.scheduler = new WorkflowScheduler(this);
|
|
57
|
+
} else {
|
|
58
|
+
const options = containerOrOptions || {};
|
|
59
|
+
this.logger = options.logger || new NullLogger();
|
|
60
|
+
this.serializer = options.serializer || new JsonSerializer();
|
|
61
|
+
this.evaluator = options.evaluator || new PropertyEvaluator();
|
|
62
|
+
this.eventBus = options.eventBus || { emit: async () => {} };
|
|
63
|
+
this.middleware = options.middleware || [];
|
|
64
|
+
userRegistry = options.registry || {};
|
|
65
|
+
this.blueprints = options.blueprints || {};
|
|
66
|
+
this.scheduler = new WorkflowScheduler(this);
|
|
67
|
+
this.dependencies = options.dependencies || {};
|
|
68
|
+
this.options = options;
|
|
69
|
+
this.container = null;
|
|
70
|
+
}
|
|
71
|
+
const loopControllerFunction = async (context) => {
|
|
72
|
+
const condition = context.params.condition;
|
|
73
|
+
const contextData = await context.context.toJSON();
|
|
74
|
+
if (this.evaluator.evaluate(condition, contextData)) return { action: "continue" };
|
|
75
|
+
else return {
|
|
76
|
+
action: "break",
|
|
77
|
+
output: null
|
|
78
|
+
};
|
|
79
|
+
};
|
|
80
|
+
this.registry = new Map(Object.entries({
|
|
81
|
+
wait: WaitNode,
|
|
82
|
+
sleep: SleepNode,
|
|
83
|
+
webhook: WebhookNode,
|
|
84
|
+
subflow: SubflowNode,
|
|
85
|
+
"batch-scatter": BatchScatterNode,
|
|
86
|
+
"batch-gather": BatchGatherNode,
|
|
87
|
+
"loop-controller": loopControllerFunction,
|
|
88
|
+
...userRegistry
|
|
89
|
+
}));
|
|
90
|
+
this.orchestrator = this.container?.has(ServiceTokens.Orchestrator) ? this.container.resolve(ServiceTokens.Orchestrator) : new DefaultOrchestrator();
|
|
91
|
+
this.analysisCache = /* @__PURE__ */ new WeakMap();
|
|
92
|
+
this.logicHandler = new WorkflowLogicHandler(this.evaluator, this.eventBus);
|
|
93
|
+
this.executorFactory = new NodeExecutorFactory(this.eventBus);
|
|
94
|
+
}
|
|
95
|
+
_setupExecutionContext(blueprint, initialState, options) {
|
|
96
|
+
const executionId = globalThis.crypto?.randomUUID();
|
|
97
|
+
const contextData = typeof initialState === "string" ? this.serializer.deserialize(initialState) : initialState;
|
|
98
|
+
blueprint = sanitizeBlueprint(blueprint);
|
|
99
|
+
const state = new WorkflowState(contextData);
|
|
100
|
+
const nodeRegistry = this._createExecutionRegistry(options?.functionRegistry);
|
|
101
|
+
return new ExecutionContext(blueprint, state, nodeRegistry, executionId, this, {
|
|
102
|
+
logger: this.logger,
|
|
103
|
+
eventBus: this.eventBus,
|
|
104
|
+
serializer: this.serializer,
|
|
105
|
+
evaluator: this.evaluator,
|
|
106
|
+
middleware: this.middleware,
|
|
107
|
+
dependencies: this.dependencies
|
|
108
|
+
}, options?.signal, options?.concurrency);
|
|
109
|
+
}
|
|
110
|
+
async run(blueprint, initialState = {}, options) {
|
|
111
|
+
const startTime = Date.now();
|
|
112
|
+
const executionContext = this._setupExecutionContext(blueprint, initialState, options);
|
|
113
|
+
this.logger.info(`Starting workflow execution`, {
|
|
114
|
+
blueprintId: executionContext.blueprint.id,
|
|
115
|
+
executionId: executionContext.executionId
|
|
116
|
+
});
|
|
117
|
+
try {
|
|
118
|
+
await this.eventBus.emit({
|
|
119
|
+
type: "workflow:start",
|
|
120
|
+
payload: {
|
|
121
|
+
blueprintId: executionContext.blueprint.id,
|
|
122
|
+
executionId: executionContext.executionId
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
await this.eventBus.emit({
|
|
126
|
+
type: "workflow:resume",
|
|
127
|
+
payload: {
|
|
128
|
+
blueprintId: executionContext.blueprint.id,
|
|
129
|
+
executionId: executionContext.executionId
|
|
130
|
+
}
|
|
131
|
+
});
|
|
132
|
+
const analysis = this.analysisCache.get(executionContext.blueprint) ?? (() => {
|
|
133
|
+
const computed = analyzeBlueprint(executionContext.blueprint);
|
|
134
|
+
this.analysisCache.set(executionContext.blueprint, computed);
|
|
135
|
+
return computed;
|
|
136
|
+
})();
|
|
137
|
+
if (options?.strict && !analysis.isDag) throw new Error(`Workflow '${executionContext.blueprint.id}' failed strictness check: Cycles are not allowed.`);
|
|
138
|
+
if (!analysis.isDag) this.logger.warn(`Workflow contains cycles`, { blueprintId: executionContext.blueprint.id });
|
|
139
|
+
const traverser = new GraphTraverser(executionContext.blueprint, options?.strict === true);
|
|
140
|
+
const result = await this.orchestrator.run(executionContext, traverser);
|
|
141
|
+
const duration = Date.now() - startTime;
|
|
142
|
+
if (result.status === "stalled") {
|
|
143
|
+
await this.eventBus.emit({
|
|
144
|
+
type: "workflow:stall",
|
|
145
|
+
payload: {
|
|
146
|
+
blueprintId: executionContext.blueprint.id,
|
|
147
|
+
executionId: executionContext.executionId,
|
|
148
|
+
remainingNodes: traverser.getAllNodeIds().size - executionContext.state.getCompletedNodes().size
|
|
149
|
+
}
|
|
150
|
+
});
|
|
151
|
+
await this.eventBus.emit({
|
|
152
|
+
type: "workflow:pause",
|
|
153
|
+
payload: {
|
|
154
|
+
blueprintId: executionContext.blueprint.id,
|
|
155
|
+
executionId: executionContext.executionId
|
|
156
|
+
}
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
this.logger.info(`Workflow execution completed`, {
|
|
160
|
+
blueprintId: executionContext.blueprint.id,
|
|
161
|
+
executionId: executionContext.executionId,
|
|
162
|
+
status: result.status,
|
|
163
|
+
duration,
|
|
164
|
+
errors: result.errors?.length || 0
|
|
165
|
+
});
|
|
166
|
+
await this.eventBus.emit({
|
|
167
|
+
type: "workflow:finish",
|
|
168
|
+
payload: {
|
|
169
|
+
blueprintId: executionContext.blueprint.id,
|
|
170
|
+
executionId: executionContext.executionId,
|
|
171
|
+
status: result.status,
|
|
172
|
+
errors: result.errors
|
|
173
|
+
}
|
|
174
|
+
});
|
|
175
|
+
if (result.status === "awaiting") {
|
|
176
|
+
const awaitingNodeIds = executionContext.state.getAwaitingNodeIds();
|
|
177
|
+
for (const nodeId of awaitingNodeIds) {
|
|
178
|
+
const details = executionContext.state.getAwaitingDetails(nodeId);
|
|
179
|
+
if (details?.reason === "timer") this.scheduler.registerAwaitingWorkflow(executionContext.executionId, executionContext.blueprint.id, result.serializedContext, nodeId, details.wakeUpAt, options?.functionRegistry);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
return result;
|
|
183
|
+
} catch (error) {
|
|
184
|
+
const duration = Date.now() - startTime;
|
|
185
|
+
const workflowError = {
|
|
186
|
+
message: error instanceof Error ? error.message : String(error),
|
|
187
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
188
|
+
isFatal: false,
|
|
189
|
+
name: "WorkflowError"
|
|
190
|
+
};
|
|
191
|
+
await this.eventBus.emit({
|
|
192
|
+
type: "workflow:finish",
|
|
193
|
+
payload: {
|
|
194
|
+
blueprintId: executionContext.blueprint.id,
|
|
195
|
+
executionId: executionContext.executionId,
|
|
196
|
+
status: "cancelled",
|
|
197
|
+
errors: [workflowError]
|
|
198
|
+
}
|
|
199
|
+
});
|
|
200
|
+
if (error instanceof DOMException ? error.name === "AbortError" : error instanceof FlowcraftError && error.message.includes("cancelled")) {
|
|
201
|
+
this.logger.info(`Workflow execution cancelled`, {
|
|
202
|
+
blueprintId: executionContext.blueprint.id,
|
|
203
|
+
executionId: executionContext.executionId,
|
|
204
|
+
duration
|
|
205
|
+
});
|
|
206
|
+
await this.eventBus.emit({
|
|
207
|
+
type: "workflow:pause",
|
|
208
|
+
payload: {
|
|
209
|
+
blueprintId: executionContext.blueprint.id,
|
|
210
|
+
executionId: executionContext.executionId
|
|
211
|
+
}
|
|
212
|
+
});
|
|
213
|
+
await this.eventBus.emit({
|
|
214
|
+
type: "workflow:finish",
|
|
215
|
+
payload: {
|
|
216
|
+
blueprintId: executionContext.blueprint.id,
|
|
217
|
+
executionId: executionContext.executionId,
|
|
218
|
+
status: "cancelled",
|
|
219
|
+
errors: [workflowError]
|
|
220
|
+
}
|
|
221
|
+
});
|
|
222
|
+
return {
|
|
223
|
+
context: {},
|
|
224
|
+
serializedContext: "{}",
|
|
225
|
+
status: "cancelled"
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
this.logger.error(`Workflow execution failed`, {
|
|
229
|
+
blueprintId: executionContext.blueprint.id,
|
|
230
|
+
executionId: executionContext.executionId,
|
|
231
|
+
duration,
|
|
232
|
+
error: error instanceof Error ? error.message : String(error)
|
|
233
|
+
});
|
|
234
|
+
throw error;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
startScheduler(checkIntervalMs) {
|
|
238
|
+
if (checkIntervalMs !== void 0) this.scheduler = new WorkflowScheduler(this, checkIntervalMs);
|
|
239
|
+
this.scheduler.start();
|
|
240
|
+
}
|
|
241
|
+
stopScheduler() {
|
|
242
|
+
this.scheduler.stop();
|
|
243
|
+
}
|
|
244
|
+
_setupResumedExecutionContext(blueprint, workflowState, options) {
|
|
245
|
+
const executionId = globalThis.crypto?.randomUUID();
|
|
246
|
+
return new ExecutionContext(blueprint, workflowState, this._createExecutionRegistry(options?.functionRegistry), executionId, this, {
|
|
247
|
+
logger: this.logger,
|
|
248
|
+
eventBus: this.eventBus,
|
|
249
|
+
serializer: this.serializer,
|
|
250
|
+
evaluator: this.evaluator,
|
|
251
|
+
middleware: this.middleware,
|
|
252
|
+
dependencies: this.dependencies
|
|
253
|
+
}, options?.signal);
|
|
254
|
+
}
|
|
255
|
+
async resume(blueprint, serializedContext, resumeData, nodeId, options) {
|
|
256
|
+
const executionId = globalThis.crypto?.randomUUID();
|
|
257
|
+
const workflowState = new WorkflowState(this.serializer.deserialize(serializedContext));
|
|
258
|
+
const awaitingNodeIds = workflowState.getAwaitingNodeIds();
|
|
259
|
+
if (awaitingNodeIds.length === 0) throw new FlowcraftError("Cannot resume: The provided context is not in an awaiting state.", { isFatal: true });
|
|
260
|
+
const awaitingNodeId = nodeId || awaitingNodeIds[0];
|
|
261
|
+
if (!awaitingNodeIds.includes(awaitingNodeId)) throw new FlowcraftError(`Cannot resume: Node '${awaitingNodeId}' is not in an awaiting state.`, { isFatal: true });
|
|
262
|
+
const awaitingNodeDef = blueprint.nodes.find((n) => n.id === awaitingNodeId);
|
|
263
|
+
if (!awaitingNodeDef) throw new FlowcraftError(`Awaiting node '${awaitingNodeId}' not found in blueprint.`, {
|
|
264
|
+
nodeId: awaitingNodeId,
|
|
265
|
+
blueprintId: blueprint.id,
|
|
266
|
+
isFatal: true
|
|
267
|
+
});
|
|
268
|
+
const contextImpl = workflowState.getContext();
|
|
269
|
+
if (awaitingNodeDef.uses === "SubflowNode") {
|
|
270
|
+
const subflowStateKey = `_subflowState.${awaitingNodeId}`;
|
|
271
|
+
const subflowContext = await contextImpl.get(subflowStateKey);
|
|
272
|
+
if (!subflowContext) throw new FlowcraftError(`Cannot resume: Subflow state for node '${awaitingNodeId}' not found.`, {
|
|
273
|
+
nodeId: awaitingNodeId,
|
|
274
|
+
blueprintId: blueprint.id,
|
|
275
|
+
isFatal: true
|
|
276
|
+
});
|
|
277
|
+
const blueprintId = awaitingNodeDef.params?.blueprintId;
|
|
278
|
+
if (!blueprintId) throw new FlowcraftError(`Subflow node '${awaitingNodeId}' is missing the 'blueprintId' parameter.`, {
|
|
279
|
+
nodeId: awaitingNodeId,
|
|
280
|
+
blueprintId: blueprint.id,
|
|
281
|
+
isFatal: true
|
|
282
|
+
});
|
|
283
|
+
const subBlueprint = this.blueprints[blueprintId];
|
|
284
|
+
if (!subBlueprint) throw new FlowcraftError(`Sub-blueprint with ID '${blueprintId}' not found in runtime registry.`, {
|
|
285
|
+
nodeId: awaitingNodeId,
|
|
286
|
+
blueprintId: blueprint.id,
|
|
287
|
+
isFatal: true
|
|
288
|
+
});
|
|
289
|
+
const subflowResumeResult = await this.resume(subBlueprint, subflowContext, resumeData, void 0, options);
|
|
290
|
+
if (subflowResumeResult.status !== "completed") throw new FlowcraftError(`Resumed subflow '${subBlueprint.id}' did not complete. Status: ${subflowResumeResult.status}`, {
|
|
291
|
+
nodeId: awaitingNodeId,
|
|
292
|
+
blueprintId: blueprint.id,
|
|
293
|
+
isFatal: false
|
|
294
|
+
});
|
|
295
|
+
const subflowFinalContext = subflowResumeResult.context;
|
|
296
|
+
let finalSubflowOutput;
|
|
297
|
+
const subAnalysis = analyzeBlueprint(subBlueprint);
|
|
298
|
+
if (awaitingNodeDef.params?.outputs) finalSubflowOutput = subflowFinalContext;
|
|
299
|
+
else if (subAnalysis.terminalNodeIds.length === 1) finalSubflowOutput = subflowFinalContext[`_outputs.${subAnalysis.terminalNodeIds[0]}`];
|
|
300
|
+
else {
|
|
301
|
+
const terminalOutputs = {};
|
|
302
|
+
for (const terminalId of subAnalysis.terminalNodeIds) terminalOutputs[terminalId] = subflowFinalContext[`_outputs.${terminalId}`];
|
|
303
|
+
finalSubflowOutput = terminalOutputs;
|
|
304
|
+
}
|
|
305
|
+
resumeData = { output: finalSubflowOutput };
|
|
306
|
+
await contextImpl.delete(subflowStateKey);
|
|
307
|
+
}
|
|
308
|
+
const existingOutput = await workflowState.getContext().get(`_outputs.${awaitingNodeId}`);
|
|
309
|
+
const nodeOutput = resumeData.output !== void 0 ? resumeData.output : existingOutput;
|
|
310
|
+
workflowState.addCompletedNode(awaitingNodeId, nodeOutput);
|
|
311
|
+
const nodeResult = { output: nodeOutput };
|
|
312
|
+
const nextSteps = await this.determineNextNodes(blueprint, awaitingNodeId, nodeResult, contextImpl, executionId);
|
|
313
|
+
if (nextSteps.length === 0) {
|
|
314
|
+
workflowState.clearAwaiting(awaitingNodeId);
|
|
315
|
+
const result = await workflowState.toResult(this.serializer, executionId);
|
|
316
|
+
result.status = "completed";
|
|
317
|
+
return result;
|
|
318
|
+
}
|
|
319
|
+
const allPredecessors = new GraphTraverser(blueprint).getAllPredecessors();
|
|
320
|
+
for (const { node, edge } of nextSteps) await this.applyEdgeTransform(edge, nodeResult, node, contextImpl, allPredecessors, executionId);
|
|
321
|
+
const traverser = GraphTraverser.fromState(blueprint, workflowState);
|
|
322
|
+
const nextNodeDefs = nextSteps.map((s) => s.node);
|
|
323
|
+
for (const nodeDef of nextNodeDefs) traverser.addToFrontier(nodeDef.id);
|
|
324
|
+
workflowState.clearAwaiting(awaitingNodeId);
|
|
325
|
+
const executionContext = this._setupResumedExecutionContext(blueprint, workflowState, options);
|
|
326
|
+
return await this.orchestrator.run(executionContext, traverser);
|
|
327
|
+
}
|
|
328
|
+
_createExecutionRegistry(dynamicRegistry) {
|
|
329
|
+
const executionRegistry = new Map(this.registry);
|
|
330
|
+
if (dynamicRegistry) for (const [key, func] of dynamicRegistry.entries()) executionRegistry.set(key, func);
|
|
331
|
+
return executionRegistry;
|
|
332
|
+
}
|
|
333
|
+
async executeNode(blueprint, nodeId, state, _allPredecessors, functionRegistry, executionId, signal) {
|
|
334
|
+
const nodeDef = blueprint.nodes.find((n) => n.id === nodeId);
|
|
335
|
+
if (!nodeDef) throw new FlowcraftError(`Node '${nodeId}' not found in blueprint.`, {
|
|
336
|
+
nodeId,
|
|
337
|
+
blueprintId: blueprint.id,
|
|
338
|
+
executionId,
|
|
339
|
+
isFatal: false
|
|
340
|
+
});
|
|
341
|
+
const asyncContext = state.getContext();
|
|
342
|
+
const input = await this.resolveNodeInput(nodeDef.id, blueprint, asyncContext);
|
|
343
|
+
const nodeRegistry = new Map([...this.registry, ...functionRegistry || /* @__PURE__ */ new Map()]);
|
|
344
|
+
const services = {
|
|
345
|
+
logger: this.logger,
|
|
346
|
+
eventBus: this.eventBus,
|
|
347
|
+
serializer: this.serializer,
|
|
348
|
+
evaluator: this.evaluator,
|
|
349
|
+
middleware: this.middleware,
|
|
350
|
+
dependencies: this.dependencies
|
|
351
|
+
};
|
|
352
|
+
const context = new ExecutionContext(blueprint, state, nodeRegistry, executionId || "", this, services, signal);
|
|
353
|
+
const executionResult = await this.executorFactory.createExecutorForNode(nodeId, context).execute(input);
|
|
354
|
+
if (executionResult.status === "success") return executionResult.result;
|
|
355
|
+
if (executionResult.status === "failed_with_fallback") {
|
|
356
|
+
const fallbackNode = blueprint.nodes.find((n) => n.id === executionResult.fallbackNodeId);
|
|
357
|
+
if (!fallbackNode) throw new FlowcraftError(`Fallback node '${executionResult.fallbackNodeId}' not found in blueprint.`, {
|
|
358
|
+
nodeId: nodeDef.id,
|
|
359
|
+
blueprintId: blueprint.id,
|
|
360
|
+
executionId,
|
|
361
|
+
isFatal: false
|
|
362
|
+
});
|
|
363
|
+
const fallbackInput = await this.resolveNodeInput(fallbackNode.id, blueprint, asyncContext);
|
|
364
|
+
const fallbackResult = await this.executorFactory.createExecutorForNode(fallbackNode.id, context).execute(fallbackInput);
|
|
365
|
+
if (fallbackResult.status === "success") {
|
|
366
|
+
state.markFallbackExecuted();
|
|
367
|
+
state.addCompletedNode(executionResult.fallbackNodeId, fallbackResult.result.output);
|
|
368
|
+
this.logger.info(`Fallback execution completed`, {
|
|
369
|
+
nodeId: nodeDef.id,
|
|
370
|
+
fallbackNodeId: executionResult.fallbackNodeId,
|
|
371
|
+
executionId
|
|
372
|
+
});
|
|
373
|
+
return {
|
|
374
|
+
...fallbackResult.result,
|
|
375
|
+
_fallbackExecuted: true
|
|
376
|
+
};
|
|
377
|
+
}
|
|
378
|
+
throw fallbackResult.error;
|
|
379
|
+
}
|
|
380
|
+
throw executionResult.error;
|
|
381
|
+
}
|
|
382
|
+
getExecutorForNode(nodeId, context) {
|
|
383
|
+
return this.executorFactory.createExecutorForNode(nodeId, context);
|
|
384
|
+
}
|
|
385
|
+
async determineNextNodes(blueprint, nodeId, result, context, executionId) {
|
|
386
|
+
return this.logicHandler.determineNextNodes(blueprint, nodeId, result, context, executionId);
|
|
387
|
+
}
|
|
388
|
+
async applyEdgeTransform(edge, sourceResult, targetNode, context, allPredecessors, executionId) {
|
|
389
|
+
return this.logicHandler.applyEdgeTransform(edge, sourceResult, targetNode, context, allPredecessors, executionId);
|
|
390
|
+
}
|
|
391
|
+
async resolveNodeInput(nodeId, blueprint, context) {
|
|
392
|
+
return this.logicHandler.resolveNodeInput(nodeId, blueprint, context);
|
|
393
|
+
}
|
|
394
|
+
/**
|
|
395
|
+
* Replay a workflow execution from a pre-recorded event history.
|
|
396
|
+
* This reconstructs the final workflow state without executing any node logic,
|
|
397
|
+
* enabling time-travel debugging and post-mortem analysis.
|
|
398
|
+
*
|
|
399
|
+
* @param blueprint The workflow blueprint
|
|
400
|
+
* @param events The recorded event history for the execution
|
|
401
|
+
* @param executionId Optional execution ID to filter events (if events contain multiple executions)
|
|
402
|
+
* @returns The reconstructed workflow result
|
|
403
|
+
*/
|
|
404
|
+
async replay(blueprint, events, executionId) {
|
|
405
|
+
let filteredEvents = events;
|
|
406
|
+
if (executionId) filteredEvents = events.filter((event) => {
|
|
407
|
+
if ("executionId" in event.payload) return event.payload.executionId === executionId;
|
|
408
|
+
return false;
|
|
409
|
+
});
|
|
410
|
+
if (!executionId) {
|
|
411
|
+
const workflowStartEvent = filteredEvents.find((e) => e.type === "workflow:start");
|
|
412
|
+
if (workflowStartEvent && "executionId" in workflowStartEvent.payload) executionId = workflowStartEvent.payload.executionId;
|
|
413
|
+
else throw new FlowcraftError("Cannot determine execution ID from events", { isFatal: true });
|
|
414
|
+
}
|
|
415
|
+
const tempContext = this._setupExecutionContext(blueprint, {}, { strict: false });
|
|
416
|
+
const executionContext = new ExecutionContext(blueprint, tempContext.state, tempContext.nodeRegistry, executionId, this, tempContext.services, tempContext.signal, tempContext.concurrency);
|
|
417
|
+
const { ReplayOrchestrator } = await import("./orchestrators/replay.mjs");
|
|
418
|
+
const replayOrchestrator = new ReplayOrchestrator(filteredEvents);
|
|
419
|
+
const traverser = new GraphTraverser(blueprint);
|
|
420
|
+
return await replayOrchestrator.run(executionContext, traverser);
|
|
421
|
+
}
|
|
422
|
+
};
|
|
423
|
+
|
|
424
|
+
//#endregion
|
|
425
|
+
export { FlowRuntime };
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
//#region src/runtime/scheduler.ts
|
|
2
|
+
var WorkflowScheduler = class {
|
|
3
|
+
runtime;
|
|
4
|
+
activeWorkflows = /* @__PURE__ */ new Map();
|
|
5
|
+
resumeResults = /* @__PURE__ */ new Map();
|
|
6
|
+
intervalId;
|
|
7
|
+
checkIntervalMs;
|
|
8
|
+
constructor(runtime, checkIntervalMs = 1e3) {
|
|
9
|
+
this.runtime = runtime;
|
|
10
|
+
this.checkIntervalMs = checkIntervalMs;
|
|
11
|
+
}
|
|
12
|
+
start() {
|
|
13
|
+
if (this.intervalId) return;
|
|
14
|
+
this.intervalId = setInterval(() => {
|
|
15
|
+
this.checkAndResumeWorkflows();
|
|
16
|
+
}, this.checkIntervalMs);
|
|
17
|
+
}
|
|
18
|
+
stop() {
|
|
19
|
+
if (this.intervalId) {
|
|
20
|
+
clearInterval(this.intervalId);
|
|
21
|
+
this.intervalId = void 0;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
registerAwaitingWorkflow(executionId, blueprintId, serializedContext, awaitingNodeId, wakeUpAt, functionRegistry) {
|
|
25
|
+
this.activeWorkflows.set(executionId, {
|
|
26
|
+
executionId,
|
|
27
|
+
blueprintId,
|
|
28
|
+
serializedContext,
|
|
29
|
+
awaitingNodeId,
|
|
30
|
+
wakeUpAt,
|
|
31
|
+
functionRegistry
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
unregisterWorkflow(executionId) {
|
|
35
|
+
this.activeWorkflows.delete(executionId);
|
|
36
|
+
}
|
|
37
|
+
async checkAndResumeWorkflows() {
|
|
38
|
+
const now = /* @__PURE__ */ new Date();
|
|
39
|
+
const toResume = [];
|
|
40
|
+
for (const [_executionId, workflow] of this.activeWorkflows) if (new Date(workflow.wakeUpAt) <= now) toResume.push(workflow);
|
|
41
|
+
for (const workflow of toResume) try {
|
|
42
|
+
const blueprint = this.runtime.getBlueprint(workflow.blueprintId);
|
|
43
|
+
if (!blueprint) {
|
|
44
|
+
console.warn(`Blueprint ${workflow.blueprintId} not found, skipping resumption`);
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
const result = await this.runtime.resume(blueprint, workflow.serializedContext, { output: void 0 }, workflow.awaitingNodeId, { functionRegistry: workflow.functionRegistry });
|
|
48
|
+
this.resumeResults.set(workflow.executionId, result);
|
|
49
|
+
if (result.status === "completed" || result.status === "failed") this.unregisterWorkflow(workflow.executionId);
|
|
50
|
+
} catch (error) {
|
|
51
|
+
console.error(`Failed to resume workflow ${workflow.executionId}:`, error);
|
|
52
|
+
this.unregisterWorkflow(workflow.executionId);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
getActiveWorkflows() {
|
|
56
|
+
return Array.from(this.activeWorkflows.values());
|
|
57
|
+
}
|
|
58
|
+
getResumeResult(executionId) {
|
|
59
|
+
return this.resumeResults.get(executionId);
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
//#endregion
|
|
64
|
+
export { WorkflowScheduler };
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import { FlowcraftError } from "../errors.mjs";
|
|
2
|
+
import { AsyncContextView, Context, TrackedAsyncContext } from "../context.mjs";
|
|
3
|
+
|
|
4
|
+
//#region src/runtime/state.ts
|
|
5
|
+
var WorkflowState = class {
|
|
6
|
+
_completedNodes = /* @__PURE__ */ new Set();
|
|
7
|
+
errors = [];
|
|
8
|
+
anyFallbackExecuted = false;
|
|
9
|
+
context;
|
|
10
|
+
_isAwaiting = false;
|
|
11
|
+
_awaitingNodeIds = /* @__PURE__ */ new Set();
|
|
12
|
+
_awaitingDetails = /* @__PURE__ */ new Map();
|
|
13
|
+
isLastAttempt;
|
|
14
|
+
constructor(initialData, context) {
|
|
15
|
+
if (context) this.context = context instanceof TrackedAsyncContext ? context : new TrackedAsyncContext(context);
|
|
16
|
+
else this.context = new TrackedAsyncContext(new AsyncContextView(new Context(initialData)));
|
|
17
|
+
if (initialData._awaitingNodeIds) {
|
|
18
|
+
this._isAwaiting = true;
|
|
19
|
+
const awaitingIds = initialData._awaitingNodeIds;
|
|
20
|
+
if (Array.isArray(awaitingIds)) for (const id of awaitingIds) this._awaitingNodeIds.add(id);
|
|
21
|
+
}
|
|
22
|
+
if (initialData._awaitingDetails) this._awaitingDetails = new Map(Object.entries(initialData._awaitingDetails));
|
|
23
|
+
for (const key of Object.keys(initialData)) if (key.startsWith("_outputs.")) {
|
|
24
|
+
const nodeId = key.substring(9);
|
|
25
|
+
this._completedNodes.add(nodeId);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Configure the context to emit events when modified.
|
|
30
|
+
* This is called after the ExecutionContext is created.
|
|
31
|
+
*/
|
|
32
|
+
setEventEmitter(eventBus, executionId, sourceNode) {
|
|
33
|
+
if (this.context instanceof TrackedAsyncContext) this.context.configureEventEmitter(eventBus, executionId, sourceNode);
|
|
34
|
+
}
|
|
35
|
+
async addCompletedNode(nodeId, output) {
|
|
36
|
+
this._completedNodes.add(nodeId);
|
|
37
|
+
await this.context.set(`_outputs.${nodeId}`, output);
|
|
38
|
+
await this.context.set(nodeId, output);
|
|
39
|
+
}
|
|
40
|
+
addError(nodeId, error) {
|
|
41
|
+
const flowcraftError = new FlowcraftError(error.message, {
|
|
42
|
+
cause: error,
|
|
43
|
+
nodeId,
|
|
44
|
+
isFatal: false
|
|
45
|
+
});
|
|
46
|
+
this.errors.push({
|
|
47
|
+
...flowcraftError,
|
|
48
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
49
|
+
originalError: error
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
clearError(nodeId) {
|
|
53
|
+
this.errors = this.errors.filter((err) => err.nodeId !== nodeId);
|
|
54
|
+
}
|
|
55
|
+
markFallbackExecuted() {
|
|
56
|
+
this.anyFallbackExecuted = true;
|
|
57
|
+
}
|
|
58
|
+
getContext() {
|
|
59
|
+
return this.context;
|
|
60
|
+
}
|
|
61
|
+
getCompletedNodes() {
|
|
62
|
+
return new Set(this._completedNodes);
|
|
63
|
+
}
|
|
64
|
+
getErrors() {
|
|
65
|
+
return this.errors;
|
|
66
|
+
}
|
|
67
|
+
getAnyFallbackExecuted() {
|
|
68
|
+
return this.anyFallbackExecuted;
|
|
69
|
+
}
|
|
70
|
+
async markAsAwaiting(nodeId, details) {
|
|
71
|
+
this._isAwaiting = true;
|
|
72
|
+
this._awaitingNodeIds.add(nodeId);
|
|
73
|
+
if (details) this._awaitingDetails.set(nodeId, details);
|
|
74
|
+
await this.context.set("_awaitingNodeIds", Array.from(this._awaitingNodeIds));
|
|
75
|
+
await this.context.set("_awaitingDetails", Object.fromEntries(this._awaitingDetails));
|
|
76
|
+
}
|
|
77
|
+
isAwaiting() {
|
|
78
|
+
return this._isAwaiting && this._awaitingNodeIds.size > 0;
|
|
79
|
+
}
|
|
80
|
+
getAwaitingNodeIds() {
|
|
81
|
+
return Array.from(this._awaitingNodeIds);
|
|
82
|
+
}
|
|
83
|
+
getAwaitingDetails(nodeId) {
|
|
84
|
+
return this._awaitingDetails.get(nodeId);
|
|
85
|
+
}
|
|
86
|
+
clearAwaiting(nodeId) {
|
|
87
|
+
if (nodeId) {
|
|
88
|
+
this._awaitingNodeIds.delete(nodeId);
|
|
89
|
+
this._awaitingDetails.delete(nodeId);
|
|
90
|
+
} else {
|
|
91
|
+
this._awaitingNodeIds.clear();
|
|
92
|
+
this._awaitingDetails.clear();
|
|
93
|
+
}
|
|
94
|
+
this._isAwaiting = this._awaitingNodeIds.size > 0;
|
|
95
|
+
if (this._awaitingNodeIds.size > 0) {
|
|
96
|
+
this.context.set("_awaitingNodeIds", Array.from(this._awaitingNodeIds));
|
|
97
|
+
this.context.set("_awaitingDetails", Object.fromEntries(this._awaitingDetails));
|
|
98
|
+
} else {
|
|
99
|
+
this.context.delete("_awaitingNodeIds");
|
|
100
|
+
this.context.delete("_awaitingDetails");
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
getStatus(isTraversalComplete = false) {
|
|
104
|
+
if (this._isAwaiting) return "awaiting";
|
|
105
|
+
if (this.anyFallbackExecuted) return "completed";
|
|
106
|
+
if (this.errors.length > 0) return "failed";
|
|
107
|
+
if (isTraversalComplete) return "completed";
|
|
108
|
+
return "stalled";
|
|
109
|
+
}
|
|
110
|
+
async toResult(serializer, executionId) {
|
|
111
|
+
const contextJSON = await this.context.toJSON();
|
|
112
|
+
if (!this._isAwaiting && contextJSON._awaitingNodeIds) {
|
|
113
|
+
delete contextJSON._awaitingNodeIds;
|
|
114
|
+
delete contextJSON._awaitingDetails;
|
|
115
|
+
}
|
|
116
|
+
if (executionId) contextJSON._executionId = executionId;
|
|
117
|
+
return {
|
|
118
|
+
context: contextJSON,
|
|
119
|
+
serializedContext: serializer.serialize(contextJSON),
|
|
120
|
+
status: this.getStatus(),
|
|
121
|
+
errors: this.errors.length > 0 ? this.errors : void 0
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
//#endregion
|
|
127
|
+
export { WorkflowState };
|