flowcraft 1.0.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +37 -134
- package/dist/analysis.d.ts +43 -0
- package/dist/analysis.js +3 -0
- package/dist/chunk-4PELJWF7.js +29 -0
- package/dist/chunk-4PELJWF7.js.map +1 -0
- package/dist/chunk-55J6XMHW.js +3 -0
- package/dist/{chunk-7XUN3OQT.js.map → chunk-55J6XMHW.js.map} +1 -1
- package/dist/chunk-5EHIPX23.js +202 -0
- package/dist/chunk-5EHIPX23.js.map +1 -0
- package/dist/chunk-5QMPFUKA.js +40 -0
- package/dist/chunk-5QMPFUKA.js.map +1 -0
- package/dist/chunk-5ZWYSKMH.js +147 -0
- package/dist/chunk-5ZWYSKMH.js.map +1 -0
- package/dist/chunk-5ZXV3R5D.js +28 -0
- package/dist/chunk-5ZXV3R5D.js.map +1 -0
- package/dist/chunk-CO5BTPKI.js +410 -0
- package/dist/chunk-CO5BTPKI.js.map +1 -0
- package/dist/chunk-CSZ6EOWG.js +61 -0
- package/dist/chunk-CSZ6EOWG.js.map +1 -0
- package/dist/chunk-CYHZ2YVH.js +24 -0
- package/dist/chunk-CYHZ2YVH.js.map +1 -0
- package/dist/chunk-DSYAC4WB.js +27 -0
- package/dist/chunk-DSYAC4WB.js.map +1 -0
- package/dist/chunk-HMR2GEGE.js +3 -0
- package/dist/{chunk-F2RSES6P.js.map → chunk-HMR2GEGE.js.map} +1 -1
- package/dist/chunk-HN72TZY5.js +110 -0
- package/dist/chunk-HN72TZY5.js.map +1 -0
- package/dist/chunk-KWQHFT7E.js +49 -0
- package/dist/chunk-KWQHFT7E.js.map +1 -0
- package/dist/chunk-PH2IYZHV.js +48 -0
- package/dist/chunk-PH2IYZHV.js.map +1 -0
- package/dist/chunk-QRMUKDSP.js +141 -0
- package/dist/chunk-QRMUKDSP.js.map +1 -0
- package/dist/chunk-UETC63DP.js +65 -0
- package/dist/chunk-UETC63DP.js.map +1 -0
- package/dist/chunk-UMXW3TCY.js +165 -0
- package/dist/chunk-UMXW3TCY.js.map +1 -0
- package/dist/context.d.ts +23 -105
- package/dist/context.js +1 -1
- package/dist/errors.d.ts +15 -31
- package/dist/errors.js +1 -1
- package/dist/evaluator.d.ts +30 -0
- package/dist/evaluator.js +3 -0
- package/dist/evaluator.js.map +1 -0
- package/dist/flow.d.ts +55 -0
- package/dist/flow.js +4 -0
- package/dist/flow.js.map +1 -0
- package/dist/index.d.ts +15 -16
- package/dist/index.js +17 -25
- package/dist/linter.d.ts +24 -0
- package/dist/linter.js +4 -0
- package/dist/linter.js.map +1 -0
- package/dist/logger.d.ts +15 -40
- package/dist/logger.js +1 -1
- package/dist/node.d.ts +1 -0
- package/dist/node.js +3 -0
- package/dist/node.js.map +1 -0
- package/dist/runtime/adapter.d.ts +94 -0
- package/dist/runtime/adapter.js +15 -0
- package/dist/runtime/adapter.js.map +1 -0
- package/dist/runtime/executors.d.ts +26 -0
- package/dist/runtime/executors.js +4 -0
- package/dist/runtime/executors.js.map +1 -0
- package/dist/runtime/index.d.ts +7 -0
- package/dist/runtime/index.js +16 -0
- package/dist/runtime/runtime.d.ts +34 -0
- package/dist/runtime/runtime.js +14 -0
- package/dist/runtime/runtime.js.map +1 -0
- package/dist/runtime/state.d.ts +21 -0
- package/dist/runtime/state.js +4 -0
- package/dist/runtime/state.js.map +1 -0
- package/dist/runtime/traverser.d.ts +25 -0
- package/dist/runtime/traverser.js +5 -0
- package/dist/runtime/traverser.js.map +1 -0
- package/dist/runtime/types.d.ts +15 -0
- package/dist/runtime/types.js +3 -0
- package/dist/sanitizer.d.ts +10 -0
- package/dist/sanitizer.js +3 -0
- package/dist/{utils/sanitize.js.map → sanitizer.js.map} +1 -1
- package/dist/serializer.d.ts +16 -0
- package/dist/serializer.js +3 -0
- package/dist/serializer.js.map +1 -0
- package/dist/types-lG3xCzp_.d.ts +206 -0
- package/dist/types.d.ts +1 -3
- package/dist/types.js +1 -1
- package/package.json +10 -21
- package/LICENSE +0 -21
- package/dist/builder/graph/graph.d.ts +0 -57
- package/dist/builder/graph/graph.js +0 -21
- package/dist/builder/graph/graph.js.map +0 -1
- package/dist/builder/graph/index.d.ts +0 -8
- package/dist/builder/graph/index.js +0 -23
- package/dist/builder/graph/internal-nodes.d.ts +0 -59
- package/dist/builder/graph/internal-nodes.js +0 -20
- package/dist/builder/graph/internal-nodes.js.map +0 -1
- package/dist/builder/graph/runner.d.ts +0 -51
- package/dist/builder/graph/runner.js +0 -21
- package/dist/builder/graph/runner.js.map +0 -1
- package/dist/builder/graph/types.d.ts +0 -3
- package/dist/builder/graph/types.js +0 -3
- package/dist/builder/index.d.ts +0 -8
- package/dist/builder/index.js +0 -24
- package/dist/builder/index.js.map +0 -1
- package/dist/builder/patterns.d.ts +0 -136
- package/dist/builder/patterns.js +0 -19
- package/dist/builder/patterns.js.map +0 -1
- package/dist/chunk-3YMBNZ77.js +0 -441
- package/dist/chunk-3YMBNZ77.js.map +0 -1
- package/dist/chunk-64DNBF5W.js +0 -36
- package/dist/chunk-64DNBF5W.js.map +0 -1
- package/dist/chunk-6QCXIRLA.js +0 -18
- package/dist/chunk-6QCXIRLA.js.map +0 -1
- package/dist/chunk-7XUN3OQT.js +0 -3
- package/dist/chunk-AOHBHYF6.js +0 -7
- package/dist/chunk-AOHBHYF6.js.map +0 -1
- package/dist/chunk-BRFMFLR6.js +0 -85
- package/dist/chunk-BRFMFLR6.js.map +0 -1
- package/dist/chunk-ELEHMJPM.js +0 -13
- package/dist/chunk-ELEHMJPM.js.map +0 -1
- package/dist/chunk-F2RSES6P.js +0 -3
- package/dist/chunk-F6C6J7HK.js +0 -3
- package/dist/chunk-F6C6J7HK.js.map +0 -1
- package/dist/chunk-GMKJ34T2.js +0 -3
- package/dist/chunk-GMKJ34T2.js.map +0 -1
- package/dist/chunk-HEO3XL4Z.js +0 -328
- package/dist/chunk-HEO3XL4Z.js.map +0 -1
- package/dist/chunk-IIKTTIW5.js +0 -56
- package/dist/chunk-IIKTTIW5.js.map +0 -1
- package/dist/chunk-KOBEU2EM.js +0 -3
- package/dist/chunk-KOBEU2EM.js.map +0 -1
- package/dist/chunk-L5PK5VL2.js +0 -178
- package/dist/chunk-L5PK5VL2.js.map +0 -1
- package/dist/chunk-P3RPDZHO.js +0 -36
- package/dist/chunk-P3RPDZHO.js.map +0 -1
- package/dist/chunk-PNWOW52F.js +0 -19
- package/dist/chunk-PNWOW52F.js.map +0 -1
- package/dist/chunk-R27FIYR5.js +0 -62
- package/dist/chunk-R27FIYR5.js.map +0 -1
- package/dist/chunk-S4WFNGQG.js +0 -17
- package/dist/chunk-S4WFNGQG.js.map +0 -1
- package/dist/chunk-TS3M7MWA.js +0 -3
- package/dist/chunk-TS3M7MWA.js.map +0 -1
- package/dist/chunk-UY4PNPBX.js +0 -156
- package/dist/chunk-UY4PNPBX.js.map +0 -1
- package/dist/chunk-VMH2LRM6.js +0 -114
- package/dist/chunk-VMH2LRM6.js.map +0 -1
- package/dist/chunk-VZDHIOCH.js +0 -76
- package/dist/chunk-VZDHIOCH.js.map +0 -1
- package/dist/chunk-WGVHM7DU.js +0 -66
- package/dist/chunk-WGVHM7DU.js.map +0 -1
- package/dist/chunk-WR5PDOPP.js +0 -91
- package/dist/chunk-WR5PDOPP.js.map +0 -1
- package/dist/chunk-YR433ZDA.js +0 -20
- package/dist/chunk-YR433ZDA.js.map +0 -1
- package/dist/executors/in-memory.d.ts +0 -39
- package/dist/executors/in-memory.js +0 -6
- package/dist/executors/in-memory.js.map +0 -1
- package/dist/executors/types.d.ts +0 -3
- package/dist/executors/types.js +0 -3
- package/dist/executors/types.js.map +0 -1
- package/dist/functions.d.ts +0 -88
- package/dist/functions.js +0 -21
- package/dist/functions.js.map +0 -1
- package/dist/types-U76Ukj96.d.ts +0 -609
- package/dist/utils/analysis.d.ts +0 -75
- package/dist/utils/analysis.js +0 -3
- package/dist/utils/index.d.ts +0 -8
- package/dist/utils/index.js +0 -10
- package/dist/utils/index.js.map +0 -1
- package/dist/utils/mermaid.d.ts +0 -46
- package/dist/utils/mermaid.js +0 -4
- package/dist/utils/mermaid.js.map +0 -1
- package/dist/utils/middleware.d.ts +0 -11
- package/dist/utils/middleware.js +0 -3
- package/dist/utils/middleware.js.map +0 -1
- package/dist/utils/sanitize.d.ts +0 -19
- package/dist/utils/sanitize.js +0 -3
- package/dist/utils/sleep.d.ts +0 -9
- package/dist/utils/sleep.js +0 -4
- package/dist/utils/sleep.js.map +0 -1
- package/dist/workflow/AbstractNode.d.ts +0 -3
- package/dist/workflow/AbstractNode.js +0 -4
- package/dist/workflow/AbstractNode.js.map +0 -1
- package/dist/workflow/Flow.d.ts +0 -3
- package/dist/workflow/Flow.js +0 -16
- package/dist/workflow/Flow.js.map +0 -1
- package/dist/workflow/Node.d.ts +0 -3
- package/dist/workflow/Node.js +0 -15
- package/dist/workflow/Node.js.map +0 -1
- package/dist/workflow/index.d.ts +0 -4
- package/dist/workflow/index.js +0 -18
- package/dist/workflow/index.js.map +0 -1
- package/dist/workflow/node-patterns.d.ts +0 -55
- package/dist/workflow/node-patterns.js +0 -16
- package/dist/workflow/node-patterns.js.map +0 -1
- package/dist/workflow/registry.d.ts +0 -17
- package/dist/workflow/registry.js +0 -3
- package/dist/workflow/registry.js.map +0 -1
- /package/dist/{utils/analysis.js.map → analysis.js.map} +0 -0
- /package/dist/{builder/graph → runtime}/index.js.map +0 -0
- /package/dist/{builder/graph → runtime}/types.js.map +0 -0
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import { isNodeClass } from './chunk-5QMPFUKA.js';
|
|
2
|
+
|
|
3
|
+
// src/flow.ts
|
|
4
|
+
function _hashFunction(fn) {
|
|
5
|
+
const source = fn.toString();
|
|
6
|
+
let hash = 0;
|
|
7
|
+
for (let i = 0; i < source.length; i++) {
|
|
8
|
+
const char = source.charCodeAt(i);
|
|
9
|
+
hash = (hash << 5) - hash + char;
|
|
10
|
+
hash = hash & hash;
|
|
11
|
+
}
|
|
12
|
+
return Math.abs(hash).toString(16);
|
|
13
|
+
}
|
|
14
|
+
var Flow = class {
|
|
15
|
+
blueprint;
|
|
16
|
+
functionRegistry;
|
|
17
|
+
loopControllerIds;
|
|
18
|
+
loopDefinitions;
|
|
19
|
+
constructor(id) {
|
|
20
|
+
this.blueprint = { id, nodes: [], edges: [] };
|
|
21
|
+
this.functionRegistry = /* @__PURE__ */ new Map();
|
|
22
|
+
this.loopControllerIds = /* @__PURE__ */ new Map();
|
|
23
|
+
this.loopDefinitions = [];
|
|
24
|
+
}
|
|
25
|
+
node(id, implementation, options) {
|
|
26
|
+
let usesKey;
|
|
27
|
+
if (isNodeClass(implementation)) {
|
|
28
|
+
usesKey = implementation.name && implementation.name !== "BaseNode" ? implementation.name : `class_${_hashFunction(implementation)}`;
|
|
29
|
+
this.functionRegistry.set(usesKey, implementation);
|
|
30
|
+
} else {
|
|
31
|
+
usesKey = `fn_${_hashFunction(implementation)}`;
|
|
32
|
+
this.functionRegistry.set(usesKey, implementation);
|
|
33
|
+
}
|
|
34
|
+
const nodeDef = { id, uses: usesKey, ...options };
|
|
35
|
+
this.blueprint.nodes?.push(nodeDef);
|
|
36
|
+
return this;
|
|
37
|
+
}
|
|
38
|
+
edge(source, target, options) {
|
|
39
|
+
const edgeDef = { source, target, ...options };
|
|
40
|
+
this.blueprint.edges?.push(edgeDef);
|
|
41
|
+
return this;
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Creates a batch processing pattern.
|
|
45
|
+
* It takes an input array, runs a worker node on each item in parallel, and gathers the results.
|
|
46
|
+
* @param id The base ID for this batch operation.
|
|
47
|
+
* @param worker The node implementation to run on each item.
|
|
48
|
+
* @param options Configuration for the batch operation.
|
|
49
|
+
* @param options.inputKey The key in the context that holds the input array for the batch.
|
|
50
|
+
* @param options.outputKey The key in the context where the array of results will be stored.
|
|
51
|
+
* @returns The Flow instance for chaining.
|
|
52
|
+
*/
|
|
53
|
+
batch(id, worker, options) {
|
|
54
|
+
const { inputKey, outputKey } = options;
|
|
55
|
+
const scatterId = `${id}_scatter`;
|
|
56
|
+
const gatherId = `${id}_gather`;
|
|
57
|
+
let workerUsesKey;
|
|
58
|
+
if (isNodeClass(worker)) {
|
|
59
|
+
workerUsesKey = worker.name && worker.name !== "BaseNode" ? worker.name : `class_batch_worker_${_hashFunction(worker)}`;
|
|
60
|
+
this.functionRegistry.set(workerUsesKey, worker);
|
|
61
|
+
} else {
|
|
62
|
+
workerUsesKey = `fn_batch_worker_${_hashFunction(worker)}`;
|
|
63
|
+
this.functionRegistry.set(workerUsesKey, worker);
|
|
64
|
+
}
|
|
65
|
+
this.blueprint.nodes?.push({
|
|
66
|
+
id: scatterId,
|
|
67
|
+
uses: "batch-scatter",
|
|
68
|
+
// built-in
|
|
69
|
+
inputs: inputKey,
|
|
70
|
+
params: { workerUsesKey, outputKey, gatherNodeId: gatherId }
|
|
71
|
+
});
|
|
72
|
+
this.blueprint.nodes?.push({
|
|
73
|
+
id: gatherId,
|
|
74
|
+
uses: "batch-gather",
|
|
75
|
+
// built-in
|
|
76
|
+
params: { outputKey },
|
|
77
|
+
config: { joinStrategy: "all" }
|
|
78
|
+
// important: must wait for all scattered jobs
|
|
79
|
+
});
|
|
80
|
+
this.edge(scatterId, gatherId);
|
|
81
|
+
return this;
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Creates a loop pattern in the workflow graph.
|
|
85
|
+
* @param id A unique identifier for the loop construct.
|
|
86
|
+
* @param options Defines the start, end, and continuation condition of the loop.
|
|
87
|
+
* @param options.startNodeId The ID of the first node inside the loop body.
|
|
88
|
+
* @param options.endNodeId The ID of the last node inside the loop body.
|
|
89
|
+
* @param options.condition An expression that, if true, causes the loop to run again.
|
|
90
|
+
*/
|
|
91
|
+
loop(id, options) {
|
|
92
|
+
const { startNodeId, endNodeId, condition } = options;
|
|
93
|
+
const controllerId = `${id}-loop`;
|
|
94
|
+
this.loopControllerIds.set(id, controllerId);
|
|
95
|
+
this.loopDefinitions.push({ id, startNodeId, endNodeId });
|
|
96
|
+
this.blueprint.nodes?.push({
|
|
97
|
+
id: controllerId,
|
|
98
|
+
uses: "loop-controller",
|
|
99
|
+
// built-in
|
|
100
|
+
params: { condition },
|
|
101
|
+
config: { joinStrategy: "any" }
|
|
102
|
+
// to allow re-execution on each loop iteration
|
|
103
|
+
});
|
|
104
|
+
this.edge(endNodeId, controllerId);
|
|
105
|
+
this.edge(controllerId, startNodeId, {
|
|
106
|
+
action: "continue",
|
|
107
|
+
transform: `context.${endNodeId}`
|
|
108
|
+
// pass the end node's value to the start node
|
|
109
|
+
});
|
|
110
|
+
return this;
|
|
111
|
+
}
|
|
112
|
+
getLoopControllerId(id) {
|
|
113
|
+
const controllerId = this.loopControllerIds.get(id);
|
|
114
|
+
if (!controllerId) {
|
|
115
|
+
throw new Error(`Loop with id '${id}' not found. Ensure you have defined it using the .loop() method.`);
|
|
116
|
+
}
|
|
117
|
+
return controllerId;
|
|
118
|
+
}
|
|
119
|
+
toBlueprint() {
|
|
120
|
+
if (!this.blueprint.nodes || this.blueprint.nodes.length === 0) {
|
|
121
|
+
throw new Error("Cannot build a blueprint with no nodes.");
|
|
122
|
+
}
|
|
123
|
+
for (const loopDef of this.loopDefinitions) {
|
|
124
|
+
const startNode = this.blueprint.nodes?.find((n) => n.id === loopDef.startNodeId);
|
|
125
|
+
const endNode = this.blueprint.nodes?.find((n) => n.id === loopDef.endNodeId);
|
|
126
|
+
if (!startNode) {
|
|
127
|
+
throw new Error(`Loop '${loopDef.id}' references non-existent start node '${loopDef.startNodeId}'.`);
|
|
128
|
+
}
|
|
129
|
+
if (!endNode) {
|
|
130
|
+
throw new Error(`Loop '${loopDef.id}' references non-existent end node '${loopDef.endNodeId}'.`);
|
|
131
|
+
}
|
|
132
|
+
startNode.config = { ...startNode.config, joinStrategy: "any" };
|
|
133
|
+
endNode.config = { ...endNode.config, joinStrategy: "any" };
|
|
134
|
+
}
|
|
135
|
+
return this.blueprint;
|
|
136
|
+
}
|
|
137
|
+
getFunctionRegistry() {
|
|
138
|
+
return this.functionRegistry;
|
|
139
|
+
}
|
|
140
|
+
};
|
|
141
|
+
function createFlow(id) {
|
|
142
|
+
return new Flow(id);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
export { Flow, createFlow };
|
|
146
|
+
//# sourceMappingURL=chunk-5ZWYSKMH.js.map
|
|
147
|
+
//# sourceMappingURL=chunk-5ZWYSKMH.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/flow.ts"],"names":[],"mappings":";;;AAMA,SAAS,cAAc,EAAA,EAAwF;AAC9G,EAAA,MAAM,MAAA,GAAS,GAAG,QAAA,EAAS;AAC3B,EAAA,IAAI,IAAA,GAAO,CAAA;AACX,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AACvC,IAAA,MAAM,IAAA,GAAO,MAAA,CAAO,UAAA,CAAW,CAAC,CAAA;AAChC,IAAA,IAAA,GAAA,CAAQ,IAAA,IAAQ,KAAK,IAAA,GAAO,IAAA;AAC5B,IAAA,IAAA,GAAO,IAAA,GAAO,IAAA;AAAA,EACf;AACA,EAAA,OAAO,IAAA,CAAK,GAAA,CAAI,IAAI,CAAA,CAAE,SAAS,EAAE,CAAA;AAClC;AAKO,IAAM,OAAN,MAGL;AAAA,EACO,SAAA;AAAA,EACA,gBAAA;AAAA,EACA,iBAAA;AAAA,EACA,eAAA;AAAA,EAMR,YAAY,EAAA,EAAY;AACvB,IAAA,IAAA,CAAK,SAAA,GAAY,EAAE,EAAA,EAAI,KAAA,EAAO,EAAC,EAAG,KAAA,EAAO,EAAC,EAAE;AAC5C,IAAA,IAAA,CAAK,gBAAA,uBAAuB,GAAA,EAAI;AAChC,IAAA,IAAA,CAAK,iBAAA,uBAAwB,GAAA,EAAI;AACjC,IAAA,IAAA,CAAK,kBAAkB,EAAC;AAAA,EACzB;AAAA,EAEA,IAAA,CACC,EAAA,EACA,cAAA,EAGA,OAAA,EACO;AACP,IAAA,IAAI,OAAA;AAEJ,IAAA,IAAI,WAAA,CAAY,cAAc,CAAA,EAAG;AAChC,MAAA,OAAA,GACC,cAAA,CAAe,IAAA,IAAQ,cAAA,CAAe,IAAA,KAAS,UAAA,GAC5C,eAAe,IAAA,GACf,CAAA,MAAA,EAAS,aAAA,CAAc,cAAc,CAAC,CAAA,CAAA;AAC1C,MAAA,IAAA,CAAK,gBAAA,CAAiB,GAAA,CAAI,OAAA,EAAS,cAAc,CAAA;AAAA,IAClD,CAAA,MAAO;AACN,MAAA,OAAA,GAAU,CAAA,GAAA,EAAM,aAAA,CAAc,cAAc,CAAC,CAAA,CAAA;AAC7C,MAAA,IAAA,CAAK,gBAAA,CAAiB,GAAA,CAAI,OAAA,EAAS,cAAyC,CAAA;AAAA,IAC7E;AAEA,IAAA,MAAM,UAA0B,EAAE,EAAA,EAAI,IAAA,EAAM,OAAA,EAAS,GAAG,OAAA,EAAQ;AAChE,IAAA,IAAA,CAAK,SAAA,CAAU,KAAA,EAAO,IAAA,CAAK,OAAO,CAAA;AAClC,IAAA,OAAO,IAAA;AAAA,EACR;AAAA,EAEA,IAAA,CAAK,MAAA,EAAgB,MAAA,EAAgB,OAAA,EAA2D;AAC/F,IAAA,MAAM,OAAA,GAA0B,EAAE,MAAA,EAAQ,MAAA,EAAQ,GAAG,OAAA,EAAQ;AAC7D,IAAA,IAAA,CAAK,SAAA,CAAU,KAAA,EAAO,IAAA,CAAK,OAAO,CAAA;AAClC,IAAA,OAAO,IAAA;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CACC,EAAA,EACA,MAAA,EAGA,OAAA,EAMO;AACP,IAAA,MAAM,EAAE,QAAA,EAAU,SAAA,EAAU,GAAI,OAAA;AAChC,IAAA,MAAM,SAAA,GAAY,GAAG,EAAE,CAAA,QAAA,CAAA;AACvB,IAAA,MAAM,QAAA,GAAW,GAAG,EAAE,CAAA,OAAA,CAAA;AAGtB,IAAA,IAAI,aAAA;AACJ,IAAA,IAAI,WAAA,CAAY,MAAM,CAAA,EAAG;AACxB,MAAA,aAAA,GACC,MAAA,CAAO,IAAA,IAAQ,MAAA,CAAO,IAAA,KAAS,UAAA,GAAa,OAAO,IAAA,GAAO,CAAA,mBAAA,EAAsB,aAAA,CAAc,MAAM,CAAC,CAAA,CAAA;AACtG,MAAA,IAAA,CAAK,gBAAA,CAAiB,GAAA,CAAI,aAAA,EAAe,MAAM,CAAA;AAAA,IAChD,CAAA,MAAO;AACN,MAAA,aAAA,GAAgB,CAAA,gBAAA,EAAmB,aAAA,CAAc,MAAM,CAAC,CAAA,CAAA;AACxD,MAAA,IAAA,CAAK,gBAAA,CAAiB,GAAA,CAAI,aAAA,EAAe,MAAiC,CAAA;AAAA,IAC3E;AAGA,IAAA,IAAA,CAAK,SAAA,CAAU,OAAO,IAAA,CAAK;AAAA,MAC1B,EAAA,EAAI,SAAA;AAAA,MACJ,IAAA,EAAM,eAAA;AAAA;AAAA,MACN,MAAA,EAAQ,QAAA;AAAA,MACR,MAAA,EAAQ,EAAE,aAAA,EAAe,SAAA,EAAW,cAAc,QAAA;AAAS,KAC3D,CAAA;AAGD,IAAA,IAAA,CAAK,SAAA,CAAU,OAAO,IAAA,CAAK;AAAA,MAC1B,EAAA,EAAI,QAAA;AAAA,MACJ,IAAA,EAAM,cAAA;AAAA;AAAA,MACN,MAAA,EAAQ,EAAE,SAAA,EAAU;AAAA,MACpB,MAAA,EAAQ,EAAE,YAAA,EAAc,KAAA;AAAM;AAAA,KAC9B,CAAA;AAGD,IAAA,IAAA,CAAK,IAAA,CAAK,WAAW,QAAQ,CAAA;AAE7B,IAAA,OAAO,IAAA;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,IAAA,CACC,IACA,OAAA,EAQO;AACP,IAAA,MAAM,EAAE,WAAA,EAAa,SAAA,EAAW,SAAA,EAAU,GAAI,OAAA;AAC9C,IAAA,MAAM,YAAA,GAAe,GAAG,EAAE,CAAA,KAAA,CAAA;AAE1B,IAAA,IAAA,CAAK,iBAAA,CAAkB,GAAA,CAAI,EAAA,EAAI,YAAY,CAAA;AAE3C,IAAA,IAAA,CAAK,gBAAgB,IAAA,CAAK,EAAE,EAAA,EAAI,WAAA,EAAa,WAAW,CAAA;AAGxD,IAAA,IAAA,CAAK,SAAA,CAAU,OAAO,IAAA,CAAK;AAAA,MAC1B,EAAA,EAAI,YAAA;AAAA,MACJ,IAAA,EAAM,iBAAA;AAAA;AAAA,MACN,MAAA,EAAQ,EAAE,SAAA,EAAU;AAAA,MACpB,MAAA,EAAQ,EAAE,YAAA,EAAc,KAAA;AAAM;AAAA,KAC9B,CAAA;AAED,IAAA,IAAA,CAAK,IAAA,CAAK,WAAW,YAAY,CAAA;AAEjC,IAAA,IAAA,CAAK,IAAA,CAAK,cAAc,WAAA,EAAa;AAAA,MACpC,MAAA,EAAQ,UAAA;AAAA,MACR,SAAA,EAAW,WAAW,SAAS,CAAA;AAAA;AAAA,KAC/B,CAAA;AAED,IAAA,OAAO,IAAA;AAAA,EACR;AAAA,EAEA,oBAAoB,EAAA,EAAoB;AACvC,IAAA,MAAM,YAAA,GAAe,IAAA,CAAK,iBAAA,CAAkB,GAAA,CAAI,EAAE,CAAA;AAClD,IAAA,IAAI,CAAC,YAAA,EAAc;AAClB,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,cAAA,EAAiB,EAAE,CAAA,iEAAA,CAAmE,CAAA;AAAA,IACvG;AACA,IAAA,OAAO,YAAA;AAAA,EACR;AAAA,EAEA,WAAA,GAAiC;AAChC,IAAA,IAAI,CAAC,KAAK,SAAA,CAAU,KAAA,IAAS,KAAK,SAAA,CAAU,KAAA,CAAM,WAAW,CAAA,EAAG;AAC/D,MAAA,MAAM,IAAI,MAAM,yCAAyC,CAAA;AAAA,IAC1D;AAEA,IAAA,KAAA,MAAW,OAAA,IAAW,KAAK,eAAA,EAAiB;AAC3C,MAAA,MAAM,SAAA,GAAY,IAAA,CAAK,SAAA,CAAU,KAAA,EAAO,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,EAAA,KAAO,OAAA,CAAQ,WAAW,CAAA;AAChF,MAAA,MAAM,OAAA,GAAU,IAAA,CAAK,SAAA,CAAU,KAAA,EAAO,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,EAAA,KAAO,OAAA,CAAQ,SAAS,CAAA;AAE5E,MAAA,IAAI,CAAC,SAAA,EAAW;AACf,QAAA,MAAM,IAAI,MAAM,CAAA,MAAA,EAAS,OAAA,CAAQ,EAAE,CAAA,sCAAA,EAAyC,OAAA,CAAQ,WAAW,CAAA,EAAA,CAAI,CAAA;AAAA,MACpG;AACA,MAAA,IAAI,CAAC,OAAA,EAAS;AACb,QAAA,MAAM,IAAI,MAAM,CAAA,MAAA,EAAS,OAAA,CAAQ,EAAE,CAAA,oCAAA,EAAuC,OAAA,CAAQ,SAAS,CAAA,EAAA,CAAI,CAAA;AAAA,MAChG;AAEA,MAAA,SAAA,CAAU,SAAS,EAAE,GAAG,SAAA,CAAU,MAAA,EAAQ,cAAc,KAAA,EAAM;AAC9D,MAAA,OAAA,CAAQ,SAAS,EAAE,GAAG,OAAA,CAAQ,MAAA,EAAQ,cAAc,KAAA,EAAM;AAAA,IAC3D;AAEA,IAAA,OAAO,IAAA,CAAK,SAAA;AAAA,EACb;AAAA,EAEA,mBAAA,GAAsB;AACrB,IAAA,OAAO,IAAA,CAAK,gBAAA;AAAA,EACb;AACD;AAKO,SAAS,WAGd,EAAA,EAA2C;AAC5C,EAAA,OAAO,IAAI,KAAK,EAAE,CAAA;AACnB","file":"chunk-5ZWYSKMH.js","sourcesContent":["import { isNodeClass } from './node'\nimport type { EdgeDefinition, NodeClass, NodeDefinition, NodeFunction, WorkflowBlueprint } from './types'\n\n/**\n * Generates a deterministic hash for a function based on its source code.\n */\nfunction _hashFunction(fn: NodeFunction<any, any, any, any, any> | NodeClass<any, any, any, any, any>): string {\n\tconst source = fn.toString()\n\tlet hash = 0\n\tfor (let i = 0; i < source.length; i++) {\n\t\tconst char = source.charCodeAt(i)\n\t\thash = (hash << 5) - hash + char\n\t\thash = hash & hash // Convert to 32-bit integer\n\t}\n\treturn Math.abs(hash).toString(16)\n}\n\n/**\n * A fluent API for programmatically constructing a WorkflowBlueprint.\n */\nexport class Flow<\n\tTContext extends Record<string, any> = Record<string, any>,\n\tTDependencies extends Record<string, any> = Record<string, any>,\n> {\n\tprivate blueprint: Partial<WorkflowBlueprint>\n\tprivate functionRegistry: Map<string, NodeFunction | NodeClass>\n\tprivate loopControllerIds: Map<string, string>\n\tprivate loopDefinitions: Array<{\n\t\tid: string\n\t\tstartNodeId: string\n\t\tendNodeId: string\n\t}>\n\n\tconstructor(id: string) {\n\t\tthis.blueprint = { id, nodes: [], edges: [] }\n\t\tthis.functionRegistry = new Map()\n\t\tthis.loopControllerIds = new Map()\n\t\tthis.loopDefinitions = []\n\t}\n\n\tnode<TInput = any, TOutput = any, TAction extends string = string>(\n\t\tid: string,\n\t\timplementation:\n\t\t\t| NodeFunction<TContext, TDependencies, TInput, TOutput, TAction>\n\t\t\t| NodeClass<TContext, TDependencies, TInput, TOutput, TAction>,\n\t\toptions?: Omit<NodeDefinition, 'id' | 'uses'>,\n\t): this {\n\t\tlet usesKey: string\n\n\t\tif (isNodeClass(implementation)) {\n\t\t\tusesKey =\n\t\t\t\timplementation.name && implementation.name !== 'BaseNode'\n\t\t\t\t\t? implementation.name\n\t\t\t\t\t: `class_${_hashFunction(implementation)}`\n\t\t\tthis.functionRegistry.set(usesKey, implementation)\n\t\t} else {\n\t\t\tusesKey = `fn_${_hashFunction(implementation)}`\n\t\t\tthis.functionRegistry.set(usesKey, implementation as unknown as NodeFunction)\n\t\t}\n\n\t\tconst nodeDef: NodeDefinition = { id, uses: usesKey, ...options }\n\t\tthis.blueprint.nodes?.push(nodeDef)\n\t\treturn this\n\t}\n\n\tedge(source: string, target: string, options?: Omit<EdgeDefinition, 'source' | 'target'>): this {\n\t\tconst edgeDef: EdgeDefinition = { source, target, ...options }\n\t\tthis.blueprint.edges?.push(edgeDef)\n\t\treturn this\n\t}\n\n\t/**\n\t * Creates a batch processing pattern.\n\t * It takes an input array, runs a worker node on each item in parallel, and gathers the results.\n\t * @param id The base ID for this batch operation.\n\t * @param worker The node implementation to run on each item.\n\t * @param options Configuration for the batch operation.\n\t * @param options.inputKey The key in the context that holds the input array for the batch.\n\t * @param options.outputKey The key in the context where the array of results will be stored.\n\t * @returns The Flow instance for chaining.\n\t */\n\tbatch<TInput = any, TOutput = any, TAction extends string = string>(\n\t\tid: string,\n\t\tworker:\n\t\t\t| NodeFunction<TContext, TDependencies, TInput, TOutput, TAction>\n\t\t\t| NodeClass<TContext, TDependencies, TInput, TOutput, TAction>,\n\t\toptions: {\n\t\t\t/** The key in the context that holds the input array for the batch. */\n\t\t\tinputKey: string\n\t\t\t/** The key in the context where the array of results will be stored. */\n\t\t\toutputKey: string\n\t\t},\n\t): this {\n\t\tconst { inputKey, outputKey } = options\n\t\tconst scatterId = `${id}_scatter`\n\t\tconst gatherId = `${id}_gather`\n\n\t\t// register worker implementation under a unique key.\n\t\tlet workerUsesKey: string\n\t\tif (isNodeClass(worker)) {\n\t\t\tworkerUsesKey =\n\t\t\t\tworker.name && worker.name !== 'BaseNode' ? worker.name : `class_batch_worker_${_hashFunction(worker)}`\n\t\t\tthis.functionRegistry.set(workerUsesKey, worker)\n\t\t} else {\n\t\t\tworkerUsesKey = `fn_batch_worker_${_hashFunction(worker)}`\n\t\t\tthis.functionRegistry.set(workerUsesKey, worker as unknown as NodeFunction)\n\t\t}\n\n\t\t// scatter node: takes an array and dynamically schedules worker nodes\n\t\tthis.blueprint.nodes?.push({\n\t\t\tid: scatterId,\n\t\t\tuses: 'batch-scatter', // built-in\n\t\t\tinputs: inputKey,\n\t\t\tparams: { workerUsesKey, outputKey, gatherNodeId: gatherId },\n\t\t})\n\n\t\t// gather node: waits for all workers to finish and collects the results\n\t\tthis.blueprint.nodes?.push({\n\t\t\tid: gatherId,\n\t\t\tuses: 'batch-gather', // built-in\n\t\t\tparams: { outputKey },\n\t\t\tconfig: { joinStrategy: 'all' }, // important: must wait for all scattered jobs\n\t\t})\n\n\t\t// edge to connect scatter and gather nodes. orchestrator will manage dynamic workers\n\t\tthis.edge(scatterId, gatherId)\n\n\t\treturn this\n\t}\n\n\t/**\n\t * Creates a loop pattern in the workflow graph.\n\t * @param id A unique identifier for the loop construct.\n\t * @param options Defines the start, end, and continuation condition of the loop.\n\t * @param options.startNodeId The ID of the first node inside the loop body.\n\t * @param options.endNodeId The ID of the last node inside the loop body.\n\t * @param options.condition An expression that, if true, causes the loop to run again.\n\t */\n\tloop(\n\t\tid: string,\n\t\toptions: {\n\t\t\t/** The ID of the first node inside the loop body. */\n\t\t\tstartNodeId: string\n\t\t\t/** The ID of the last node inside the loop body. */\n\t\t\tendNodeId: string\n\t\t\t/** An expression that, if true, causes the loop to run again. */\n\t\t\tcondition: string\n\t\t},\n\t): this {\n\t\tconst { startNodeId, endNodeId, condition } = options\n\t\tconst controllerId = `${id}-loop`\n\n\t\tthis.loopControllerIds.set(id, controllerId)\n\n\t\tthis.loopDefinitions.push({ id, startNodeId, endNodeId })\n\n\t\t// controller node: evaluates the loop condition\n\t\tthis.blueprint.nodes?.push({\n\t\t\tid: controllerId,\n\t\t\tuses: 'loop-controller', // built-in\n\t\t\tparams: { condition },\n\t\t\tconfig: { joinStrategy: 'any' }, // to allow re-execution on each loop iteration\n\t\t})\n\n\t\tthis.edge(endNodeId, controllerId)\n\n\t\tthis.edge(controllerId, startNodeId, {\n\t\t\taction: 'continue',\n\t\t\ttransform: `context.${endNodeId}`, // pass the end node's value to the start node\n\t\t})\n\n\t\treturn this\n\t}\n\n\tgetLoopControllerId(id: string): string {\n\t\tconst controllerId = this.loopControllerIds.get(id)\n\t\tif (!controllerId) {\n\t\t\tthrow new Error(`Loop with id '${id}' not found. Ensure you have defined it using the .loop() method.`)\n\t\t}\n\t\treturn controllerId\n\t}\n\n\ttoBlueprint(): WorkflowBlueprint {\n\t\tif (!this.blueprint.nodes || this.blueprint.nodes.length === 0) {\n\t\t\tthrow new Error('Cannot build a blueprint with no nodes.')\n\t\t}\n\n\t\tfor (const loopDef of this.loopDefinitions) {\n\t\t\tconst startNode = this.blueprint.nodes?.find((n) => n.id === loopDef.startNodeId)\n\t\t\tconst endNode = this.blueprint.nodes?.find((n) => n.id === loopDef.endNodeId)\n\n\t\t\tif (!startNode) {\n\t\t\t\tthrow new Error(`Loop '${loopDef.id}' references non-existent start node '${loopDef.startNodeId}'.`)\n\t\t\t}\n\t\t\tif (!endNode) {\n\t\t\t\tthrow new Error(`Loop '${loopDef.id}' references non-existent end node '${loopDef.endNodeId}'.`)\n\t\t\t}\n\n\t\t\tstartNode.config = { ...startNode.config, joinStrategy: 'any' }\n\t\t\tendNode.config = { ...endNode.config, joinStrategy: 'any' }\n\t\t}\n\n\t\treturn this.blueprint as WorkflowBlueprint\n\t}\n\n\tgetFunctionRegistry() {\n\t\treturn this.functionRegistry\n\t}\n}\n\n/**\n * Helper function to create a new Flow builder instance.\n */\nexport function createFlow<\n\tTContext extends Record<string, any> = Record<string, any>,\n\tTDependencies extends Record<string, any> = Record<string, any>,\n>(id: string): Flow<TContext, TDependencies> {\n\treturn new Flow(id)\n}\n"]}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
// src/errors.ts
|
|
2
|
+
var NodeExecutionError = class extends Error {
|
|
3
|
+
constructor(message, nodeId, blueprintId, originalError, executionId) {
|
|
4
|
+
super(message);
|
|
5
|
+
this.nodeId = nodeId;
|
|
6
|
+
this.blueprintId = blueprintId;
|
|
7
|
+
this.originalError = originalError;
|
|
8
|
+
this.executionId = executionId;
|
|
9
|
+
this.name = "NodeExecutionError";
|
|
10
|
+
}
|
|
11
|
+
};
|
|
12
|
+
var CancelledWorkflowError = class extends Error {
|
|
13
|
+
constructor(message = "Workflow execution was cancelled.", executionId) {
|
|
14
|
+
super(message);
|
|
15
|
+
this.executionId = executionId;
|
|
16
|
+
this.name = "CancelledWorkflowError";
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
var FatalNodeExecutionError = class extends NodeExecutionError {
|
|
20
|
+
constructor(message, nodeId, blueprintId, originalError, executionId) {
|
|
21
|
+
super(message, nodeId, blueprintId, originalError, executionId);
|
|
22
|
+
this.name = "FatalNodeExecutionError";
|
|
23
|
+
}
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export { CancelledWorkflowError, FatalNodeExecutionError, NodeExecutionError };
|
|
27
|
+
//# sourceMappingURL=chunk-5ZXV3R5D.js.map
|
|
28
|
+
//# sourceMappingURL=chunk-5ZXV3R5D.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/errors.ts"],"names":[],"mappings":";AACO,IAAM,kBAAA,GAAN,cAAiC,KAAA,CAAM;AAAA,EAC7C,WAAA,CACC,OAAA,EACgB,MAAA,EACA,WAAA,EACA,eACA,WAAA,EACf;AACD,IAAA,KAAA,CAAM,OAAO,CAAA;AALG,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,WAAA,GAAA,WAAA;AACA,IAAA,IAAA,CAAA,aAAA,GAAA,aAAA;AACA,IAAA,IAAA,CAAA,WAAA,GAAA,WAAA;AAGhB,IAAA,IAAA,CAAK,IAAA,GAAO,oBAAA;AAAA,EACb;AACD;AAGO,IAAM,sBAAA,GAAN,cAAqC,KAAA,CAAM;AAAA,EACjD,WAAA,CACC,OAAA,GAAU,mCAAA,EACM,WAAA,EACf;AACD,IAAA,KAAA,CAAM,OAAO,CAAA;AAFG,IAAA,IAAA,CAAA,WAAA,GAAA,WAAA;AAGhB,IAAA,IAAA,CAAK,IAAA,GAAO,wBAAA;AAAA,EACb;AACD;AAGO,IAAM,uBAAA,GAAN,cAAsC,kBAAA,CAAmB;AAAA,EAC/D,WAAA,CAAY,OAAA,EAAiB,MAAA,EAAgB,WAAA,EAAqB,eAAuB,WAAA,EAAsB;AAC9G,IAAA,KAAA,CAAM,OAAA,EAAS,MAAA,EAAQ,WAAA,EAAa,aAAA,EAAe,WAAW,CAAA;AAC9D,IAAA,IAAA,CAAK,IAAA,GAAO,yBAAA;AAAA,EACb;AACD","file":"chunk-5ZXV3R5D.js","sourcesContent":["/** Error thrown when a node fails during execution. */\nexport class NodeExecutionError extends Error {\n\tconstructor(\n\t\tmessage: string,\n\t\tpublic readonly nodeId: string,\n\t\tpublic readonly blueprintId: string,\n\t\tpublic readonly originalError?: Error,\n\t\tpublic readonly executionId?: string,\n\t) {\n\t\tsuper(message)\n\t\tthis.name = 'NodeExecutionError'\n\t}\n}\n\n/** Error thrown when a workflow is gracefully aborted. */\nexport class CancelledWorkflowError extends Error {\n\tconstructor(\n\t\tmessage = 'Workflow execution was cancelled.',\n\t\tpublic readonly executionId?: string,\n\t) {\n\t\tsuper(message)\n\t\tthis.name = 'CancelledWorkflowError'\n\t}\n}\n\n/** Error thrown for a non-recoverable failure that should halt the workflow immediately. */\nexport class FatalNodeExecutionError extends NodeExecutionError {\n\tconstructor(message: string, nodeId: string, blueprintId: string, originalError?: Error, executionId?: string) {\n\t\tsuper(message, nodeId, blueprintId, originalError, executionId)\n\t\tthis.name = 'FatalNodeExecutionError'\n\t}\n}\n"]}
|
|
@@ -0,0 +1,410 @@
|
|
|
1
|
+
import { WorkflowState } from './chunk-CSZ6EOWG.js';
|
|
2
|
+
import { GraphTraverser } from './chunk-UMXW3TCY.js';
|
|
3
|
+
import { sanitizeBlueprint } from './chunk-DSYAC4WB.js';
|
|
4
|
+
import { JsonSerializer } from './chunk-CYHZ2YVH.js';
|
|
5
|
+
import { BuiltInNodeExecutor, ClassNodeExecutor, FunctionNodeExecutor } from './chunk-QRMUKDSP.js';
|
|
6
|
+
import { AsyncContextView } from './chunk-KWQHFT7E.js';
|
|
7
|
+
import { CancelledWorkflowError, NodeExecutionError, FatalNodeExecutionError } from './chunk-5ZXV3R5D.js';
|
|
8
|
+
import { PropertyEvaluator } from './chunk-PH2IYZHV.js';
|
|
9
|
+
import { isNodeClass } from './chunk-5QMPFUKA.js';
|
|
10
|
+
import { analyzeBlueprint } from './chunk-HN72TZY5.js';
|
|
11
|
+
import { NullLogger } from './chunk-4PELJWF7.js';
|
|
12
|
+
|
|
13
|
+
// src/runtime/runtime.ts
|
|
14
|
+
var FlowRuntime = class {
|
|
15
|
+
registry;
|
|
16
|
+
blueprints;
|
|
17
|
+
dependencies;
|
|
18
|
+
logger;
|
|
19
|
+
eventBus;
|
|
20
|
+
serializer;
|
|
21
|
+
middleware;
|
|
22
|
+
evaluator;
|
|
23
|
+
options;
|
|
24
|
+
constructor(options) {
|
|
25
|
+
this.registry = options.registry || {};
|
|
26
|
+
this.blueprints = options.blueprints || {};
|
|
27
|
+
this.dependencies = options.dependencies || {};
|
|
28
|
+
this.logger = options.logger || new NullLogger();
|
|
29
|
+
this.eventBus = options.eventBus || { emit: () => {
|
|
30
|
+
} };
|
|
31
|
+
this.serializer = options.serializer || new JsonSerializer();
|
|
32
|
+
this.middleware = options.middleware || [];
|
|
33
|
+
this.evaluator = options.evaluator || new PropertyEvaluator();
|
|
34
|
+
this.options = options;
|
|
35
|
+
}
|
|
36
|
+
async run(blueprint, initialState = {}, options) {
|
|
37
|
+
const executionId = globalThis.crypto?.randomUUID();
|
|
38
|
+
const startTime = Date.now();
|
|
39
|
+
const contextData = typeof initialState === "string" ? this.serializer.deserialize(initialState) : initialState;
|
|
40
|
+
blueprint = sanitizeBlueprint(blueprint);
|
|
41
|
+
const state = new WorkflowState(contextData);
|
|
42
|
+
this.logger.info(`Starting workflow execution`, {
|
|
43
|
+
blueprintId: blueprint.id,
|
|
44
|
+
executionId
|
|
45
|
+
});
|
|
46
|
+
try {
|
|
47
|
+
await this.eventBus.emit("workflow:start", {
|
|
48
|
+
blueprintId: blueprint.id,
|
|
49
|
+
executionId
|
|
50
|
+
});
|
|
51
|
+
const analysis = analyzeBlueprint(blueprint);
|
|
52
|
+
if (options?.strict && !analysis.isDag) {
|
|
53
|
+
throw new Error(`Workflow '${blueprint.id}' failed strictness check: Cycles are not allowed.`);
|
|
54
|
+
}
|
|
55
|
+
if (!analysis.isDag) {
|
|
56
|
+
this.logger.warn(`Workflow contains cycles`, {
|
|
57
|
+
blueprintId: blueprint.id
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
const traverser = new GraphTraverser(
|
|
61
|
+
blueprint,
|
|
62
|
+
this,
|
|
63
|
+
state,
|
|
64
|
+
options?.functionRegistry,
|
|
65
|
+
executionId,
|
|
66
|
+
options?.signal,
|
|
67
|
+
options?.concurrency
|
|
68
|
+
);
|
|
69
|
+
await traverser.traverse();
|
|
70
|
+
const status = state.getStatus(traverser.getAllNodeIds(), traverser.getFallbackNodeIds());
|
|
71
|
+
const result = state.toResult(this.serializer);
|
|
72
|
+
result.status = status;
|
|
73
|
+
const duration = Date.now() - startTime;
|
|
74
|
+
if (status === "stalled") {
|
|
75
|
+
await this.eventBus.emit("workflow:stall", {
|
|
76
|
+
blueprintId: blueprint.id,
|
|
77
|
+
executionId,
|
|
78
|
+
remainingNodes: traverser.getAllNodeIds().size - state.getCompletedNodes().size
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
this.logger.info(`Workflow execution completed`, {
|
|
82
|
+
blueprintId: blueprint.id,
|
|
83
|
+
executionId,
|
|
84
|
+
status,
|
|
85
|
+
duration,
|
|
86
|
+
errors: result.errors?.length || 0
|
|
87
|
+
});
|
|
88
|
+
await this.eventBus.emit("workflow:finish", {
|
|
89
|
+
blueprintId: blueprint.id,
|
|
90
|
+
executionId,
|
|
91
|
+
status,
|
|
92
|
+
errors: result.errors
|
|
93
|
+
});
|
|
94
|
+
return result;
|
|
95
|
+
} catch (error) {
|
|
96
|
+
const duration = Date.now() - startTime;
|
|
97
|
+
if (error instanceof DOMException ? error.name === "AbortError" : error instanceof CancelledWorkflowError) {
|
|
98
|
+
this.logger.info(`Workflow execution cancelled`, {
|
|
99
|
+
blueprintId: blueprint.id,
|
|
100
|
+
executionId,
|
|
101
|
+
duration
|
|
102
|
+
});
|
|
103
|
+
await this.eventBus.emit("workflow:finish", {
|
|
104
|
+
blueprintId: blueprint.id,
|
|
105
|
+
executionId,
|
|
106
|
+
status: "cancelled",
|
|
107
|
+
error
|
|
108
|
+
});
|
|
109
|
+
return {
|
|
110
|
+
context: {},
|
|
111
|
+
serializedContext: "{}",
|
|
112
|
+
status: "cancelled"
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
this.logger.error(`Workflow execution failed`, {
|
|
116
|
+
blueprintId: blueprint.id,
|
|
117
|
+
executionId,
|
|
118
|
+
duration,
|
|
119
|
+
error: error instanceof Error ? error.message : String(error)
|
|
120
|
+
});
|
|
121
|
+
throw error;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
async executeNode(blueprint, nodeId, state, allPredecessors, functionRegistry, executionId, signal) {
|
|
125
|
+
const nodeDef = blueprint.nodes.find((n) => n.id === nodeId);
|
|
126
|
+
if (!nodeDef) {
|
|
127
|
+
throw new NodeExecutionError(
|
|
128
|
+
`Node '${nodeId}' not found in blueprint.`,
|
|
129
|
+
nodeId,
|
|
130
|
+
blueprint.id,
|
|
131
|
+
void 0,
|
|
132
|
+
executionId
|
|
133
|
+
);
|
|
134
|
+
}
|
|
135
|
+
const contextImpl = state.getContext();
|
|
136
|
+
const asyncContext = contextImpl.type === "sync" ? new AsyncContextView(contextImpl) : contextImpl;
|
|
137
|
+
const nodeContext = {
|
|
138
|
+
context: asyncContext,
|
|
139
|
+
input: await this._resolveNodeInput(nodeDef, asyncContext, allPredecessors),
|
|
140
|
+
params: nodeDef.params || {},
|
|
141
|
+
dependencies: { ...this.dependencies, logger: this.logger },
|
|
142
|
+
signal
|
|
143
|
+
};
|
|
144
|
+
const beforeHooks = this.middleware.map((m) => m.beforeNode).filter((hook) => !!hook);
|
|
145
|
+
const afterHooks = this.middleware.map((m) => m.afterNode).filter((hook) => !!hook);
|
|
146
|
+
const aroundHooks = this.middleware.map((m) => m.aroundNode).filter((hook) => !!hook);
|
|
147
|
+
const executor = this.getExecutor(nodeDef, functionRegistry);
|
|
148
|
+
const coreExecution = async () => {
|
|
149
|
+
let result;
|
|
150
|
+
let error;
|
|
151
|
+
try {
|
|
152
|
+
for (const hook of beforeHooks) await hook(nodeContext.context, nodeId);
|
|
153
|
+
result = await this.executeWithFallback(
|
|
154
|
+
blueprint,
|
|
155
|
+
nodeDef,
|
|
156
|
+
nodeContext,
|
|
157
|
+
executor,
|
|
158
|
+
executionId,
|
|
159
|
+
signal,
|
|
160
|
+
state,
|
|
161
|
+
functionRegistry
|
|
162
|
+
);
|
|
163
|
+
return result;
|
|
164
|
+
} catch (e) {
|
|
165
|
+
error = e;
|
|
166
|
+
throw e;
|
|
167
|
+
} finally {
|
|
168
|
+
for (const hook of afterHooks) await hook(nodeContext.context, nodeId, result, error);
|
|
169
|
+
}
|
|
170
|
+
};
|
|
171
|
+
let executionChain = coreExecution;
|
|
172
|
+
for (let i = aroundHooks.length - 1; i >= 0; i--) {
|
|
173
|
+
const hook = aroundHooks[i];
|
|
174
|
+
const next = executionChain;
|
|
175
|
+
executionChain = () => hook(nodeContext.context, nodeId, next);
|
|
176
|
+
}
|
|
177
|
+
try {
|
|
178
|
+
await this.eventBus.emit("node:start", {
|
|
179
|
+
blueprintId: blueprint.id,
|
|
180
|
+
nodeId,
|
|
181
|
+
executionId
|
|
182
|
+
});
|
|
183
|
+
const result = await executionChain();
|
|
184
|
+
await this.eventBus.emit("node:finish", {
|
|
185
|
+
blueprintId: blueprint.id,
|
|
186
|
+
nodeId,
|
|
187
|
+
result,
|
|
188
|
+
executionId
|
|
189
|
+
});
|
|
190
|
+
return result;
|
|
191
|
+
} catch (error) {
|
|
192
|
+
await this.eventBus.emit("node:error", {
|
|
193
|
+
blueprintId: blueprint.id,
|
|
194
|
+
nodeId,
|
|
195
|
+
error,
|
|
196
|
+
executionId
|
|
197
|
+
});
|
|
198
|
+
if (error instanceof DOMException && error.name === "AbortError") {
|
|
199
|
+
throw new CancelledWorkflowError("Workflow cancelled");
|
|
200
|
+
}
|
|
201
|
+
throw error instanceof NodeExecutionError ? error : new NodeExecutionError(`Node '${nodeId}' failed execution.`, nodeId, blueprint.id, error, executionId);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
getExecutor(nodeDef, functionRegistry) {
|
|
205
|
+
if (nodeDef.uses.startsWith("batch-") || nodeDef.uses.startsWith("loop-") || nodeDef.uses === "subflow") {
|
|
206
|
+
return new BuiltInNodeExecutor((nodeDef2, context) => this._executeBuiltInNode(nodeDef2, context));
|
|
207
|
+
}
|
|
208
|
+
const implementation = functionRegistry?.get(nodeDef.uses) || this.registry[nodeDef.uses];
|
|
209
|
+
if (!implementation) {
|
|
210
|
+
throw new FatalNodeExecutionError(
|
|
211
|
+
`Implementation for '${nodeDef.uses}' not found for node '${nodeDef.id}'.`,
|
|
212
|
+
nodeDef.id,
|
|
213
|
+
""
|
|
214
|
+
);
|
|
215
|
+
}
|
|
216
|
+
const maxRetries = nodeDef.config?.maxRetries ?? 1;
|
|
217
|
+
return isNodeClass(implementation) ? new ClassNodeExecutor(implementation, maxRetries, this.eventBus) : new FunctionNodeExecutor(implementation, maxRetries, this.eventBus);
|
|
218
|
+
}
|
|
219
|
+
async executeWithFallback(blueprint, nodeDef, context, executor, executionId, signal, state, functionRegistry) {
|
|
220
|
+
try {
|
|
221
|
+
return await executor.execute(nodeDef, context, executionId, signal);
|
|
222
|
+
} catch (error) {
|
|
223
|
+
const isFatal = error instanceof FatalNodeExecutionError || error instanceof NodeExecutionError && error.originalError instanceof FatalNodeExecutionError;
|
|
224
|
+
if (isFatal) throw error;
|
|
225
|
+
const fallbackNodeId = nodeDef.config?.fallback;
|
|
226
|
+
if (fallbackNodeId && state) {
|
|
227
|
+
context.dependencies.logger.warn(`Executing fallback for node`, {
|
|
228
|
+
nodeId: nodeDef.id,
|
|
229
|
+
fallbackNodeId,
|
|
230
|
+
error: error instanceof Error ? error.message : String(error),
|
|
231
|
+
executionId
|
|
232
|
+
});
|
|
233
|
+
await this.eventBus.emit("node:fallback", {
|
|
234
|
+
blueprintId: blueprint.id,
|
|
235
|
+
nodeId: nodeDef.id,
|
|
236
|
+
executionId,
|
|
237
|
+
fallback: fallbackNodeId
|
|
238
|
+
});
|
|
239
|
+
const fallbackNode = blueprint.nodes.find((n) => n.id === fallbackNodeId);
|
|
240
|
+
if (!fallbackNode) {
|
|
241
|
+
throw new NodeExecutionError(
|
|
242
|
+
`Fallback node '${fallbackNodeId}' not found in blueprint.`,
|
|
243
|
+
nodeDef.id,
|
|
244
|
+
blueprint.id,
|
|
245
|
+
void 0,
|
|
246
|
+
executionId
|
|
247
|
+
);
|
|
248
|
+
}
|
|
249
|
+
const fallbackExecutor = this.getExecutor(fallbackNode, functionRegistry);
|
|
250
|
+
const fallbackResult = await fallbackExecutor.execute(fallbackNode, context, executionId, signal);
|
|
251
|
+
state.markFallbackExecuted();
|
|
252
|
+
state.addCompletedNode(fallbackNodeId, fallbackResult.output);
|
|
253
|
+
context.dependencies.logger.info(`Fallback execution completed`, {
|
|
254
|
+
nodeId: nodeDef.id,
|
|
255
|
+
fallbackNodeId,
|
|
256
|
+
executionId
|
|
257
|
+
});
|
|
258
|
+
return { ...fallbackResult, _fallbackExecuted: true };
|
|
259
|
+
}
|
|
260
|
+
throw error;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
async determineNextNodes(blueprint, nodeId, result, context) {
|
|
264
|
+
const outgoingEdges = blueprint.edges.filter((edge) => edge.source === nodeId);
|
|
265
|
+
const matched = [];
|
|
266
|
+
const evaluateEdge = async (edge) => {
|
|
267
|
+
if (!edge.condition) return true;
|
|
268
|
+
const contextData = context.type === "sync" ? context.toJSON() : await context.toJSON();
|
|
269
|
+
return !!this.evaluator.evaluate(edge.condition, {
|
|
270
|
+
...contextData,
|
|
271
|
+
result
|
|
272
|
+
});
|
|
273
|
+
};
|
|
274
|
+
if (result.action) {
|
|
275
|
+
const actionEdges = outgoingEdges.filter((edge) => edge.action === result.action);
|
|
276
|
+
for (const edge of actionEdges) {
|
|
277
|
+
if (await evaluateEdge(edge)) {
|
|
278
|
+
const targetNode = blueprint.nodes.find((n) => n.id === edge.target);
|
|
279
|
+
if (targetNode) matched.push({ node: targetNode, edge });
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
if (matched.length === 0) {
|
|
284
|
+
const defaultEdges = outgoingEdges.filter((edge) => !edge.action);
|
|
285
|
+
for (const edge of defaultEdges) {
|
|
286
|
+
if (await evaluateEdge(edge)) {
|
|
287
|
+
const targetNode = blueprint.nodes.find((n) => n.id === edge.target);
|
|
288
|
+
if (targetNode) matched.push({ node: targetNode, edge });
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
this.logger.debug(`Determined next nodes for ${nodeId}`, {
|
|
293
|
+
matchedNodes: matched.map((m) => m.node.id),
|
|
294
|
+
action: result.action
|
|
295
|
+
});
|
|
296
|
+
return matched;
|
|
297
|
+
}
|
|
298
|
+
async applyEdgeTransform(edge, sourceResult, targetNode, context, allPredecessors) {
|
|
299
|
+
const asyncContext = context.type === "sync" ? new AsyncContextView(context) : context;
|
|
300
|
+
const finalInput = edge.transform ? this.evaluator.evaluate(edge.transform, {
|
|
301
|
+
input: sourceResult.output,
|
|
302
|
+
context: await asyncContext.toJSON()
|
|
303
|
+
}) : sourceResult.output;
|
|
304
|
+
const inputKey = `${targetNode.id}_input`;
|
|
305
|
+
await asyncContext.set(inputKey, finalInput);
|
|
306
|
+
if (targetNode.config?.joinStrategy === "any") {
|
|
307
|
+
targetNode.inputs = inputKey;
|
|
308
|
+
} else if (!targetNode.inputs) {
|
|
309
|
+
const predecessors = allPredecessors?.get(targetNode.id);
|
|
310
|
+
if (!predecessors || predecessors.size === 1) {
|
|
311
|
+
targetNode.inputs = inputKey;
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
async _resolveNodeInput(nodeDef, context, allPredecessors) {
|
|
316
|
+
if (nodeDef.inputs) {
|
|
317
|
+
if (typeof nodeDef.inputs === "string") return await context.get(nodeDef.inputs);
|
|
318
|
+
if (typeof nodeDef.inputs === "object") {
|
|
319
|
+
const input = {};
|
|
320
|
+
for (const key in nodeDef.inputs) {
|
|
321
|
+
const contextKey = nodeDef.inputs[key];
|
|
322
|
+
input[key] = await context.get(contextKey);
|
|
323
|
+
}
|
|
324
|
+
return input;
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
if (allPredecessors) {
|
|
328
|
+
const predecessors = allPredecessors.get(nodeDef.id);
|
|
329
|
+
if (predecessors && predecessors.size === 1) {
|
|
330
|
+
const singlePredecessorId = predecessors.values().next().value;
|
|
331
|
+
return await context.get(singlePredecessorId);
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
return void 0;
|
|
335
|
+
}
|
|
336
|
+
async _executeBuiltInNode(nodeDef, contextImpl) {
|
|
337
|
+
const context = contextImpl.type === "sync" ? new AsyncContextView(contextImpl) : contextImpl;
|
|
338
|
+
const { params = {}, id, inputs } = nodeDef;
|
|
339
|
+
switch (nodeDef.uses) {
|
|
340
|
+
case "batch-scatter": {
|
|
341
|
+
const inputArray = await context.get(inputs) || [];
|
|
342
|
+
if (!Array.isArray(inputArray)) throw new Error(`Input for batch-scatter node '${id}' must be an array.`);
|
|
343
|
+
const batchId = globalThis.crypto.randomUUID();
|
|
344
|
+
const dynamicNodes = [];
|
|
345
|
+
for (let i = 0; i < inputArray.length; i++) {
|
|
346
|
+
const item = inputArray[i];
|
|
347
|
+
const itemInputKey = `${id}_${batchId}_item_${i}`;
|
|
348
|
+
await context.set(itemInputKey, item);
|
|
349
|
+
dynamicNodes.push({
|
|
350
|
+
id: `${params.workerUsesKey}_${batchId}_${i}`,
|
|
351
|
+
uses: params.workerUsesKey,
|
|
352
|
+
inputs: itemInputKey
|
|
353
|
+
});
|
|
354
|
+
}
|
|
355
|
+
const gatherNodeId = params.gatherNodeId;
|
|
356
|
+
return { dynamicNodes, output: { gatherNodeId } };
|
|
357
|
+
}
|
|
358
|
+
case "batch-gather": {
|
|
359
|
+
return { output: {} };
|
|
360
|
+
}
|
|
361
|
+
case "loop-controller": {
|
|
362
|
+
const contextData = await context.toJSON();
|
|
363
|
+
const shouldContinue = !!this.evaluator.evaluate(params.condition, contextData);
|
|
364
|
+
return { action: shouldContinue ? "continue" : "break" };
|
|
365
|
+
}
|
|
366
|
+
case "subflow": {
|
|
367
|
+
const { blueprintId, inputs: inputMapping, outputs: outputMapping } = params;
|
|
368
|
+
if (!blueprintId)
|
|
369
|
+
throw new FatalNodeExecutionError(`Subflow node '${id}' is missing the 'blueprintId' parameter.`, id, "");
|
|
370
|
+
const subBlueprint = this.blueprints[blueprintId];
|
|
371
|
+
if (!subBlueprint)
|
|
372
|
+
throw new FatalNodeExecutionError(
|
|
373
|
+
`Sub-blueprint with ID '${blueprintId}' not found in runtime registry.`,
|
|
374
|
+
id,
|
|
375
|
+
""
|
|
376
|
+
);
|
|
377
|
+
const subflowInitialContext = {};
|
|
378
|
+
if (inputMapping) {
|
|
379
|
+
for (const [targetKey, sourceKey] of Object.entries(inputMapping)) {
|
|
380
|
+
if (await context.has(sourceKey)) {
|
|
381
|
+
subflowInitialContext[targetKey] = await context.get(sourceKey);
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
const subflowResult = await this.run(subBlueprint, subflowInitialContext);
|
|
386
|
+
if (subflowResult.status !== "completed")
|
|
387
|
+
throw new NodeExecutionError(
|
|
388
|
+
`Sub-workflow '${blueprintId}' did not complete successfully. Status: ${subflowResult.status}`,
|
|
389
|
+
id,
|
|
390
|
+
subBlueprint.id
|
|
391
|
+
);
|
|
392
|
+
if (outputMapping) {
|
|
393
|
+
for (const [parentKey, subKey] of Object.entries(outputMapping)) {
|
|
394
|
+
const subflowFinalContext = subflowResult.context;
|
|
395
|
+
if (Object.hasOwn(subflowFinalContext, subKey)) {
|
|
396
|
+
await context.set(parentKey, subflowFinalContext[subKey]);
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
return { output: subflowResult.context };
|
|
401
|
+
}
|
|
402
|
+
default:
|
|
403
|
+
throw new FatalNodeExecutionError(`Unknown built-in node type: '${nodeDef.uses}'`, id, "");
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
};
|
|
407
|
+
|
|
408
|
+
export { FlowRuntime };
|
|
409
|
+
//# sourceMappingURL=chunk-CO5BTPKI.js.map
|
|
410
|
+
//# sourceMappingURL=chunk-CO5BTPKI.js.map
|