flowcraft 2.2.0 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -34
- package/dist/analysis.d.ts +3 -1
- package/dist/chunk-33NO4PUJ.js +74 -0
- package/dist/chunk-33NO4PUJ.js.map +1 -0
- package/dist/chunk-5KKSQWSC.js +90 -0
- package/dist/chunk-5KKSQWSC.js.map +1 -0
- package/dist/{chunk-QLGJUDQF.js → chunk-6INWPSZT.js} +6 -6
- package/dist/chunk-6INWPSZT.js.map +1 -0
- package/dist/chunk-BC4G7OM6.js +42 -0
- package/dist/chunk-BC4G7OM6.js.map +1 -0
- package/dist/chunk-BCRWXTWX.js +21 -0
- package/dist/chunk-BCRWXTWX.js.map +1 -0
- package/dist/{chunk-ZCHFZBGL.js → chunk-C4HYIJI3.js} +120 -5
- package/dist/chunk-C4HYIJI3.js.map +1 -0
- package/dist/chunk-CD3Q4N6V.js +13 -0
- package/dist/chunk-CD3Q4N6V.js.map +1 -0
- package/dist/chunk-CD4FUZOJ.js +114 -0
- package/dist/chunk-CD4FUZOJ.js.map +1 -0
- package/dist/chunk-CY755I7I.js +25 -0
- package/dist/chunk-CY755I7I.js.map +1 -0
- package/dist/chunk-DL7KVYZF.js +39 -0
- package/dist/chunk-DL7KVYZF.js.map +1 -0
- package/dist/chunk-FRKO3WX4.js +32 -0
- package/dist/chunk-FRKO3WX4.js.map +1 -0
- package/dist/chunk-G53CSLBF.js +54 -0
- package/dist/chunk-G53CSLBF.js.map +1 -0
- package/dist/chunk-G5BGBPFP.js +172 -0
- package/dist/chunk-G5BGBPFP.js.map +1 -0
- package/dist/chunk-HAZ26F3P.js +98 -0
- package/dist/chunk-HAZ26F3P.js.map +1 -0
- package/dist/chunk-IB2BISIC.js +446 -0
- package/dist/chunk-IB2BISIC.js.map +1 -0
- package/dist/{chunk-U5V5O5MN.js → chunk-LNK7LZER.js} +5 -3
- package/dist/chunk-LNK7LZER.js.map +1 -0
- package/dist/chunk-MCGK3FXQ.js +143 -0
- package/dist/chunk-MCGK3FXQ.js.map +1 -0
- package/dist/chunk-MUYLRTSR.js +82 -0
- package/dist/chunk-MUYLRTSR.js.map +1 -0
- package/dist/chunk-NVJ3ZO3P.js +3 -0
- package/dist/{chunk-HMR2GEGE.js.map → chunk-NVJ3ZO3P.js.map} +1 -1
- package/dist/chunk-NVLZFLYM.js +3 -0
- package/dist/chunk-NVLZFLYM.js.map +1 -0
- package/dist/chunk-ONH7PIJZ.js +300 -0
- package/dist/chunk-ONH7PIJZ.js.map +1 -0
- package/dist/chunk-QNYXQKFW.js +25 -0
- package/dist/chunk-QNYXQKFW.js.map +1 -0
- package/dist/chunk-RM677CNU.js +52 -0
- package/dist/chunk-RM677CNU.js.map +1 -0
- package/dist/chunk-WWGFIYKW.js +47 -0
- package/dist/chunk-WWGFIYKW.js.map +1 -0
- package/dist/chunk-XNRIM27H.js +76 -0
- package/dist/chunk-XNRIM27H.js.map +1 -0
- package/dist/container-factory.d.ts +17 -0
- package/dist/container-factory.js +13 -0
- package/dist/container-factory.js.map +1 -0
- package/dist/container.d.ts +23 -0
- package/dist/container.js +3 -0
- package/dist/container.js.map +1 -0
- package/dist/context.d.ts +3 -1
- package/dist/errors.d.ts +18 -17
- package/dist/errors.js +1 -1
- package/dist/evaluator.d.ts +3 -1
- package/dist/flow.d.ts +12 -2
- package/dist/flow.js +2 -2
- package/dist/index.d.ts +7 -8
- package/dist/index.js +26 -14
- package/dist/linter.d.ts +3 -1
- package/dist/logger.d.ts +3 -1
- package/dist/node.d.ts +3 -1
- package/dist/node.js +1 -1
- package/dist/nodes/batch-gather.d.ts +9 -0
- package/dist/nodes/batch-gather.js +4 -0
- package/dist/nodes/batch-gather.js.map +1 -0
- package/dist/nodes/batch-scatter.d.ts +9 -0
- package/dist/nodes/batch-scatter.js +4 -0
- package/dist/nodes/batch-scatter.js.map +1 -0
- package/dist/nodes/subflow.d.ts +9 -0
- package/dist/nodes/subflow.js +10 -0
- package/dist/nodes/subflow.js.map +1 -0
- package/dist/nodes/wait.d.ts +9 -0
- package/dist/nodes/wait.js +4 -0
- package/dist/nodes/wait.js.map +1 -0
- package/dist/runtime/adapter.d.ts +3 -5
- package/dist/runtime/adapter.js +19 -9
- package/dist/runtime/execution-context.d.ts +3 -0
- package/dist/runtime/execution-context.js +6 -0
- package/dist/runtime/execution-context.js.map +1 -0
- package/dist/runtime/executors.d.ts +3 -26
- package/dist/runtime/executors.js +2 -2
- package/dist/runtime/index.d.ts +5 -7
- package/dist/runtime/index.js +21 -10
- package/dist/runtime/node-executor-factory.d.ts +12 -0
- package/dist/runtime/node-executor-factory.js +6 -0
- package/dist/runtime/node-executor-factory.js.map +1 -0
- package/dist/runtime/orchestrator.d.ts +9 -0
- package/dist/runtime/orchestrator.js +8 -0
- package/dist/runtime/orchestrator.js.map +1 -0
- package/dist/runtime/orchestrators/step-by-step.d.ts +16 -0
- package/dist/runtime/orchestrators/step-by-step.js +5 -0
- package/dist/runtime/orchestrators/step-by-step.js.map +1 -0
- package/dist/runtime/orchestrators/utils.d.ts +35 -0
- package/dist/runtime/orchestrators/utils.js +4 -0
- package/dist/runtime/orchestrators/utils.js.map +1 -0
- package/dist/runtime/runtime.d.ts +3 -41
- package/dist/runtime/runtime.js +18 -8
- package/dist/runtime/state.d.ts +3 -21
- package/dist/runtime/state.js +2 -1
- package/dist/runtime/traverser.d.ts +3 -26
- package/dist/runtime/traverser.js +1 -2
- package/dist/runtime/types.d.ts +3 -16
- package/dist/runtime/types.js +1 -1
- package/dist/runtime/workflow-logic-handler.d.ts +17 -0
- package/dist/runtime/workflow-logic-handler.js +5 -0
- package/dist/runtime/workflow-logic-handler.js.map +1 -0
- package/dist/sanitizer.d.ts +3 -1
- package/dist/serializer.d.ts +3 -1
- package/dist/testing/event-logger.d.ts +63 -0
- package/dist/testing/event-logger.js +3 -0
- package/dist/testing/event-logger.js.map +1 -0
- package/dist/testing/index.d.ts +6 -0
- package/dist/testing/index.js +31 -0
- package/dist/testing/index.js.map +1 -0
- package/dist/testing/run-with-trace.d.ts +38 -0
- package/dist/testing/run-with-trace.js +29 -0
- package/dist/testing/run-with-trace.js.map +1 -0
- package/dist/testing/stepper.d.ts +79 -0
- package/dist/testing/stepper.js +11 -0
- package/dist/testing/stepper.js.map +1 -0
- package/dist/types-ezHUBdpL.d.ts +564 -0
- package/dist/types.d.ts +3 -1
- package/package.json +55 -51
- package/LICENSE +0 -21
- package/dist/chunk-5ZXV3R5D.js +0 -28
- package/dist/chunk-5ZXV3R5D.js.map +0 -1
- package/dist/chunk-GEKDR2SS.js +0 -201
- package/dist/chunk-GEKDR2SS.js.map +0 -1
- package/dist/chunk-HMR2GEGE.js +0 -3
- package/dist/chunk-M2FRTT2K.js +0 -144
- package/dist/chunk-M2FRTT2K.js.map +0 -1
- package/dist/chunk-OTS5YJ3S.js +0 -494
- package/dist/chunk-OTS5YJ3S.js.map +0 -1
- package/dist/chunk-QLGJUDQF.js.map +0 -1
- package/dist/chunk-U5V5O5MN.js.map +0 -1
- package/dist/chunk-VSGQDLBF.js +0 -61
- package/dist/chunk-VSGQDLBF.js.map +0 -1
- package/dist/chunk-ZCHFZBGL.js.map +0 -1
- package/dist/types-CsTeXTiA.d.ts +0 -222
package/dist/chunk-VSGQDLBF.js
DELETED
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
import { Context } from './chunk-R3HQXIEL.js';
|
|
2
|
-
|
|
3
|
-
// src/runtime/state.ts
|
|
4
|
-
var WorkflowState = class {
|
|
5
|
-
_completedNodes = /* @__PURE__ */ new Set();
|
|
6
|
-
errors = [];
|
|
7
|
-
anyFallbackExecuted = false;
|
|
8
|
-
context;
|
|
9
|
-
constructor(initialData) {
|
|
10
|
-
this.context = new Context(initialData);
|
|
11
|
-
}
|
|
12
|
-
addCompletedNode(nodeId, output) {
|
|
13
|
-
this._completedNodes.add(nodeId);
|
|
14
|
-
this.context.set(nodeId, output);
|
|
15
|
-
}
|
|
16
|
-
addError(nodeId, error) {
|
|
17
|
-
this.errors.push({
|
|
18
|
-
nodeId,
|
|
19
|
-
message: error.message,
|
|
20
|
-
originalError: error,
|
|
21
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
22
|
-
stack: error.stack
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
|
-
clearError(nodeId) {
|
|
26
|
-
this.errors = this.errors.filter((err) => err.nodeId !== nodeId);
|
|
27
|
-
}
|
|
28
|
-
markFallbackExecuted() {
|
|
29
|
-
this.anyFallbackExecuted = true;
|
|
30
|
-
}
|
|
31
|
-
getContext() {
|
|
32
|
-
return this.context;
|
|
33
|
-
}
|
|
34
|
-
getCompletedNodes() {
|
|
35
|
-
return new Set(this._completedNodes);
|
|
36
|
-
}
|
|
37
|
-
getErrors() {
|
|
38
|
-
return this.errors;
|
|
39
|
-
}
|
|
40
|
-
getAnyFallbackExecuted() {
|
|
41
|
-
return this.anyFallbackExecuted;
|
|
42
|
-
}
|
|
43
|
-
getStatus(allNodeIds, _fallbackNodeIds) {
|
|
44
|
-
if (this.anyFallbackExecuted) return "completed";
|
|
45
|
-
if (this.errors.length > 0) return "failed";
|
|
46
|
-
return this._completedNodes.size < allNodeIds.size ? "stalled" : "completed";
|
|
47
|
-
}
|
|
48
|
-
toResult(serializer) {
|
|
49
|
-
const contextJSON = this.context.toJSON();
|
|
50
|
-
return {
|
|
51
|
-
context: contextJSON,
|
|
52
|
-
serializedContext: serializer.serialize(contextJSON),
|
|
53
|
-
status: this.getStatus(/* @__PURE__ */ new Set(), /* @__PURE__ */ new Set()),
|
|
54
|
-
errors: this.errors.length > 0 ? this.errors : void 0
|
|
55
|
-
};
|
|
56
|
-
}
|
|
57
|
-
};
|
|
58
|
-
|
|
59
|
-
export { WorkflowState };
|
|
60
|
-
//# sourceMappingURL=chunk-VSGQDLBF.js.map
|
|
61
|
-
//# sourceMappingURL=chunk-VSGQDLBF.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/runtime/state.ts"],"names":[],"mappings":";;;AAGO,IAAM,gBAAN,MAA0D;AAAA,EACxD,eAAA,uBAAsB,GAAA,EAAY;AAAA,EAClC,SAA0B,EAAC;AAAA,EAC3B,mBAAA,GAAsB,KAAA;AAAA,EACtB,OAAA;AAAA,EAER,YAAY,WAAA,EAAgC;AAC3C,IAAA,IAAA,CAAK,OAAA,GAAU,IAAI,OAAA,CAAkB,WAAW,CAAA;AAAA,EACjD;AAAA,EAEA,gBAAA,CAAiB,QAAgB,MAAA,EAAa;AAC7C,IAAA,IAAA,CAAK,eAAA,CAAgB,IAAI,MAAM,CAAA;AAC/B,IAAA,IAAA,CAAK,OAAA,CAAQ,GAAA,CAAI,MAAA,EAAQ,MAAM,CAAA;AAAA,EAChC;AAAA,EAEA,QAAA,CAAS,QAAgB,KAAA,EAAc;AACtC,IAAA,IAAA,CAAK,OAAO,IAAA,CAAK;AAAA,MAChB,MAAA;AAAA,MACA,SAAS,KAAA,CAAM,OAAA;AAAA,MACf,aAAA,EAAe,KAAA;AAAA,MACf,SAAA,EAAA,iBAAW,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAAA,MAClC,OAAO,KAAA,CAAM;AAAA,KACb,CAAA;AAAA,EACF;AAAA,EAEA,WAAW,MAAA,EAAgB;AAC1B,IAAA,IAAA,CAAK,MAAA,GAAS,KAAK,MAAA,CAAO,MAAA,CAAO,CAAC,GAAA,KAAQ,GAAA,CAAI,WAAW,MAAM,CAAA;AAAA,EAChE;AAAA,EAEA,oBAAA,GAAuB;AACtB,IAAA,IAAA,CAAK,mBAAA,GAAsB,IAAA;AAAA,EAC5B;AAAA,EAEA,UAAA,GAA8C;AAC7C,IAAA,OAAO,IAAA,CAAK,OAAA;AAAA,EACb;AAAA,EAEA,iBAAA,GAAiC;AAChC,IAAA,OAAO,IAAI,GAAA,CAAI,IAAA,CAAK,eAAe,CAAA;AAAA,EACpC;AAAA,EAEA,SAAA,GAA6B;AAC5B,IAAA,OAAO,IAAA,CAAK,MAAA;AAAA,EACb;AAAA,EAEA,sBAAA,GAAkC;AACjC,IAAA,OAAO,IAAA,CAAK,mBAAA;AAAA,EACb;AAAA,EAEA,SAAA,CAAU,YAAyB,gBAAA,EAAyD;AAC3F,IAAA,IAAI,IAAA,CAAK,qBAAqB,OAAO,WAAA;AACrC,IAAA,IAAI,IAAA,CAAK,MAAA,CAAO,MAAA,GAAS,CAAA,EAAG,OAAO,QAAA;AAEnC,IAAA,OAAO,IAAA,CAAK,eAAA,CAAgB,IAAA,GAAO,UAAA,CAAW,OAAO,SAAA,GAAY,WAAA;AAAA,EAClE;AAAA,EAEA,SAAS,UAAA,EAAmD;AAC3D,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,OAAA,CAAQ,MAAA,EAAO;AACxC,IAAA,OAAO;AAAA,MACN,OAAA,EAAS,WAAA;AAAA,MACT,iBAAA,EAAmB,UAAA,CAAW,SAAA,CAAU,WAAW,CAAA;AAAA,MACnD,MAAA,EAAQ,KAAK,SAAA,iBAAU,IAAI,KAAI,kBAAG,IAAI,KAAK,CAAA;AAAA,MAC3C,QAAQ,IAAA,CAAK,MAAA,CAAO,MAAA,GAAS,CAAA,GAAI,KAAK,MAAA,GAAS;AAAA,KAChD;AAAA,EACD;AACD","file":"chunk-VSGQDLBF.js","sourcesContent":["import { Context } from '../context'\nimport type { ContextImplementation, ISerializer, WorkflowError, WorkflowResult } from '../types'\n\nexport class WorkflowState<TContext extends Record<string, any>> {\n\tprivate _completedNodes = new Set<string>()\n\tprivate errors: WorkflowError[] = []\n\tprivate anyFallbackExecuted = false\n\tprivate context: ContextImplementation<TContext>\n\n\tconstructor(initialData: Partial<TContext>) {\n\t\tthis.context = new Context<TContext>(initialData)\n\t}\n\n\taddCompletedNode(nodeId: string, output: any) {\n\t\tthis._completedNodes.add(nodeId)\n\t\tthis.context.set(nodeId, output)\n\t}\n\n\taddError(nodeId: string, error: Error) {\n\t\tthis.errors.push({\n\t\t\tnodeId,\n\t\t\tmessage: error.message,\n\t\t\toriginalError: error,\n\t\t\ttimestamp: new Date().toISOString(),\n\t\t\tstack: error.stack,\n\t\t})\n\t}\n\n\tclearError(nodeId: string) {\n\t\tthis.errors = this.errors.filter((err) => err.nodeId !== nodeId)\n\t}\n\n\tmarkFallbackExecuted() {\n\t\tthis.anyFallbackExecuted = true\n\t}\n\n\tgetContext(): ContextImplementation<TContext> {\n\t\treturn this.context\n\t}\n\n\tgetCompletedNodes(): Set<string> {\n\t\treturn new Set(this._completedNodes)\n\t}\n\n\tgetErrors(): WorkflowError[] {\n\t\treturn this.errors\n\t}\n\n\tgetAnyFallbackExecuted(): boolean {\n\t\treturn this.anyFallbackExecuted\n\t}\n\n\tgetStatus(allNodeIds: Set<string>, _fallbackNodeIds: Set<string>): WorkflowResult['status'] {\n\t\tif (this.anyFallbackExecuted) return 'completed'\n\t\tif (this.errors.length > 0) return 'failed'\n\t\t// const _remainingNodes = [...allNodeIds].filter((id) => !this._completedNodes.has(id) && !fallbackNodeIds.has(id))\n\t\treturn this._completedNodes.size < allNodeIds.size ? 'stalled' : 'completed'\n\t}\n\n\ttoResult(serializer: ISerializer): WorkflowResult<TContext> {\n\t\tconst contextJSON = this.context.toJSON() as TContext\n\t\treturn {\n\t\t\tcontext: contextJSON,\n\t\t\tserializedContext: serializer.serialize(contextJSON),\n\t\t\tstatus: this.getStatus(new Set(), new Set()),\n\t\t\terrors: this.errors.length > 0 ? this.errors : undefined,\n\t\t}\n\t}\n}\n"]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/flow.ts"],"names":[],"mappings":";;;AAMA,SAAS,cAAc,EAAA,EAAwF;AAC9G,EAAA,MAAM,MAAA,GAAS,GAAG,QAAA,EAAS;AAC3B,EAAA,IAAI,IAAA,GAAO,CAAA;AACX,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AACvC,IAAA,MAAM,IAAA,GAAO,MAAA,CAAO,UAAA,CAAW,CAAC,CAAA;AAChC,IAAA,IAAA,GAAA,CAAQ,IAAA,IAAQ,KAAK,IAAA,GAAO,IAAA;AAC5B,IAAA,IAAA,GAAO,IAAA,GAAO,IAAA;AAAA,EACf;AACA,EAAA,OAAO,IAAA,CAAK,GAAA,CAAI,IAAI,CAAA,CAAE,SAAS,EAAE,CAAA;AAClC;AAKO,IAAM,OAAN,MAGL;AAAA,EACO,SAAA;AAAA,EACA,gBAAA;AAAA,EACA,iBAAA;AAAA,EACA,eAAA;AAAA,EAKA,gBAAA;AAAA,EAER,YAAY,EAAA,EAAY;AACvB,IAAA,IAAA,CAAK,SAAA,GAAY,EAAE,EAAA,EAAI,KAAA,EAAO,EAAC,EAAG,KAAA,EAAO,EAAC,EAAE;AAC5C,IAAA,IAAA,CAAK,gBAAA,uBAAuB,GAAA,EAAI;AAChC,IAAA,IAAA,CAAK,iBAAA,uBAAwB,GAAA,EAAI;AACjC,IAAA,IAAA,CAAK,kBAAkB,EAAC;AACxB,IAAA,IAAA,CAAK,gBAAA,uBAAuB,GAAA,EAAI;AAAA,EACjC;AAAA,EAEA,IAAA,CACC,EAAA,EACA,cAAA,EAGA,OAAA,EACO;AACP,IAAA,IAAI,OAAA;AAEJ,IAAA,IAAI,WAAA,CAAY,cAAc,CAAA,EAAG;AAChC,MAAA,OAAA,GACC,cAAA,CAAe,IAAA,IAAQ,cAAA,CAAe,IAAA,KAAS,UAAA,GAC5C,eAAe,IAAA,GACf,CAAA,MAAA,EAAS,aAAA,CAAc,cAAc,CAAC,CAAA,CAAA;AAC1C,MAAA,IAAA,CAAK,gBAAA,CAAiB,GAAA,CAAI,OAAA,EAAS,cAAc,CAAA;AAAA,IAClD,CAAA,MAAO;AACN,MAAA,OAAA,GAAU,CAAA,GAAA,EAAM,aAAA,CAAc,cAAc,CAAC,CAAA,CAAA;AAC7C,MAAA,IAAA,CAAK,gBAAA,CAAiB,GAAA,CAAI,OAAA,EAAS,cAAyC,CAAA;AAAA,IAC7E;AAEA,IAAA,MAAM,UAA0B,EAAE,EAAA,EAAI,IAAA,EAAM,OAAA,EAAS,GAAG,OAAA,EAAQ;AAChE,IAAA,IAAA,CAAK,SAAA,CAAU,KAAA,EAAO,IAAA,CAAK,OAAO,CAAA;AAClC,IAAA,OAAO,IAAA;AAAA,EACR;AAAA,EAEA,IAAA,CAAK,MAAA,EAAgB,MAAA,EAAgB,OAAA,EAA2D;AAC/F,IAAA,MAAM,OAAA,GAA0B,EAAE,MAAA,EAAQ,MAAA,EAAQ,GAAG,OAAA,EAAQ;AAC7D,IAAA,IAAA,CAAK,SAAA,CAAU,KAAA,EAAO,IAAA,CAAK,OAAO,CAAA;AAClC,IAAA,OAAO,IAAA;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CACC,EAAA,EACA,MAAA,EAGA,OAAA,EAQyE;AACzE,IAAA,MAAM,EAAE,QAAA,EAAU,SAAA,EAAU,GAAI,OAAA;AAChC,IAAA,MAAM,SAAA,GAAY,GAAG,EAAE,CAAA,QAAA,CAAA;AACvB,IAAA,MAAM,QAAA,GAAW,GAAG,EAAE,CAAA,OAAA,CAAA;AAGtB,IAAA,IAAI,aAAA;AACJ,IAAA,IAAI,WAAA,CAAY,MAAM,CAAA,EAAG;AACxB,MAAA,aAAA,GACC,MAAA,CAAO,IAAA,IAAQ,MAAA,CAAO,IAAA,KAAS,UAAA,GAAa,OAAO,IAAA,GAAO,CAAA,mBAAA,EAAsB,aAAA,CAAc,MAAM,CAAC,CAAA,CAAA;AACtG,MAAA,IAAA,CAAK,gBAAA,CAAiB,GAAA,CAAI,aAAA,EAAe,MAAM,CAAA;AAAA,IAChD,CAAA,MAAO;AACN,MAAA,aAAA,GAAgB,CAAA,gBAAA,EAAmB,aAAA,CAAc,MAAM,CAAC,CAAA,CAAA;AACxD,MAAA,IAAA,CAAK,gBAAA,CAAiB,GAAA,CAAI,aAAA,EAAe,MAAiC,CAAA;AAAA,IAC3E;AAGA,IAAA,IAAA,CAAK,SAAA,CAAU,OAAO,IAAA,CAAK;AAAA,MAC1B,EAAA,EAAI,SAAA;AAAA,MACJ,IAAA,EAAM,eAAA;AAAA;AAAA,MACN,MAAA,EAAQ,QAAA;AAAA,MACR,MAAA,EAAQ,EAAE,aAAA,EAAe,SAAA,EAAgC,cAAc,QAAA,EAAU,SAAA,EAAW,QAAQ,SAAA;AAAU,KAC9G,CAAA;AAGD,IAAA,IAAA,CAAK,SAAA,CAAU,OAAO,IAAA,CAAK;AAAA,MAC1B,EAAA,EAAI,QAAA;AAAA,MACJ,IAAA,EAAM,cAAA;AAAA;AAAA,MACN,MAAA,EAAQ,EAAE,SAAA,EAAW,YAAA,EAAc,QAAA,EAAS;AAAA,MAC5C,MAAA,EAAQ,EAAE,YAAA,EAAc,KAAA;AAAM;AAAA,KAC9B,CAAA;AAGD,IAAA,IAAA,CAAK,IAAA,CAAK,WAAW,QAAQ,CAAA;AAE7B,IAAA,OAAO,IAAA;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,IAAA,CACC,IACA,OAAA,EAQO;AACP,IAAA,MAAM,EAAE,WAAA,EAAa,SAAA,EAAW,SAAA,EAAU,GAAI,OAAA;AAC9C,IAAA,MAAM,YAAA,GAAe,GAAG,EAAE,CAAA,KAAA,CAAA;AAE1B,IAAA,IAAA,CAAK,iBAAA,CAAkB,GAAA,CAAI,EAAA,EAAI,YAAY,CAAA;AAE3C,IAAA,IAAA,CAAK,gBAAgB,IAAA,CAAK,EAAE,EAAA,EAAI,WAAA,EAAa,WAAW,CAAA;AAGxD,IAAA,IAAA,CAAK,SAAA,CAAU,OAAO,IAAA,CAAK;AAAA,MAC1B,EAAA,EAAI,YAAA;AAAA,MACJ,IAAA,EAAM,iBAAA;AAAA;AAAA,MACN,MAAA,EAAQ,EAAE,SAAA,EAAU;AAAA,MACpB,MAAA,EAAQ,EAAE,YAAA,EAAc,KAAA;AAAM;AAAA,KAC9B,CAAA;AAED,IAAA,IAAA,CAAK,IAAA,CAAK,WAAW,YAAY,CAAA;AAEjC,IAAA,IAAA,CAAK,IAAA,CAAK,cAAc,WAAA,EAAa;AAAA,MACpC,MAAA,EAAQ,UAAA;AAAA,MACR,SAAA,EAAW,WAAW,SAAS,CAAA;AAAA;AAAA,KAC/B,CAAA;AAED,IAAA,OAAO,IAAA;AAAA,EACR;AAAA,EAEA,oBAAoB,EAAA,EAAoB;AACvC,IAAA,MAAM,YAAA,GAAe,IAAA,CAAK,iBAAA,CAAkB,GAAA,CAAI,EAAE,CAAA;AAClD,IAAA,IAAI,CAAC,YAAA,EAAc;AAClB,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,cAAA,EAAiB,EAAE,CAAA,iEAAA,CAAmE,CAAA;AAAA,IACvG;AACA,IAAA,OAAO,YAAA;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,mBAAmB,MAAA,EAAsB;AACxC,IAAA,IAAA,CAAK,gBAAA,CAAiB,GAAA,CAAI,MAAA,EAAQ,MAAM,CAAA;AACxC,IAAA,OAAO,IAAA;AAAA,EACR;AAAA,EAEA,WAAA,GAAiC;AAChC,IAAA,IAAI,CAAC,KAAK,SAAA,CAAU,KAAA,IAAS,KAAK,SAAA,CAAU,KAAA,CAAM,WAAW,CAAA,EAAG;AAC/D,MAAA,MAAM,IAAI,MAAM,yCAAyC,CAAA;AAAA,IAC1D;AAEA,IAAA,KAAA,MAAW,OAAA,IAAW,KAAK,eAAA,EAAiB;AAC3C,MAAA,MAAM,SAAA,GAAY,IAAA,CAAK,SAAA,CAAU,KAAA,EAAO,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,EAAA,KAAO,OAAA,CAAQ,WAAW,CAAA;AAChF,MAAA,MAAM,OAAA,GAAU,IAAA,CAAK,SAAA,CAAU,KAAA,EAAO,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,EAAA,KAAO,OAAA,CAAQ,SAAS,CAAA;AAE5E,MAAA,IAAI,CAAC,SAAA,EAAW;AACf,QAAA,MAAM,IAAI,MAAM,CAAA,MAAA,EAAS,OAAA,CAAQ,EAAE,CAAA,sCAAA,EAAyC,OAAA,CAAQ,WAAW,CAAA,EAAA,CAAI,CAAA;AAAA,MACpG;AACA,MAAA,IAAI,CAAC,OAAA,EAAS;AACb,QAAA,MAAM,IAAI,MAAM,CAAA,MAAA,EAAS,OAAA,CAAQ,EAAE,CAAA,oCAAA,EAAuC,OAAA,CAAQ,SAAS,CAAA,EAAA,CAAI,CAAA;AAAA,MAChG;AAEA,MAAA,SAAA,CAAU,SAAS,EAAE,GAAG,SAAA,CAAU,MAAA,EAAQ,cAAc,KAAA,EAAM;AAC9D,MAAA,OAAA,CAAQ,SAAS,EAAE,GAAG,OAAA,CAAQ,MAAA,EAAQ,cAAc,KAAA,EAAM;AAAA,IAC3D;AAEA,IAAA,IAAI,IAAA,CAAK,gBAAA,CAAiB,IAAA,GAAO,CAAA,EAAG;AACnC,MAAA,IAAA,CAAK,UAAU,QAAA,GAAW;AAAA,QACzB,GAAG,KAAK,SAAA,CAAU,QAAA;AAAA,QAClB,kBAAkB,KAAA,CAAM,IAAA,CAAK,IAAA,CAAK,gBAAA,CAAiB,MAAM;AAAA,OAC1D;AAAA,IACD;AAEA,IAAA,OAAO,IAAA,CAAK,SAAA;AAAA,EACb;AAAA,EAEA,mBAAA,GAAsB;AACrB,IAAA,OAAO,IAAA,CAAK,gBAAA;AAAA,EACb;AACD;AAKO,SAAS,WAGd,EAAA,EAA2C;AAC5C,EAAA,OAAO,IAAI,KAAK,EAAE,CAAA;AACnB","file":"chunk-ZCHFZBGL.js","sourcesContent":["import { isNodeClass } from './node'\nimport type { EdgeDefinition, NodeClass, NodeDefinition, NodeFunction, WorkflowBlueprint } from './types'\n\n/**\n * Generates a deterministic hash for a function based on its source code.\n */\nfunction _hashFunction(fn: NodeFunction<any, any, any, any, any> | NodeClass<any, any, any, any, any>): string {\n\tconst source = fn.toString()\n\tlet hash = 0\n\tfor (let i = 0; i < source.length; i++) {\n\t\tconst char = source.charCodeAt(i)\n\t\thash = (hash << 5) - hash + char\n\t\thash = hash & hash // Convert to 32-bit integer\n\t}\n\treturn Math.abs(hash).toString(16)\n}\n\n/**\n * A fluent API for programmatically constructing a WorkflowBlueprint.\n */\nexport class Flow<\n\tTContext extends Record<string, any> = Record<string, any>,\n\tTDependencies extends Record<string, any> = Record<string, any>,\n> {\n\tprivate blueprint: Partial<WorkflowBlueprint>\n\tprivate functionRegistry: Map<string, NodeFunction | NodeClass>\n\tprivate loopControllerIds: Map<string, string>\n\tprivate loopDefinitions: Array<{\n\t\tid: string\n\t\tstartNodeId: string\n\t\tendNodeId: string\n\t}>\n\tprivate cycleEntryPoints: Map<string, string>\n\n\tconstructor(id: string) {\n\t\tthis.blueprint = { id, nodes: [], edges: [] }\n\t\tthis.functionRegistry = new Map()\n\t\tthis.loopControllerIds = new Map()\n\t\tthis.loopDefinitions = []\n\t\tthis.cycleEntryPoints = new Map()\n\t}\n\n\tnode<TInput = any, TOutput = any, TAction extends string = string>(\n\t\tid: string,\n\t\timplementation:\n\t\t\t| NodeFunction<TContext, TDependencies, TInput, TOutput, TAction>\n\t\t\t| NodeClass<TContext, TDependencies, TInput, TOutput, TAction>,\n\t\toptions?: Omit<NodeDefinition, 'id' | 'uses'>,\n\t): this {\n\t\tlet usesKey: string\n\n\t\tif (isNodeClass(implementation)) {\n\t\t\tusesKey =\n\t\t\t\timplementation.name && implementation.name !== 'BaseNode'\n\t\t\t\t\t? implementation.name\n\t\t\t\t\t: `class_${_hashFunction(implementation)}`\n\t\t\tthis.functionRegistry.set(usesKey, implementation)\n\t\t} else {\n\t\t\tusesKey = `fn_${_hashFunction(implementation)}`\n\t\t\tthis.functionRegistry.set(usesKey, implementation as unknown as NodeFunction)\n\t\t}\n\n\t\tconst nodeDef: NodeDefinition = { id, uses: usesKey, ...options }\n\t\tthis.blueprint.nodes?.push(nodeDef)\n\t\treturn this\n\t}\n\n\tedge(source: string, target: string, options?: Omit<EdgeDefinition, 'source' | 'target'>): this {\n\t\tconst edgeDef: EdgeDefinition = { source, target, ...options }\n\t\tthis.blueprint.edges?.push(edgeDef)\n\t\treturn this\n\t}\n\n\t/**\n\t * Creates a batch processing pattern.\n\t * It takes an input array, runs a worker node on each item in parallel, and gathers the results.\n\t * This method augments the Flow's TContext with a new key for the output array.\n\t *\n\t * @param id The base ID for this batch operation.\n\t * @param worker The node implementation to run on each item.\n\t * @param options Configuration for the batch operation.\n\t * @returns The Flow instance with an updated context type for chaining.\n\t */\n\tbatch<TWorkerInput, TWorkerOutput, TWorkerAction extends string, TOutputKey extends string>(\n\t\tid: string,\n\t\tworker:\n\t\t\t| NodeFunction<TContext, TDependencies, TWorkerInput, TWorkerOutput, TWorkerAction>\n\t\t\t| NodeClass<TContext, TDependencies, TWorkerInput, TWorkerOutput, TWorkerAction>,\n\t\toptions: {\n\t\t\t/** The key in the context that holds the input array for the batch. */\n\t\t\tinputKey: keyof TContext\n\t\t\t/** The key in the context where the array of results will be stored. */\n\t\t\toutputKey: TOutputKey\n\t\t\t/** The number of items to process in each chunk to limit memory usage. */\n\t\t\tchunkSize?: number\n\t\t},\n\t): Flow<TContext & { [K in TOutputKey]: TWorkerOutput[] }, TDependencies> {\n\t\tconst { inputKey, outputKey } = options\n\t\tconst scatterId = `${id}_scatter`\n\t\tconst gatherId = `${id}_gather`\n\n\t\t// register worker implementation under a unique key.\n\t\tlet workerUsesKey: string\n\t\tif (isNodeClass(worker)) {\n\t\t\tworkerUsesKey =\n\t\t\t\tworker.name && worker.name !== 'BaseNode' ? worker.name : `class_batch_worker_${_hashFunction(worker)}`\n\t\t\tthis.functionRegistry.set(workerUsesKey, worker)\n\t\t} else {\n\t\t\tworkerUsesKey = `fn_batch_worker_${_hashFunction(worker)}`\n\t\t\tthis.functionRegistry.set(workerUsesKey, worker as unknown as NodeFunction)\n\t\t}\n\n\t\t// scatter node: takes an array and dynamically schedules worker nodes\n\t\tthis.blueprint.nodes?.push({\n\t\t\tid: scatterId,\n\t\t\tuses: 'batch-scatter', // built-in\n\t\t\tinputs: inputKey as string,\n\t\t\tparams: { workerUsesKey, outputKey: outputKey as string, gatherNodeId: gatherId, chunkSize: options.chunkSize },\n\t\t})\n\n\t\t// gather node: waits for all workers to finish and collects the results\n\t\tthis.blueprint.nodes?.push({\n\t\t\tid: gatherId,\n\t\t\tuses: 'batch-gather', // built-in\n\t\t\tparams: { outputKey, gatherNodeId: gatherId },\n\t\t\tconfig: { joinStrategy: 'all' }, // important: must wait for all scattered jobs\n\t\t})\n\n\t\t// edge to connect scatter and gather nodes, orchestrator will manage dynamic workers\n\t\tthis.edge(scatterId, gatherId)\n\n\t\treturn this as unknown as Flow<TContext & { [K in TOutputKey]: TWorkerOutput[] }, TDependencies>\n\t}\n\n\t/**\n\t * Creates a loop pattern in the workflow graph.\n\t * @param id A unique identifier for the loop construct.\n\t * @param options Defines the start, end, and continuation condition of the loop.\n\t * @param options.startNodeId The ID of the first node inside the loop body.\n\t * @param options.endNodeId The ID of the last node inside the loop body.\n\t * @param options.condition An expression that, if true, causes the loop to run again.\n\t */\n\tloop(\n\t\tid: string,\n\t\toptions: {\n\t\t\t/** The ID of the first node inside the loop body. */\n\t\t\tstartNodeId: string\n\t\t\t/** The ID of the last node inside the loop body. */\n\t\t\tendNodeId: string\n\t\t\t/** An expression that, if true, causes the loop to run again. */\n\t\t\tcondition: string\n\t\t},\n\t): this {\n\t\tconst { startNodeId, endNodeId, condition } = options\n\t\tconst controllerId = `${id}-loop`\n\n\t\tthis.loopControllerIds.set(id, controllerId)\n\n\t\tthis.loopDefinitions.push({ id, startNodeId, endNodeId })\n\n\t\t// controller node: evaluates the loop condition\n\t\tthis.blueprint.nodes?.push({\n\t\t\tid: controllerId,\n\t\t\tuses: 'loop-controller', // built-in\n\t\t\tparams: { condition },\n\t\t\tconfig: { joinStrategy: 'any' }, // to allow re-execution on each loop iteration\n\t\t})\n\n\t\tthis.edge(endNodeId, controllerId)\n\n\t\tthis.edge(controllerId, startNodeId, {\n\t\t\taction: 'continue',\n\t\t\ttransform: `context.${endNodeId}`, // pass the end node's value to the start node\n\t\t})\n\n\t\treturn this\n\t}\n\n\tgetLoopControllerId(id: string): string {\n\t\tconst controllerId = this.loopControllerIds.get(id)\n\t\tif (!controllerId) {\n\t\t\tthrow new Error(`Loop with id '${id}' not found. Ensure you have defined it using the .loop() method.`)\n\t\t}\n\t\treturn controllerId\n\t}\n\n\t/**\n\t * Sets the preferred entry point for a cycle in non-DAG workflows.\n\t * This helps remove ambiguity when the runtime needs to choose a starting node for cycles.\n\t * @param nodeId The ID of the node to use as the entry point for cycles containing this node.\n\t */\n\tsetCycleEntryPoint(nodeId: string): this {\n\t\tthis.cycleEntryPoints.set(nodeId, nodeId)\n\t\treturn this\n\t}\n\n\ttoBlueprint(): WorkflowBlueprint {\n\t\tif (!this.blueprint.nodes || this.blueprint.nodes.length === 0) {\n\t\t\tthrow new Error('Cannot build a blueprint with no nodes.')\n\t\t}\n\n\t\tfor (const loopDef of this.loopDefinitions) {\n\t\t\tconst startNode = this.blueprint.nodes?.find((n) => n.id === loopDef.startNodeId)\n\t\t\tconst endNode = this.blueprint.nodes?.find((n) => n.id === loopDef.endNodeId)\n\n\t\t\tif (!startNode) {\n\t\t\t\tthrow new Error(`Loop '${loopDef.id}' references non-existent start node '${loopDef.startNodeId}'.`)\n\t\t\t}\n\t\t\tif (!endNode) {\n\t\t\t\tthrow new Error(`Loop '${loopDef.id}' references non-existent end node '${loopDef.endNodeId}'.`)\n\t\t\t}\n\n\t\t\tstartNode.config = { ...startNode.config, joinStrategy: 'any' }\n\t\t\tendNode.config = { ...endNode.config, joinStrategy: 'any' }\n\t\t}\n\n\t\tif (this.cycleEntryPoints.size > 0) {\n\t\t\tthis.blueprint.metadata = {\n\t\t\t\t...this.blueprint.metadata,\n\t\t\t\tcycleEntryPoints: Array.from(this.cycleEntryPoints.keys()),\n\t\t\t}\n\t\t}\n\n\t\treturn this.blueprint as WorkflowBlueprint\n\t}\n\n\tgetFunctionRegistry() {\n\t\treturn this.functionRegistry\n\t}\n}\n\n/**\n * Helper function to create a new Flow builder instance.\n */\nexport function createFlow<\n\tTContext extends Record<string, any> = Record<string, any>,\n\tTDependencies extends Record<string, any> = Record<string, any>,\n>(id: string): Flow<TContext, TDependencies> {\n\treturn new Flow(id)\n}\n"]}
|
package/dist/types-CsTeXTiA.d.ts
DELETED
|
@@ -1,222 +0,0 @@
|
|
|
1
|
-
/** A type guard to reliably distinguish a NodeClass from a NodeFunction. */
|
|
2
|
-
declare function isNodeClass(impl: any): impl is NodeClass;
|
|
3
|
-
/**
|
|
4
|
-
* A structured, class-based node for complex logic with a safe, granular lifecycle.
|
|
5
|
-
* This class is generic, allowing implementations to specify the exact context
|
|
6
|
-
* and dependency types they expect.
|
|
7
|
-
*/
|
|
8
|
-
declare abstract class BaseNode<TContext extends Record<string, any> = Record<string, any>, TDependencies extends RuntimeDependencies = RuntimeDependencies, TInput = any, TOutput = any, TAction extends string = string> {
|
|
9
|
-
protected params?: Record<string, any> | undefined;
|
|
10
|
-
/**
|
|
11
|
-
* @param params Static parameters for this node instance, passed from the blueprint.
|
|
12
|
-
*/
|
|
13
|
-
constructor(params?: Record<string, any> | undefined);
|
|
14
|
-
/**
|
|
15
|
-
* Phase 1: Gathers and prepares data for execution. This phase is NOT retried on failure.
|
|
16
|
-
* @param context The node's execution context.
|
|
17
|
-
* @returns The data needed for the `exec` phase.
|
|
18
|
-
*/
|
|
19
|
-
prep(context: NodeContext<TContext, TDependencies, TInput>): Promise<any>;
|
|
20
|
-
/**
|
|
21
|
-
* Phase 2: Performs the core, isolated logic. This is the ONLY phase that is retried.
|
|
22
|
-
* @param prepResult The data returned from the `prep` phase.
|
|
23
|
-
* @param context The node's execution context.
|
|
24
|
-
*/
|
|
25
|
-
abstract exec(prepResult: any, context: NodeContext<TContext, TDependencies, TInput>): Promise<Omit<NodeResult<TOutput, TAction>, 'error'>>;
|
|
26
|
-
/**
|
|
27
|
-
* Phase 3: Processes the result and saves state. This phase is NOT retried.
|
|
28
|
-
* @param execResult The successful result from the `exec` or `fallback` phase.
|
|
29
|
-
* @param _context The node's execution context.
|
|
30
|
-
*/
|
|
31
|
-
post(execResult: Omit<NodeResult<TOutput, TAction>, 'error'>, _context: NodeContext<TContext, TDependencies, TInput>): Promise<NodeResult<TOutput, TAction>>;
|
|
32
|
-
/**
|
|
33
|
-
* An optional safety net that runs if all `exec` retries fail.
|
|
34
|
-
* @param error The final error from the last `exec` attempt.
|
|
35
|
-
* @param _context The node's execution context.
|
|
36
|
-
*/
|
|
37
|
-
fallback(error: Error, _context: NodeContext<TContext, TDependencies, TInput>): Promise<Omit<NodeResult<TOutput, TAction>, 'error'>>;
|
|
38
|
-
/**
|
|
39
|
-
* An optional cleanup phase for non-retriable errors that occur outside the main `exec` method.
|
|
40
|
-
* This method is invoked in a `finally` block or equivalent construct if a fatal, unhandled exception occurs in the `prep`, `exec`, or `post` phases.
|
|
41
|
-
* Allows nodes to perform crucial cleanup, such as closing database connections or releasing locks.
|
|
42
|
-
* @param _error The error that caused the failure.
|
|
43
|
-
* @param _context The node's execution context.
|
|
44
|
-
*/
|
|
45
|
-
recover(_error: Error, _context: NodeContext<TContext, TDependencies, TInput>): Promise<void>;
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
/** The central, serializable representation of a workflow. */
|
|
49
|
-
interface WorkflowBlueprint {
|
|
50
|
-
id: string;
|
|
51
|
-
nodes: NodeDefinition[];
|
|
52
|
-
edges: EdgeDefinition[];
|
|
53
|
-
metadata?: Record<string, any>;
|
|
54
|
-
}
|
|
55
|
-
/** Defines a single step in the workflow. */
|
|
56
|
-
interface NodeDefinition {
|
|
57
|
-
id: string;
|
|
58
|
-
/** A key that resolves to an implementation in a registry. */
|
|
59
|
-
uses: string;
|
|
60
|
-
/** Static parameters for the node. */
|
|
61
|
-
params?: Record<string, any>;
|
|
62
|
-
/** Maps context data to this node's `input`. */
|
|
63
|
-
inputs?: string | Record<string, string>;
|
|
64
|
-
/** Configuration for retries, timeouts, etc. */
|
|
65
|
-
config?: NodeConfig;
|
|
66
|
-
}
|
|
67
|
-
/** Defines the connection and data flow between two nodes. */
|
|
68
|
-
interface EdgeDefinition {
|
|
69
|
-
source: string;
|
|
70
|
-
target: string;
|
|
71
|
-
/** An 'action' from the source node that triggers this edge. */
|
|
72
|
-
action?: string;
|
|
73
|
-
/** A condition that must be met for this edge to be taken. */
|
|
74
|
-
condition?: string;
|
|
75
|
-
/** A string expression to transform the data before passing it to the target node. */
|
|
76
|
-
transform?: string;
|
|
77
|
-
}
|
|
78
|
-
/** Configuration for a node's resiliency and behavior. */
|
|
79
|
-
interface NodeConfig {
|
|
80
|
-
maxRetries?: number;
|
|
81
|
-
retryDelay?: number;
|
|
82
|
-
timeout?: number;
|
|
83
|
-
/** The `uses` key of another node implementation for fallback. */
|
|
84
|
-
fallback?: string;
|
|
85
|
-
/** Determines how a node with multiple incoming edges should be triggered. */
|
|
86
|
-
joinStrategy?: 'all' | 'any';
|
|
87
|
-
}
|
|
88
|
-
/** The required return type for any node implementation. */
|
|
89
|
-
interface NodeResult<TOutput = any, TAction extends string = string> {
|
|
90
|
-
output?: TOutput;
|
|
91
|
-
action?: TAction;
|
|
92
|
-
error?: {
|
|
93
|
-
message: string;
|
|
94
|
-
[key: string]: any;
|
|
95
|
-
};
|
|
96
|
-
/** Allows a node to dynamically schedule new nodes for the orchestrator to execute. */
|
|
97
|
-
dynamicNodes?: NodeDefinition[];
|
|
98
|
-
/** Internal flag: Indicates that this result came from a fallback execution. */
|
|
99
|
-
_fallbackExecuted?: boolean;
|
|
100
|
-
}
|
|
101
|
-
/** The context object passed to every node's execution logic. */
|
|
102
|
-
interface NodeContext<TContext extends Record<string, any> = Record<string, any>, TDependencies extends RuntimeDependencies = RuntimeDependencies, TInput = any> {
|
|
103
|
-
/** The async-only interface for interacting with the workflow's state. */
|
|
104
|
-
context: IAsyncContext<TContext>;
|
|
105
|
-
/** The primary input data for this node, typically from its predecessor. */
|
|
106
|
-
input?: TInput;
|
|
107
|
-
/** Static parameters defined in the blueprint. */
|
|
108
|
-
params: Record<string, any>;
|
|
109
|
-
/** Shared, runtime-level dependencies (e.g., database clients, loggers). */
|
|
110
|
-
dependencies: TDependencies;
|
|
111
|
-
/** A signal to gracefully cancel long-running node operations. */
|
|
112
|
-
signal?: AbortSignal;
|
|
113
|
-
}
|
|
114
|
-
/** A simple function-based node implementation. */
|
|
115
|
-
type NodeFunction<TContext extends Record<string, any> = Record<string, any>, TDependencies extends RuntimeDependencies = RuntimeDependencies, TInput = any, TOutput = any, TAction extends string = string> = (context: NodeContext<TContext, TDependencies, TInput>) => Promise<NodeResult<TOutput, TAction>>;
|
|
116
|
-
/** Represents a constructor for any concrete class that extends the abstract BaseNode. */
|
|
117
|
-
type NodeClass<TContext extends Record<string, any> = Record<string, any>, TDependencies extends RuntimeDependencies = RuntimeDependencies, TInput = any, TOutput = any, TAction extends string = string> = new (params?: Record<string, any>) => BaseNode<TContext, TDependencies, TInput, TOutput, TAction>;
|
|
118
|
-
/** A union of all possible node implementation types. */
|
|
119
|
-
type NodeImplementation = NodeFunction | NodeClass;
|
|
120
|
-
/** A registry mapping node types to their implementations. */
|
|
121
|
-
type NodeRegistry = Record<string, NodeImplementation>;
|
|
122
|
-
/** A discriminated union for all possible context implementations. */
|
|
123
|
-
type ContextImplementation<T extends Record<string, any>> = ISyncContext<T> | IAsyncContext<T>;
|
|
124
|
-
/** The synchronous context interface for high-performance, in-memory state. */
|
|
125
|
-
interface ISyncContext<TContext extends Record<string, any> = Record<string, any>> {
|
|
126
|
-
readonly type: 'sync';
|
|
127
|
-
get<K extends keyof TContext>(key: K): TContext[K] | undefined;
|
|
128
|
-
get(key: string): any | undefined;
|
|
129
|
-
set<K extends keyof TContext>(key: K, value: TContext[K]): void;
|
|
130
|
-
set(key: string, value: any): void;
|
|
131
|
-
has<K extends keyof TContext>(key: K): boolean;
|
|
132
|
-
has(key: string): boolean;
|
|
133
|
-
delete<K extends keyof TContext>(key: K): boolean;
|
|
134
|
-
delete(key: string): boolean;
|
|
135
|
-
toJSON: () => Record<string, any>;
|
|
136
|
-
}
|
|
137
|
-
/** The asynchronous context interface for remote or distributed state. */
|
|
138
|
-
interface IAsyncContext<TContext extends Record<string, any> = Record<string, any>> {
|
|
139
|
-
readonly type: 'async';
|
|
140
|
-
get<K extends keyof TContext>(key: K): Promise<TContext[K] | undefined>;
|
|
141
|
-
get(key: string): Promise<any | undefined>;
|
|
142
|
-
set<K extends keyof TContext>(key: K, value: TContext[K]): Promise<void>;
|
|
143
|
-
set(key: string, value: any): Promise<void>;
|
|
144
|
-
has<K extends keyof TContext>(key: K): Promise<boolean>;
|
|
145
|
-
has(key: string): Promise<boolean>;
|
|
146
|
-
delete<K extends keyof TContext>(key: K): Promise<boolean>;
|
|
147
|
-
delete(key: string): Promise<boolean>;
|
|
148
|
-
toJSON: () => Promise<Record<string, any>>;
|
|
149
|
-
}
|
|
150
|
-
/** Generic for any set of dependencies. */
|
|
151
|
-
interface RuntimeDependencies {
|
|
152
|
-
[key: string]: any;
|
|
153
|
-
}
|
|
154
|
-
/** Configuration options for the FlowRuntime. */
|
|
155
|
-
interface RuntimeOptions<TDependencies extends RuntimeDependencies = RuntimeDependencies> {
|
|
156
|
-
/** A registry of globally available node implementations. */
|
|
157
|
-
registry?: Record<string, NodeFunction | NodeClass | typeof BaseNode>;
|
|
158
|
-
/** A registry of all available workflow blueprints for subflow execution. */
|
|
159
|
-
blueprints?: Record<string, WorkflowBlueprint>;
|
|
160
|
-
/** Shared dependencies to be injected into every node. */
|
|
161
|
-
dependencies?: TDependencies;
|
|
162
|
-
/** A pluggable logger for consistent output. */
|
|
163
|
-
logger?: ILogger;
|
|
164
|
-
/** A pluggable event bus for observability. */
|
|
165
|
-
eventBus?: IEventBus;
|
|
166
|
-
/**
|
|
167
|
-
* A pluggable evaluator for edge conditions and transforms.
|
|
168
|
-
* @default new PropertyEvaluator() - A safe evaluator for simple property access.
|
|
169
|
-
* For complex logic, provide a custom implementation or use the `UnsafeEvaluator`
|
|
170
|
-
* (not recommended for production).
|
|
171
|
-
*/
|
|
172
|
-
evaluator?: IEvaluator;
|
|
173
|
-
/** An array of middleware to wrap node execution. */
|
|
174
|
-
middleware?: Middleware[];
|
|
175
|
-
/** A pluggable serializer for handling complex data types in the context. */
|
|
176
|
-
serializer?: ISerializer;
|
|
177
|
-
/** A flag to enforce strictness in the workflow. */
|
|
178
|
-
strict?: boolean;
|
|
179
|
-
}
|
|
180
|
-
/** Interface for a pluggable expression evaluator for conditions and transforms. */
|
|
181
|
-
interface IEvaluator {
|
|
182
|
-
evaluate: (expression: string, context: Record<string, any>) => any;
|
|
183
|
-
}
|
|
184
|
-
/** Interface for a pluggable logger. */
|
|
185
|
-
interface ILogger {
|
|
186
|
-
debug: (message: string, meta?: Record<string, any>) => void;
|
|
187
|
-
info: (message: string, meta?: Record<string, any>) => void;
|
|
188
|
-
warn: (message: string, meta?: Record<string, any>) => void;
|
|
189
|
-
error: (message: string, meta?: Record<string, any>) => void;
|
|
190
|
-
}
|
|
191
|
-
/** Interface for a pluggable event bus. */
|
|
192
|
-
interface IEventBus {
|
|
193
|
-
emit: (eventName: string, payload: Record<string, any>) => void | Promise<void>;
|
|
194
|
-
}
|
|
195
|
-
/** Interface for a pluggable serializer. */
|
|
196
|
-
interface ISerializer {
|
|
197
|
-
serialize: (data: Record<string, any>) => string;
|
|
198
|
-
deserialize: (text: string) => Record<string, any>;
|
|
199
|
-
}
|
|
200
|
-
/** Interface for middleware to handle cross-cutting concerns. */
|
|
201
|
-
interface Middleware<TContext extends Record<string, any> = Record<string, any>> {
|
|
202
|
-
beforeNode?: (ctx: ContextImplementation<TContext>, nodeId: string) => void | Promise<void>;
|
|
203
|
-
afterNode?: (ctx: ContextImplementation<TContext>, nodeId: string, result: NodeResult | undefined, error: Error | undefined) => void | Promise<void>;
|
|
204
|
-
aroundNode?: (ctx: ContextImplementation<TContext>, nodeId: string, next: () => Promise<NodeResult>) => Promise<NodeResult>;
|
|
205
|
-
}
|
|
206
|
-
/** A structured error object returned from a failed workflow execution. */
|
|
207
|
-
interface WorkflowError {
|
|
208
|
-
nodeId: string;
|
|
209
|
-
message: string;
|
|
210
|
-
originalError?: any;
|
|
211
|
-
timestamp: string;
|
|
212
|
-
stack?: string;
|
|
213
|
-
}
|
|
214
|
-
/** The final result of a workflow execution. */
|
|
215
|
-
interface WorkflowResult<TContext = Record<string, any>> {
|
|
216
|
-
context: TContext;
|
|
217
|
-
serializedContext: string;
|
|
218
|
-
status: 'completed' | 'failed' | 'stalled' | 'cancelled';
|
|
219
|
-
errors?: WorkflowError[];
|
|
220
|
-
}
|
|
221
|
-
|
|
222
|
-
export { BaseNode as B, type ContextImplementation as C, type EdgeDefinition as E, type ISyncContext as I, type Middleware as M, type NodeDefinition as N, type RuntimeDependencies as R, type WorkflowBlueprint as W, type NodeConfig as a, type NodeResult as b, type NodeContext as c, type NodeFunction as d, type NodeClass as e, type NodeImplementation as f, type NodeRegistry as g, type IAsyncContext as h, isNodeClass as i, type RuntimeOptions as j, type IEvaluator as k, type ILogger as l, type IEventBus as m, type ISerializer as n, type WorkflowError as o, type WorkflowResult as p };
|