@nebulaos/core 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +206 -0
- package/dist/__tests__/mocks/mock-provider.d.ts +15 -0
- package/dist/__tests__/mocks/mock-provider.js +44 -0
- package/dist/agent/Agent.d.ts +96 -0
- package/dist/agent/Agent.js +861 -0
- package/dist/agent/BaseAgent.d.ts +53 -0
- package/dist/agent/BaseAgent.js +126 -0
- package/dist/agent/events/events.d.ts +14 -0
- package/dist/agent/events/events.js +2 -0
- package/dist/agent/events/events.spec.d.ts +1 -0
- package/dist/agent/events/events.spec.js +75 -0
- package/dist/agent/instruction/index.d.ts +23 -0
- package/dist/agent/instruction/index.js +76 -0
- package/dist/agent/memory/in-memory.d.ts +24 -0
- package/dist/agent/memory/in-memory.js +78 -0
- package/dist/agent/memory/index.d.ts +2 -0
- package/dist/agent/memory/index.js +18 -0
- package/dist/agent/memory/memory.d.ts +43 -0
- package/dist/agent/memory/memory.js +7 -0
- package/dist/agent/provider/file-parts.spec.d.ts +1 -0
- package/dist/agent/provider/file-parts.spec.js +83 -0
- package/dist/agent/provider/index.d.ts +130 -0
- package/dist/agent/provider/index.js +8 -0
- package/dist/agent/skills/index.d.ts +61 -0
- package/dist/agent/skills/index.js +9 -0
- package/dist/agent/tools/index.d.ts +35 -0
- package/dist/agent/tools/index.js +87 -0
- package/dist/cost/add-cost.d.ts +10 -0
- package/dist/cost/add-cost.js +80 -0
- package/dist/cost/add-cost.spec.d.ts +1 -0
- package/dist/cost/add-cost.spec.js +36 -0
- package/dist/cost/index.d.ts +1 -0
- package/dist/cost/index.js +17 -0
- package/dist/domain-events/index.d.ts +16 -0
- package/dist/domain-events/index.js +38 -0
- package/dist/eval/index.d.ts +19 -0
- package/dist/eval/index.js +24 -0
- package/dist/events/base.d.ts +5 -0
- package/dist/events/base.js +2 -0
- package/dist/events/schemas.d.ts +3463 -0
- package/dist/events/schemas.js +244 -0
- package/dist/execution-context/index.d.ts +21 -0
- package/dist/execution-context/index.js +17 -0
- package/dist/index.cjs +2958 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +3425 -0
- package/dist/index.d.ts +22 -0
- package/dist/index.js +53 -0
- package/dist/index.js.map +1 -0
- package/dist/lgpd/index.d.ts +7 -0
- package/dist/lgpd/index.js +21 -0
- package/dist/logger/agent-logger.d.ts +16 -0
- package/dist/logger/agent-logger.js +110 -0
- package/dist/logger/formatters.d.ts +32 -0
- package/dist/logger/formatters.js +146 -0
- package/dist/logger/index.d.ts +30 -0
- package/dist/logger/index.js +88 -0
- package/dist/logger/styles.d.ts +46 -0
- package/dist/logger/styles.js +53 -0
- package/dist/logger/workflow-logger.d.ts +16 -0
- package/dist/logger/workflow-logger.js +79 -0
- package/dist/multi-agent/agent-as-tool/AgentAsTool.d.ts +16 -0
- package/dist/multi-agent/agent-as-tool/AgentAsTool.js +54 -0
- package/dist/multi-agent/agent-as-tool/AgentAsTool.spec.d.ts +1 -0
- package/dist/multi-agent/agent-as-tool/AgentAsTool.spec.js +76 -0
- package/dist/multi-agent/committee-team/CommitteeTeam.d.ts +16 -0
- package/dist/multi-agent/committee-team/CommitteeTeam.js +150 -0
- package/dist/multi-agent/committee-team/CommitteeTeam.spec.d.ts +1 -0
- package/dist/multi-agent/committee-team/CommitteeTeam.spec.js +43 -0
- package/dist/multi-agent/handoff-team/HandoffTeam.d.ts +16 -0
- package/dist/multi-agent/handoff-team/HandoffTeam.js +185 -0
- package/dist/multi-agent/handoff-team/HandoffTeam.spec.d.ts +1 -0
- package/dist/multi-agent/handoff-team/HandoffTeam.spec.js +105 -0
- package/dist/multi-agent/hierarchical-team/HierarchicalTeam.d.ts +18 -0
- package/dist/multi-agent/hierarchical-team/HierarchicalTeam.js +164 -0
- package/dist/multi-agent/hierarchical-team/HierarchicalTeam.spec.d.ts +1 -0
- package/dist/multi-agent/hierarchical-team/HierarchicalTeam.spec.js +53 -0
- package/dist/multi-agent/index.d.ts +10 -0
- package/dist/multi-agent/index.js +26 -0
- package/dist/multi-agent/pipeline-team/PipelineTeam.d.ts +13 -0
- package/dist/multi-agent/pipeline-team/PipelineTeam.js +104 -0
- package/dist/multi-agent/pipeline-team/PipelineTeam.spec.d.ts +1 -0
- package/dist/multi-agent/pipeline-team/PipelineTeam.spec.js +54 -0
- package/dist/multi-agent/router-team/RouterTeam.d.ts +15 -0
- package/dist/multi-agent/router-team/RouterTeam.js +153 -0
- package/dist/multi-agent/router-team/RouterTeam.spec.d.ts +1 -0
- package/dist/multi-agent/router-team/RouterTeam.spec.js +69 -0
- package/dist/multi-agent/types/index.d.ts +349 -0
- package/dist/multi-agent/types/index.js +79 -0
- package/dist/multi-agent/utils/guardrails.d.ts +6 -0
- package/dist/multi-agent/utils/guardrails.js +34 -0
- package/dist/multi-agent/utils/memory.d.ts +8 -0
- package/dist/multi-agent/utils/memory.js +40 -0
- package/dist/multi-agent/utils/prompts.d.ts +4 -0
- package/dist/multi-agent/utils/prompts.js +25 -0
- package/dist/tracing/index.d.ts +89 -0
- package/dist/tracing/index.js +188 -0
- package/dist/tsup.config.d.ts +2 -0
- package/dist/tsup.config.js +11 -0
- package/dist/utils/schema-to-zod.d.ts +7 -0
- package/dist/utils/schema-to-zod.js +36 -0
- package/dist/workflow/Workflow.d.ts +106 -0
- package/dist/workflow/Workflow.js +204 -0
- package/dist/workflow/adapters.d.ts +61 -0
- package/dist/workflow/adapters.js +29 -0
- package/dist/workflow/definition/DefinitionBuilder.d.ts +9 -0
- package/dist/workflow/definition/DefinitionBuilder.js +91 -0
- package/dist/workflow/definition/DefinitionBuilder.spec.d.ts +1 -0
- package/dist/workflow/definition/DefinitionBuilder.spec.js +66 -0
- package/dist/workflow/definition/DefinitionHasher.d.ts +8 -0
- package/dist/workflow/definition/DefinitionHasher.js +11 -0
- package/dist/workflow/definition/DefinitionHasher.spec.d.ts +1 -0
- package/dist/workflow/definition/DefinitionHasher.spec.js +28 -0
- package/dist/workflow/definition/types.d.ts +27 -0
- package/dist/workflow/definition/types.js +2 -0
- package/dist/workflow/events.d.ts +9 -0
- package/dist/workflow/events.js +2 -0
- package/dist/workflow/execution/AgentNodeIntegration.spec.d.ts +1 -0
- package/dist/workflow/execution/AgentNodeIntegration.spec.js +50 -0
- package/dist/workflow/execution/NodeExecutor.d.ts +9 -0
- package/dist/workflow/execution/NodeExecutor.js +43 -0
- package/dist/workflow/execution/NodeExecutor.spec.d.ts +1 -0
- package/dist/workflow/execution/NodeExecutor.spec.js +45 -0
- package/dist/workflow/execution/WorkflowEventBus.d.ts +14 -0
- package/dist/workflow/execution/WorkflowEventBus.js +42 -0
- package/dist/workflow/execution/WorkflowEventBus.spec.d.ts +1 -0
- package/dist/workflow/execution/WorkflowEventBus.spec.js +78 -0
- package/dist/workflow/execution/WorkflowRunner.d.ts +26 -0
- package/dist/workflow/execution/WorkflowRunner.js +212 -0
- package/dist/workflow/execution/WorkflowRunner.spec.d.ts +1 -0
- package/dist/workflow/execution/WorkflowRunner.spec.js +92 -0
- package/dist/workflow/execution/WorkflowTelemetry.d.ts +13 -0
- package/dist/workflow/execution/WorkflowTelemetry.js +43 -0
- package/dist/workflow/execution/WorkflowTelemetry.spec.d.ts +1 -0
- package/dist/workflow/execution/WorkflowTelemetry.spec.js +31 -0
- package/dist/workflow/graph/NodeNameRegistry.d.ts +20 -0
- package/dist/workflow/graph/NodeNameRegistry.js +21 -0
- package/dist/workflow/graph/NodeNameRegistry.spec.d.ts +1 -0
- package/dist/workflow/graph/NodeNameRegistry.spec.js +18 -0
- package/dist/workflow/graph/WorkflowGraph.d.ts +14 -0
- package/dist/workflow/graph/WorkflowGraph.js +23 -0
- package/dist/workflow/graph/nodes.d.ts +26 -0
- package/dist/workflow/graph/nodes.js +2 -0
- package/dist/workflow/queue/WorkflowQueueService.d.ts +22 -0
- package/dist/workflow/queue/WorkflowQueueService.js +47 -0
- package/dist/workflow/state/WorkflowStateService.d.ts +7 -0
- package/dist/workflow/state/WorkflowStateService.js +20 -0
- package/dist/workflow/types.d.ts +16 -0
- package/dist/workflow/types.js +2 -0
- package/package.json +56 -0
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DefinitionBuilder = void 0;
|
|
4
|
+
class DefinitionBuilder {
|
|
5
|
+
build(root) {
|
|
6
|
+
const nodes = [];
|
|
7
|
+
const edges = [];
|
|
8
|
+
const walk = (wf, parentAfterNodeId) => {
|
|
9
|
+
const wfNodes = wf.__internalNodes();
|
|
10
|
+
const linearIds = wfNodes.map((n) => n.id);
|
|
11
|
+
for (const n of wfNodes) {
|
|
12
|
+
nodes.push({
|
|
13
|
+
id: n.id,
|
|
14
|
+
displayName: n.displayName,
|
|
15
|
+
type: n.type,
|
|
16
|
+
agentName: n.type === "agent" ? n.agent.name : undefined,
|
|
17
|
+
agentId: n.type === "agent" ? n.agent.id : undefined,
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
for (let i = 0; i < linearIds.length - 1; i++) {
|
|
21
|
+
edges.push({ from: linearIds[i], to: linearIds[i + 1], type: "linear" });
|
|
22
|
+
}
|
|
23
|
+
for (let i = 0; i < wfNodes.length; i++) {
|
|
24
|
+
const node = wfNodes[i];
|
|
25
|
+
const nextId = wfNodes[i + 1]?.id ?? parentAfterNodeId;
|
|
26
|
+
if (node.type === "branch") {
|
|
27
|
+
for (const [key, sub] of Object.entries(node.branches)) {
|
|
28
|
+
const subIds = sub.__internalNodes().map((n) => n.id);
|
|
29
|
+
if (subIds.length === 0)
|
|
30
|
+
continue;
|
|
31
|
+
edges.push({ from: node.id, to: subIds[0], type: "branch", label: key });
|
|
32
|
+
walk(sub, nextId);
|
|
33
|
+
if (nextId) {
|
|
34
|
+
const terminals = this.findTerminalNodeIds(sub);
|
|
35
|
+
for (const t of terminals)
|
|
36
|
+
edges.push({ from: t, to: nextId, type: "merge", label: key });
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
if (node.type === "parallel") {
|
|
41
|
+
node.workflows.forEach((sub, index) => {
|
|
42
|
+
const subIds = sub.__internalNodes().map((n) => n.id);
|
|
43
|
+
if (subIds.length === 0)
|
|
44
|
+
return;
|
|
45
|
+
edges.push({ from: node.id, to: subIds[0], type: "parallel", label: String(index) });
|
|
46
|
+
walk(sub, nextId);
|
|
47
|
+
if (nextId) {
|
|
48
|
+
const terminals = this.findTerminalNodeIds(sub);
|
|
49
|
+
for (const t of terminals)
|
|
50
|
+
edges.push({ from: t, to: nextId, type: "merge", label: String(index) });
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
};
|
|
56
|
+
walk(root, undefined);
|
|
57
|
+
return { nodes: dedupeNodes(nodes), edges };
|
|
58
|
+
}
|
|
59
|
+
findTerminalNodeIds(wf) {
|
|
60
|
+
const wfNodes = wf.__internalNodes();
|
|
61
|
+
if (wfNodes.length === 0)
|
|
62
|
+
return [];
|
|
63
|
+
const last = wfNodes[wfNodes.length - 1];
|
|
64
|
+
if (last.type === "branch") {
|
|
65
|
+
const terminals = [];
|
|
66
|
+
for (const sub of Object.values(last.branches)) {
|
|
67
|
+
terminals.push(...this.findTerminalNodeIds(sub));
|
|
68
|
+
}
|
|
69
|
+
return terminals.length ? terminals : [last.id];
|
|
70
|
+
}
|
|
71
|
+
if (last.type === "parallel") {
|
|
72
|
+
const terminals = [];
|
|
73
|
+
for (const sub of last.workflows)
|
|
74
|
+
terminals.push(...this.findTerminalNodeIds(sub));
|
|
75
|
+
return terminals.length ? terminals : [last.id];
|
|
76
|
+
}
|
|
77
|
+
return [last.id];
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
exports.DefinitionBuilder = DefinitionBuilder;
|
|
81
|
+
function dedupeNodes(nodes) {
|
|
82
|
+
const seen = new Set();
|
|
83
|
+
const out = [];
|
|
84
|
+
for (const n of nodes) {
|
|
85
|
+
if (seen.has(n.id))
|
|
86
|
+
continue;
|
|
87
|
+
seen.add(n.id);
|
|
88
|
+
out.push(n);
|
|
89
|
+
}
|
|
90
|
+
return out;
|
|
91
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const Workflow_1 = require("../Workflow");
|
|
4
|
+
const DefinitionBuilder_1 = require("./DefinitionBuilder");
|
|
5
|
+
describe("DefinitionBuilder", () => {
|
|
6
|
+
it("should create linear edges for linear workflows", () => {
|
|
7
|
+
const wf = new Workflow_1.Workflow({ id: "wf" })
|
|
8
|
+
.start("start", async ({ input }) => input)
|
|
9
|
+
.step("a", async ({ input }) => input)
|
|
10
|
+
.finish("finish", async ({ input }) => input);
|
|
11
|
+
const { nodes, edges } = new DefinitionBuilder_1.DefinitionBuilder().build(wf);
|
|
12
|
+
expect(nodes.map((n) => n.id)).toEqual(["start", "a", "finish"]);
|
|
13
|
+
expect(edges).toEqual([
|
|
14
|
+
{ from: "start", to: "a", type: "linear" },
|
|
15
|
+
{ from: "a", to: "finish", type: "linear" },
|
|
16
|
+
]);
|
|
17
|
+
});
|
|
18
|
+
it("should include branch fan-out and merge edges", () => {
|
|
19
|
+
const wf = new Workflow_1.Workflow({ id: "wf" })
|
|
20
|
+
.step("before", async ({ input }) => input)
|
|
21
|
+
.branch("route", async () => "billing", {
|
|
22
|
+
billing: (b) => b.step("billing-step", async ({ input }) => input),
|
|
23
|
+
tech: (t) => t.step("tech-step", async ({ input }) => input),
|
|
24
|
+
})
|
|
25
|
+
.step("after", async ({ input }) => input);
|
|
26
|
+
const { edges } = new DefinitionBuilder_1.DefinitionBuilder().build(wf);
|
|
27
|
+
// Fan-out from branch node
|
|
28
|
+
expect(edges).toEqual(expect.arrayContaining([
|
|
29
|
+
{ from: "before", to: "route", type: "linear" },
|
|
30
|
+
{ from: "route", to: "billing-step", type: "branch", label: "billing" },
|
|
31
|
+
{ from: "route", to: "tech-step", type: "branch", label: "tech" },
|
|
32
|
+
]));
|
|
33
|
+
// Merge back into "after"
|
|
34
|
+
expect(edges).toEqual(expect.arrayContaining([
|
|
35
|
+
{ from: "billing-step", to: "after", type: "merge", label: "billing" },
|
|
36
|
+
{ from: "tech-step", to: "after", type: "merge", label: "tech" },
|
|
37
|
+
]));
|
|
38
|
+
});
|
|
39
|
+
it("should include parallel fan-out and merge edges", () => {
|
|
40
|
+
const wf = new Workflow_1.Workflow({ id: "wf" })
|
|
41
|
+
.step("before", async ({ input }) => input)
|
|
42
|
+
.parallel("fanout", [
|
|
43
|
+
(p) => p.step("p1", async ({ input }) => input),
|
|
44
|
+
(p) => p.step("p2", async ({ input }) => input),
|
|
45
|
+
])
|
|
46
|
+
.step("after", async ({ input }) => input);
|
|
47
|
+
const { edges } = new DefinitionBuilder_1.DefinitionBuilder().build(wf);
|
|
48
|
+
expect(edges).toEqual(expect.arrayContaining([
|
|
49
|
+
{ from: "before", to: "fanout", type: "linear" },
|
|
50
|
+
{ from: "fanout", to: "p1", type: "parallel", label: "0" },
|
|
51
|
+
{ from: "fanout", to: "p2", type: "parallel", label: "1" },
|
|
52
|
+
{ from: "p1", to: "after", type: "merge", label: "0" },
|
|
53
|
+
{ from: "p2", to: "after", type: "merge", label: "1" },
|
|
54
|
+
]));
|
|
55
|
+
});
|
|
56
|
+
it("should dedupe nodes across traversal", () => {
|
|
57
|
+
const wf = new Workflow_1.Workflow({ id: "wf" })
|
|
58
|
+
.step("same", async ({ input }) => input)
|
|
59
|
+
.branch("route", async () => "a", {
|
|
60
|
+
a: (w) => w.step("same", async ({ input }) => input),
|
|
61
|
+
});
|
|
62
|
+
const { nodes } = new DefinitionBuilder_1.DefinitionBuilder().build(wf);
|
|
63
|
+
// Global registry should have deduped ids
|
|
64
|
+
expect(nodes.map((n) => n.id)).toEqual(expect.arrayContaining(["same", "route", "same-2"]));
|
|
65
|
+
});
|
|
66
|
+
});
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DefinitionHasher = void 0;
|
|
4
|
+
const node_crypto_1 = require("node:crypto");
|
|
5
|
+
class DefinitionHasher {
|
|
6
|
+
hash(input) {
|
|
7
|
+
const payload = JSON.stringify({ workflowId: input.workflowId, nodes: input.nodes, edges: input.edges });
|
|
8
|
+
return (0, node_crypto_1.createHash)("sha256").update(payload).digest("hex").slice(0, 16);
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
exports.DefinitionHasher = DefinitionHasher;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const DefinitionHasher_1 = require("./DefinitionHasher");
|
|
4
|
+
describe("DefinitionHasher", () => {
|
|
5
|
+
it("should be deterministic for same workflowId + nodes + edges", () => {
|
|
6
|
+
const hasher = new DefinitionHasher_1.DefinitionHasher();
|
|
7
|
+
const input = {
|
|
8
|
+
workflowId: "wf",
|
|
9
|
+
nodes: [{ id: "a", displayName: "a", type: "step" }],
|
|
10
|
+
edges: [],
|
|
11
|
+
};
|
|
12
|
+
expect(hasher.hash(input)).toBe(hasher.hash(input));
|
|
13
|
+
});
|
|
14
|
+
it("should change when nodes change", () => {
|
|
15
|
+
const hasher = new DefinitionHasher_1.DefinitionHasher();
|
|
16
|
+
const base = hasher.hash({
|
|
17
|
+
workflowId: "wf",
|
|
18
|
+
nodes: [{ id: "a", displayName: "a", type: "step" }],
|
|
19
|
+
edges: [],
|
|
20
|
+
});
|
|
21
|
+
const changed = hasher.hash({
|
|
22
|
+
workflowId: "wf",
|
|
23
|
+
nodes: [{ id: "a", displayName: "a", type: "step" }, { id: "b", displayName: "b", type: "step" }],
|
|
24
|
+
edges: [],
|
|
25
|
+
});
|
|
26
|
+
expect(changed).not.toBe(base);
|
|
27
|
+
});
|
|
28
|
+
});
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
export type WorkflowNodeType = "start" | "step" | "agent" | "branch" | "parallel" | "finish";
|
|
2
|
+
export type WorkflowDefinitionNode = {
|
|
3
|
+
id: string;
|
|
4
|
+
displayName: string;
|
|
5
|
+
type: WorkflowNodeType;
|
|
6
|
+
metadata?: Record<string, unknown>;
|
|
7
|
+
agentName?: string;
|
|
8
|
+
/**
|
|
9
|
+
* Stable agent identifier (matches runtimeResourceId used at registration time).
|
|
10
|
+
* This avoids UI needing to guess whether to use agentName vs agentId.
|
|
11
|
+
*/
|
|
12
|
+
agentId?: string;
|
|
13
|
+
};
|
|
14
|
+
export type WorkflowDefinitionEdge = {
|
|
15
|
+
from: string;
|
|
16
|
+
to: string;
|
|
17
|
+
type: "linear" | "branch" | "parallel" | "merge";
|
|
18
|
+
label?: string;
|
|
19
|
+
};
|
|
20
|
+
export type WorkflowDefinition = {
|
|
21
|
+
workflowId: string;
|
|
22
|
+
name?: string;
|
|
23
|
+
description?: string;
|
|
24
|
+
version: string;
|
|
25
|
+
nodes: WorkflowDefinitionNode[];
|
|
26
|
+
edges: WorkflowDefinitionEdge[];
|
|
27
|
+
};
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { WorkflowRunStart, WorkflowRunEnd, WorkflowRunError, WorkflowNodeStart, WorkflowNodeEnd } from "../events/schemas.js";
|
|
2
|
+
export type WorkflowEventMap = {
|
|
3
|
+
"workflow:run:start": WorkflowRunStart;
|
|
4
|
+
"workflow:run:end": WorkflowRunEnd;
|
|
5
|
+
"workflow:run:error": WorkflowRunError;
|
|
6
|
+
"workflow:node:start": WorkflowNodeStart;
|
|
7
|
+
"workflow:node:end": WorkflowNodeEnd;
|
|
8
|
+
};
|
|
9
|
+
export type WorkflowEventName = keyof WorkflowEventMap;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const Workflow_1 = require("../Workflow");
|
|
4
|
+
const Agent_1 = require("../../agent/Agent");
|
|
5
|
+
const in_memory_1 = require("../../agent/memory/in-memory");
|
|
6
|
+
const mock_provider_1 = require("../../__tests__/mocks/mock-provider");
|
|
7
|
+
const tracing_1 = require("../../tracing");
|
|
8
|
+
class CapturingExporter {
|
|
9
|
+
events = [];
|
|
10
|
+
async exportBatch(events) {
|
|
11
|
+
this.events.push(...events);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
describe("Agent node integration (Workflow + Agent + Telemetry)", () => {
|
|
15
|
+
it("should execute agent node with real Agent and nest agent span under node span", async () => {
|
|
16
|
+
const exporter = new CapturingExporter();
|
|
17
|
+
tracing_1.Tracing.setExporter(exporter);
|
|
18
|
+
const provider = new mock_provider_1.MockProvider();
|
|
19
|
+
provider.enqueueResponse({
|
|
20
|
+
content: "AGENT_OK",
|
|
21
|
+
usage: { promptTokens: 1, completionTokens: 1, totalTokens: 2 },
|
|
22
|
+
});
|
|
23
|
+
const agent = new Agent_1.Agent({
|
|
24
|
+
id: "triage-agent",
|
|
25
|
+
name: "triage-agent",
|
|
26
|
+
memory: new in_memory_1.InMemory(),
|
|
27
|
+
model: provider,
|
|
28
|
+
instructions: "Return the next step.",
|
|
29
|
+
});
|
|
30
|
+
const wf = new Workflow_1.Workflow({ id: "wf" })
|
|
31
|
+
.step("before", async ({ input }) => input)
|
|
32
|
+
.agent("triage", agent, { toInput: (input) => `input:${String(input)}` })
|
|
33
|
+
.step("after", async ({ input }) => `after:${String(input)}`);
|
|
34
|
+
const out = await wf.run("x", { executionId: "exec-agent-1" });
|
|
35
|
+
expect(out).toBe("after:AGENT_OK");
|
|
36
|
+
const starts = exporter.events.filter((e) => e.type === "telemetry:span:start");
|
|
37
|
+
const nodeSpanStart = starts.find((e) => (e.span.kind === "workflow.node" || e.span.kind === "custom") &&
|
|
38
|
+
e.span.name === "workflow:node:triage");
|
|
39
|
+
expect(nodeSpanStart).toBeDefined();
|
|
40
|
+
const agentSpanStart = starts.find((e) => e.span.kind === "agent" && e.span.name === "agent:triage-agent");
|
|
41
|
+
expect(agentSpanStart).toBeDefined();
|
|
42
|
+
// Agent span must be a child of the workflow node span.
|
|
43
|
+
expect(agentSpanStart.trace.parentSpanId).toBe(nodeSpanStart.trace.spanId);
|
|
44
|
+
// And share the same traceId.
|
|
45
|
+
expect(agentSpanStart.trace.traceId).toBe(nodeSpanStart.trace.traceId);
|
|
46
|
+
// Agent node inside workflow is a separate sub-execution.
|
|
47
|
+
expect(agentSpanStart.executionId).toBeDefined();
|
|
48
|
+
expect(agentSpanStart.executionId).not.toBe("exec-agent-1");
|
|
49
|
+
});
|
|
50
|
+
});
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.NodeExecutor = void 0;
|
|
4
|
+
const node_crypto_1 = require("node:crypto");
|
|
5
|
+
const index_js_1 = require("../../execution-context/index.js");
|
|
6
|
+
class NodeExecutor {
|
|
7
|
+
async executeOnce(params) {
|
|
8
|
+
const { node, currentInput, state, executionId } = params;
|
|
9
|
+
switch (node.type) {
|
|
10
|
+
case "start":
|
|
11
|
+
case "step":
|
|
12
|
+
case "finish":
|
|
13
|
+
return node.handler({ input: currentInput, state });
|
|
14
|
+
case "agent": {
|
|
15
|
+
const toInput = node.toInput ??
|
|
16
|
+
((input) => (typeof input === "string" ? input : JSON.stringify(input)));
|
|
17
|
+
const message = toInput(currentInput, state);
|
|
18
|
+
// IMPORTANT:
|
|
19
|
+
// - agent nodes inside a workflow are tracked as independent sub-executions
|
|
20
|
+
// - they must share the same traceId, but use a different executionId
|
|
21
|
+
const agentExecutionId = (0, node_crypto_1.randomUUID)();
|
|
22
|
+
const result = await index_js_1.ExecutionContext.run({
|
|
23
|
+
executionId: agentExecutionId,
|
|
24
|
+
rootExecutionId: executionId,
|
|
25
|
+
parentExecutionId: executionId,
|
|
26
|
+
}, async () => node.agent.execute(message, { executionId: agentExecutionId }));
|
|
27
|
+
return result.content;
|
|
28
|
+
}
|
|
29
|
+
case "branch": {
|
|
30
|
+
const key = await node.selector({ input: currentInput, state });
|
|
31
|
+
const wf = node.branches[key];
|
|
32
|
+
if (!wf)
|
|
33
|
+
throw new Error(`Unknown branch key '${key}' for node '${node.id}'`);
|
|
34
|
+
return wf.run(currentInput, { executionId });
|
|
35
|
+
}
|
|
36
|
+
case "parallel": {
|
|
37
|
+
const results = await Promise.all(node.workflows.map((wf) => wf.run(currentInput, { executionId })));
|
|
38
|
+
return results;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
exports.NodeExecutor = NodeExecutor;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const NodeExecutor_1 = require("./NodeExecutor");
|
|
4
|
+
describe("NodeExecutor", () => {
|
|
5
|
+
it("should execute a step node", async () => {
|
|
6
|
+
const exec = new NodeExecutor_1.NodeExecutor();
|
|
7
|
+
const out = await exec.executeOnce({
|
|
8
|
+
node: {
|
|
9
|
+
id: "a",
|
|
10
|
+
displayName: "a",
|
|
11
|
+
type: "step",
|
|
12
|
+
handler: async ({ input }) => `${input}-ok`,
|
|
13
|
+
},
|
|
14
|
+
currentInput: "x",
|
|
15
|
+
state: {},
|
|
16
|
+
executionId: "exec-1",
|
|
17
|
+
});
|
|
18
|
+
expect(out).toBe("x-ok");
|
|
19
|
+
});
|
|
20
|
+
it("should execute an agent node using toInput mapping", async () => {
|
|
21
|
+
const exec = new NodeExecutor_1.NodeExecutor();
|
|
22
|
+
const calls = [];
|
|
23
|
+
const fakeAgent = {
|
|
24
|
+
name: "fake-agent",
|
|
25
|
+
execute: async (input) => {
|
|
26
|
+
calls.push(input);
|
|
27
|
+
return { content: `resp:${input}` };
|
|
28
|
+
},
|
|
29
|
+
};
|
|
30
|
+
const out = await exec.executeOnce({
|
|
31
|
+
node: {
|
|
32
|
+
id: "agent1",
|
|
33
|
+
displayName: "agent1",
|
|
34
|
+
type: "agent",
|
|
35
|
+
agent: fakeAgent,
|
|
36
|
+
toInput: (input) => `mapped:${input.value}`,
|
|
37
|
+
},
|
|
38
|
+
currentInput: { value: 123 },
|
|
39
|
+
state: {},
|
|
40
|
+
executionId: "exec-1",
|
|
41
|
+
});
|
|
42
|
+
expect(out).toBe("resp:mapped:123");
|
|
43
|
+
expect(calls).toEqual(["mapped:123"]);
|
|
44
|
+
});
|
|
45
|
+
});
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { EventEmitter } from "node:events";
|
|
2
|
+
import type { WorkflowEventMap, WorkflowEventName } from "../events.js";
|
|
3
|
+
export declare class WorkflowEventBus {
|
|
4
|
+
private readonly emitter;
|
|
5
|
+
constructor(emitter: EventEmitter);
|
|
6
|
+
on<E extends WorkflowEventName>(event: E, listener: (data: WorkflowEventMap[E]) => void): void;
|
|
7
|
+
once<E extends WorkflowEventName>(event: E, listener: (data: WorkflowEventMap[E]) => void): void;
|
|
8
|
+
off<E extends WorkflowEventName>(event: E, listener: (data: WorkflowEventMap[E]) => void): void;
|
|
9
|
+
emit<E extends WorkflowEventName>(event: E, payload: {
|
|
10
|
+
data: WorkflowEventMap[E]["data"];
|
|
11
|
+
correlationId?: string;
|
|
12
|
+
executionId?: string;
|
|
13
|
+
}): void;
|
|
14
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.WorkflowEventBus = void 0;
|
|
4
|
+
const index_js_1 = require("../../tracing/index.js");
|
|
5
|
+
const index_js_2 = require("../../execution-context/index.js");
|
|
6
|
+
const index_js_3 = require("../../domain-events/index.js");
|
|
7
|
+
class WorkflowEventBus {
|
|
8
|
+
emitter;
|
|
9
|
+
constructor(emitter) {
|
|
10
|
+
this.emitter = emitter;
|
|
11
|
+
}
|
|
12
|
+
on(event, listener) {
|
|
13
|
+
this.emitter.on(event, listener);
|
|
14
|
+
}
|
|
15
|
+
once(event, listener) {
|
|
16
|
+
this.emitter.once(event, listener);
|
|
17
|
+
}
|
|
18
|
+
off(event, listener) {
|
|
19
|
+
this.emitter.off(event, listener);
|
|
20
|
+
}
|
|
21
|
+
emit(event, payload) {
|
|
22
|
+
const traceContext = index_js_1.Tracing.getContext();
|
|
23
|
+
const execContext = index_js_2.ExecutionContext.getOrUndefined();
|
|
24
|
+
const fullEvent = {
|
|
25
|
+
type: event,
|
|
26
|
+
timestamp: new Date().toISOString(),
|
|
27
|
+
trace: {
|
|
28
|
+
traceId: traceContext?.traceId ?? `fallback-${Date.now()}`,
|
|
29
|
+
spanId: traceContext?.spanId ?? `fallback-${Date.now()}`,
|
|
30
|
+
parentSpanId: traceContext?.parentId,
|
|
31
|
+
},
|
|
32
|
+
correlationId: payload.correlationId,
|
|
33
|
+
executionId: payload.executionId ?? execContext?.executionId,
|
|
34
|
+
rootExecutionId: execContext?.rootExecutionId,
|
|
35
|
+
parentExecutionId: execContext?.parentExecutionId,
|
|
36
|
+
data: payload.data,
|
|
37
|
+
};
|
|
38
|
+
index_js_3.DomainEvents.publish(fullEvent);
|
|
39
|
+
this.emitter.emit(event, fullEvent);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
exports.WorkflowEventBus = WorkflowEventBus;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
const node_events_1 = require("node:events");
|
|
37
|
+
const tracing_1 = require("../../tracing");
|
|
38
|
+
const WorkflowEventBus_1 = require("./WorkflowEventBus");
|
|
39
|
+
describe("WorkflowEventBus", () => {
|
|
40
|
+
it("should emit enriched events with trace context", async () => {
|
|
41
|
+
const emitter = new node_events_1.EventEmitter();
|
|
42
|
+
const bus = new WorkflowEventBus_1.WorkflowEventBus(emitter);
|
|
43
|
+
const received = [];
|
|
44
|
+
emitter.on("workflow:run:start", (e) => received.push(e));
|
|
45
|
+
await tracing_1.Tracing.withSpan({ kind: "custom", name: "test-span", executionId: "exec-1", data: {} }, async () => {
|
|
46
|
+
bus.emit("workflow:run:start", {
|
|
47
|
+
executionId: "exec-1",
|
|
48
|
+
data: { workflowId: "wf", definitionVersion: "v1", input: { a: 1 } },
|
|
49
|
+
});
|
|
50
|
+
});
|
|
51
|
+
expect(received.length).toBe(1);
|
|
52
|
+
expect(received[0].type).toBe("workflow:run:start");
|
|
53
|
+
expect(received[0].executionId).toBe("exec-1");
|
|
54
|
+
expect(received[0].trace?.traceId).toBeDefined();
|
|
55
|
+
expect(received[0].trace?.spanId).toBeDefined();
|
|
56
|
+
});
|
|
57
|
+
it("should preserve a single executionId across nested workflow runs (branch + parallel)", async () => {
|
|
58
|
+
const { Workflow } = await Promise.resolve().then(() => __importStar(require("../Workflow")));
|
|
59
|
+
const wf = new Workflow({ id: "wf" })
|
|
60
|
+
.step("before", async ({ input }) => input)
|
|
61
|
+
.branch("route", async () => "a", {
|
|
62
|
+
a: (w) => w.parallel("fanout", [
|
|
63
|
+
(p) => p.step("p1", async () => "p1"),
|
|
64
|
+
(p) => p.step("p2", async () => "p2"),
|
|
65
|
+
]),
|
|
66
|
+
})
|
|
67
|
+
.step("after", async ({ input }) => input);
|
|
68
|
+
const executionIds = new Set();
|
|
69
|
+
wf.on("workflow:run:start", (e) => executionIds.add(e.executionId));
|
|
70
|
+
wf.on("workflow:node:start", (e) => executionIds.add(e.executionId));
|
|
71
|
+
wf.on("workflow:node:end", (e) => executionIds.add(e.executionId));
|
|
72
|
+
wf.on("workflow:run:end", (e) => executionIds.add(e.executionId));
|
|
73
|
+
await wf.run("x", { executionId: "exec-nested-1" });
|
|
74
|
+
// All workflow events in this execution must share the same executionId.
|
|
75
|
+
expect(executionIds.size).toBe(1);
|
|
76
|
+
expect(Array.from(executionIds)[0]).toBe("exec-nested-1");
|
|
77
|
+
});
|
|
78
|
+
});
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { WorkflowConfig } from "../Workflow.js";
|
|
2
|
+
import type { WorkflowDefinition } from "../definition/types.js";
|
|
3
|
+
import type { InternalNode } from "../graph/nodes.js";
|
|
4
|
+
import { WorkflowEventBus } from "./WorkflowEventBus.js";
|
|
5
|
+
import { WorkflowTelemetry } from "./WorkflowTelemetry.js";
|
|
6
|
+
import { WorkflowStateService } from "../state/WorkflowStateService.js";
|
|
7
|
+
import { NodeExecutor } from "./NodeExecutor.js";
|
|
8
|
+
export declare class WorkflowRunner {
|
|
9
|
+
private readonly config;
|
|
10
|
+
private readonly events;
|
|
11
|
+
private readonly telemetry;
|
|
12
|
+
private readonly state;
|
|
13
|
+
private readonly nodeExecutor;
|
|
14
|
+
constructor(config: WorkflowConfig<any, any>, events: WorkflowEventBus, telemetry: WorkflowTelemetry, state: WorkflowStateService, nodeExecutor: NodeExecutor);
|
|
15
|
+
run(params: {
|
|
16
|
+
workflowId: string;
|
|
17
|
+
workflowNameForTelemetry: string;
|
|
18
|
+
definition: WorkflowDefinition;
|
|
19
|
+
nodes: readonly InternalNode[];
|
|
20
|
+
input: unknown;
|
|
21
|
+
options?: {
|
|
22
|
+
executionId?: string;
|
|
23
|
+
};
|
|
24
|
+
}): Promise<any>;
|
|
25
|
+
private executeNodeWithRetry;
|
|
26
|
+
}
|