@specrun/cli 0.1.0-beta1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +191 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +1281 -0
- package/package.json +55 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1281 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/cli/root.ts
|
|
4
|
+
import { Command as Command4 } from "commander";
|
|
5
|
+
|
|
6
|
+
// src/cli/run.ts
|
|
7
|
+
import { readFileSync as readFileSync2 } from "fs";
|
|
8
|
+
import { Command as Command2 } from "commander";
|
|
9
|
+
|
|
10
|
+
// src/spec/parser.ts
|
|
11
|
+
function parseFlow(data) {
|
|
12
|
+
let raw;
|
|
13
|
+
try {
|
|
14
|
+
raw = JSON.parse(typeof data === "string" ? data : data.toString());
|
|
15
|
+
} catch (err) {
|
|
16
|
+
throw new Error(`spec: failed to parse component: ${err}`);
|
|
17
|
+
}
|
|
18
|
+
const base = raw;
|
|
19
|
+
if (base.component_type !== "Flow") {
|
|
20
|
+
throw new Error(
|
|
21
|
+
`spec: expected component_type 'Flow', got "${base.component_type}"`
|
|
22
|
+
);
|
|
23
|
+
}
|
|
24
|
+
const flow = raw;
|
|
25
|
+
const parsed = {
|
|
26
|
+
...flow,
|
|
27
|
+
parsedNodes: []
|
|
28
|
+
};
|
|
29
|
+
if (!Array.isArray(flow.nodes)) {
|
|
30
|
+
throw new Error("spec: failed to parse flow: nodes must be an array");
|
|
31
|
+
}
|
|
32
|
+
for (let i = 0; i < flow.nodes.length; i++) {
|
|
33
|
+
try {
|
|
34
|
+
const node = parseNode(flow.nodes[i]);
|
|
35
|
+
parsed.parsedNodes.push(node);
|
|
36
|
+
} catch (err) {
|
|
37
|
+
throw new Error(`spec: failed to parse node ${i}: ${err}`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return parsed;
|
|
41
|
+
}
|
|
42
|
+
function parseNode(data) {
|
|
43
|
+
if (typeof data !== "object" || data === null) {
|
|
44
|
+
throw new Error("spec: failed to parse node base: not an object");
|
|
45
|
+
}
|
|
46
|
+
const base = data;
|
|
47
|
+
switch (base.component_type) {
|
|
48
|
+
case "StartNode":
|
|
49
|
+
return data;
|
|
50
|
+
case "EndNode":
|
|
51
|
+
return data;
|
|
52
|
+
case "AgentNode":
|
|
53
|
+
return data;
|
|
54
|
+
case "ToolNode":
|
|
55
|
+
return data;
|
|
56
|
+
case "LlmNode":
|
|
57
|
+
return data;
|
|
58
|
+
case "BranchingNode":
|
|
59
|
+
return data;
|
|
60
|
+
default:
|
|
61
|
+
throw new Error(
|
|
62
|
+
`spec: unknown component_type "${base.component_type}"`
|
|
63
|
+
);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// src/spec/validate.ts
|
|
68
|
+
function validateFlow(pf) {
|
|
69
|
+
const errs = [];
|
|
70
|
+
if (!pf.name) {
|
|
71
|
+
errs.push("flow name is required");
|
|
72
|
+
}
|
|
73
|
+
if (pf.component_type !== "Flow") {
|
|
74
|
+
errs.push(
|
|
75
|
+
`expected component_type 'Flow', got "${pf.component_type}"`
|
|
76
|
+
);
|
|
77
|
+
}
|
|
78
|
+
if (pf.parsedNodes.length === 0) {
|
|
79
|
+
errs.push("flow must have at least one node");
|
|
80
|
+
}
|
|
81
|
+
let hasStart = false;
|
|
82
|
+
let hasEnd = false;
|
|
83
|
+
const nodeNames = /* @__PURE__ */ new Map();
|
|
84
|
+
for (const n of pf.parsedNodes) {
|
|
85
|
+
const name = n.name;
|
|
86
|
+
const count = (nodeNames.get(name) ?? 0) + 1;
|
|
87
|
+
nodeNames.set(name, count);
|
|
88
|
+
if (count > 1) {
|
|
89
|
+
errs.push(`duplicate node name "${name}"`);
|
|
90
|
+
}
|
|
91
|
+
if (n.component_type === "StartNode") {
|
|
92
|
+
hasStart = true;
|
|
93
|
+
}
|
|
94
|
+
if (n.component_type === "EndNode") {
|
|
95
|
+
hasEnd = true;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
if (!hasStart) {
|
|
99
|
+
errs.push("flow must have a StartNode");
|
|
100
|
+
}
|
|
101
|
+
if (!hasEnd) {
|
|
102
|
+
errs.push("flow must have an EndNode");
|
|
103
|
+
}
|
|
104
|
+
for (let i = 0; i < pf.control_flow_connections.length; i++) {
|
|
105
|
+
const edge = pf.control_flow_connections[i];
|
|
106
|
+
if (!nodeNames.has(edge.from_node)) {
|
|
107
|
+
errs.push(
|
|
108
|
+
`control_flow_connections[${i}]: from_node "${edge.from_node}" not found`
|
|
109
|
+
);
|
|
110
|
+
}
|
|
111
|
+
if (!nodeNames.has(edge.to_node)) {
|
|
112
|
+
errs.push(
|
|
113
|
+
`control_flow_connections[${i}]: to_node "${edge.to_node}" not found`
|
|
114
|
+
);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
const dataEdges = pf.data_flow_connections ?? [];
|
|
118
|
+
for (let i = 0; i < dataEdges.length; i++) {
|
|
119
|
+
const edge = dataEdges[i];
|
|
120
|
+
if (!nodeNames.has(edge.source_node)) {
|
|
121
|
+
errs.push(
|
|
122
|
+
`data_flow_connections[${i}]: source_node "${edge.source_node}" not found`
|
|
123
|
+
);
|
|
124
|
+
}
|
|
125
|
+
if (!nodeNames.has(edge.destination_node)) {
|
|
126
|
+
errs.push(
|
|
127
|
+
`data_flow_connections[${i}]: destination_node "${edge.destination_node}" not found`
|
|
128
|
+
);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
for (const n of pf.parsedNodes) {
|
|
132
|
+
if (n.component_type === "AgentNode") {
|
|
133
|
+
if (!n.agent) {
|
|
134
|
+
errs.push(`AgentNode "${n.name}" must have an agent`);
|
|
135
|
+
} else {
|
|
136
|
+
if (!n.agent.system_prompt) {
|
|
137
|
+
errs.push(
|
|
138
|
+
`AgentNode "${n.name}": agent must have a system_prompt`
|
|
139
|
+
);
|
|
140
|
+
}
|
|
141
|
+
if (!n.agent.llm_config) {
|
|
142
|
+
errs.push(
|
|
143
|
+
`AgentNode "${n.name}": agent must have llm_config`
|
|
144
|
+
);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
if (n.component_type === "LlmNode") {
|
|
149
|
+
if (!n.prompt_template) {
|
|
150
|
+
errs.push(`LlmNode "${n.name}" must have a prompt_template`);
|
|
151
|
+
}
|
|
152
|
+
if (!n.llm_config) {
|
|
153
|
+
errs.push(`LlmNode "${n.name}" must have llm_config`);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
if (n.component_type === "BranchingNode") {
|
|
157
|
+
if (!n.mapping || Object.keys(n.mapping).length === 0) {
|
|
158
|
+
errs.push(
|
|
159
|
+
`BranchingNode "${n.name}" must have a non-empty mapping`
|
|
160
|
+
);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
if (n.component_type === "ToolNode") {
|
|
164
|
+
if (!n.tool) {
|
|
165
|
+
errs.push(`ToolNode "${n.name}" must have a tool`);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
if (errs.length > 0) {
|
|
170
|
+
throw new Error(`validate: ${errs.join("; ")}`);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// src/node/branching.ts
|
|
175
|
+
var DEFAULT_BRANCH = "DEFAULT_BRANCH";
|
|
176
|
+
var BranchingExecutor = class {
|
|
177
|
+
node;
|
|
178
|
+
_branch = "";
|
|
179
|
+
constructor(node, _deps) {
|
|
180
|
+
this.node = node;
|
|
181
|
+
}
|
|
182
|
+
branch() {
|
|
183
|
+
return this._branch;
|
|
184
|
+
}
|
|
185
|
+
async execute(_signal, input) {
|
|
186
|
+
let [keyValue, ok] = input.getString("branching_mapping_key");
|
|
187
|
+
if (!ok) {
|
|
188
|
+
for (const key of input.keys()) {
|
|
189
|
+
const [v, found] = input.getString(key);
|
|
190
|
+
if (found) {
|
|
191
|
+
keyValue = v;
|
|
192
|
+
break;
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
if (!keyValue) {
|
|
196
|
+
throw new Error(
|
|
197
|
+
`run: BranchingNode "${this.node.name}": no branching_mapping_key in input`
|
|
198
|
+
);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
if (keyValue in this.node.mapping) {
|
|
202
|
+
this._branch = this.node.mapping[keyValue];
|
|
203
|
+
} else if (DEFAULT_BRANCH in this.node.mapping) {
|
|
204
|
+
this._branch = this.node.mapping[DEFAULT_BRANCH];
|
|
205
|
+
} else {
|
|
206
|
+
this._branch = "default";
|
|
207
|
+
}
|
|
208
|
+
return input.clone();
|
|
209
|
+
}
|
|
210
|
+
};
|
|
211
|
+
var StartExecutor = class {
|
|
212
|
+
constructor(node) {
|
|
213
|
+
this.node = node;
|
|
214
|
+
}
|
|
215
|
+
branch() {
|
|
216
|
+
return "";
|
|
217
|
+
}
|
|
218
|
+
async execute(_signal, input) {
|
|
219
|
+
return input.clone();
|
|
220
|
+
}
|
|
221
|
+
};
|
|
222
|
+
var EndExecutor = class {
|
|
223
|
+
constructor(node) {
|
|
224
|
+
this.node = node;
|
|
225
|
+
}
|
|
226
|
+
branch() {
|
|
227
|
+
return "";
|
|
228
|
+
}
|
|
229
|
+
async execute(_signal, input) {
|
|
230
|
+
return input.clone();
|
|
231
|
+
}
|
|
232
|
+
};
|
|
233
|
+
|
|
234
|
+
// src/spec/types.ts
|
|
235
|
+
function propertyTitle(p) {
|
|
236
|
+
const t = p.json_schema["title"];
|
|
237
|
+
return typeof t === "string" ? t : "";
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// src/state/state.ts
|
|
241
|
+
var State = class _State {
|
|
242
|
+
data;
|
|
243
|
+
constructor(data) {
|
|
244
|
+
this.data = data ? { ...data } : {};
|
|
245
|
+
}
|
|
246
|
+
/** Get retrieves a value by key. */
|
|
247
|
+
get(key) {
|
|
248
|
+
if (key in this.data) {
|
|
249
|
+
return [this.data[key], true];
|
|
250
|
+
}
|
|
251
|
+
return [void 0, false];
|
|
252
|
+
}
|
|
253
|
+
/** GetString retrieves a string value by key. */
|
|
254
|
+
getString(key) {
|
|
255
|
+
const [v, ok] = this.get(key);
|
|
256
|
+
if (!ok) return ["", false];
|
|
257
|
+
if (typeof v === "string") return [v, true];
|
|
258
|
+
return ["", false];
|
|
259
|
+
}
|
|
260
|
+
/** Set returns a new State with the given key-value pair added/replaced. */
|
|
261
|
+
set(key, value) {
|
|
262
|
+
const newData = { ...this.data };
|
|
263
|
+
newData[key] = value;
|
|
264
|
+
return new _State(newData);
|
|
265
|
+
}
|
|
266
|
+
/** Merge returns a new State with all key-value pairs from other merged in. */
|
|
267
|
+
merge(other) {
|
|
268
|
+
const newData = { ...this.data, ...other.data };
|
|
269
|
+
return new _State(newData);
|
|
270
|
+
}
|
|
271
|
+
/** Clone returns a deep copy of this State. */
|
|
272
|
+
clone() {
|
|
273
|
+
return new _State({ ...this.data });
|
|
274
|
+
}
|
|
275
|
+
/** Keys returns all keys in the state. */
|
|
276
|
+
keys() {
|
|
277
|
+
return Object.keys(this.data);
|
|
278
|
+
}
|
|
279
|
+
/** Data returns the underlying map (as a copy). */
|
|
280
|
+
toData() {
|
|
281
|
+
return { ...this.data };
|
|
282
|
+
}
|
|
283
|
+
/** toJSON serializes State to a plain object for JSON.stringify. */
|
|
284
|
+
toJSON() {
|
|
285
|
+
return { ...this.data };
|
|
286
|
+
}
|
|
287
|
+
/** String returns a JSON string representation. */
|
|
288
|
+
toString() {
|
|
289
|
+
try {
|
|
290
|
+
return JSON.stringify(this.data);
|
|
291
|
+
} catch (err) {
|
|
292
|
+
return `<error: ${err}>`;
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
};
|
|
296
|
+
|
|
297
|
+
// src/node/agent.ts
|
|
298
|
+
var MAX_TOOL_ROUNDS = 10;
|
|
299
|
+
var AgentExecutor = class {
|
|
300
|
+
node;
|
|
301
|
+
deps;
|
|
302
|
+
constructor(node, deps) {
|
|
303
|
+
this.node = node;
|
|
304
|
+
this.deps = deps;
|
|
305
|
+
}
|
|
306
|
+
branch() {
|
|
307
|
+
return "";
|
|
308
|
+
}
|
|
309
|
+
async execute(signal, input) {
|
|
310
|
+
const agent = this.node.agent;
|
|
311
|
+
if (!agent) {
|
|
312
|
+
throw new Error(`run: AgentNode "${this.node.name}" has no agent`);
|
|
313
|
+
}
|
|
314
|
+
if (!this.deps.llmProvider) {
|
|
315
|
+
throw new Error(
|
|
316
|
+
`run: AgentNode "${this.node.name}": no LLM provider configured`
|
|
317
|
+
);
|
|
318
|
+
}
|
|
319
|
+
const systemPrompt = substituteTemplate(
|
|
320
|
+
agent.system_prompt ?? "",
|
|
321
|
+
input
|
|
322
|
+
);
|
|
323
|
+
const toolDefs = [];
|
|
324
|
+
if (agent.tools) {
|
|
325
|
+
for (const t of agent.tools) {
|
|
326
|
+
toolDefs.push({
|
|
327
|
+
name: t.name,
|
|
328
|
+
description: t.description ?? "",
|
|
329
|
+
parameters: buildToolSchema(t)
|
|
330
|
+
});
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
const messages = [
|
|
334
|
+
{ role: "system", content: systemPrompt }
|
|
335
|
+
];
|
|
336
|
+
const inputJSON = JSON.stringify(input.toData());
|
|
337
|
+
messages.push({ role: "user", content: inputJSON });
|
|
338
|
+
let model = "gpt-4o";
|
|
339
|
+
if (agent.llm_config?.model_id) {
|
|
340
|
+
model = agent.llm_config.model_id;
|
|
341
|
+
}
|
|
342
|
+
for (let round = 0; round < MAX_TOOL_ROUNDS; round++) {
|
|
343
|
+
const resp = await this.deps.llmProvider.chatCompletion(signal, {
|
|
344
|
+
model,
|
|
345
|
+
messages,
|
|
346
|
+
tools: toolDefs.length > 0 ? toolDefs : void 0
|
|
347
|
+
});
|
|
348
|
+
if (!resp.tool_calls || resp.tool_calls.length === 0) {
|
|
349
|
+
let output = new State();
|
|
350
|
+
output = output.set("result", resp.content);
|
|
351
|
+
if (resp.content) {
|
|
352
|
+
try {
|
|
353
|
+
const parsed = JSON.parse(resp.content);
|
|
354
|
+
for (const [k, v] of Object.entries(parsed)) {
|
|
355
|
+
output = output.set(k, v);
|
|
356
|
+
}
|
|
357
|
+
} catch {
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
return output;
|
|
361
|
+
}
|
|
362
|
+
messages.push({
|
|
363
|
+
role: "assistant",
|
|
364
|
+
content: resp.content,
|
|
365
|
+
tool_calls: resp.tool_calls
|
|
366
|
+
});
|
|
367
|
+
for (const tc of resp.tool_calls) {
|
|
368
|
+
try {
|
|
369
|
+
const toolResult = await this.executeTool(signal, tc);
|
|
370
|
+
const resultJSON = JSON.stringify(toolResult);
|
|
371
|
+
if (this.deps.verbose) {
|
|
372
|
+
console.error(` Tool "${tc.name}" result: ${resultJSON}`);
|
|
373
|
+
}
|
|
374
|
+
messages.push({
|
|
375
|
+
role: "tool",
|
|
376
|
+
tool_call_id: tc.id,
|
|
377
|
+
content: resultJSON
|
|
378
|
+
});
|
|
379
|
+
} catch (err) {
|
|
380
|
+
if (this.deps.verbose) {
|
|
381
|
+
console.error(` Tool "${tc.name}" error: ${err}`);
|
|
382
|
+
}
|
|
383
|
+
messages.push({
|
|
384
|
+
role: "tool",
|
|
385
|
+
tool_call_id: tc.id,
|
|
386
|
+
content: `Error: ${err}`
|
|
387
|
+
});
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
throw new Error(
|
|
392
|
+
`run: AgentNode "${this.node.name}" exceeded max tool rounds (${MAX_TOOL_ROUNDS})`
|
|
393
|
+
);
|
|
394
|
+
}
|
|
395
|
+
async executeTool(signal, tc) {
|
|
396
|
+
let args;
|
|
397
|
+
try {
|
|
398
|
+
args = JSON.parse(tc.arguments);
|
|
399
|
+
} catch (err) {
|
|
400
|
+
throw new Error(`failed to parse tool arguments: ${err}`);
|
|
401
|
+
}
|
|
402
|
+
if (!this.deps.toolRegistry) {
|
|
403
|
+
throw new Error(`tool "${tc.name}" not found in registry`);
|
|
404
|
+
}
|
|
405
|
+
const [toolDef, ok] = this.deps.toolRegistry.lookup(tc.name);
|
|
406
|
+
if (!ok) {
|
|
407
|
+
throw new Error(`tool "${tc.name}" not found in registry`);
|
|
408
|
+
}
|
|
409
|
+
if (this.deps.verbose) {
|
|
410
|
+
console.error(
|
|
411
|
+
` Executing tool "${tc.name}" with args: ${JSON.stringify(args)}`
|
|
412
|
+
);
|
|
413
|
+
}
|
|
414
|
+
if (!this.deps.toolExecutor) {
|
|
415
|
+
throw new Error(`no tool executor configured`);
|
|
416
|
+
}
|
|
417
|
+
const result = await this.deps.toolExecutor.execute(
|
|
418
|
+
signal,
|
|
419
|
+
toolDef.path,
|
|
420
|
+
args
|
|
421
|
+
);
|
|
422
|
+
return result.output;
|
|
423
|
+
}
|
|
424
|
+
};
|
|
425
|
+
function substituteTemplate(template, s) {
|
|
426
|
+
let result = template;
|
|
427
|
+
for (const key of s.keys()) {
|
|
428
|
+
const [v] = s.getString(key);
|
|
429
|
+
result = result.replaceAll(`{{${key}}}`, v);
|
|
430
|
+
}
|
|
431
|
+
return result;
|
|
432
|
+
}
|
|
433
|
+
function buildToolSchema(t) {
|
|
434
|
+
const properties = {};
|
|
435
|
+
const required = [];
|
|
436
|
+
if (t.inputs) {
|
|
437
|
+
for (const input of t.inputs) {
|
|
438
|
+
const name = propertyTitle(input);
|
|
439
|
+
if (!name) continue;
|
|
440
|
+
const prop = {};
|
|
441
|
+
for (const [k, v] of Object.entries(input.json_schema)) {
|
|
442
|
+
if (k !== "title") {
|
|
443
|
+
prop[k] = v;
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
properties[name] = prop;
|
|
447
|
+
required.push(name);
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
return {
|
|
451
|
+
type: "object",
|
|
452
|
+
properties,
|
|
453
|
+
required
|
|
454
|
+
};
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// src/node/llm.ts
|
|
458
|
+
var LLMExecutor = class {
|
|
459
|
+
node;
|
|
460
|
+
deps;
|
|
461
|
+
constructor(node, deps) {
|
|
462
|
+
this.node = node;
|
|
463
|
+
this.deps = deps;
|
|
464
|
+
}
|
|
465
|
+
branch() {
|
|
466
|
+
return "";
|
|
467
|
+
}
|
|
468
|
+
async execute(signal, input) {
|
|
469
|
+
if (!this.deps.llmProvider) {
|
|
470
|
+
throw new Error(
|
|
471
|
+
`run: LlmNode "${this.node.name}": no LLM provider configured`
|
|
472
|
+
);
|
|
473
|
+
}
|
|
474
|
+
const prompt = substituteTemplate(this.node.prompt_template, input);
|
|
475
|
+
let model = "gpt-4o";
|
|
476
|
+
if (this.node.llm_config?.model_id) {
|
|
477
|
+
model = this.node.llm_config.model_id;
|
|
478
|
+
}
|
|
479
|
+
const resp = await this.deps.llmProvider.chatCompletion(signal, {
|
|
480
|
+
model,
|
|
481
|
+
messages: [{ role: "user", content: prompt }]
|
|
482
|
+
});
|
|
483
|
+
let output = new State();
|
|
484
|
+
output = output.set("generated_text", resp.content);
|
|
485
|
+
return output;
|
|
486
|
+
}
|
|
487
|
+
};
|
|
488
|
+
|
|
489
|
+
// src/node/tool.ts
|
|
490
|
+
var ToolNodeExecutor = class {
|
|
491
|
+
node;
|
|
492
|
+
deps;
|
|
493
|
+
constructor(node, deps) {
|
|
494
|
+
this.node = node;
|
|
495
|
+
this.deps = deps;
|
|
496
|
+
}
|
|
497
|
+
branch() {
|
|
498
|
+
return "";
|
|
499
|
+
}
|
|
500
|
+
async execute(signal, input) {
|
|
501
|
+
if (!this.node.tool) {
|
|
502
|
+
throw new Error(`run: ToolNode "${this.node.name}" has no tool`);
|
|
503
|
+
}
|
|
504
|
+
if (!this.deps.toolRegistry) {
|
|
505
|
+
throw new Error(
|
|
506
|
+
`run: ToolNode "${this.node.name}": no tool registry configured`
|
|
507
|
+
);
|
|
508
|
+
}
|
|
509
|
+
const [toolDef, ok] = this.deps.toolRegistry.lookup(this.node.tool.name);
|
|
510
|
+
if (!ok) {
|
|
511
|
+
throw new Error(
|
|
512
|
+
`run: ToolNode "${this.node.name}": tool "${this.node.tool.name}" not found`
|
|
513
|
+
);
|
|
514
|
+
}
|
|
515
|
+
if (!this.deps.toolExecutor) {
|
|
516
|
+
throw new Error(
|
|
517
|
+
`run: ToolNode "${this.node.name}": no tool executor configured`
|
|
518
|
+
);
|
|
519
|
+
}
|
|
520
|
+
const result = await this.deps.toolExecutor.execute(
|
|
521
|
+
signal,
|
|
522
|
+
toolDef.path,
|
|
523
|
+
input.toData()
|
|
524
|
+
);
|
|
525
|
+
return new State(result.output);
|
|
526
|
+
}
|
|
527
|
+
};
|
|
528
|
+
|
|
529
|
+
// src/graph/types.ts
|
|
530
|
+
var CompiledGraph = class {
|
|
531
|
+
name;
|
|
532
|
+
nodes;
|
|
533
|
+
start;
|
|
534
|
+
dataFlowEdges;
|
|
535
|
+
constructor(name, nodes, start, dataFlowEdges) {
|
|
536
|
+
this.name = name;
|
|
537
|
+
this.nodes = nodes;
|
|
538
|
+
this.start = start;
|
|
539
|
+
this.dataFlowEdges = dataFlowEdges;
|
|
540
|
+
}
|
|
541
|
+
/** GetNode returns a compiled node by name. */
|
|
542
|
+
getNode(name) {
|
|
543
|
+
const n = this.nodes.get(name);
|
|
544
|
+
if (n) return [n, true];
|
|
545
|
+
return [void 0, false];
|
|
546
|
+
}
|
|
547
|
+
/** NextNode resolves the next node from the current node. */
|
|
548
|
+
nextNode(current, branch) {
|
|
549
|
+
for (const edge of current.edges) {
|
|
550
|
+
if (branch === "" && (edge.from_branch ?? "") === "") {
|
|
551
|
+
const next = this.nodes.get(edge.to_node);
|
|
552
|
+
if (next) return [next, true];
|
|
553
|
+
}
|
|
554
|
+
if (branch !== "" && edge.from_branch === branch) {
|
|
555
|
+
const next = this.nodes.get(edge.to_node);
|
|
556
|
+
if (next) return [next, true];
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
if (branch !== "") {
|
|
560
|
+
for (const edge of current.edges) {
|
|
561
|
+
if ((edge.from_branch ?? "") === "") {
|
|
562
|
+
const next = this.nodes.get(edge.to_node);
|
|
563
|
+
if (next) return [next, true];
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
return [void 0, false];
|
|
568
|
+
}
|
|
569
|
+
};
|
|
570
|
+
|
|
571
|
+
// src/graph/compile.ts
|
|
572
|
+
function compile(pf, deps) {
|
|
573
|
+
const nodes = /* @__PURE__ */ new Map();
|
|
574
|
+
let start = "";
|
|
575
|
+
for (const n of pf.parsedNodes) {
|
|
576
|
+
const name = n.name;
|
|
577
|
+
const executor = buildExecutor(n, deps);
|
|
578
|
+
const cn = {
|
|
579
|
+
name,
|
|
580
|
+
type: n.component_type,
|
|
581
|
+
specNode: n,
|
|
582
|
+
executor,
|
|
583
|
+
edges: [],
|
|
584
|
+
inputMappings: /* @__PURE__ */ new Map()
|
|
585
|
+
};
|
|
586
|
+
if (n.component_type === "StartNode") {
|
|
587
|
+
start = name;
|
|
588
|
+
}
|
|
589
|
+
nodes.set(name, cn);
|
|
590
|
+
}
|
|
591
|
+
if (!start) {
|
|
592
|
+
throw new Error("compile: no StartNode found");
|
|
593
|
+
}
|
|
594
|
+
for (const edge of pf.control_flow_connections) {
|
|
595
|
+
const cn = nodes.get(edge.from_node);
|
|
596
|
+
if (!cn) {
|
|
597
|
+
throw new Error(
|
|
598
|
+
`compile: control flow edge from_node "${edge.from_node}" not found`
|
|
599
|
+
);
|
|
600
|
+
}
|
|
601
|
+
cn.edges.push(edge);
|
|
602
|
+
}
|
|
603
|
+
const dataEdges = pf.data_flow_connections ?? [];
|
|
604
|
+
for (const edge of dataEdges) {
|
|
605
|
+
const cn = nodes.get(edge.destination_node);
|
|
606
|
+
if (!cn) {
|
|
607
|
+
throw new Error(
|
|
608
|
+
`compile: data flow edge destination_node "${edge.destination_node}" not found`
|
|
609
|
+
);
|
|
610
|
+
}
|
|
611
|
+
cn.inputMappings.set(edge.destination_input, {
|
|
612
|
+
sourceNode: edge.source_node,
|
|
613
|
+
sourceOutput: edge.source_output
|
|
614
|
+
});
|
|
615
|
+
}
|
|
616
|
+
return new CompiledGraph(pf.name, nodes, start, dataEdges);
|
|
617
|
+
}
|
|
618
|
+
function buildExecutor(n, deps) {
|
|
619
|
+
switch (n.component_type) {
|
|
620
|
+
case "StartNode":
|
|
621
|
+
return new StartExecutor(n);
|
|
622
|
+
case "EndNode":
|
|
623
|
+
return new EndExecutor(n);
|
|
624
|
+
case "AgentNode":
|
|
625
|
+
return new AgentExecutor(n, deps);
|
|
626
|
+
case "LlmNode":
|
|
627
|
+
return new LLMExecutor(n, deps);
|
|
628
|
+
case "ToolNode":
|
|
629
|
+
return new ToolNodeExecutor(n, deps);
|
|
630
|
+
case "BranchingNode":
|
|
631
|
+
return new BranchingExecutor(n, deps);
|
|
632
|
+
default:
|
|
633
|
+
throw new Error(
|
|
634
|
+
`unknown node type: ${n.component_type}`
|
|
635
|
+
);
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
|
|
639
|
+
// src/graph/validate.ts
|
|
640
|
+
function validate(g) {
|
|
641
|
+
const errs = [];
|
|
642
|
+
if (!g.start) {
|
|
643
|
+
errs.push("no start node");
|
|
644
|
+
}
|
|
645
|
+
const reachable = /* @__PURE__ */ new Set();
|
|
646
|
+
const walk = (name) => {
|
|
647
|
+
if (reachable.has(name)) return;
|
|
648
|
+
reachable.add(name);
|
|
649
|
+
const cn = g.nodes.get(name);
|
|
650
|
+
if (!cn) return;
|
|
651
|
+
for (const edge of cn.edges) {
|
|
652
|
+
walk(edge.to_node);
|
|
653
|
+
}
|
|
654
|
+
};
|
|
655
|
+
walk(g.start);
|
|
656
|
+
for (const name of g.nodes.keys()) {
|
|
657
|
+
if (!reachable.has(name)) {
|
|
658
|
+
errs.push(`node "${name}" is unreachable from start`);
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
let hasReachableEnd = false;
|
|
662
|
+
for (const [name, cn] of g.nodes) {
|
|
663
|
+
if (cn.type === "EndNode" && reachable.has(name)) {
|
|
664
|
+
hasReachableEnd = true;
|
|
665
|
+
break;
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
if (!hasReachableEnd) {
|
|
669
|
+
errs.push("no reachable EndNode from start");
|
|
670
|
+
}
|
|
671
|
+
for (const [name, cn] of g.nodes) {
|
|
672
|
+
if (cn.type === "EndNode") continue;
|
|
673
|
+
if (!reachable.has(name)) continue;
|
|
674
|
+
if (cn.edges.length === 0) {
|
|
675
|
+
errs.push(`node "${name}" has no outgoing edges`);
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
if (errs.length > 0) {
|
|
679
|
+
throw new Error(`validate: ${errs.join("; ")}`);
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
|
|
683
|
+
// src/tool/registry.ts
|
|
684
|
+
import { readdirSync, statSync } from "fs";
|
|
685
|
+
import { join, extname } from "path";
|
|
686
|
+
var FileRegistry = class _FileRegistry {
|
|
687
|
+
tools;
|
|
688
|
+
constructor(tools) {
|
|
689
|
+
this.tools = tools;
|
|
690
|
+
}
|
|
691
|
+
/** Creates a registry by scanning the given directory for executables. */
|
|
692
|
+
static create(toolsDir) {
|
|
693
|
+
const tools = /* @__PURE__ */ new Map();
|
|
694
|
+
if (!toolsDir) {
|
|
695
|
+
return new _FileRegistry(tools);
|
|
696
|
+
}
|
|
697
|
+
let info;
|
|
698
|
+
try {
|
|
699
|
+
info = statSync(toolsDir);
|
|
700
|
+
} catch (err) {
|
|
701
|
+
throw new Error(`tool: tools directory "${toolsDir}": ${err}`);
|
|
702
|
+
}
|
|
703
|
+
if (!info.isDirectory()) {
|
|
704
|
+
throw new Error(`tool: "${toolsDir}" is not a directory`);
|
|
705
|
+
}
|
|
706
|
+
const entries = readdirSync(toolsDir, { withFileTypes: true });
|
|
707
|
+
for (const entry of entries) {
|
|
708
|
+
if (entry.isDirectory()) continue;
|
|
709
|
+
const path = join(toolsDir, entry.name);
|
|
710
|
+
let entryInfo;
|
|
711
|
+
try {
|
|
712
|
+
entryInfo = statSync(path);
|
|
713
|
+
} catch {
|
|
714
|
+
continue;
|
|
715
|
+
}
|
|
716
|
+
if ((entryInfo.mode & 73) === 0) continue;
|
|
717
|
+
const ext = extname(entry.name);
|
|
718
|
+
const name = ext ? entry.name.slice(0, -ext.length) : entry.name;
|
|
719
|
+
tools.set(name, {
|
|
720
|
+
name,
|
|
721
|
+
description: "",
|
|
722
|
+
path
|
|
723
|
+
});
|
|
724
|
+
}
|
|
725
|
+
return new _FileRegistry(tools);
|
|
726
|
+
}
|
|
727
|
+
lookup(name) {
|
|
728
|
+
const t = this.tools.get(name);
|
|
729
|
+
if (t) return [t, true];
|
|
730
|
+
return [{ name: "", description: "", path: "" }, false];
|
|
731
|
+
}
|
|
732
|
+
all() {
|
|
733
|
+
return Array.from(this.tools.values());
|
|
734
|
+
}
|
|
735
|
+
/** Checks that all tool names in the spec have matching executables. */
|
|
736
|
+
validateTools(toolNames) {
|
|
737
|
+
const missing = [];
|
|
738
|
+
for (const name of toolNames) {
|
|
739
|
+
if (!this.tools.has(name)) {
|
|
740
|
+
missing.push(name);
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
if (missing.length > 0) {
|
|
744
|
+
throw new Error(
|
|
745
|
+
`tool: missing executables for tools: ${missing.join(", ")}`
|
|
746
|
+
);
|
|
747
|
+
}
|
|
748
|
+
}
|
|
749
|
+
};
|
|
750
|
+
|
|
751
|
+
// src/tool/executor.ts
|
|
752
|
+
import { spawn } from "child_process";
|
|
753
|
+
var DEFAULT_TIMEOUT = 3e4;
|
|
754
|
+
var SubprocessExecutor = class {
|
|
755
|
+
timeout;
|
|
756
|
+
constructor(timeout) {
|
|
757
|
+
this.timeout = timeout ?? DEFAULT_TIMEOUT;
|
|
758
|
+
}
|
|
759
|
+
async execute(signal, toolPath, input) {
|
|
760
|
+
const inputJSON = JSON.stringify(input);
|
|
761
|
+
return new Promise((resolve, reject) => {
|
|
762
|
+
const ac = new AbortController();
|
|
763
|
+
const timer = setTimeout(() => {
|
|
764
|
+
ac.abort();
|
|
765
|
+
reject(
|
|
766
|
+
new Error(
|
|
767
|
+
`tool: execution timed out after ${this.timeout}ms`
|
|
768
|
+
)
|
|
769
|
+
);
|
|
770
|
+
}, this.timeout);
|
|
771
|
+
if (signal?.aborted) {
|
|
772
|
+
clearTimeout(timer);
|
|
773
|
+
reject(new Error("tool: execution aborted"));
|
|
774
|
+
return;
|
|
775
|
+
}
|
|
776
|
+
const onExternalAbort = () => {
|
|
777
|
+
ac.abort();
|
|
778
|
+
clearTimeout(timer);
|
|
779
|
+
reject(new Error("tool: execution aborted"));
|
|
780
|
+
};
|
|
781
|
+
signal?.addEventListener("abort", onExternalAbort, { once: true });
|
|
782
|
+
const proc = spawn(toolPath, [], {
|
|
783
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
784
|
+
signal: ac.signal
|
|
785
|
+
});
|
|
786
|
+
const stdoutChunks = [];
|
|
787
|
+
const stderrChunks = [];
|
|
788
|
+
proc.stdout.on("data", (chunk) => stdoutChunks.push(chunk));
|
|
789
|
+
proc.stderr.on("data", (chunk) => stderrChunks.push(chunk));
|
|
790
|
+
proc.on("error", (err) => {
|
|
791
|
+
clearTimeout(timer);
|
|
792
|
+
signal?.removeEventListener("abort", onExternalAbort);
|
|
793
|
+
if (ac.signal.aborted) return;
|
|
794
|
+
const stderrStr = Buffer.concat(stderrChunks).toString();
|
|
795
|
+
reject(
|
|
796
|
+
new Error(`tool: execution failed: ${err.message}: ${stderrStr}`)
|
|
797
|
+
);
|
|
798
|
+
});
|
|
799
|
+
proc.on("close", (code) => {
|
|
800
|
+
clearTimeout(timer);
|
|
801
|
+
signal?.removeEventListener("abort", onExternalAbort);
|
|
802
|
+
const stderrStr = Buffer.concat(stderrChunks).toString();
|
|
803
|
+
if (code !== 0) {
|
|
804
|
+
reject(
|
|
805
|
+
new Error(
|
|
806
|
+
`tool: execution failed: exit code ${code}: ${stderrStr}`
|
|
807
|
+
)
|
|
808
|
+
);
|
|
809
|
+
return;
|
|
810
|
+
}
|
|
811
|
+
const stdoutStr = Buffer.concat(stdoutChunks).toString();
|
|
812
|
+
let output;
|
|
813
|
+
if (stdoutStr.length > 0) {
|
|
814
|
+
try {
|
|
815
|
+
output = JSON.parse(stdoutStr);
|
|
816
|
+
} catch (err) {
|
|
817
|
+
reject(
|
|
818
|
+
new Error(
|
|
819
|
+
`tool: failed to parse output JSON: ${err}: raw output: ${stdoutStr}`
|
|
820
|
+
)
|
|
821
|
+
);
|
|
822
|
+
return;
|
|
823
|
+
}
|
|
824
|
+
} else {
|
|
825
|
+
output = {};
|
|
826
|
+
}
|
|
827
|
+
resolve({ output, stderr: stderrStr });
|
|
828
|
+
});
|
|
829
|
+
proc.stdin.write(inputJSON);
|
|
830
|
+
proc.stdin.end();
|
|
831
|
+
});
|
|
832
|
+
}
|
|
833
|
+
};
|
|
834
|
+
|
|
835
|
+
// src/llm/openai.ts
|
|
836
|
+
import OpenAI from "openai";
|
|
837
|
+
var OpenAIProvider = class {
|
|
838
|
+
client;
|
|
839
|
+
constructor(opts) {
|
|
840
|
+
this.client = new OpenAI(opts);
|
|
841
|
+
}
|
|
842
|
+
async chatCompletion(signal, req) {
|
|
843
|
+
const messages = req.messages.map((msg) => {
|
|
844
|
+
switch (msg.role) {
|
|
845
|
+
case "system":
|
|
846
|
+
return {
|
|
847
|
+
role: "system",
|
|
848
|
+
content: msg.content
|
|
849
|
+
};
|
|
850
|
+
case "user":
|
|
851
|
+
return { role: "user", content: msg.content };
|
|
852
|
+
case "assistant": {
|
|
853
|
+
if (msg.tool_calls && msg.tool_calls.length > 0) {
|
|
854
|
+
return {
|
|
855
|
+
role: "assistant",
|
|
856
|
+
content: msg.content,
|
|
857
|
+
tool_calls: msg.tool_calls.map((tc) => ({
|
|
858
|
+
id: tc.id,
|
|
859
|
+
type: "function",
|
|
860
|
+
function: {
|
|
861
|
+
name: tc.name,
|
|
862
|
+
arguments: tc.arguments
|
|
863
|
+
}
|
|
864
|
+
}))
|
|
865
|
+
};
|
|
866
|
+
}
|
|
867
|
+
return {
|
|
868
|
+
role: "assistant",
|
|
869
|
+
content: msg.content
|
|
870
|
+
};
|
|
871
|
+
}
|
|
872
|
+
case "tool":
|
|
873
|
+
return {
|
|
874
|
+
role: "tool",
|
|
875
|
+
tool_call_id: msg.tool_call_id,
|
|
876
|
+
content: msg.content
|
|
877
|
+
};
|
|
878
|
+
}
|
|
879
|
+
});
|
|
880
|
+
const tools = req.tools && req.tools.length > 0 ? req.tools.map((t) => ({
|
|
881
|
+
type: "function",
|
|
882
|
+
function: {
|
|
883
|
+
name: t.name,
|
|
884
|
+
description: t.description,
|
|
885
|
+
parameters: t.parameters
|
|
886
|
+
}
|
|
887
|
+
})) : void 0;
|
|
888
|
+
try {
|
|
889
|
+
const completion = await this.client.chat.completions.create(
|
|
890
|
+
{
|
|
891
|
+
model: req.model,
|
|
892
|
+
messages,
|
|
893
|
+
tools
|
|
894
|
+
},
|
|
895
|
+
{ signal }
|
|
896
|
+
);
|
|
897
|
+
if (!completion.choices || completion.choices.length === 0) {
|
|
898
|
+
throw new Error("llm: no choices in response");
|
|
899
|
+
}
|
|
900
|
+
const choice = completion.choices[0];
|
|
901
|
+
const resp = {
|
|
902
|
+
content: choice.message.content ?? "",
|
|
903
|
+
finish_reason: choice.finish_reason ?? ""
|
|
904
|
+
};
|
|
905
|
+
if (choice.message.tool_calls && choice.message.tool_calls.length > 0) {
|
|
906
|
+
resp.tool_calls = choice.message.tool_calls.map(
|
|
907
|
+
(tc) => ({
|
|
908
|
+
id: tc.id,
|
|
909
|
+
name: tc.function.name,
|
|
910
|
+
arguments: tc.function.arguments
|
|
911
|
+
})
|
|
912
|
+
);
|
|
913
|
+
}
|
|
914
|
+
return resp;
|
|
915
|
+
} catch (err) {
|
|
916
|
+
if (err instanceof Error && err.message.startsWith("llm:")) throw err;
|
|
917
|
+
throw new Error(`llm: OpenAI API error: ${err}`);
|
|
918
|
+
}
|
|
919
|
+
}
|
|
920
|
+
};
|
|
921
|
+
|
|
922
|
+
// src/runner/runner.ts
|
|
923
|
+
var Runner = class {
|
|
924
|
+
graph;
|
|
925
|
+
opts;
|
|
926
|
+
constructor(graph, opts) {
|
|
927
|
+
this.graph = graph;
|
|
928
|
+
this.opts = opts;
|
|
929
|
+
}
|
|
930
|
+
/** Run executes the flow with the given initial inputs. */
|
|
931
|
+
async run(signal, inputs) {
|
|
932
|
+
const timeoutSignal = AbortSignal.timeout(this.opts.timeout);
|
|
933
|
+
const combinedAc = new AbortController();
|
|
934
|
+
const onTimeout = () => combinedAc.abort();
|
|
935
|
+
const onExternal = () => combinedAc.abort();
|
|
936
|
+
timeoutSignal.addEventListener("abort", onTimeout, { once: true });
|
|
937
|
+
signal?.addEventListener("abort", onExternal, { once: true });
|
|
938
|
+
const effectiveSignal = combinedAc.signal;
|
|
939
|
+
try {
|
|
940
|
+
return await this._run(effectiveSignal, inputs);
|
|
941
|
+
} finally {
|
|
942
|
+
timeoutSignal.removeEventListener("abort", onTimeout);
|
|
943
|
+
signal?.removeEventListener("abort", onExternal);
|
|
944
|
+
}
|
|
945
|
+
}
|
|
946
|
+
async _run(signal, inputs) {
|
|
947
|
+
this.emit({ type: "flow_start" });
|
|
948
|
+
const nodeOutputs = /* @__PURE__ */ new Map();
|
|
949
|
+
const [startNode, ok] = this.graph.getNode(this.graph.start);
|
|
950
|
+
if (!ok) {
|
|
951
|
+
throw new Error(`run: start node "${this.graph.start}" not found`);
|
|
952
|
+
}
|
|
953
|
+
let current = startNode;
|
|
954
|
+
let currentState = new State(inputs);
|
|
955
|
+
for (let iteration = 0; iteration < this.opts.maxIterations; iteration++) {
|
|
956
|
+
if (signal.aborted) {
|
|
957
|
+
throw new Error("run: context cancelled: operation was aborted");
|
|
958
|
+
}
|
|
959
|
+
if (this.opts.verbose) {
|
|
960
|
+
console.error(
|
|
961
|
+
`Executing node: ${current.name} (${current.type})`
|
|
962
|
+
);
|
|
963
|
+
}
|
|
964
|
+
this.emit({
|
|
965
|
+
type: "node_start",
|
|
966
|
+
nodeName: current.name,
|
|
967
|
+
nodeType: current.type,
|
|
968
|
+
state: currentState
|
|
969
|
+
});
|
|
970
|
+
const nodeInput = this.resolveInputs(
|
|
971
|
+
current,
|
|
972
|
+
nodeOutputs,
|
|
973
|
+
currentState
|
|
974
|
+
);
|
|
975
|
+
let output;
|
|
976
|
+
try {
|
|
977
|
+
output = await current.executor.execute(signal, nodeInput);
|
|
978
|
+
} catch (err) {
|
|
979
|
+
const execErr = err instanceof Error ? err : new Error(String(err));
|
|
980
|
+
this.emit({
|
|
981
|
+
type: "node_error",
|
|
982
|
+
nodeName: current.name,
|
|
983
|
+
nodeType: current.type,
|
|
984
|
+
error: execErr
|
|
985
|
+
});
|
|
986
|
+
throw execErr;
|
|
987
|
+
}
|
|
988
|
+
nodeOutputs.set(current.name, output);
|
|
989
|
+
this.emit({
|
|
990
|
+
type: "node_complete",
|
|
991
|
+
nodeName: current.name,
|
|
992
|
+
nodeType: current.type,
|
|
993
|
+
state: output
|
|
994
|
+
});
|
|
995
|
+
if (current.type === "EndNode") {
|
|
996
|
+
this.emit({ type: "flow_complete", state: output });
|
|
997
|
+
return output;
|
|
998
|
+
}
|
|
999
|
+
currentState = currentState.merge(output);
|
|
1000
|
+
const branch = current.executor.branch();
|
|
1001
|
+
const [next, found] = this.graph.nextNode(current, branch);
|
|
1002
|
+
if (!found) {
|
|
1003
|
+
throw new Error(
|
|
1004
|
+
`run: no next node from "${current.name}" (branch="${branch}")`
|
|
1005
|
+
);
|
|
1006
|
+
}
|
|
1007
|
+
current = next;
|
|
1008
|
+
}
|
|
1009
|
+
throw new Error(
|
|
1010
|
+
`run: exceeded max iterations (${this.opts.maxIterations})`
|
|
1011
|
+
);
|
|
1012
|
+
}
|
|
1013
|
+
resolveInputs(cn, nodeOutputs, currentState) {
|
|
1014
|
+
if (cn.inputMappings.size === 0) {
|
|
1015
|
+
return currentState;
|
|
1016
|
+
}
|
|
1017
|
+
let resolved = new State();
|
|
1018
|
+
for (const [destInput, src] of cn.inputMappings) {
|
|
1019
|
+
const srcOutput = nodeOutputs.get(src.sourceNode);
|
|
1020
|
+
if (srcOutput) {
|
|
1021
|
+
const [val, ok] = srcOutput.get(src.sourceOutput);
|
|
1022
|
+
if (ok) {
|
|
1023
|
+
resolved = resolved.set(destInput, val);
|
|
1024
|
+
}
|
|
1025
|
+
}
|
|
1026
|
+
}
|
|
1027
|
+
return currentState.merge(resolved);
|
|
1028
|
+
}
|
|
1029
|
+
emit(e) {
|
|
1030
|
+
if (this.opts.eventHandler) {
|
|
1031
|
+
this.opts.eventHandler(e);
|
|
1032
|
+
}
|
|
1033
|
+
}
|
|
1034
|
+
};
|
|
1035
|
+
|
|
1036
|
+
// src/runner/options.ts
|
|
1037
|
+
var DEFAULT_MAX_ITERATIONS = 50;
|
|
1038
|
+
var DEFAULT_TIMEOUT2 = 3e5;
|
|
1039
|
+
function defaultOptions() {
|
|
1040
|
+
return {
|
|
1041
|
+
maxIterations: DEFAULT_MAX_ITERATIONS,
|
|
1042
|
+
timeout: DEFAULT_TIMEOUT2,
|
|
1043
|
+
verbose: false
|
|
1044
|
+
};
|
|
1045
|
+
}
|
|
1046
|
+
|
|
1047
|
+
// src/cli/validate.ts
|
|
1048
|
+
import { readFileSync } from "fs";
|
|
1049
|
+
import { Command } from "commander";
|
|
1050
|
+
var validateCommand = new Command("validate").description("Validate an Agent Spec flow definition").argument("<flow.json>", "Path to flow JSON file").option("--tools-dir <dir>", "Directory containing tool executables").action((flowPath, options) => {
|
|
1051
|
+
const data = readFileSync(flowPath, "utf-8");
|
|
1052
|
+
const pf = parseFlow(data);
|
|
1053
|
+
console.log(` Parsed flow: ${pf.name}`);
|
|
1054
|
+
validateFlow(pf);
|
|
1055
|
+
console.log(" Spec validation passed");
|
|
1056
|
+
const deps = {};
|
|
1057
|
+
const cg = compile(pf, deps);
|
|
1058
|
+
console.log(" Graph compilation passed");
|
|
1059
|
+
validate(cg);
|
|
1060
|
+
console.log(" Graph validation passed");
|
|
1061
|
+
if (options.toolsDir) {
|
|
1062
|
+
const reg = FileRegistry.create(options.toolsDir);
|
|
1063
|
+
const toolNames = collectToolNames(pf);
|
|
1064
|
+
if (toolNames.length > 0) {
|
|
1065
|
+
reg.validateTools(toolNames);
|
|
1066
|
+
console.log(
|
|
1067
|
+
` Tool validation passed (${toolNames.length} tools found)`
|
|
1068
|
+
);
|
|
1069
|
+
} else {
|
|
1070
|
+
console.log(" No tools to validate");
|
|
1071
|
+
}
|
|
1072
|
+
}
|
|
1073
|
+
console.log(`Valid: ${flowPath}`);
|
|
1074
|
+
});
|
|
1075
|
+
function collectToolNames(pf) {
|
|
1076
|
+
const names = [];
|
|
1077
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1078
|
+
for (const n of pf.parsedNodes) {
|
|
1079
|
+
if (n.component_type === "AgentNode") {
|
|
1080
|
+
const an = n;
|
|
1081
|
+
if (an.agent?.tools) {
|
|
1082
|
+
for (const t of an.agent.tools) {
|
|
1083
|
+
if (t.component_type === "ServerTool" && !seen.has(t.name)) {
|
|
1084
|
+
names.push(t.name);
|
|
1085
|
+
seen.add(t.name);
|
|
1086
|
+
}
|
|
1087
|
+
}
|
|
1088
|
+
}
|
|
1089
|
+
} else if (n.component_type === "ToolNode") {
|
|
1090
|
+
const tn = n;
|
|
1091
|
+
if (tn.tool && tn.tool.component_type === "ServerTool" && !seen.has(tn.tool.name)) {
|
|
1092
|
+
names.push(tn.tool.name);
|
|
1093
|
+
seen.add(tn.tool.name);
|
|
1094
|
+
}
|
|
1095
|
+
}
|
|
1096
|
+
}
|
|
1097
|
+
return names;
|
|
1098
|
+
}
|
|
1099
|
+
|
|
1100
|
+
// src/cli/run.ts
|
|
1101
|
+
var runCommand = new Command2("run").description("Run an Agent Spec flow").argument("<flow.json>", "Path to flow JSON file").option("--tools-dir <dir>", "Directory containing tool executables").option("--input <json>", "Input JSON object").action(
|
|
1102
|
+
async (flowPath, options, command) => {
|
|
1103
|
+
const verbose = command.parent?.opts().verbose ?? false;
|
|
1104
|
+
const data = readFileSync2(flowPath, "utf-8");
|
|
1105
|
+
const pf = parseFlow(data);
|
|
1106
|
+
validateFlow(pf);
|
|
1107
|
+
const reg = FileRegistry.create(options.toolsDir ?? "");
|
|
1108
|
+
const toolNames = collectToolNames(pf);
|
|
1109
|
+
if (toolNames.length > 0) {
|
|
1110
|
+
reg.validateTools(toolNames);
|
|
1111
|
+
}
|
|
1112
|
+
const provider = new OpenAIProvider();
|
|
1113
|
+
const deps = {
|
|
1114
|
+
llmProvider: provider,
|
|
1115
|
+
toolExecutor: new SubprocessExecutor(),
|
|
1116
|
+
toolRegistry: reg,
|
|
1117
|
+
verbose
|
|
1118
|
+
};
|
|
1119
|
+
const cg = compile(pf, deps);
|
|
1120
|
+
validate(cg);
|
|
1121
|
+
let inputs;
|
|
1122
|
+
if (options.input) {
|
|
1123
|
+
try {
|
|
1124
|
+
inputs = JSON.parse(options.input);
|
|
1125
|
+
} catch (err) {
|
|
1126
|
+
throw new Error(`failed to parse --input JSON: ${err}`);
|
|
1127
|
+
}
|
|
1128
|
+
} else {
|
|
1129
|
+
inputs = {};
|
|
1130
|
+
}
|
|
1131
|
+
const opts = defaultOptions();
|
|
1132
|
+
opts.verbose = verbose;
|
|
1133
|
+
if (verbose) {
|
|
1134
|
+
opts.eventHandler = (e) => {
|
|
1135
|
+
switch (e.type) {
|
|
1136
|
+
case "node_start":
|
|
1137
|
+
console.error(
|
|
1138
|
+
`[${e.nodeName}] Starting ${e.nodeType}`
|
|
1139
|
+
);
|
|
1140
|
+
break;
|
|
1141
|
+
case "node_complete":
|
|
1142
|
+
console.error(`[${e.nodeName}] Completed`);
|
|
1143
|
+
break;
|
|
1144
|
+
case "node_error":
|
|
1145
|
+
console.error(
|
|
1146
|
+
`[${e.nodeName}] Error: ${e.error}`
|
|
1147
|
+
);
|
|
1148
|
+
break;
|
|
1149
|
+
case "flow_start":
|
|
1150
|
+
console.error("Flow started");
|
|
1151
|
+
break;
|
|
1152
|
+
case "flow_complete":
|
|
1153
|
+
console.error("Flow completed");
|
|
1154
|
+
break;
|
|
1155
|
+
}
|
|
1156
|
+
};
|
|
1157
|
+
}
|
|
1158
|
+
const runner = new Runner(cg, opts);
|
|
1159
|
+
const result = await runner.run(void 0, inputs);
|
|
1160
|
+
const output = JSON.stringify(result.toData(), null, 2);
|
|
1161
|
+
console.log(output);
|
|
1162
|
+
}
|
|
1163
|
+
);
|
|
1164
|
+
|
|
1165
|
+
// src/cli/init.ts
|
|
1166
|
+
import { Command as Command3 } from "commander";
|
|
1167
|
+
|
|
1168
|
+
// src/scaffold/templates.ts
|
|
1169
|
+
import { mkdirSync, writeFileSync } from "fs";
|
|
1170
|
+
import { join as join2 } from "path";
|
|
1171
|
+
var flowTemplate = `{
|
|
1172
|
+
"component_type": "Flow",
|
|
1173
|
+
"agentspec_version": "26.1.0",
|
|
1174
|
+
"name": "my-flow",
|
|
1175
|
+
"start_node": {
|
|
1176
|
+
"component_type": "StartNode",
|
|
1177
|
+
"name": "start",
|
|
1178
|
+
"inputs": [{"json_schema": {"title": "query", "type": "string"}}]
|
|
1179
|
+
},
|
|
1180
|
+
"nodes": [
|
|
1181
|
+
{
|
|
1182
|
+
"component_type": "StartNode",
|
|
1183
|
+
"name": "start",
|
|
1184
|
+
"inputs": [{"json_schema": {"title": "query", "type": "string"}}]
|
|
1185
|
+
},
|
|
1186
|
+
{
|
|
1187
|
+
"component_type": "AgentNode",
|
|
1188
|
+
"name": "assistant",
|
|
1189
|
+
"agent": {
|
|
1190
|
+
"component_type": "Agent",
|
|
1191
|
+
"name": "assistant-agent",
|
|
1192
|
+
"system_prompt": "You are a helpful assistant. Answer the user's question: {{query}}",
|
|
1193
|
+
"llm_config": {
|
|
1194
|
+
"component_type": "OpenAIConfig",
|
|
1195
|
+
"model_id": "gpt-4o"
|
|
1196
|
+
},
|
|
1197
|
+
"tools": [
|
|
1198
|
+
{
|
|
1199
|
+
"component_type": "ServerTool",
|
|
1200
|
+
"name": "example_tool",
|
|
1201
|
+
"description": "An example tool that echoes input",
|
|
1202
|
+
"inputs": [{"json_schema": {"title": "message", "type": "string"}}],
|
|
1203
|
+
"outputs": [{"json_schema": {"title": "response", "type": "string"}}]
|
|
1204
|
+
}
|
|
1205
|
+
]
|
|
1206
|
+
}
|
|
1207
|
+
},
|
|
1208
|
+
{
|
|
1209
|
+
"component_type": "EndNode",
|
|
1210
|
+
"name": "end",
|
|
1211
|
+
"inputs": [{"json_schema": {"title": "result", "type": "string"}}]
|
|
1212
|
+
}
|
|
1213
|
+
],
|
|
1214
|
+
"control_flow_connections": [
|
|
1215
|
+
{"from_node": "start", "to_node": "assistant"},
|
|
1216
|
+
{"from_node": "assistant", "to_node": "end"}
|
|
1217
|
+
],
|
|
1218
|
+
"data_flow_connections": [
|
|
1219
|
+
{"source_node": "start", "source_output": "query", "destination_node": "assistant", "destination_input": "query"},
|
|
1220
|
+
{"source_node": "assistant", "source_output": "result", "destination_node": "end", "destination_input": "result"}
|
|
1221
|
+
]
|
|
1222
|
+
}
|
|
1223
|
+
`;
|
|
1224
|
+
var toolTemplate = `#!/usr/bin/env bash
|
|
1225
|
+
# Example tool: reads JSON from stdin, writes JSON to stdout
|
|
1226
|
+
set -euo pipefail
|
|
1227
|
+
|
|
1228
|
+
# Parse input
|
|
1229
|
+
INPUT=$(cat)
|
|
1230
|
+
MESSAGE=$(echo "$INPUT" | python3 -c "import sys,json; print(json.load(sys.stdin).get('message','no message'))" 2>/dev/null || echo "no message")
|
|
1231
|
+
|
|
1232
|
+
# Return JSON output
|
|
1233
|
+
echo "{\\"response\\": \\"Echo: $MESSAGE\\"}"
|
|
1234
|
+
`;
|
|
1235
|
+
function generate(dir) {
|
|
1236
|
+
const toolsDir = join2(dir, "tools");
|
|
1237
|
+
try {
|
|
1238
|
+
mkdirSync(toolsDir, { recursive: true });
|
|
1239
|
+
} catch (err) {
|
|
1240
|
+
throw new Error(`scaffold: failed to create directory: ${err}`);
|
|
1241
|
+
}
|
|
1242
|
+
const flowPath = join2(dir, "flow.json");
|
|
1243
|
+
try {
|
|
1244
|
+
writeFileSync(flowPath, flowTemplate, { mode: 420 });
|
|
1245
|
+
} catch (err) {
|
|
1246
|
+
throw new Error(`scaffold: failed to write flow.json: ${err}`);
|
|
1247
|
+
}
|
|
1248
|
+
const toolPath = join2(toolsDir, "example_tool.sh");
|
|
1249
|
+
try {
|
|
1250
|
+
writeFileSync(toolPath, toolTemplate, { mode: 493 });
|
|
1251
|
+
} catch (err) {
|
|
1252
|
+
throw new Error(`scaffold: failed to write example_tool.sh: ${err}`);
|
|
1253
|
+
}
|
|
1254
|
+
}
|
|
1255
|
+
|
|
1256
|
+
// src/cli/init.ts
|
|
1257
|
+
var initCommand = new Command3("init").description("Scaffold a new specrun project").argument("<project-name>", "Name of the project directory").action((dir) => {
|
|
1258
|
+
generate(dir);
|
|
1259
|
+
console.log(`Created project in ${dir}/`);
|
|
1260
|
+
console.log(" flow.json - Agent Spec flow definition");
|
|
1261
|
+
console.log(" tools/example_tool.sh - Example tool script");
|
|
1262
|
+
console.log();
|
|
1263
|
+
console.log("Next steps:");
|
|
1264
|
+
console.log(` 1. Edit ${dir}/flow.json to define your workflow`);
|
|
1265
|
+
console.log(` 2. Add tool scripts to ${dir}/tools/`);
|
|
1266
|
+
console.log(
|
|
1267
|
+
` 3. Run: specrun run ${dir}/flow.json --tools-dir ${dir}/tools --input '{"query": "hello"}'`
|
|
1268
|
+
);
|
|
1269
|
+
});
|
|
1270
|
+
|
|
1271
|
+
// src/cli/root.ts
|
|
1272
|
+
var program = new Command4("specrun").description("Lightweight CLI agentic workflow framework").option("--verbose", "Enable verbose output", false).option("--trace", "Enable trace-level output", false);
|
|
1273
|
+
program.addCommand(runCommand);
|
|
1274
|
+
program.addCommand(validateCommand);
|
|
1275
|
+
program.addCommand(initCommand);
|
|
1276
|
+
|
|
1277
|
+
// src/index.ts
|
|
1278
|
+
program.parseAsync(process.argv).catch((err) => {
|
|
1279
|
+
console.error(err.message ?? err);
|
|
1280
|
+
process.exit(1);
|
|
1281
|
+
});
|