agent-worker 0.18.0 → 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +49 -38
- package/dist/cli/index.mjs +208 -4320
- package/dist/client-DAKkzdOn.mjs +171 -0
- package/dist/daemon-CwaHgxs6.mjs +1071 -0
- package/dist/index.d.mts +249 -849
- package/dist/index.mjs +27 -1102
- package/dist/output-B0mwPqjv.mjs +20 -0
- package/dist/rolldown-runtime-wcPFST8Q.mjs +13 -0
- package/dist/target-9yiBRXxa.mjs +105 -0
- package/package.json +25 -37
- package/dist/backends-D7DT0uox.mjs +0 -1484
- package/dist/backends-DUvcm-ce.mjs +0 -3
- package/dist/context-CoRTddGx.mjs +0 -4
- package/dist/create-tool-gcUuI1FD.mjs +0 -32
- package/dist/display-pretty-Kyd40DEF.mjs +0 -190
- package/dist/memory-provider-Z9D8NdwS.mjs +0 -75
- package/dist/runner-BmT0Y8MD.mjs +0 -690
- package/dist/workflow-LOZUlaDo.mjs +0 -744
package/dist/cli/index.mjs
CHANGED
|
@@ -1,4000 +1,42 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
6
|
-
import {
|
|
7
|
-
import {
|
|
8
|
-
import {
|
|
9
|
-
import {
|
|
10
|
-
import { homedir } from "node:os";
|
|
11
|
-
import { execSync, spawn } from "node:child_process";
|
|
2
|
+
import { t as __exportAll } from "../rolldown-runtime-wcPFST8Q.mjs";
|
|
3
|
+
import { a as isDaemonRunning, i as DEFAULT_PORT } from "../daemon-CwaHgxs6.mjs";
|
|
4
|
+
import { a as isDaemonActive, c as serve, d as stopWorkflow, i as health, l as shutdown, n as createAgent, o as listAgents, r as deleteAgent, s as run, u as startWorkflow } from "../client-DAKkzdOn.mjs";
|
|
5
|
+
import { t as outputJson } from "../output-B0mwPqjv.mjs";
|
|
6
|
+
import { n as parseTarget } from "../target-9yiBRXxa.mjs";
|
|
7
|
+
import { mkdirSync, readFileSync } from "node:fs";
|
|
8
|
+
import { FRONTIER_MODELS, getDefaultModel, normalizeBackendType } from "@moniro/agent-loop";
|
|
9
|
+
import { createFileContextProvider, getDefaultContextDir } from "@moniro/workspace";
|
|
12
10
|
import { Command, Option } from "commander";
|
|
13
|
-
import {
|
|
14
|
-
import { streamSSE } from "hono/streaming";
|
|
15
|
-
import { randomUUID } from "node:crypto";
|
|
16
|
-
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
17
|
-
import { nanoid } from "nanoid";
|
|
18
|
-
import { WebStandardStreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/webStandardStreamableHttp.js";
|
|
19
|
-
import { createServer } from "node:http";
|
|
20
|
-
import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
|
|
21
|
-
import { MockLanguageModelV3, mockValues } from "ai/test";
|
|
22
|
-
import { Client } from "@modelcontextprotocol/sdk/client/index.js";
|
|
23
|
-
import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
|
|
24
|
-
import { z as z$1 } from "zod/v4";
|
|
25
|
-
|
|
26
|
-
//#region rolldown:runtime
|
|
27
|
-
var __defProp = Object.defineProperty;
|
|
28
|
-
var __exportAll = (all, symbols) => {
|
|
29
|
-
let target = {};
|
|
30
|
-
for (var name in all) {
|
|
31
|
-
__defProp(target, name, {
|
|
32
|
-
get: all[name],
|
|
33
|
-
enumerable: true
|
|
34
|
-
});
|
|
35
|
-
}
|
|
36
|
-
if (symbols) {
|
|
37
|
-
__defProp(target, Symbol.toStringTag, { value: "Module" });
|
|
38
|
-
}
|
|
39
|
-
return target;
|
|
40
|
-
};
|
|
41
|
-
|
|
42
|
-
//#endregion
|
|
43
|
-
//#region src/daemon/registry.ts
|
|
44
|
-
/**
|
|
45
|
-
* Daemon Registry
|
|
46
|
-
*
|
|
47
|
-
* Discovery: daemon.json = { pid, host, port, startedAt }
|
|
48
|
-
* One daemon process on a fixed port. Clients read daemon.json to find it.
|
|
49
|
-
*
|
|
50
|
-
* Legacy: per-session files in sessions/{id}.json (deprecated, kept for transition)
|
|
51
|
-
*/
|
|
52
|
-
const CONFIG_DIR = join(homedir(), ".agent-worker");
|
|
53
|
-
const SESSIONS_DIR = join(CONFIG_DIR, "sessions");
|
|
54
|
-
const DEFAULT_PORT = 5099;
|
|
55
|
-
const DAEMON_FILE = join(CONFIG_DIR, "daemon.json");
|
|
56
|
-
const DEFAULT_FILE = join(CONFIG_DIR, "default");
|
|
57
|
-
/** Write daemon.json for client discovery */
|
|
58
|
-
function writeDaemonInfo(info) {
|
|
59
|
-
mkdirSync(CONFIG_DIR, { recursive: true });
|
|
60
|
-
writeFileSync(DAEMON_FILE, JSON.stringify(info, null, 2));
|
|
61
|
-
}
|
|
62
|
-
/** Read daemon.json. Returns null if missing or malformed. */
|
|
63
|
-
function readDaemonInfo() {
|
|
64
|
-
try {
|
|
65
|
-
return JSON.parse(readFileSync(DAEMON_FILE, "utf-8"));
|
|
66
|
-
} catch {
|
|
67
|
-
return null;
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
/** Remove daemon.json (on shutdown) */
|
|
71
|
-
function removeDaemonInfo() {
|
|
72
|
-
try {
|
|
73
|
-
unlinkSync(DAEMON_FILE);
|
|
74
|
-
} catch {}
|
|
75
|
-
}
|
|
76
|
-
/** Check if a daemon is already running (daemon.json exists + PID alive) */
|
|
77
|
-
function isDaemonRunning() {
|
|
78
|
-
const info = readDaemonInfo();
|
|
79
|
-
if (!info) return null;
|
|
80
|
-
try {
|
|
81
|
-
process.kill(info.pid, 0);
|
|
82
|
-
return info;
|
|
83
|
-
} catch {
|
|
84
|
-
removeDaemonInfo();
|
|
85
|
-
return null;
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
const DURATION_RE = /^(\d+(?:\.\d+)?)\s*(ms|s|m|h|d)$/;
|
|
89
|
-
/**
|
|
90
|
-
* Parse a duration string like "30s", "5m", "2h" into milliseconds.
|
|
91
|
-
* Returns null if not a valid duration format.
|
|
92
|
-
*/
|
|
93
|
-
function parseDuration(value) {
|
|
94
|
-
const match = value.match(DURATION_RE);
|
|
95
|
-
if (!match) return null;
|
|
96
|
-
return parseFloat(match[1]) * {
|
|
97
|
-
ms: 1,
|
|
98
|
-
s: 1e3,
|
|
99
|
-
m: 60 * 1e3,
|
|
100
|
-
h: 3600 * 1e3,
|
|
101
|
-
d: 1440 * 60 * 1e3
|
|
102
|
-
}[match[2]];
|
|
103
|
-
}
|
|
104
|
-
/**
|
|
105
|
-
* Resolve a wakeup value into a typed schedule.
|
|
106
|
-
* - number → interval (ms)
|
|
107
|
-
* - "30s"/"5m"/"2h" → interval (converted to ms)
|
|
108
|
-
* - cron expression → cron
|
|
109
|
-
*/
|
|
110
|
-
function resolveSchedule(config) {
|
|
111
|
-
const { wakeup, prompt } = config;
|
|
112
|
-
if (typeof wakeup === "number") {
|
|
113
|
-
if (wakeup <= 0) throw new Error("Wakeup interval must be positive");
|
|
114
|
-
return {
|
|
115
|
-
type: "interval",
|
|
116
|
-
ms: wakeup,
|
|
117
|
-
prompt
|
|
118
|
-
};
|
|
119
|
-
}
|
|
120
|
-
const ms = parseDuration(wakeup);
|
|
121
|
-
if (ms !== null) {
|
|
122
|
-
if (ms <= 0) throw new Error("Wakeup duration must be positive");
|
|
123
|
-
return {
|
|
124
|
-
type: "interval",
|
|
125
|
-
ms,
|
|
126
|
-
prompt
|
|
127
|
-
};
|
|
128
|
-
}
|
|
129
|
-
return {
|
|
130
|
-
type: "cron",
|
|
131
|
-
expr: wakeup,
|
|
132
|
-
prompt
|
|
133
|
-
};
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
//#endregion
|
|
137
|
-
//#region src/agent/store.ts
|
|
138
|
-
/**
|
|
139
|
-
* In-memory state store. State is lost when the daemon stops.
|
|
140
|
-
* Suitable for development and single-machine deployments.
|
|
141
|
-
*/
|
|
142
|
-
var MemoryStateStore = class {
|
|
143
|
-
states = /* @__PURE__ */ new Map();
|
|
144
|
-
async load(agentId) {
|
|
145
|
-
return this.states.get(agentId) ?? null;
|
|
146
|
-
}
|
|
147
|
-
async save(agentId, state) {
|
|
148
|
-
this.states.set(agentId, state);
|
|
149
|
-
}
|
|
150
|
-
async delete(agentId) {
|
|
151
|
-
this.states.delete(agentId);
|
|
152
|
-
}
|
|
153
|
-
};
|
|
154
|
-
|
|
155
|
-
//#endregion
|
|
156
|
-
//#region src/daemon/serve.ts
|
|
157
|
-
/**
|
|
158
|
-
* Start an HTTP server for a Hono app.
|
|
159
|
-
* Auto-detects runtime: Bun.serve() when available, @hono/node-server otherwise.
|
|
160
|
-
*/
|
|
161
|
-
async function startHttpServer(app, options) {
|
|
162
|
-
if ("Bun" in globalThis) return startBun(app, options);
|
|
163
|
-
return startNode(app, options);
|
|
164
|
-
}
|
|
165
|
-
function startBun(app, options) {
|
|
166
|
-
const server = globalThis.Bun.serve({
|
|
167
|
-
fetch: app.fetch,
|
|
168
|
-
port: options.port,
|
|
169
|
-
hostname: options.hostname
|
|
170
|
-
});
|
|
171
|
-
return {
|
|
172
|
-
port: server.port ?? options.port,
|
|
173
|
-
close: async () => server.stop(true)
|
|
174
|
-
};
|
|
175
|
-
}
|
|
176
|
-
async function startNode(app, options) {
|
|
177
|
-
const mod = await import("@hono/node-server");
|
|
178
|
-
return new Promise((resolve, reject) => {
|
|
179
|
-
const server = mod.serve({
|
|
180
|
-
fetch: app.fetch,
|
|
181
|
-
port: options.port,
|
|
182
|
-
hostname: options.hostname
|
|
183
|
-
}, (info) => {
|
|
184
|
-
resolve({
|
|
185
|
-
port: info.port,
|
|
186
|
-
close: () => new Promise((r) => server.close(() => r()))
|
|
187
|
-
});
|
|
188
|
-
});
|
|
189
|
-
server.on("error", reject);
|
|
190
|
-
});
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
//#endregion
|
|
194
|
-
//#region src/workflow/context/event-log.ts
|
|
195
|
-
var EventLog = class {
|
|
196
|
-
constructor(provider) {
|
|
197
|
-
this.provider = provider;
|
|
198
|
-
}
|
|
199
|
-
/** Record a tool invocation (MCP, SDK, or backend native) */
|
|
200
|
-
toolCall(agent, name, args, source) {
|
|
201
|
-
this.provider.appendChannel(agent, `${name}(${args})`, {
|
|
202
|
-
kind: "tool_call",
|
|
203
|
-
toolCall: {
|
|
204
|
-
name,
|
|
205
|
-
args,
|
|
206
|
-
source
|
|
207
|
-
}
|
|
208
|
-
}).catch(() => {});
|
|
209
|
-
}
|
|
210
|
-
/** Record an operational log (workflow lifecycle, warnings, errors) */
|
|
211
|
-
system(from, message) {
|
|
212
|
-
this.provider.appendChannel(from, message, { kind: "system" }).catch(() => {});
|
|
213
|
-
}
|
|
214
|
-
/** Record backend streaming text output (not tool calls) */
|
|
215
|
-
output(agent, text) {
|
|
216
|
-
this.provider.appendChannel(agent, text, { kind: "output" }).catch(() => {});
|
|
217
|
-
}
|
|
218
|
-
/** Record debug-level detail (only shown with --debug) */
|
|
219
|
-
debug(from, message) {
|
|
220
|
-
this.provider.appendChannel(from, message, { kind: "debug" }).catch(() => {});
|
|
221
|
-
}
|
|
222
|
-
};
|
|
223
|
-
|
|
224
|
-
//#endregion
|
|
225
|
-
//#region src/workflow/context/mcp/helpers.ts
|
|
226
|
-
/**
|
|
227
|
-
* Extract agent ID from MCP extra context.
|
|
228
|
-
* Session ID format: "agentName-uuid8chars" — extract agent name.
|
|
229
|
-
*/
|
|
230
|
-
function getAgentId(extra) {
|
|
231
|
-
if (!extra || typeof extra !== "object") return void 0;
|
|
232
|
-
if ("sessionId" in extra && typeof extra.sessionId === "string") {
|
|
233
|
-
const sid = extra.sessionId;
|
|
234
|
-
const match = sid.match(/^(.+)-[0-9a-f]{8}$/);
|
|
235
|
-
return match ? match[1] : sid;
|
|
236
|
-
}
|
|
237
|
-
if ("meta" in extra && extra.meta && typeof extra.meta === "object") {
|
|
238
|
-
const meta = extra.meta;
|
|
239
|
-
if ("agentId" in meta && typeof meta.agentId === "string") return meta.agentId;
|
|
240
|
-
}
|
|
241
|
-
}
|
|
242
|
-
/**
|
|
243
|
-
* Format inbox messages for JSON display.
|
|
244
|
-
*/
|
|
245
|
-
function formatInbox$1(messages) {
|
|
246
|
-
if (messages.length === 0) return JSON.stringify({
|
|
247
|
-
messages: [],
|
|
248
|
-
count: 0
|
|
249
|
-
});
|
|
250
|
-
return JSON.stringify({
|
|
251
|
-
messages: messages.map((m) => ({
|
|
252
|
-
id: m.entry.id,
|
|
253
|
-
from: m.entry.from,
|
|
254
|
-
content: m.entry.content,
|
|
255
|
-
timestamp: m.entry.timestamp,
|
|
256
|
-
priority: m.priority
|
|
257
|
-
})),
|
|
258
|
-
count: messages.length
|
|
259
|
-
});
|
|
260
|
-
}
|
|
261
|
-
/**
|
|
262
|
-
* Format tool call parameters as a concise string.
|
|
263
|
-
*/
|
|
264
|
-
function formatToolParams(params) {
|
|
265
|
-
return Object.entries(params).filter(([_, v]) => v !== void 0).map(([k, v]) => {
|
|
266
|
-
const val = typeof v === "string" && v.length > 50 ? v.slice(0, 50) + "..." : v;
|
|
267
|
-
return `${k}=${JSON.stringify(val)}`;
|
|
268
|
-
}).join(", ");
|
|
269
|
-
}
|
|
270
|
-
/**
|
|
271
|
-
* Create a logTool function that records tool calls via EventLog.
|
|
272
|
-
*/
|
|
273
|
-
function createLogTool(eventLog) {
|
|
274
|
-
return (tool, agent, params) => {
|
|
275
|
-
if (!agent) return;
|
|
276
|
-
const args = formatToolParams(params);
|
|
277
|
-
eventLog.toolCall(agent, tool, args, "mcp");
|
|
278
|
-
};
|
|
279
|
-
}
|
|
280
|
-
|
|
281
|
-
//#endregion
|
|
282
|
-
//#region src/workflow/context/mcp/channel.ts
|
|
283
|
-
const CHANNEL_MSG_LIMIT = 2e3;
|
|
284
|
-
function registerChannelTools(server, ctx, options) {
|
|
285
|
-
const { provider, getAgentId, logTool } = ctx;
|
|
286
|
-
const { onMention } = options ?? {};
|
|
287
|
-
server.tool("channel_send", `Send a message to the shared channel. Use @agent to mention/notify. Use "to" for private DMs. Long messages (> ${CHANNEL_MSG_LIMIT} chars) are automatically converted to resources.`, {
|
|
288
|
-
message: z.string().describe("Message content, can include @mentions like @reviewer or @coder. Long messages are auto-converted to resources."),
|
|
289
|
-
to: z.string().optional().describe("Send as DM to a specific agent (private, only you and recipient see it)")
|
|
290
|
-
}, async ({ message, to }, extra) => {
|
|
291
|
-
const from = getAgentId(extra) || "anonymous";
|
|
292
|
-
logTool("channel_send", from, {
|
|
293
|
-
message,
|
|
294
|
-
to
|
|
295
|
-
});
|
|
296
|
-
const sendOpts = to ? { to } : void 0;
|
|
297
|
-
const msg = await provider.smartSend(from, message, sendOpts);
|
|
298
|
-
for (const target of msg.mentions) onMention?.(from, target, msg);
|
|
299
|
-
if (to && !msg.mentions.includes(to)) onMention?.(from, to, msg);
|
|
300
|
-
return { content: [{
|
|
301
|
-
type: "text",
|
|
302
|
-
text: JSON.stringify({
|
|
303
|
-
status: "sent",
|
|
304
|
-
timestamp: msg.timestamp,
|
|
305
|
-
mentions: msg.mentions,
|
|
306
|
-
to: msg.to
|
|
307
|
-
})
|
|
308
|
-
}] };
|
|
309
|
-
});
|
|
310
|
-
server.tool("channel_read", "Read messages from the shared channel. DMs and logs are automatically filtered based on your identity.", {
|
|
311
|
-
since: z.string().optional().describe("Read entries after this timestamp (ISO format)"),
|
|
312
|
-
limit: z.number().optional().describe("Maximum entries to return")
|
|
313
|
-
}, async ({ since, limit }, extra) => {
|
|
314
|
-
const agent = getAgentId(extra);
|
|
315
|
-
logTool("channel_read", agent, {
|
|
316
|
-
since,
|
|
317
|
-
limit
|
|
318
|
-
});
|
|
319
|
-
const entries = await provider.readChannel({
|
|
320
|
-
since,
|
|
321
|
-
limit,
|
|
322
|
-
agent
|
|
323
|
-
});
|
|
324
|
-
return { content: [{
|
|
325
|
-
type: "text",
|
|
326
|
-
text: JSON.stringify(entries)
|
|
327
|
-
}] };
|
|
328
|
-
});
|
|
329
|
-
}
|
|
330
|
-
|
|
331
|
-
//#endregion
|
|
332
|
-
//#region src/workflow/context/mcp/resource.ts
|
|
333
|
-
function registerResourceTools(server, ctx) {
|
|
334
|
-
const { provider, getAgentId, logTool } = ctx;
|
|
335
|
-
server.tool("resource_create", "Store large content as a resource. Returns a reference (resource:id) usable in channel messages or documents.", {
|
|
336
|
-
content: z.string().describe("Content to store as resource"),
|
|
337
|
-
type: z.enum([
|
|
338
|
-
"markdown",
|
|
339
|
-
"json",
|
|
340
|
-
"text",
|
|
341
|
-
"diff"
|
|
342
|
-
]).optional().describe("Content type hint (default: text)")
|
|
343
|
-
}, async ({ content, type }, extra) => {
|
|
344
|
-
const createdBy = getAgentId(extra) || "anonymous";
|
|
345
|
-
logTool("resource_create", createdBy, {
|
|
346
|
-
type,
|
|
347
|
-
contentLen: content.length
|
|
348
|
-
});
|
|
349
|
-
const result = await provider.createResource(content, createdBy, type);
|
|
350
|
-
return { content: [{
|
|
351
|
-
type: "text",
|
|
352
|
-
text: JSON.stringify({
|
|
353
|
-
id: result.id,
|
|
354
|
-
ref: result.ref,
|
|
355
|
-
hint: `Use [description](${result.ref}) in messages or documents`
|
|
356
|
-
})
|
|
357
|
-
}] };
|
|
358
|
-
});
|
|
359
|
-
server.tool("resource_read", "Read resource content by ID. Use when you encounter resource:id references.", { id: z.string().describe("Resource ID (e.g., res_abc123)") }, async ({ id }) => {
|
|
360
|
-
const content = await provider.readResource(id);
|
|
361
|
-
if (content === null) return { content: [{
|
|
362
|
-
type: "text",
|
|
363
|
-
text: JSON.stringify({ error: `Resource not found: ${id}` })
|
|
364
|
-
}] };
|
|
365
|
-
return { content: [{
|
|
366
|
-
type: "text",
|
|
367
|
-
text: content
|
|
368
|
-
}] };
|
|
369
|
-
});
|
|
370
|
-
}
|
|
371
|
-
|
|
372
|
-
//#endregion
|
|
373
|
-
//#region src/workflow/context/mcp/inbox.ts
|
|
374
|
-
function registerInboxTools(server, ctx, options) {
|
|
375
|
-
const { provider, getAgentId, logTool } = ctx;
|
|
376
|
-
const { debugLog } = options ?? {};
|
|
377
|
-
server.tool("my_inbox", "Check your unread inbox messages. Does NOT acknowledge — use my_inbox_ack after processing.", {}, async (_args, extra) => {
|
|
378
|
-
const agent = getAgentId(extra) || "anonymous";
|
|
379
|
-
logTool("my_inbox", agent, {});
|
|
380
|
-
const messages = await provider.getInbox(agent);
|
|
381
|
-
if (debugLog && messages.length > 0) debugLog(`[mcp:${agent}] my_inbox → ${messages.length} unread`);
|
|
382
|
-
return { content: [{
|
|
383
|
-
type: "text",
|
|
384
|
-
text: formatInbox$1(messages)
|
|
385
|
-
}] };
|
|
386
|
-
});
|
|
387
|
-
server.tool("my_inbox_ack", "Acknowledge inbox messages up to a message ID. Call after processing messages.", { until: z.string().describe("Acknowledge messages up to and including this message ID") }, async ({ until }, extra) => {
|
|
388
|
-
const agent = getAgentId(extra) || "anonymous";
|
|
389
|
-
logTool("my_inbox_ack", agent, { until });
|
|
390
|
-
await provider.ackInbox(agent, until);
|
|
391
|
-
return { content: [{
|
|
392
|
-
type: "text",
|
|
393
|
-
text: JSON.stringify({
|
|
394
|
-
status: "acknowledged",
|
|
395
|
-
until
|
|
396
|
-
})
|
|
397
|
-
}] };
|
|
398
|
-
});
|
|
399
|
-
server.tool("my_status_set", "Update your status and current task. Call when starting or completing work.", {
|
|
400
|
-
task: z.string().optional().describe("Current task description (what you're working on)"),
|
|
401
|
-
state: z.enum(["idle", "running"]).optional().describe("Agent state (running = working, idle = available)"),
|
|
402
|
-
metadata: z.record(z.string(), z.unknown()).optional().describe("Additional metadata (e.g., PR number, file path)")
|
|
403
|
-
}, async (args, extra) => {
|
|
404
|
-
const agent = getAgentId(extra) || "anonymous";
|
|
405
|
-
logTool("my_status_set", agent, args);
|
|
406
|
-
const status = {};
|
|
407
|
-
if (args.task !== void 0) status.task = args.task;
|
|
408
|
-
if (args.state !== void 0) status.state = args.state;
|
|
409
|
-
if (args.metadata !== void 0) status.metadata = args.metadata;
|
|
410
|
-
await provider.setAgentStatus(agent, status);
|
|
411
|
-
return { content: [{
|
|
412
|
-
type: "text",
|
|
413
|
-
text: JSON.stringify({
|
|
414
|
-
status: "updated",
|
|
415
|
-
agent,
|
|
416
|
-
...status
|
|
417
|
-
})
|
|
418
|
-
}] };
|
|
419
|
-
});
|
|
420
|
-
}
|
|
421
|
-
|
|
422
|
-
//#endregion
|
|
423
|
-
//#region src/workflow/context/mcp/team.ts
|
|
424
|
-
function registerTeamTools(server, ctx) {
|
|
425
|
-
const { provider, validAgents, getAgentId, logTool } = ctx;
|
|
426
|
-
server.tool("team_members", "List all agents in this workflow. Use to discover who you can @mention. Optionally includes agent status (state, current task).", { includeStatus: z.boolean().optional().describe("Include agent status information") }, async (args, extra) => {
|
|
427
|
-
const currentAgent = getAgentId(extra) || "anonymous";
|
|
428
|
-
const includeStatus = args.includeStatus ?? false;
|
|
429
|
-
const agents = validAgents.map((name) => ({
|
|
430
|
-
name,
|
|
431
|
-
mention: `@${name}`,
|
|
432
|
-
isYou: name === currentAgent
|
|
433
|
-
}));
|
|
434
|
-
const result = {
|
|
435
|
-
agents,
|
|
436
|
-
count: agents.length,
|
|
437
|
-
hint: "Use @agent in channel_send to mention other agents"
|
|
438
|
-
};
|
|
439
|
-
if (includeStatus) result.status = await provider.listAgentStatus();
|
|
440
|
-
return { content: [{
|
|
441
|
-
type: "text",
|
|
442
|
-
text: JSON.stringify(result)
|
|
443
|
-
}] };
|
|
444
|
-
});
|
|
445
|
-
server.tool("team_doc_read", "Read a shared team document.", { file: z.string().optional().describe("Document file path (default: notes.md)") }, async ({ file }, extra) => {
|
|
446
|
-
logTool("team_doc_read", getAgentId(extra), { file });
|
|
447
|
-
return { content: [{
|
|
448
|
-
type: "text",
|
|
449
|
-
text: await provider.readDocument(file) || "(empty document)"
|
|
450
|
-
}] };
|
|
451
|
-
});
|
|
452
|
-
server.tool("team_doc_write", "Write/replace a shared team document.", {
|
|
453
|
-
content: z.string().describe("New document content (replaces existing)"),
|
|
454
|
-
file: z.string().optional().describe("Document file path (default: notes.md)")
|
|
455
|
-
}, async ({ content, file }, extra) => {
|
|
456
|
-
logTool("team_doc_write", getAgentId(extra), {
|
|
457
|
-
file,
|
|
458
|
-
contentLen: content.length
|
|
459
|
-
});
|
|
460
|
-
await provider.writeDocument(content, file);
|
|
461
|
-
return { content: [{
|
|
462
|
-
type: "text",
|
|
463
|
-
text: `Document ${file || "notes.md"} written successfully`
|
|
464
|
-
}] };
|
|
465
|
-
});
|
|
466
|
-
server.tool("team_doc_append", "Append content to a shared team document.", {
|
|
467
|
-
content: z.string().describe("Content to append to the document"),
|
|
468
|
-
file: z.string().optional().describe("Document file path (default: notes.md)")
|
|
469
|
-
}, async ({ content, file }, extra) => {
|
|
470
|
-
logTool("team_doc_append", getAgentId(extra), {
|
|
471
|
-
file,
|
|
472
|
-
contentLen: content.length
|
|
473
|
-
});
|
|
474
|
-
await provider.appendDocument(content, file);
|
|
475
|
-
return { content: [{
|
|
476
|
-
type: "text",
|
|
477
|
-
text: `Content appended to ${file || "notes.md"}`
|
|
478
|
-
}] };
|
|
479
|
-
});
|
|
480
|
-
server.tool("team_doc_list", "List all shared team document files.", {}, async () => {
|
|
481
|
-
const files = await provider.listDocuments();
|
|
482
|
-
return { content: [{
|
|
483
|
-
type: "text",
|
|
484
|
-
text: JSON.stringify({
|
|
485
|
-
files,
|
|
486
|
-
count: files.length
|
|
487
|
-
})
|
|
488
|
-
}] };
|
|
489
|
-
});
|
|
490
|
-
server.tool("team_doc_create", "Create a new shared team document file.", {
|
|
491
|
-
file: z.string().describe("Document file path (e.g., \"findings/auth.md\")"),
|
|
492
|
-
content: z.string().describe("Initial document content")
|
|
493
|
-
}, async ({ file, content }) => {
|
|
494
|
-
await provider.createDocument(file, content);
|
|
495
|
-
return { content: [{
|
|
496
|
-
type: "text",
|
|
497
|
-
text: `Document ${file} created successfully`
|
|
498
|
-
}] };
|
|
499
|
-
});
|
|
500
|
-
}
|
|
501
|
-
|
|
502
|
-
//#endregion
|
|
503
|
-
//#region src/workflow/context/proposals.ts
|
|
504
|
-
/**
|
|
505
|
-
* Format a proposal for display
|
|
506
|
-
*/
|
|
507
|
-
function formatProposal(proposal) {
|
|
508
|
-
const lines = [];
|
|
509
|
-
lines.push(`📋 **${proposal.title}** (${proposal.id})`);
|
|
510
|
-
lines.push(`Type: ${proposal.type} | Status: ${proposal.status}`);
|
|
511
|
-
if (proposal.description) lines.push(`\n${proposal.description}`);
|
|
512
|
-
lines.push("\nOptions:");
|
|
513
|
-
for (const option of proposal.options) {
|
|
514
|
-
const count = proposal.result?.counts[option.id] || 0;
|
|
515
|
-
const marker = proposal.result?.winner === option.id ? "✓ " : " ";
|
|
516
|
-
lines.push(`${marker}- ${option.label} (${option.id}): ${count} votes`);
|
|
517
|
-
}
|
|
518
|
-
if (proposal.result && Object.keys(proposal.result.votes).length > 0) {
|
|
519
|
-
lines.push("\nVotes:");
|
|
520
|
-
for (const [voter, choice] of Object.entries(proposal.result.votes)) lines.push(` @${voter} → ${choice}`);
|
|
521
|
-
}
|
|
522
|
-
if (proposal.status === "active" && proposal.expiresAt) {
|
|
523
|
-
const remaining = new Date(proposal.expiresAt).getTime() - Date.now();
|
|
524
|
-
const minutes = Math.max(0, Math.floor(remaining / 6e4));
|
|
525
|
-
lines.push(`\nExpires in: ${minutes} minutes`);
|
|
526
|
-
}
|
|
527
|
-
if (proposal.status === "resolved" && proposal.result?.winner) {
|
|
528
|
-
const winningOption = proposal.options.find((o) => o.id === proposal.result?.winner);
|
|
529
|
-
lines.push(`\n🏆 Winner: ${winningOption?.label || proposal.result.winner}`);
|
|
530
|
-
}
|
|
531
|
-
return lines.join("\n");
|
|
532
|
-
}
|
|
533
|
-
/**
|
|
534
|
-
* Format multiple proposals as a summary
|
|
535
|
-
*/
|
|
536
|
-
function formatProposalList(proposals) {
|
|
537
|
-
if (proposals.length === 0) return "(no proposals)";
|
|
538
|
-
return proposals.map((p) => {
|
|
539
|
-
const votes = Object.keys(p.result?.votes || {}).length;
|
|
540
|
-
return `- ${p.id}: ${p.title} [${p.status}] (${votes} votes)`;
|
|
541
|
-
}).join("\n");
|
|
542
|
-
}
|
|
543
|
-
|
|
544
|
-
//#endregion
|
|
545
|
-
//#region src/workflow/context/mcp/proposal.ts
|
|
546
|
-
function registerProposalTools(server, ctx, proposalManager) {
|
|
547
|
-
const { provider, validAgents, getAgentId } = ctx;
|
|
548
|
-
server.tool("team_proposal_create", "Create a new proposal for team voting. Use for decisions, elections, approvals, or assignments.", {
|
|
549
|
-
type: z.enum([
|
|
550
|
-
"election",
|
|
551
|
-
"decision",
|
|
552
|
-
"approval",
|
|
553
|
-
"assignment"
|
|
554
|
-
]).describe("Type of proposal"),
|
|
555
|
-
title: z.string().describe("Brief title for the proposal"),
|
|
556
|
-
description: z.string().optional().describe("Detailed description"),
|
|
557
|
-
options: z.array(z.object({
|
|
558
|
-
id: z.string().describe("Unique option identifier"),
|
|
559
|
-
label: z.string().describe("Display label for the option")
|
|
560
|
-
})).optional().describe("Voting options (required except for approval type)"),
|
|
561
|
-
resolution: z.object({
|
|
562
|
-
type: z.enum([
|
|
563
|
-
"plurality",
|
|
564
|
-
"majority",
|
|
565
|
-
"unanimous"
|
|
566
|
-
]).optional().describe("How to determine winner"),
|
|
567
|
-
quorum: z.number().optional().describe("Minimum votes required"),
|
|
568
|
-
tieBreaker: z.enum([
|
|
569
|
-
"first",
|
|
570
|
-
"random",
|
|
571
|
-
"creator-decides"
|
|
572
|
-
]).optional().describe("How to break ties")
|
|
573
|
-
}).optional().describe("Resolution rules"),
|
|
574
|
-
binding: z.boolean().optional().describe("Whether result is binding (default: true)"),
|
|
575
|
-
timeoutSeconds: z.number().optional().describe("Timeout in seconds (default: 3600)")
|
|
576
|
-
}, async (params, extra) => {
|
|
577
|
-
const createdBy = getAgentId(extra) || "anonymous";
|
|
578
|
-
try {
|
|
579
|
-
const proposal = await proposalManager.create({
|
|
580
|
-
type: params.type,
|
|
581
|
-
title: params.title,
|
|
582
|
-
description: params.description,
|
|
583
|
-
options: params.options,
|
|
584
|
-
resolution: params.resolution,
|
|
585
|
-
binding: params.binding,
|
|
586
|
-
timeoutSeconds: params.timeoutSeconds,
|
|
587
|
-
createdBy
|
|
588
|
-
});
|
|
589
|
-
const optionsList = proposal.options.map((o) => `${o.id}: ${o.label}`).join(", ");
|
|
590
|
-
const otherAgents = validAgents.filter((a) => a !== createdBy).map((a) => `@${a}`).join(" ");
|
|
591
|
-
await provider.appendChannel(createdBy, `Created proposal "${proposal.title}" (${proposal.id})\nOptions: ${optionsList}\nUse team_vote tool to cast your vote. ${otherAgents}`);
|
|
592
|
-
return { content: [{
|
|
593
|
-
type: "text",
|
|
594
|
-
text: JSON.stringify({
|
|
595
|
-
status: "created",
|
|
596
|
-
proposal: {
|
|
597
|
-
id: proposal.id,
|
|
598
|
-
title: proposal.title,
|
|
599
|
-
options: proposal.options,
|
|
600
|
-
expiresAt: proposal.expiresAt
|
|
601
|
-
}
|
|
602
|
-
})
|
|
603
|
-
}] };
|
|
604
|
-
} catch (error) {
|
|
605
|
-
return { content: [{
|
|
606
|
-
type: "text",
|
|
607
|
-
text: JSON.stringify({
|
|
608
|
-
status: "error",
|
|
609
|
-
error: error instanceof Error ? error.message : String(error)
|
|
610
|
-
})
|
|
611
|
-
}] };
|
|
612
|
-
}
|
|
613
|
-
});
|
|
614
|
-
server.tool("team_vote", "Cast your vote on a team proposal.", {
|
|
615
|
-
proposal: z.string().describe("Proposal ID (e.g., prop-1)"),
|
|
616
|
-
choice: z.string().describe("Option ID to vote for"),
|
|
617
|
-
reason: z.string().optional().describe("Optional reason for your vote")
|
|
618
|
-
}, async ({ proposal: proposalId, choice, reason }, extra) => {
|
|
619
|
-
const voter = getAgentId(extra) || "anonymous";
|
|
620
|
-
const result = await proposalManager.vote({
|
|
621
|
-
proposalId,
|
|
622
|
-
voter,
|
|
623
|
-
choice,
|
|
624
|
-
reason
|
|
625
|
-
});
|
|
626
|
-
if (!result.success) return { content: [{
|
|
627
|
-
type: "text",
|
|
628
|
-
text: JSON.stringify({
|
|
629
|
-
status: "error",
|
|
630
|
-
error: result.error
|
|
631
|
-
})
|
|
632
|
-
}] };
|
|
633
|
-
const reasonText = reason ? ` (reason: ${reason})` : "";
|
|
634
|
-
await provider.appendChannel(voter, `Voted "${choice}" on ${proposalId}${reasonText}`);
|
|
635
|
-
if (result.resolved && result.proposal) {
|
|
636
|
-
const winnerOption = result.proposal.options.find((o) => o.id === result.proposal.result?.winner);
|
|
637
|
-
const mentions = Object.keys(result.proposal.result?.votes || {}).map((v) => `@${v}`).join(" ");
|
|
638
|
-
await provider.appendChannel("system", `Proposal ${proposalId} resolved! Winner: ${winnerOption?.label || result.proposal.result?.winner || "none"} ${mentions}`);
|
|
639
|
-
}
|
|
640
|
-
return { content: [{
|
|
641
|
-
type: "text",
|
|
642
|
-
text: JSON.stringify({
|
|
643
|
-
status: "voted",
|
|
644
|
-
proposal: proposalId,
|
|
645
|
-
choice,
|
|
646
|
-
resolved: result.resolved,
|
|
647
|
-
winner: result.proposal?.result?.winner
|
|
648
|
-
})
|
|
649
|
-
}] };
|
|
650
|
-
});
|
|
651
|
-
server.tool("team_proposal_status", "Check status of team proposals. Omit proposal ID to see all active proposals.", { proposal: z.string().optional().describe("Proposal ID (omit for all active)") }, async ({ proposal: proposalId }) => {
|
|
652
|
-
if (proposalId) {
|
|
653
|
-
const proposal = await proposalManager.get(proposalId);
|
|
654
|
-
if (!proposal) return { content: [{
|
|
655
|
-
type: "text",
|
|
656
|
-
text: JSON.stringify({
|
|
657
|
-
status: "error",
|
|
658
|
-
error: `Proposal not found: ${proposalId}`
|
|
659
|
-
})
|
|
660
|
-
}] };
|
|
661
|
-
return { content: [{
|
|
662
|
-
type: "text",
|
|
663
|
-
text: formatProposal(proposal)
|
|
664
|
-
}] };
|
|
665
|
-
}
|
|
666
|
-
const activeProposals = await proposalManager.list("active");
|
|
667
|
-
return { content: [{
|
|
668
|
-
type: "text",
|
|
669
|
-
text: activeProposals.length > 0 ? formatProposalList(activeProposals) : "(no active proposals)"
|
|
670
|
-
}] };
|
|
671
|
-
});
|
|
672
|
-
server.tool("team_proposal_cancel", "Cancel a proposal you created.", { proposal: z.string().describe("Proposal ID to cancel") }, async ({ proposal: proposalId }, extra) => {
|
|
673
|
-
const cancelledBy = getAgentId(extra) || "anonymous";
|
|
674
|
-
const result = await proposalManager.cancel(proposalId, cancelledBy);
|
|
675
|
-
if (!result.success) return { content: [{
|
|
676
|
-
type: "text",
|
|
677
|
-
text: JSON.stringify({
|
|
678
|
-
status: "error",
|
|
679
|
-
error: result.error
|
|
680
|
-
})
|
|
681
|
-
}] };
|
|
682
|
-
await provider.appendChannel(cancelledBy, `Cancelled proposal ${proposalId}`);
|
|
683
|
-
return { content: [{
|
|
684
|
-
type: "text",
|
|
685
|
-
text: JSON.stringify({
|
|
686
|
-
status: "cancelled",
|
|
687
|
-
proposal: proposalId
|
|
688
|
-
})
|
|
689
|
-
}] };
|
|
690
|
-
});
|
|
691
|
-
}
|
|
692
|
-
|
|
693
|
-
//#endregion
|
|
694
|
-
//#region src/workflow/context/mcp/feedback.ts
|
|
695
|
-
/**
|
|
696
|
-
* Register the feedback_submit tool and return a getter for collected entries.
|
|
697
|
-
*/
|
|
698
|
-
function registerFeedbackTool(server, ctx) {
|
|
699
|
-
const { getAgentId, logTool } = ctx;
|
|
700
|
-
const entries = [];
|
|
701
|
-
server.tool("feedback_submit", "Report a workflow improvement need. Use when you hit something inconvenient — a missing tool, an awkward step, or a capability you wished you had.", {
|
|
702
|
-
target: z.string().describe("The area this is about — a tool name, a workflow step, or a general area (e.g. file search, code review)."),
|
|
703
|
-
type: z.enum([
|
|
704
|
-
"missing",
|
|
705
|
-
"friction",
|
|
706
|
-
"suggestion"
|
|
707
|
-
]).describe("missing: a tool or capability you needed but didn't have. friction: something that works but is awkward or slow. suggestion: a concrete improvement idea."),
|
|
708
|
-
description: z.string().describe("What you needed or what could be improved. Be specific."),
|
|
709
|
-
context: z.string().optional().describe("Optional: what you were trying to do when you hit this.")
|
|
710
|
-
}, async ({ target, type, description, context: ctx }, extra) => {
|
|
711
|
-
logTool("feedback_submit", getAgentId(extra) || "anonymous", {
|
|
712
|
-
target,
|
|
713
|
-
type
|
|
714
|
-
});
|
|
715
|
-
const entry = {
|
|
716
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
717
|
-
target,
|
|
718
|
-
type,
|
|
719
|
-
description,
|
|
720
|
-
...ctx ? { context: ctx } : {}
|
|
721
|
-
};
|
|
722
|
-
if (entries.length >= 50) entries.shift();
|
|
723
|
-
entries.push(entry);
|
|
724
|
-
return { content: [{
|
|
725
|
-
type: "text",
|
|
726
|
-
text: JSON.stringify({ status: "recorded" })
|
|
727
|
-
}] };
|
|
728
|
-
});
|
|
729
|
-
return { getFeedback: () => [...entries] };
|
|
730
|
-
}
|
|
731
|
-
|
|
732
|
-
//#endregion
|
|
733
|
-
//#region src/workflow/context/mcp/skills.ts
|
|
734
|
-
function registerSkillsTools(server, provider) {
|
|
735
|
-
server.tool("skills_list", "List all available agent skills with their descriptions.", {}, async () => {
|
|
736
|
-
const skills = provider.list();
|
|
737
|
-
if (skills.length === 0) return { content: [{
|
|
738
|
-
type: "text",
|
|
739
|
-
text: JSON.stringify({ message: "No skills available" })
|
|
740
|
-
}] };
|
|
741
|
-
return { content: [{
|
|
742
|
-
type: "text",
|
|
743
|
-
text: JSON.stringify({ skills: skills.map((s) => ({
|
|
744
|
-
name: s.name,
|
|
745
|
-
description: s.description
|
|
746
|
-
})) })
|
|
747
|
-
}] };
|
|
748
|
-
});
|
|
749
|
-
server.tool("skills_view", "Read the complete SKILL.md file for a skill.", { skillName: z.string().describe("Skill name to view") }, async ({ skillName }) => {
|
|
750
|
-
return { content: [{
|
|
751
|
-
type: "text",
|
|
752
|
-
text: await provider.view(skillName)
|
|
753
|
-
}] };
|
|
754
|
-
});
|
|
755
|
-
server.tool("skills_read", "Read a file within a skill directory (e.g., references/, scripts/, assets/).", {
|
|
756
|
-
skillName: z.string().describe("Skill name"),
|
|
757
|
-
filePath: z.string().describe("Relative file path within the skill (e.g., \"references/search-strategies.md\")")
|
|
758
|
-
}, async ({ skillName, filePath }) => {
|
|
759
|
-
return { content: [{
|
|
760
|
-
type: "text",
|
|
761
|
-
text: await provider.readFile(skillName, filePath)
|
|
762
|
-
}] };
|
|
763
|
-
});
|
|
764
|
-
}
|
|
765
|
-
|
|
766
|
-
//#endregion
|
|
767
|
-
//#region src/workflow/context/mcp/server.ts
|
|
768
|
-
/**
|
|
769
|
-
* Context MCP Server — thin orchestrator.
|
|
770
|
-
*
|
|
771
|
-
* Creates an McpServer and registers tools from each category.
|
|
772
|
-
* The actual tool implementations live in their own files:
|
|
773
|
-
* channel.ts, resource.ts, inbox.ts, team.ts, proposal.ts,
|
|
774
|
-
* feedback.ts, skills.ts
|
|
775
|
-
*/
|
|
776
|
-
function createContextMCPServer(options) {
|
|
777
|
-
const { provider, validAgents, name = "workflow-context", version = "1.0.0", onMention, proposalManager, feedback: feedbackEnabled, skills, debugLog } = options;
|
|
778
|
-
const server = new McpServer({
|
|
779
|
-
name,
|
|
780
|
-
version
|
|
781
|
-
});
|
|
782
|
-
const eventLog = new EventLog(provider);
|
|
783
|
-
const ctx = {
|
|
784
|
-
provider,
|
|
785
|
-
eventLog,
|
|
786
|
-
validAgents,
|
|
787
|
-
getAgentId,
|
|
788
|
-
logTool: createLogTool(eventLog)
|
|
789
|
-
};
|
|
790
|
-
const agentConnections = /* @__PURE__ */ new Map();
|
|
791
|
-
const mcpToolNames = new Set([
|
|
792
|
-
"channel_send",
|
|
793
|
-
"channel_read",
|
|
794
|
-
"resource_create",
|
|
795
|
-
"resource_read",
|
|
796
|
-
"my_inbox",
|
|
797
|
-
"my_inbox_ack",
|
|
798
|
-
"my_status_set",
|
|
799
|
-
"team_members",
|
|
800
|
-
"team_doc_read",
|
|
801
|
-
"team_doc_write",
|
|
802
|
-
"team_doc_append",
|
|
803
|
-
"team_doc_list",
|
|
804
|
-
"team_doc_create"
|
|
805
|
-
]);
|
|
806
|
-
registerChannelTools(server, ctx, { onMention });
|
|
807
|
-
registerResourceTools(server, ctx);
|
|
808
|
-
registerInboxTools(server, ctx, { debugLog });
|
|
809
|
-
registerTeamTools(server, ctx);
|
|
810
|
-
if (proposalManager) {
|
|
811
|
-
registerProposalTools(server, ctx, proposalManager);
|
|
812
|
-
mcpToolNames.add("team_proposal_create");
|
|
813
|
-
mcpToolNames.add("team_vote");
|
|
814
|
-
mcpToolNames.add("team_proposal_status");
|
|
815
|
-
mcpToolNames.add("team_proposal_cancel");
|
|
816
|
-
}
|
|
817
|
-
let getFeedback = () => [];
|
|
818
|
-
if (feedbackEnabled) {
|
|
819
|
-
getFeedback = registerFeedbackTool(server, ctx).getFeedback;
|
|
820
|
-
mcpToolNames.add("feedback_submit");
|
|
821
|
-
}
|
|
822
|
-
if (skills) {
|
|
823
|
-
registerSkillsTools(server, skills);
|
|
824
|
-
mcpToolNames.add("skills_list");
|
|
825
|
-
mcpToolNames.add("skills_view");
|
|
826
|
-
mcpToolNames.add("skills_read");
|
|
827
|
-
}
|
|
828
|
-
return {
|
|
829
|
-
server,
|
|
830
|
-
agentConnections,
|
|
831
|
-
validAgents,
|
|
832
|
-
proposalManager,
|
|
833
|
-
getFeedback,
|
|
834
|
-
mcpToolNames,
|
|
835
|
-
eventLog
|
|
836
|
-
};
|
|
837
|
-
}
|
|
838
|
-
|
|
839
|
-
//#endregion
|
|
840
|
-
//#region src/workflow/context/types.ts
|
|
841
|
-
/** Resource ID prefix */
|
|
842
|
-
const RESOURCE_PREFIX$1 = "res_";
|
|
843
|
-
/** Resource URI scheme */
|
|
844
|
-
const RESOURCE_SCHEME = "resource:";
|
|
845
|
-
/** Message length threshold for channel messages - content longer than this should use resources or documents */
|
|
846
|
-
const MESSAGE_LENGTH_THRESHOLD = 1200;
|
|
847
|
-
/**
|
|
848
|
-
* Generate a unique resource ID
|
|
849
|
-
*/
|
|
850
|
-
function generateResourceId() {
|
|
851
|
-
return `${RESOURCE_PREFIX$1}${Date.now().toString(36)}${Math.random().toString(36).slice(2, 8)}`;
|
|
852
|
-
}
|
|
853
|
-
/**
|
|
854
|
-
* Create resource reference for use in markdown
|
|
855
|
-
* Example: resource:res_abc123
|
|
856
|
-
*/
|
|
857
|
-
function createResourceRef(id) {
|
|
858
|
-
return `${RESOURCE_SCHEME}${id}`;
|
|
859
|
-
}
|
|
860
|
-
/**
|
|
861
|
-
* Check if content should be stored as a resource instead of inline
|
|
862
|
-
*/
|
|
863
|
-
function shouldUseResource(content) {
|
|
864
|
-
return content.length > MESSAGE_LENGTH_THRESHOLD;
|
|
865
|
-
}
|
|
866
|
-
/** Default context configuration values */
|
|
867
|
-
const CONTEXT_DEFAULTS = {
|
|
868
|
-
dir: "~/.agent-worker/workflows/${{ workflow.name }}/${{ workflow.tag }}/",
|
|
869
|
-
document: "notes.md"
|
|
870
|
-
};
|
|
871
|
-
/** Mention pattern for extracting @mentions */
|
|
872
|
-
const MENTION_PATTERN = /@([a-zA-Z][a-zA-Z0-9_-]*)/g;
|
|
873
|
-
/**
|
|
874
|
-
* Extract @mentions from a message
|
|
875
|
-
*/
|
|
876
|
-
function extractMentions(content, validAgents) {
|
|
877
|
-
const mentions = [];
|
|
878
|
-
let match;
|
|
879
|
-
MENTION_PATTERN.lastIndex = 0;
|
|
880
|
-
while ((match = MENTION_PATTERN.exec(content)) !== null) {
|
|
881
|
-
const agent = match[1];
|
|
882
|
-
if (agent && validAgents.includes(agent) && !mentions.includes(agent)) mentions.push(agent);
|
|
883
|
-
}
|
|
884
|
-
return mentions;
|
|
885
|
-
}
|
|
886
|
-
/** Urgent keyword pattern */
|
|
887
|
-
const URGENT_PATTERN = /\b(urgent|asap|blocked|critical)\b/i;
|
|
888
|
-
/**
|
|
889
|
-
* Calculate priority for an inbox message
|
|
890
|
-
*/
|
|
891
|
-
function calculatePriority(msg) {
|
|
892
|
-
if (msg.mentions.length > 1) return "high";
|
|
893
|
-
if (URGENT_PATTERN.test(msg.content)) return "high";
|
|
894
|
-
return "normal";
|
|
895
|
-
}
|
|
896
|
-
|
|
897
|
-
//#endregion
|
|
898
|
-
//#region src/workflow/context/provider.ts
|
|
899
|
-
/**
|
|
900
|
-
* Composite ContextProvider — delegates to domain-specific stores.
|
|
901
|
-
*
|
|
902
|
-
* Each store owns one concern:
|
|
903
|
-
* - ChannelStore: append-only JSONL message log
|
|
904
|
-
* - InboxStore: filtered view of channel with per-agent cursors
|
|
905
|
-
* - DocumentStore: raw text documents
|
|
906
|
-
* - ResourceStore: content-addressed blobs
|
|
907
|
-
* - StatusStore: agent status tracking
|
|
908
|
-
*
|
|
909
|
-
* smartSend is the only cross-store orchestration (channel + resource).
|
|
910
|
-
*/
|
|
911
|
-
var ContextProviderImpl = class {
|
|
912
|
-
constructor(channel, inbox, documents, resources, status, validAgents) {
|
|
913
|
-
this.channel = channel;
|
|
914
|
-
this.inbox = inbox;
|
|
915
|
-
this.documents = documents;
|
|
916
|
-
this.resources = resources;
|
|
917
|
-
this.status = status;
|
|
918
|
-
this.validAgents = validAgents;
|
|
919
|
-
}
|
|
920
|
-
appendChannel(from, content, options) {
|
|
921
|
-
return this.channel.append(from, content, options);
|
|
922
|
-
}
|
|
923
|
-
readChannel(options) {
|
|
924
|
-
return this.channel.read(options);
|
|
925
|
-
}
|
|
926
|
-
tailChannel(cursor) {
|
|
927
|
-
return this.channel.tail(cursor);
|
|
928
|
-
}
|
|
929
|
-
/**
|
|
930
|
-
* Smart send: automatically converts long messages to resources
|
|
931
|
-
*
|
|
932
|
-
* If content exceeds MESSAGE_LENGTH_THRESHOLD:
|
|
933
|
-
* 1. Creates a resource with the full content
|
|
934
|
-
* 2. Sends a short message referencing the resource
|
|
935
|
-
* 3. Logs the full content in debug channel for visibility
|
|
936
|
-
*/
|
|
937
|
-
async smartSend(from, content, options) {
|
|
938
|
-
if (!shouldUseResource(content)) return this.channel.append(from, content, options);
|
|
939
|
-
const resourceType = content.startsWith("```") || content.includes("\n```") ? "markdown" : "text";
|
|
940
|
-
const resource = await this.resources.create(content, from, resourceType);
|
|
941
|
-
await this.channel.append("system", `Created resource ${resource.id} (${content.length} chars) for @${from}:\n${content}`, { kind: "debug" });
|
|
942
|
-
const mentions = extractMentions(content, this.validAgents);
|
|
943
|
-
const shortMessage = `${mentions.length > 0 ? mentions.map((m) => `@${m}`).join(" ") + " " : ""}[Long content stored as resource]\n\nRead the full content: resource_read("${resource.id}")\n\nReference: ${resource.ref}`;
|
|
944
|
-
return this.channel.append(from, shortMessage, options);
|
|
945
|
-
}
|
|
946
|
-
getInbox(agent) {
|
|
947
|
-
return this.inbox.getInbox(agent);
|
|
948
|
-
}
|
|
949
|
-
markInboxSeen(agent, untilId) {
|
|
950
|
-
return this.inbox.markSeen(agent, untilId);
|
|
951
|
-
}
|
|
952
|
-
ackInbox(agent, untilId) {
|
|
953
|
-
return this.inbox.ack(agent, untilId);
|
|
954
|
-
}
|
|
955
|
-
readDocument(file) {
|
|
956
|
-
return this.documents.read(file);
|
|
957
|
-
}
|
|
958
|
-
writeDocument(content, file) {
|
|
959
|
-
return this.documents.write(content, file);
|
|
960
|
-
}
|
|
961
|
-
appendDocument(content, file) {
|
|
962
|
-
return this.documents.append(content, file);
|
|
963
|
-
}
|
|
964
|
-
listDocuments() {
|
|
965
|
-
return this.documents.list();
|
|
966
|
-
}
|
|
967
|
-
createDocument(file, content) {
|
|
968
|
-
return this.documents.create(file, content);
|
|
969
|
-
}
|
|
970
|
-
createResource(content, createdBy, type) {
|
|
971
|
-
return this.resources.create(content, createdBy, type);
|
|
972
|
-
}
|
|
973
|
-
readResource(id) {
|
|
974
|
-
return this.resources.read(id);
|
|
975
|
-
}
|
|
976
|
-
setAgentStatus(agent, status) {
|
|
977
|
-
return this.status.set(agent, status);
|
|
978
|
-
}
|
|
979
|
-
getAgentStatus(agent) {
|
|
980
|
-
return this.status.get(agent);
|
|
981
|
-
}
|
|
982
|
-
listAgentStatus() {
|
|
983
|
-
return this.status.list();
|
|
984
|
-
}
|
|
985
|
-
async markRunStart() {
|
|
986
|
-
await this.inbox.markRunStart();
|
|
987
|
-
}
|
|
988
|
-
async destroy() {
|
|
989
|
-
await this.inbox.destroy();
|
|
990
|
-
}
|
|
991
|
-
};
|
|
992
|
-
|
|
993
|
-
//#endregion
|
|
994
|
-
//#region src/workflow/context/storage.ts
|
|
995
|
-
/**
|
|
996
|
-
* Storage Backend
|
|
997
|
-
* Abstract storage layer for workflow context persistence.
|
|
998
|
-
*
|
|
999
|
-
* Keys are logical paths (e.g., "channel.jsonl", "documents/notes.md", "_state/inbox.json").
|
|
1000
|
-
* Implementations map these to actual storage (filesystem, memory, DB, etc.).
|
|
1001
|
-
*/
|
|
1002
|
-
/**
|
|
1003
|
-
* In-memory storage backend for testing and ephemeral workflows.
|
|
1004
|
-
*/
|
|
1005
|
-
var MemoryStorage = class {
|
|
1006
|
-
data = /* @__PURE__ */ new Map();
|
|
1007
|
-
async read(key) {
|
|
1008
|
-
return this.data.get(key) ?? null;
|
|
1009
|
-
}
|
|
1010
|
-
async readFrom(key, offset) {
|
|
1011
|
-
const data = this.data.get(key) ?? "";
|
|
1012
|
-
if (offset >= data.length) return {
|
|
1013
|
-
content: "",
|
|
1014
|
-
offset: data.length
|
|
1015
|
-
};
|
|
1016
|
-
return {
|
|
1017
|
-
content: data.slice(offset),
|
|
1018
|
-
offset: data.length
|
|
1019
|
-
};
|
|
1020
|
-
}
|
|
1021
|
-
async write(key, content) {
|
|
1022
|
-
this.data.set(key, content);
|
|
1023
|
-
}
|
|
1024
|
-
async append(key, content) {
|
|
1025
|
-
const existing = this.data.get(key) ?? "";
|
|
1026
|
-
this.data.set(key, existing + content);
|
|
1027
|
-
}
|
|
1028
|
-
async exists(key) {
|
|
1029
|
-
return this.data.has(key);
|
|
1030
|
-
}
|
|
1031
|
-
async list(prefix) {
|
|
1032
|
-
const normalizedPrefix = prefix.endsWith("/") ? prefix : prefix + "/";
|
|
1033
|
-
const results = [];
|
|
1034
|
-
for (const key of this.data.keys()) if (key.startsWith(normalizedPrefix)) results.push(key.slice(normalizedPrefix.length));
|
|
1035
|
-
return results.sort();
|
|
1036
|
-
}
|
|
1037
|
-
async delete(key) {
|
|
1038
|
-
this.data.delete(key);
|
|
1039
|
-
}
|
|
1040
|
-
/** Clear all data (for testing) */
|
|
1041
|
-
clear() {
|
|
1042
|
-
this.data.clear();
|
|
1043
|
-
}
|
|
1044
|
-
/** Get the number of stored keys (for testing) */
|
|
1045
|
-
get size() {
|
|
1046
|
-
return this.data.size;
|
|
1047
|
-
}
|
|
1048
|
-
/** Get all keys (for testing) */
|
|
1049
|
-
keys() {
|
|
1050
|
-
return [...this.data.keys()];
|
|
1051
|
-
}
|
|
1052
|
-
};
|
|
1053
|
-
/**
|
|
1054
|
-
* File-based storage backend.
|
|
1055
|
-
* Keys map to file paths relative to a base directory.
|
|
1056
|
-
*/
|
|
1057
|
-
var FileStorage = class {
|
|
1058
|
-
constructor(baseDir) {
|
|
1059
|
-
this.baseDir = baseDir;
|
|
1060
|
-
if (!existsSync(baseDir)) mkdirSync(baseDir, { recursive: true });
|
|
1061
|
-
}
|
|
1062
|
-
resolve(key) {
|
|
1063
|
-
return join(this.baseDir, key);
|
|
1064
|
-
}
|
|
1065
|
-
async ensureParentDir(filePath) {
|
|
1066
|
-
await mkdir(dirname(filePath), { recursive: true });
|
|
1067
|
-
}
|
|
1068
|
-
async read(key) {
|
|
1069
|
-
const filePath = this.resolve(key);
|
|
1070
|
-
try {
|
|
1071
|
-
return await readFile(filePath, "utf-8");
|
|
1072
|
-
} catch {
|
|
1073
|
-
return null;
|
|
1074
|
-
}
|
|
1075
|
-
}
|
|
1076
|
-
async readFrom(key, offset) {
|
|
1077
|
-
const filePath = this.resolve(key);
|
|
1078
|
-
let fh;
|
|
1079
|
-
try {
|
|
1080
|
-
fh = await open(filePath, "r");
|
|
1081
|
-
const { size } = await fh.stat();
|
|
1082
|
-
if (offset >= size) return {
|
|
1083
|
-
content: "",
|
|
1084
|
-
offset: size
|
|
1085
|
-
};
|
|
1086
|
-
const length = size - offset;
|
|
1087
|
-
const buffer = Buffer.alloc(length);
|
|
1088
|
-
await fh.read(buffer, 0, length, offset);
|
|
1089
|
-
return {
|
|
1090
|
-
content: buffer.toString("utf-8"),
|
|
1091
|
-
offset: size
|
|
1092
|
-
};
|
|
1093
|
-
} catch (err) {
|
|
1094
|
-
if (err.code === "ENOENT") return {
|
|
1095
|
-
content: "",
|
|
1096
|
-
offset: 0
|
|
1097
|
-
};
|
|
1098
|
-
return {
|
|
1099
|
-
content: "",
|
|
1100
|
-
offset
|
|
1101
|
-
};
|
|
1102
|
-
} finally {
|
|
1103
|
-
await fh?.close();
|
|
1104
|
-
}
|
|
1105
|
-
}
|
|
1106
|
-
async write(key, content) {
|
|
1107
|
-
const filePath = this.resolve(key);
|
|
1108
|
-
await this.ensureParentDir(filePath);
|
|
1109
|
-
await writeFile(filePath, content);
|
|
1110
|
-
}
|
|
1111
|
-
async append(key, content) {
|
|
1112
|
-
const filePath = this.resolve(key);
|
|
1113
|
-
await this.ensureParentDir(filePath);
|
|
1114
|
-
await appendFile(filePath, content);
|
|
1115
|
-
}
|
|
1116
|
-
async exists(key) {
|
|
1117
|
-
try {
|
|
1118
|
-
await stat(this.resolve(key));
|
|
1119
|
-
return true;
|
|
1120
|
-
} catch {
|
|
1121
|
-
return false;
|
|
1122
|
-
}
|
|
1123
|
-
}
|
|
1124
|
-
async list(prefix) {
|
|
1125
|
-
const dir = this.resolve(prefix);
|
|
1126
|
-
try {
|
|
1127
|
-
const results = [];
|
|
1128
|
-
const walk = async (currentDir) => {
|
|
1129
|
-
const entries = await readdir(currentDir, { withFileTypes: true });
|
|
1130
|
-
for (const entry of entries) {
|
|
1131
|
-
const fullPath = join(currentDir, entry.name);
|
|
1132
|
-
if (entry.isDirectory()) await walk(fullPath);
|
|
1133
|
-
else if (entry.isFile()) results.push(relative(dir, fullPath));
|
|
1134
|
-
}
|
|
1135
|
-
};
|
|
1136
|
-
await walk(dir);
|
|
1137
|
-
return results.sort();
|
|
1138
|
-
} catch {
|
|
1139
|
-
return [];
|
|
1140
|
-
}
|
|
1141
|
-
}
|
|
1142
|
-
async delete(key) {
|
|
1143
|
-
try {
|
|
1144
|
-
await unlink(this.resolve(key));
|
|
1145
|
-
} catch {}
|
|
1146
|
-
}
|
|
1147
|
-
};
|
|
1148
|
-
|
|
1149
|
-
//#endregion
|
|
1150
|
-
//#region src/workflow/context/stores/channel.ts
|
|
1151
|
-
/**
|
|
1152
|
-
* Channel Store
|
|
1153
|
-
* Append-only JSONL message log with incremental sync and visibility filtering.
|
|
1154
|
-
*/
|
|
1155
|
-
const CHANNEL_KEY = "channel.jsonl";
|
|
1156
|
-
/**
|
|
1157
|
-
* JSONL-backed channel store.
|
|
1158
|
-
* Incrementally syncs from a StorageBackend using byte offsets.
|
|
1159
|
-
*/
|
|
1160
|
-
var DefaultChannelStore = class {
|
|
1161
|
-
entries = [];
|
|
1162
|
-
offset = 0;
|
|
1163
|
-
syncPromise = null;
|
|
1164
|
-
constructor(storage, validAgents) {
|
|
1165
|
-
this.storage = storage;
|
|
1166
|
-
this.validAgents = validAgents;
|
|
1167
|
-
}
|
|
1168
|
-
sync() {
|
|
1169
|
-
if (!this.syncPromise) this.syncPromise = this.doSync().finally(() => {
|
|
1170
|
-
this.syncPromise = null;
|
|
1171
|
-
});
|
|
1172
|
-
return this.syncPromise;
|
|
1173
|
-
}
|
|
1174
|
-
async doSync() {
|
|
1175
|
-
const result = await this.storage.readFrom(CHANNEL_KEY, this.offset);
|
|
1176
|
-
if (result.content) {
|
|
1177
|
-
this.entries.push(...parseJsonl(result.content));
|
|
1178
|
-
this.offset = result.offset;
|
|
1179
|
-
}
|
|
1180
|
-
return this.entries;
|
|
1181
|
-
}
|
|
1182
|
-
length() {
|
|
1183
|
-
return this.entries.length;
|
|
1184
|
-
}
|
|
1185
|
-
async append(from, content, options) {
|
|
1186
|
-
const msg = {
|
|
1187
|
-
id: nanoid(),
|
|
1188
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1189
|
-
from,
|
|
1190
|
-
content,
|
|
1191
|
-
mentions: extractMentions(content, this.validAgents)
|
|
1192
|
-
};
|
|
1193
|
-
if (options?.to) msg.to = options.to;
|
|
1194
|
-
if (options?.kind) msg.kind = options.kind;
|
|
1195
|
-
if (options?.toolCall) msg.toolCall = options.toolCall;
|
|
1196
|
-
const line = JSON.stringify(msg) + "\n";
|
|
1197
|
-
await this.storage.append(CHANNEL_KEY, line);
|
|
1198
|
-
return msg;
|
|
1199
|
-
}
|
|
1200
|
-
async read(options) {
|
|
1201
|
-
let entries = await this.sync();
|
|
1202
|
-
if (options?.agent) {
|
|
1203
|
-
const agent = options.agent;
|
|
1204
|
-
entries = entries.filter((e) => {
|
|
1205
|
-
if (e.kind === "system" || e.kind === "debug" || e.kind === "output") return false;
|
|
1206
|
-
if (e.to) return e.to === agent || e.from === agent;
|
|
1207
|
-
return true;
|
|
1208
|
-
});
|
|
1209
|
-
}
|
|
1210
|
-
if (options?.since) entries = entries.filter((e) => e.timestamp > options.since);
|
|
1211
|
-
if (options?.limit && options.limit > 0) entries = entries.slice(-options.limit);
|
|
1212
|
-
return entries;
|
|
1213
|
-
}
|
|
1214
|
-
async tail(cursor) {
|
|
1215
|
-
const entries = await this.sync();
|
|
1216
|
-
return {
|
|
1217
|
-
entries: entries.slice(cursor),
|
|
1218
|
-
cursor: entries.length
|
|
1219
|
-
};
|
|
1220
|
-
}
|
|
1221
|
-
};
|
|
1222
|
-
/**
|
|
1223
|
-
* Parse JSONL content into an array of objects.
|
|
1224
|
-
* Skips empty lines and lines that fail to parse.
|
|
1225
|
-
*/
|
|
1226
|
-
function parseJsonl(content) {
|
|
1227
|
-
const results = [];
|
|
1228
|
-
for (const line of content.split("\n")) {
|
|
1229
|
-
const trimmed = line.trim();
|
|
1230
|
-
if (!trimmed) continue;
|
|
1231
|
-
try {
|
|
1232
|
-
results.push(JSON.parse(trimmed));
|
|
1233
|
-
} catch {}
|
|
1234
|
-
}
|
|
1235
|
-
return results;
|
|
1236
|
-
}
|
|
1237
|
-
|
|
1238
|
-
//#endregion
|
|
1239
|
-
//#region src/workflow/context/stores/inbox.ts
|
|
1240
|
-
const INBOX_STATE_KEY = "_state/inbox.json";
|
|
1241
|
-
/**
|
|
1242
|
-
* Default inbox store backed by channel + JSON cursor file.
|
|
1243
|
-
* Inbox is a filtered view of the channel, not a separate log.
|
|
1244
|
-
*/
|
|
1245
|
-
var DefaultInboxStore = class {
|
|
1246
|
-
runStartIndex = 0;
|
|
1247
|
-
constructor(channel, storage) {
|
|
1248
|
-
this.channel = channel;
|
|
1249
|
-
this.storage = storage;
|
|
1250
|
-
}
|
|
1251
|
-
async getInbox(agent) {
|
|
1252
|
-
const state = await this.loadState();
|
|
1253
|
-
const lastAckId = state.readCursors[agent];
|
|
1254
|
-
const lastSeenId = state.seenCursors?.[agent];
|
|
1255
|
-
let entries = await this.channel.sync();
|
|
1256
|
-
if (this.runStartIndex > 0) entries = entries.slice(this.runStartIndex);
|
|
1257
|
-
if (lastAckId) {
|
|
1258
|
-
const ackIdx = entries.findIndex((e) => e.id === lastAckId);
|
|
1259
|
-
if (ackIdx >= 0) entries = entries.slice(ackIdx + 1);
|
|
1260
|
-
}
|
|
1261
|
-
let seenIdx = -1;
|
|
1262
|
-
if (lastSeenId) seenIdx = entries.findIndex((e) => e.id === lastSeenId);
|
|
1263
|
-
return entries.filter((e) => {
|
|
1264
|
-
if (e.kind === "system" || e.kind === "debug" || e.kind === "output" || e.kind === "tool_call") return false;
|
|
1265
|
-
if (e.from === agent) return false;
|
|
1266
|
-
return e.mentions.includes(agent) || e.to === agent;
|
|
1267
|
-
}).map((entry) => {
|
|
1268
|
-
const entryIdx = entries.indexOf(entry);
|
|
1269
|
-
return {
|
|
1270
|
-
entry,
|
|
1271
|
-
priority: calculatePriority(entry),
|
|
1272
|
-
seen: seenIdx >= 0 && entryIdx <= seenIdx
|
|
1273
|
-
};
|
|
1274
|
-
});
|
|
1275
|
-
}
|
|
1276
|
-
async markSeen(agent, untilId) {
|
|
1277
|
-
const state = await this.loadState();
|
|
1278
|
-
if (!state.seenCursors) state.seenCursors = {};
|
|
1279
|
-
state.seenCursors[agent] = untilId;
|
|
1280
|
-
await this.storage.write(INBOX_STATE_KEY, JSON.stringify(state, null, 2));
|
|
1281
|
-
}
|
|
1282
|
-
async ack(agent, untilId) {
|
|
1283
|
-
const state = await this.loadState();
|
|
1284
|
-
state.readCursors[agent] = untilId;
|
|
1285
|
-
await this.storage.write(INBOX_STATE_KEY, JSON.stringify(state, null, 2));
|
|
1286
|
-
}
|
|
1287
|
-
async markRunStart() {
|
|
1288
|
-
this.runStartIndex = (await this.channel.sync()).length;
|
|
1289
|
-
}
|
|
1290
|
-
async destroy() {
|
|
1291
|
-
await this.storage.delete(INBOX_STATE_KEY);
|
|
1292
|
-
}
|
|
1293
|
-
async loadState() {
|
|
1294
|
-
const raw = await this.storage.read(INBOX_STATE_KEY);
|
|
1295
|
-
if (!raw) return { readCursors: {} };
|
|
1296
|
-
try {
|
|
1297
|
-
const data = JSON.parse(raw);
|
|
1298
|
-
return {
|
|
1299
|
-
readCursors: data.readCursors || {},
|
|
1300
|
-
seenCursors: data.seenCursors
|
|
1301
|
-
};
|
|
1302
|
-
} catch {
|
|
1303
|
-
return { readCursors: {} };
|
|
1304
|
-
}
|
|
1305
|
-
}
|
|
1306
|
-
};
|
|
1307
|
-
|
|
1308
|
-
//#endregion
|
|
1309
|
-
//#region src/workflow/context/stores/document.ts
|
|
1310
|
-
const DOCUMENT_PREFIX = "documents/";
|
|
1311
|
-
/**
|
|
1312
|
-
* Default document store backed by a StorageBackend.
|
|
1313
|
-
* Documents are stored as raw text under a key prefix.
|
|
1314
|
-
*/
|
|
1315
|
-
var DefaultDocumentStore = class {
|
|
1316
|
-
constructor(storage) {
|
|
1317
|
-
this.storage = storage;
|
|
1318
|
-
}
|
|
1319
|
-
key(file) {
|
|
1320
|
-
return DOCUMENT_PREFIX + (file || CONTEXT_DEFAULTS.document);
|
|
1321
|
-
}
|
|
1322
|
-
async read(file) {
|
|
1323
|
-
return await this.storage.read(this.key(file)) ?? "";
|
|
1324
|
-
}
|
|
1325
|
-
async write(content, file) {
|
|
1326
|
-
await this.storage.write(this.key(file), content);
|
|
1327
|
-
}
|
|
1328
|
-
async append(content, file) {
|
|
1329
|
-
await this.storage.append(this.key(file), content);
|
|
1330
|
-
}
|
|
1331
|
-
async list() {
|
|
1332
|
-
return (await this.storage.list(DOCUMENT_PREFIX)).filter((f) => f.endsWith(".md")).sort();
|
|
1333
|
-
}
|
|
1334
|
-
async create(file, content) {
|
|
1335
|
-
const key = this.key(file);
|
|
1336
|
-
if (await this.storage.exists(key)) throw new Error(`Document already exists: ${file}`);
|
|
1337
|
-
await this.storage.write(key, content);
|
|
1338
|
-
}
|
|
1339
|
-
};
|
|
1340
|
-
|
|
1341
|
-
//#endregion
|
|
1342
|
-
//#region src/workflow/context/stores/resource.ts
|
|
1343
|
-
const RESOURCE_PREFIX = "resources/";
|
|
1344
|
-
/**
|
|
1345
|
-
* Default resource store backed by a StorageBackend.
|
|
1346
|
-
* Resources are keyed by generated ID with type-based extensions.
|
|
1347
|
-
*/
|
|
1348
|
-
var DefaultResourceStore = class {
|
|
1349
|
-
constructor(storage) {
|
|
1350
|
-
this.storage = storage;
|
|
1351
|
-
}
|
|
1352
|
-
async create(content, _createdBy, type = "text") {
|
|
1353
|
-
const id = generateResourceId();
|
|
1354
|
-
const key = `${RESOURCE_PREFIX}${id}.${type === "json" ? "json" : type === "diff" ? "diff" : "md"}`;
|
|
1355
|
-
await this.storage.write(key, content);
|
|
1356
|
-
return {
|
|
1357
|
-
id,
|
|
1358
|
-
ref: createResourceRef(id)
|
|
1359
|
-
};
|
|
1360
|
-
}
|
|
1361
|
-
async read(id) {
|
|
1362
|
-
for (const ext of [
|
|
1363
|
-
"md",
|
|
1364
|
-
"json",
|
|
1365
|
-
"diff",
|
|
1366
|
-
"txt"
|
|
1367
|
-
]) {
|
|
1368
|
-
const key = `${RESOURCE_PREFIX}${id}.${ext}`;
|
|
1369
|
-
const content = await this.storage.read(key);
|
|
1370
|
-
if (content !== null) return content;
|
|
1371
|
-
}
|
|
1372
|
-
return null;
|
|
1373
|
-
}
|
|
1374
|
-
};
|
|
1375
|
-
|
|
1376
|
-
//#endregion
|
|
1377
|
-
//#region src/workflow/context/stores/status.ts
|
|
1378
|
-
const STATUS_KEY = "_state/agent-status.json";
|
|
1379
|
-
/**
|
|
1380
|
-
* Default status store backed by a JSON file via StorageBackend.
|
|
1381
|
-
* All agent statuses are stored in a single JSON object.
|
|
1382
|
-
*/
|
|
1383
|
-
var DefaultStatusStore = class {
|
|
1384
|
-
constructor(storage) {
|
|
1385
|
-
this.storage = storage;
|
|
1386
|
-
}
|
|
1387
|
-
async set(agent, status) {
|
|
1388
|
-
const statuses = await this.loadAll();
|
|
1389
|
-
const existing = statuses[agent] || {
|
|
1390
|
-
state: "idle",
|
|
1391
|
-
lastUpdate: (/* @__PURE__ */ new Date()).toISOString()
|
|
1392
|
-
};
|
|
1393
|
-
statuses[agent] = {
|
|
1394
|
-
...existing,
|
|
1395
|
-
...status,
|
|
1396
|
-
lastUpdate: (/* @__PURE__ */ new Date()).toISOString()
|
|
1397
|
-
};
|
|
1398
|
-
if (status.state === "running" && existing.state !== "running") statuses[agent].startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
1399
|
-
if (status.state === "idle") {
|
|
1400
|
-
statuses[agent].startedAt = void 0;
|
|
1401
|
-
statuses[agent].task = void 0;
|
|
1402
|
-
}
|
|
1403
|
-
await this.save(statuses);
|
|
1404
|
-
}
|
|
1405
|
-
async get(agent) {
|
|
1406
|
-
return (await this.loadAll())[agent] || null;
|
|
1407
|
-
}
|
|
1408
|
-
async list() {
|
|
1409
|
-
return this.loadAll();
|
|
1410
|
-
}
|
|
1411
|
-
async loadAll() {
|
|
1412
|
-
const raw = await this.storage.read(STATUS_KEY);
|
|
1413
|
-
if (!raw) return {};
|
|
1414
|
-
try {
|
|
1415
|
-
return JSON.parse(raw);
|
|
1416
|
-
} catch {
|
|
1417
|
-
return {};
|
|
1418
|
-
}
|
|
1419
|
-
}
|
|
1420
|
-
async save(statuses) {
|
|
1421
|
-
await this.storage.write(STATUS_KEY, JSON.stringify(statuses, null, 2));
|
|
1422
|
-
}
|
|
1423
|
-
};
|
|
1424
|
-
|
|
1425
|
-
//#endregion
|
|
1426
|
-
//#region src/workflow/context/file-provider.ts
|
|
1427
|
-
/**
|
|
1428
|
-
* File Context Provider
|
|
1429
|
-
* Composes default stores with FileStorage backend.
|
|
1430
|
-
* Includes instance lock to prevent concurrent access to the same context directory.
|
|
1431
|
-
*/
|
|
1432
|
-
var file_provider_exports = /* @__PURE__ */ __exportAll({
|
|
1433
|
-
FileContextProvider: () => FileContextProvider,
|
|
1434
|
-
createFileContextProvider: () => createFileContextProvider,
|
|
1435
|
-
getDefaultContextDir: () => getDefaultContextDir,
|
|
1436
|
-
resolveContextDir: () => resolveContextDir
|
|
1437
|
-
});
|
|
1438
|
-
/** Lock file name within context directory */
|
|
1439
|
-
const LOCK_FILE = "_state/instance.lock";
|
|
1440
|
-
/**
|
|
1441
|
-
* File-based ContextProvider.
|
|
1442
|
-
* Creates default stores backed by a shared FileStorage.
|
|
1443
|
-
*
|
|
1444
|
-
* Adds instance locking: only one process can hold the lock at a time.
|
|
1445
|
-
* Stale locks (from crashed processes) are automatically cleaned up.
|
|
1446
|
-
*/
|
|
1447
|
-
var FileContextProvider = class extends ContextProviderImpl {
|
|
1448
|
-
lockPath;
|
|
1449
|
-
constructor(storage, validAgents, contextDir) {
|
|
1450
|
-
const channel = new DefaultChannelStore(storage, validAgents);
|
|
1451
|
-
const inbox = new DefaultInboxStore(channel, storage);
|
|
1452
|
-
const documents = new DefaultDocumentStore(storage);
|
|
1453
|
-
const resources = new DefaultResourceStore(storage);
|
|
1454
|
-
const status = new DefaultStatusStore(storage);
|
|
1455
|
-
super(channel, inbox, documents, resources, status, validAgents);
|
|
1456
|
-
this.contextDir = contextDir;
|
|
1457
|
-
this.lockPath = join(contextDir, LOCK_FILE);
|
|
1458
|
-
}
|
|
1459
|
-
/**
|
|
1460
|
-
* Acquire instance lock.
|
|
1461
|
-
* Throws if another live process holds the lock.
|
|
1462
|
-
* Automatically cleans up stale locks from dead processes.
|
|
1463
|
-
*/
|
|
1464
|
-
acquireLock() {
|
|
1465
|
-
if (existsSync(this.lockPath)) try {
|
|
1466
|
-
const existing = JSON.parse(readFileSync(this.lockPath, "utf-8"));
|
|
1467
|
-
try {
|
|
1468
|
-
process.kill(existing.pid, 0);
|
|
1469
|
-
throw new Error(`Context directory is locked by another process (PID ${existing.pid}, started ${existing.startedAt}). If the process is no longer running, delete ${this.lockPath}`);
|
|
1470
|
-
} catch (e) {
|
|
1471
|
-
if (e instanceof Error && e.message.includes("Context directory is locked")) throw e;
|
|
1472
|
-
}
|
|
1473
|
-
} catch (e) {
|
|
1474
|
-
if (e instanceof Error && e.message.includes("Context directory is locked")) throw e;
|
|
1475
|
-
}
|
|
1476
|
-
const lock = {
|
|
1477
|
-
pid: process.pid,
|
|
1478
|
-
startedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1479
|
-
};
|
|
1480
|
-
const stateDir = join(this.contextDir, "_state");
|
|
1481
|
-
if (!existsSync(stateDir)) mkdirSync(stateDir, { recursive: true });
|
|
1482
|
-
writeFileSync(this.lockPath, JSON.stringify(lock, null, 2));
|
|
1483
|
-
}
|
|
1484
|
-
/**
|
|
1485
|
-
* Release instance lock.
|
|
1486
|
-
* Safe to call even if lock is not held (no-op).
|
|
1487
|
-
*/
|
|
1488
|
-
releaseLock() {
|
|
1489
|
-
try {
|
|
1490
|
-
if (existsSync(this.lockPath)) {
|
|
1491
|
-
if (JSON.parse(readFileSync(this.lockPath, "utf-8")).pid === process.pid) unlinkSync(this.lockPath);
|
|
1492
|
-
}
|
|
1493
|
-
} catch {}
|
|
1494
|
-
}
|
|
1495
|
-
/**
|
|
1496
|
-
* Override destroy to release lock and clean up transient state.
|
|
1497
|
-
*/
|
|
1498
|
-
async destroy() {
|
|
1499
|
-
await super.destroy();
|
|
1500
|
-
this.releaseLock();
|
|
1501
|
-
}
|
|
1502
|
-
};
|
|
1503
|
-
/**
|
|
1504
|
-
* Resolve a context directory template to an absolute path.
|
|
1505
|
-
*
|
|
1506
|
-
* Supports:
|
|
1507
|
-
* - ${{ workflow.name }} — substituted with workflowName
|
|
1508
|
-
* - ${{ workflow.tag }} — substituted with tag
|
|
1509
|
-
* - ~ expansion to home directory
|
|
1510
|
-
* - Relative paths resolved against baseDir (or cwd if not provided)
|
|
1511
|
-
* - Absolute paths used as-is
|
|
1512
|
-
*/
|
|
1513
|
-
function resolveContextDir(dirTemplate, opts) {
|
|
1514
|
-
const workflow = opts.workflow ?? opts.workflowName ?? "global";
|
|
1515
|
-
const workflowName = opts.workflowName ?? workflow;
|
|
1516
|
-
const tag = opts.tag ?? "main";
|
|
1517
|
-
let dir = dirTemplate.replace("${{ workflow.name }}", workflowName).replace("${{ workflow.tag }}", tag);
|
|
1518
|
-
if (dir.startsWith("~/")) dir = join(homedir(), dir.slice(2));
|
|
1519
|
-
else if (dir === "~") dir = homedir();
|
|
1520
|
-
else if (!isAbsolute(dir)) dir = join(opts.baseDir ?? process.cwd(), dir);
|
|
1521
|
-
return dir;
|
|
1522
|
-
}
|
|
1523
|
-
/**
|
|
1524
|
-
* Resolve context dir for a workflow:tag using default template.
|
|
1525
|
-
* Shorthand for the common case.
|
|
1526
|
-
* @param workflow Workflow name (defaults to "global")
|
|
1527
|
-
* @param tag Workflow instance tag (defaults to "main")
|
|
1528
|
-
*/
|
|
1529
|
-
function getDefaultContextDir(workflow, tag) {
|
|
1530
|
-
const wf = workflow ?? "global";
|
|
1531
|
-
const t = tag ?? "main";
|
|
1532
|
-
return resolveContextDir(CONTEXT_DEFAULTS.dir, {
|
|
1533
|
-
workflow: wf,
|
|
1534
|
-
tag: t
|
|
1535
|
-
});
|
|
1536
|
-
}
|
|
1537
|
-
/**
|
|
1538
|
-
* Create a FileContextProvider with default paths.
|
|
1539
|
-
*
|
|
1540
|
-
* Directory layout:
|
|
1541
|
-
* contextDir/
|
|
1542
|
-
* ├── channel.jsonl # Channel log (JSONL)
|
|
1543
|
-
* ├── documents/ # Team documents
|
|
1544
|
-
* │ └── notes.md # Default document
|
|
1545
|
-
* ├── resources/ # Resource blobs
|
|
1546
|
-
* ├── _state/
|
|
1547
|
-
* │ ├── inbox.json # Inbox read cursors
|
|
1548
|
-
* │ ├── instance.lock # Instance lock (PID-based)
|
|
1549
|
-
* │ └── proposals.json # Proposal state
|
|
1550
|
-
* └── ...
|
|
1551
|
-
*/
|
|
1552
|
-
function createFileContextProvider(contextDir, validAgents) {
|
|
1553
|
-
return new FileContextProvider(new FileStorage(contextDir), validAgents, contextDir);
|
|
1554
|
-
}
|
|
1555
|
-
|
|
1556
|
-
//#endregion
|
|
1557
|
-
//#region src/workflow/context/http-transport.ts
|
|
1558
|
-
/**
|
|
1559
|
-
* HTTP-based MCP Transport
|
|
1560
|
-
*
|
|
1561
|
-
* Hosts MCP server over HTTP using StreamableHTTPServerTransport.
|
|
1562
|
-
* CLI agents (cursor, claude, codex) connect directly via URL — no subprocess bridge needed.
|
|
1563
|
-
*
|
|
1564
|
-
* Each agent gets a unique URL: http://localhost:<port>/mcp?agent=<name>
|
|
1565
|
-
* The agent name is used as the MCP session ID, so tool handlers
|
|
1566
|
-
* receive it via extra.sessionId → getAgentId().
|
|
1567
|
-
*/
|
|
1568
|
-
/**
|
|
1569
|
-
* Parse request body as JSON
|
|
1570
|
-
*/
|
|
1571
|
-
function parseRequestBody(req) {
|
|
1572
|
-
return new Promise((resolve, reject) => {
|
|
1573
|
-
const chunks = [];
|
|
1574
|
-
req.on("data", (chunk) => chunks.push(chunk));
|
|
1575
|
-
req.on("end", () => {
|
|
1576
|
-
try {
|
|
1577
|
-
const body = Buffer.concat(chunks).toString();
|
|
1578
|
-
resolve(body ? JSON.parse(body) : void 0);
|
|
1579
|
-
} catch (err) {
|
|
1580
|
-
reject(err);
|
|
1581
|
-
}
|
|
1582
|
-
});
|
|
1583
|
-
req.on("error", reject);
|
|
1584
|
-
});
|
|
1585
|
-
}
|
|
1586
|
-
/**
|
|
1587
|
-
* Check if a JSON-RPC message is an initialize request
|
|
1588
|
-
*/
|
|
1589
|
-
function isInitializeRequest(body) {
|
|
1590
|
-
if (Array.isArray(body)) return body.some((msg) => msg?.method === "initialize");
|
|
1591
|
-
return body?.method === "initialize";
|
|
1592
|
-
}
|
|
1593
|
-
/**
|
|
1594
|
-
* Start an HTTP MCP server
|
|
1595
|
-
*
|
|
1596
|
-
* Agents connect via: http://localhost:<port>/mcp?agent=<name>
|
|
1597
|
-
* The server creates a per-session StreamableHTTPServerTransport and McpServer.
|
|
1598
|
-
*/
|
|
1599
|
-
async function runWithHttp(options) {
|
|
1600
|
-
const { createServerInstance, port = 0, onConnect, onDisconnect } = options;
|
|
1601
|
-
const sessions = /* @__PURE__ */ new Map();
|
|
1602
|
-
const httpServer = createServer(async (req, res) => {
|
|
1603
|
-
const reqUrl = new URL(req.url || "/", `http://localhost`);
|
|
1604
|
-
if (!reqUrl.pathname.startsWith("/mcp")) {
|
|
1605
|
-
res.writeHead(404, { "Content-Type": "application/json" });
|
|
1606
|
-
res.end(JSON.stringify({ error: "Not found" }));
|
|
1607
|
-
return;
|
|
1608
|
-
}
|
|
1609
|
-
const agentName = reqUrl.searchParams.get("agent") || "anonymous";
|
|
1610
|
-
const sessionId = req.headers["mcp-session-id"];
|
|
1611
|
-
if (sessionId && sessions.has(sessionId)) {
|
|
1612
|
-
const session = sessions.get(sessionId);
|
|
1613
|
-
if (req.method === "DELETE") {
|
|
1614
|
-
await session.transport.close();
|
|
1615
|
-
sessions.delete(sessionId);
|
|
1616
|
-
if (onDisconnect) onDisconnect(session.agentId, sessionId);
|
|
1617
|
-
res.writeHead(200);
|
|
1618
|
-
res.end();
|
|
1619
|
-
return;
|
|
1620
|
-
}
|
|
1621
|
-
const body = req.method === "POST" ? await parseRequestBody(req) : void 0;
|
|
1622
|
-
await session.transport.handleRequest(req, res, body);
|
|
1623
|
-
return;
|
|
1624
|
-
}
|
|
1625
|
-
if (req.method === "POST") {
|
|
1626
|
-
const body = await parseRequestBody(req);
|
|
1627
|
-
if (!isInitializeRequest(body)) {
|
|
1628
|
-
res.writeHead(400, { "Content-Type": "application/json" });
|
|
1629
|
-
res.end(JSON.stringify({ error: "Bad request: session required" }));
|
|
1630
|
-
return;
|
|
1631
|
-
}
|
|
1632
|
-
const transport = new StreamableHTTPServerTransport({
|
|
1633
|
-
sessionIdGenerator: () => `${agentName}-${randomUUID().slice(0, 8)}`,
|
|
1634
|
-
onsessioninitialized: (sid) => {
|
|
1635
|
-
sessions.set(sid, {
|
|
1636
|
-
transport,
|
|
1637
|
-
agentId: agentName
|
|
1638
|
-
});
|
|
1639
|
-
if (onConnect) onConnect(agentName, sid);
|
|
1640
|
-
}
|
|
1641
|
-
});
|
|
1642
|
-
Object.defineProperty(transport, "_agentId", {
|
|
1643
|
-
value: agentName,
|
|
1644
|
-
writable: true
|
|
1645
|
-
});
|
|
1646
|
-
await createServerInstance().connect(transport);
|
|
1647
|
-
await transport.handleRequest(req, res, body);
|
|
1648
|
-
return;
|
|
1649
|
-
}
|
|
1650
|
-
if (req.method === "GET") {
|
|
1651
|
-
res.writeHead(400, { "Content-Type": "application/json" });
|
|
1652
|
-
res.end(JSON.stringify({ error: "Session ID required for GET requests" }));
|
|
1653
|
-
return;
|
|
1654
|
-
}
|
|
1655
|
-
res.writeHead(405, { "Content-Type": "application/json" });
|
|
1656
|
-
res.end(JSON.stringify({ error: "Method not allowed" }));
|
|
1657
|
-
});
|
|
1658
|
-
const actualPort = await new Promise((resolve, reject) => {
|
|
1659
|
-
httpServer.on("error", reject);
|
|
1660
|
-
httpServer.listen(port, "127.0.0.1", () => {
|
|
1661
|
-
httpServer.removeListener("error", reject);
|
|
1662
|
-
const addr = httpServer.address();
|
|
1663
|
-
if (typeof addr === "object" && addr) resolve(addr.port);
|
|
1664
|
-
else reject(/* @__PURE__ */ new Error("Failed to get server address"));
|
|
1665
|
-
});
|
|
1666
|
-
});
|
|
1667
|
-
return {
|
|
1668
|
-
httpServer,
|
|
1669
|
-
url: `http://127.0.0.1:${actualPort}/mcp`,
|
|
1670
|
-
port: actualPort,
|
|
1671
|
-
sessions,
|
|
1672
|
-
async close() {
|
|
1673
|
-
for (const [sid, session] of sessions) {
|
|
1674
|
-
await session.transport.close();
|
|
1675
|
-
if (onDisconnect) onDisconnect(session.agentId, sid);
|
|
1676
|
-
}
|
|
1677
|
-
sessions.clear();
|
|
1678
|
-
await new Promise((resolve) => {
|
|
1679
|
-
httpServer.close(() => resolve());
|
|
1680
|
-
});
|
|
1681
|
-
}
|
|
1682
|
-
};
|
|
1683
|
-
}
|
|
1684
|
-
|
|
1685
|
-
//#endregion
|
|
1686
|
-
//#region src/workflow/loop/types.ts
|
|
1687
|
-
/** Default loop configuration values */
|
|
1688
|
-
const LOOP_DEFAULTS = {
|
|
1689
|
-
pollInterval: 5e3,
|
|
1690
|
-
retry: {
|
|
1691
|
-
maxAttempts: 3,
|
|
1692
|
-
backoffMs: 1e3,
|
|
1693
|
-
backoffMultiplier: 2
|
|
1694
|
-
},
|
|
1695
|
-
recentChannelLimit: 50,
|
|
1696
|
-
idleDebounceMs: 2e3
|
|
1697
|
-
};
|
|
1698
|
-
|
|
1699
|
-
//#endregion
|
|
1700
|
-
//#region src/workflow/loop/prompt.ts
|
|
1701
|
-
/**
|
|
1702
|
-
* Format inbox messages for display
|
|
1703
|
-
*/
|
|
1704
|
-
function formatInbox(inbox) {
|
|
1705
|
-
if (inbox.length === 0) return "(no messages)";
|
|
1706
|
-
return inbox.map((m) => {
|
|
1707
|
-
const priority = m.priority === "high" ? " [HIGH]" : "";
|
|
1708
|
-
const time = m.entry.timestamp.slice(11, 19);
|
|
1709
|
-
const dm = m.entry.to ? " [DM]" : "";
|
|
1710
|
-
return `- [${time}] From @${m.entry.from}${priority}${dm}: ${m.entry.content}`;
|
|
1711
|
-
}).join("\n");
|
|
1712
|
-
}
|
|
1713
|
-
/** Project context (what codebase to work on) */
|
|
1714
|
-
const projectSection = (ctx) => `## Project\nWorking on: ${ctx.projectDir}`;
|
|
1715
|
-
/** Inbox (unread messages for this agent) */
|
|
1716
|
-
const inboxSection = (ctx) => {
|
|
1717
|
-
const count = ctx.inbox.length;
|
|
1718
|
-
return `## Inbox (${count} ${count === 1 ? "message" : "messages"} for you)\n${formatInbox(ctx.inbox)}`;
|
|
1719
|
-
};
|
|
1720
|
-
/** Recent activity hint (use tool instead of injecting messages) */
|
|
1721
|
-
const activitySection = () => "## Recent Activity\nUse channel_read tool to view recent channel messages and conversation context if needed.";
|
|
1722
|
-
/** Shared document section */
|
|
1723
|
-
const documentSection = (ctx) => ctx.documentContent ? `## Shared Document\n${ctx.documentContent}` : null;
|
|
1724
|
-
/** Retry notice */
|
|
1725
|
-
const retrySection = (ctx) => ctx.retryAttempt > 1 ? `## Note\nThis is retry attempt ${ctx.retryAttempt}. Previous attempt failed.` : null;
|
|
1726
|
-
/** MCP tool instructions */
|
|
1727
|
-
const instructionsSection = (ctx) => {
|
|
1728
|
-
const lines = [];
|
|
1729
|
-
lines.push("## Instructions");
|
|
1730
|
-
lines.push("You are an agent in a multi-agent workflow. Communicate ONLY through the MCP tools below.");
|
|
1731
|
-
lines.push("Your text output is NOT seen by other agents — you MUST use channel_send to communicate.");
|
|
1732
|
-
lines.push("");
|
|
1733
|
-
lines.push("### Channel Tools");
|
|
1734
|
-
lines.push("- **channel_send**: Send a message to the shared channel. Use @agentname to mention/notify.");
|
|
1735
|
-
lines.push(" Use the \"to\" parameter for private DMs: channel_send({ message: \"...\", to: \"bob\" })");
|
|
1736
|
-
lines.push("- **channel_read**: Read recent channel messages (DMs and logs are auto-filtered).");
|
|
1737
|
-
lines.push("");
|
|
1738
|
-
lines.push("### Team Tools");
|
|
1739
|
-
lines.push("- **team_members**: List all agents you can @mention. Pass includeStatus=true to see their current state and tasks.");
|
|
1740
|
-
lines.push("- **team_doc_read/write/append/list/create**: Shared team documents.");
|
|
1741
|
-
lines.push("");
|
|
1742
|
-
lines.push("### Personal Tools");
|
|
1743
|
-
lines.push("- **my_inbox**: Check your unread messages.");
|
|
1744
|
-
lines.push("- **my_inbox_ack**: Acknowledge messages after processing (pass the latest message ID).");
|
|
1745
|
-
lines.push("- **my_status_set**: Update your status. Call when starting work (state='running', task='...') or when done (state='idle').");
|
|
1746
|
-
lines.push("");
|
|
1747
|
-
lines.push("### Proposal & Voting Tools");
|
|
1748
|
-
lines.push("- **team_proposal_create**: Create a proposal for team voting (types: election, decision, approval, assignment).");
|
|
1749
|
-
lines.push("- **team_vote**: Cast your vote on an active proposal. You can change your vote by voting again.");
|
|
1750
|
-
lines.push("- **team_proposal_status**: Check status of a proposal, or list all active proposals.");
|
|
1751
|
-
lines.push("- **team_proposal_cancel**: Cancel a proposal you created.");
|
|
1752
|
-
lines.push("");
|
|
1753
|
-
lines.push("### Resource Tools");
|
|
1754
|
-
lines.push("- **resource_create**: Store large content, get a reference (resource:id) for use anywhere.");
|
|
1755
|
-
lines.push("- **resource_read**: Read resource content by ID.");
|
|
1756
|
-
if (ctx.feedback) {
|
|
1757
|
-
lines.push("");
|
|
1758
|
-
lines.push("### Feedback Tool");
|
|
1759
|
-
lines.push("- **feedback_submit**: Report workflow improvement needs — a missing tool, an awkward step, or a capability gap.");
|
|
1760
|
-
lines.push(" Only use when you genuinely hit a pain point during your work.");
|
|
1761
|
-
}
|
|
1762
|
-
return lines.join("\n");
|
|
1763
|
-
};
|
|
1764
|
-
/** Workflow instructions (read → work → ack → exit) */
|
|
1765
|
-
const workflowSection = () => {
|
|
1766
|
-
const lines = [];
|
|
1767
|
-
lines.push("### Workflow");
|
|
1768
|
-
lines.push("1. Read your inbox messages above");
|
|
1769
|
-
lines.push("2. Do your assigned work using channel_send with @mentions");
|
|
1770
|
-
lines.push("3. Acknowledge your inbox with my_inbox_ack");
|
|
1771
|
-
lines.push("4. Exit when your task is complete");
|
|
1772
|
-
return lines.join("\n");
|
|
1773
|
-
};
|
|
1774
|
-
/** Exit guidance (when to stop) */
|
|
1775
|
-
const exitSection = () => {
|
|
1776
|
-
const lines = [];
|
|
1777
|
-
lines.push("### IMPORTANT: When to stop");
|
|
1778
|
-
lines.push("- Once your assigned task is complete, acknowledge your inbox and exit. Do NOT keep chatting.");
|
|
1779
|
-
lines.push("- Do NOT send pleasantries (\"you're welcome\", \"glad to help\", \"thanks again\") — they trigger unnecessary cycles.");
|
|
1780
|
-
lines.push("- Do NOT @mention another agent in your final message unless you need them to do more work.");
|
|
1781
|
-
lines.push("- If you receive a thank-you or acknowledgment, just call my_inbox_ack and exit. Do not reply.");
|
|
1782
|
-
return lines.join("\n");
|
|
1783
|
-
};
|
|
1784
|
-
/**
|
|
1785
|
-
* Default prompt sections — produces the same output as the original
|
|
1786
|
-
* monolithic buildAgentPrompt. New sections (soul, memory, todo) can
|
|
1787
|
-
* be inserted at specific positions without touching these.
|
|
1788
|
-
*/
|
|
1789
|
-
const DEFAULT_SECTIONS = [
|
|
1790
|
-
projectSection,
|
|
1791
|
-
inboxSection,
|
|
1792
|
-
activitySection,
|
|
1793
|
-
documentSection,
|
|
1794
|
-
retrySection,
|
|
1795
|
-
instructionsSection,
|
|
1796
|
-
workflowSection,
|
|
1797
|
-
exitSection
|
|
1798
|
-
];
|
|
1799
|
-
/**
|
|
1800
|
-
* Assemble prompt from sections. Joins non-null sections with blank lines.
|
|
1801
|
-
*/
|
|
1802
|
-
function assemblePrompt(sections, ctx) {
|
|
1803
|
-
return sections.map((section) => section(ctx)).filter((content) => content !== null).join("\n\n");
|
|
1804
|
-
}
|
|
1805
|
-
/**
|
|
1806
|
-
* Build the complete agent prompt from run context.
|
|
1807
|
-
*
|
|
1808
|
-
* Uses the default section list. For custom section lists,
|
|
1809
|
-
* use assemblePrompt() directly.
|
|
1810
|
-
*/
|
|
1811
|
-
function buildAgentPrompt(ctx) {
|
|
1812
|
-
return assemblePrompt(DEFAULT_SECTIONS, ctx);
|
|
1813
|
-
}
|
|
1814
|
-
|
|
1815
|
-
//#endregion
|
|
1816
|
-
//#region src/workflow/loop/mcp-config.ts
|
|
1817
|
-
/**
|
|
1818
|
-
* Workflow MCP Config Generation & Writing
|
|
1819
|
-
*
|
|
1820
|
-
* Two responsibilities:
|
|
1821
|
-
* 1. Generate MCP config for workflow HTTP transport
|
|
1822
|
-
* 2. Write backend-specific MCP config files to workspace
|
|
1823
|
-
*
|
|
1824
|
-
* Writing lives here (not in backends) because it's workspace infrastructure,
|
|
1825
|
-
* not a backend concern. Backends only need their cwd set — they don't
|
|
1826
|
-
* need to know about MCP config file layout.
|
|
1827
|
-
*/
|
|
1828
|
-
/**
|
|
1829
|
-
* Generate MCP config for workflow context server.
|
|
1830
|
-
*
|
|
1831
|
-
* Uses HTTP transport — CLI agents connect directly via URL:
|
|
1832
|
-
* { type: "http", url: "http://127.0.0.1:<port>/mcp?agent=<name>" }
|
|
1833
|
-
*/
|
|
1834
|
-
function generateWorkflowMCPConfig(mcpUrl, agentName) {
|
|
1835
|
-
const url = `${mcpUrl}?agent=${encodeURIComponent(agentName)}`;
|
|
1836
|
-
return { mcpServers: { "workflow-context": {
|
|
1837
|
-
type: "http",
|
|
1838
|
-
url
|
|
1839
|
-
} } };
|
|
1840
|
-
}
|
|
1841
|
-
/**
|
|
1842
|
-
* Write MCP config to a workspace directory in the format expected by a backend.
|
|
1843
|
-
*
|
|
1844
|
-
* Each CLI backend reads MCP config from a different location:
|
|
1845
|
-
* - claude: {workspace}/mcp-config.json (passed via --mcp-config flag)
|
|
1846
|
-
* - cursor: {workspace}/.cursor/mcp.json (auto-discovered by cursor)
|
|
1847
|
-
* - codex: {workspace}/.codex/config.yaml (auto-discovered by codex)
|
|
1848
|
-
* - opencode: {workspace}/opencode.json (auto-discovered by opencode)
|
|
1849
|
-
* - default/mock: no config file needed (MCP handled by loop via SDK)
|
|
1850
|
-
*/
|
|
1851
|
-
function writeBackendMcpConfig(backendType, workspaceDir, mcpConfig) {
|
|
1852
|
-
ensureDir(workspaceDir);
|
|
1853
|
-
switch (backendType) {
|
|
1854
|
-
case "claude":
|
|
1855
|
-
writeJsonConfig(join(workspaceDir, "mcp-config.json"), mcpConfig);
|
|
1856
|
-
break;
|
|
1857
|
-
case "cursor": {
|
|
1858
|
-
const cursorDir = join(workspaceDir, ".cursor");
|
|
1859
|
-
ensureDir(cursorDir);
|
|
1860
|
-
writeJsonConfig(join(cursorDir, "mcp.json"), mcpConfig);
|
|
1861
|
-
break;
|
|
1862
|
-
}
|
|
1863
|
-
case "codex": {
|
|
1864
|
-
const codexDir = join(workspaceDir, ".codex");
|
|
1865
|
-
ensureDir(codexDir);
|
|
1866
|
-
const codexConfig = { mcp_servers: mcpConfig.mcpServers };
|
|
1867
|
-
writeFileSync(join(codexDir, "config.yaml"), stringify(codexConfig));
|
|
1868
|
-
break;
|
|
1869
|
-
}
|
|
1870
|
-
case "opencode": {
|
|
1871
|
-
const opencodeMcp = {};
|
|
1872
|
-
for (const [name, config] of Object.entries(mcpConfig.mcpServers)) {
|
|
1873
|
-
const serverConfig = config;
|
|
1874
|
-
if (serverConfig.type === "http") opencodeMcp[name] = serverConfig;
|
|
1875
|
-
else opencodeMcp[name] = {
|
|
1876
|
-
type: "local",
|
|
1877
|
-
command: [serverConfig.command, ...serverConfig.args || []],
|
|
1878
|
-
enabled: true,
|
|
1879
|
-
...serverConfig.env ? { environment: serverConfig.env } : {}
|
|
1880
|
-
};
|
|
1881
|
-
}
|
|
1882
|
-
const opencodeConfig = {
|
|
1883
|
-
$schema: "https://opencode.ai/config.json",
|
|
1884
|
-
mcp: opencodeMcp
|
|
1885
|
-
};
|
|
1886
|
-
writeJsonConfig(join(workspaceDir, "opencode.json"), opencodeConfig);
|
|
1887
|
-
break;
|
|
1888
|
-
}
|
|
1889
|
-
}
|
|
1890
|
-
}
|
|
1891
|
-
function ensureDir(dir) {
|
|
1892
|
-
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
1893
|
-
}
|
|
1894
|
-
function writeJsonConfig(path, data) {
|
|
1895
|
-
writeFileSync(path, JSON.stringify(data, null, 2));
|
|
1896
|
-
}
|
|
1897
|
-
|
|
1898
|
-
//#endregion
|
|
1899
|
-
//#region src/daemon/cron.ts
|
|
1900
|
-
/**
|
|
1901
|
-
* Minimal cron expression parser.
|
|
1902
|
-
* Supports standard 5-field cron: minute hour day-of-month month day-of-week
|
|
1903
|
-
*
|
|
1904
|
-
* Field syntax:
|
|
1905
|
-
* * every value
|
|
1906
|
-
* N exact value
|
|
1907
|
-
* N-M range (inclusive)
|
|
1908
|
-
* N,M,O list
|
|
1909
|
-
* * /step every step (e.g. * /15 = 0,15,30,45) [no space — formatting only]
|
|
1910
|
-
* N-M/step range with step
|
|
1911
|
-
*/
|
|
1912
|
-
function range(min, max) {
|
|
1913
|
-
const r = [];
|
|
1914
|
-
for (let i = min; i <= max; i++) r.push(i);
|
|
1915
|
-
return r;
|
|
1916
|
-
}
|
|
1917
|
-
function parseIntStrict(s, context) {
|
|
1918
|
-
const n = parseInt(s, 10);
|
|
1919
|
-
if (isNaN(n)) throw new Error(`Invalid number "${s}" in ${context}`);
|
|
1920
|
-
return n;
|
|
1921
|
-
}
|
|
1922
|
-
function parseCronField(field, min, max) {
|
|
1923
|
-
const values = /* @__PURE__ */ new Set();
|
|
1924
|
-
for (const part of field.split(",")) if (part === "*") for (const v of range(min, max)) values.add(v);
|
|
1925
|
-
else if (part.includes("/")) {
|
|
1926
|
-
const [rangeStr, stepStr] = part.split("/");
|
|
1927
|
-
const step = parseIntStrict(stepStr, `step "${part}"`);
|
|
1928
|
-
if (step <= 0) throw new Error(`Invalid step: ${part}`);
|
|
1929
|
-
let lo = min;
|
|
1930
|
-
let hi = max;
|
|
1931
|
-
if (rangeStr !== "*") if (rangeStr.includes("-")) {
|
|
1932
|
-
const parts = rangeStr.split("-");
|
|
1933
|
-
lo = parseIntStrict(parts[0], `range "${part}"`);
|
|
1934
|
-
hi = parseIntStrict(parts[1], `range "${part}"`);
|
|
1935
|
-
} else {
|
|
1936
|
-
lo = parseIntStrict(rangeStr, `field "${part}"`);
|
|
1937
|
-
hi = max;
|
|
1938
|
-
}
|
|
1939
|
-
for (let v = lo; v <= hi; v += step) values.add(v);
|
|
1940
|
-
} else if (part.includes("-")) {
|
|
1941
|
-
const parts = part.split("-");
|
|
1942
|
-
const lo = parseIntStrict(parts[0], `range "${part}"`);
|
|
1943
|
-
const hi = parseIntStrict(parts[1], `range "${part}"`);
|
|
1944
|
-
for (const v of range(lo, hi)) values.add(v);
|
|
1945
|
-
} else values.add(parseIntStrict(part, `field "${field}"`));
|
|
1946
|
-
return values;
|
|
1947
|
-
}
|
|
1948
|
-
/**
|
|
1949
|
-
* Parse a 5-field cron expression into sets of matching values.
|
|
1950
|
-
*/
|
|
1951
|
-
function parseCron(expr) {
|
|
1952
|
-
const parts = expr.trim().split(/\s+/);
|
|
1953
|
-
if (parts.length !== 5) throw new Error(`Invalid cron expression (expected 5 fields): ${expr}`);
|
|
1954
|
-
return {
|
|
1955
|
-
minutes: parseCronField(parts[0], 0, 59),
|
|
1956
|
-
hours: parseCronField(parts[1], 0, 23),
|
|
1957
|
-
daysOfMonth: parseCronField(parts[2], 1, 31),
|
|
1958
|
-
months: parseCronField(parts[3], 1, 12),
|
|
1959
|
-
daysOfWeek: parseCronField(parts[4], 0, 6)
|
|
1960
|
-
};
|
|
1961
|
-
}
|
|
1962
|
-
/**
|
|
1963
|
-
* Check if a Date matches a parsed cron expression.
|
|
1964
|
-
*/
|
|
1965
|
-
function matchesCron(date, fields) {
|
|
1966
|
-
return fields.minutes.has(date.getMinutes()) && fields.hours.has(date.getHours()) && fields.daysOfMonth.has(date.getDate()) && fields.months.has(date.getMonth() + 1) && fields.daysOfWeek.has(date.getDay());
|
|
1967
|
-
}
|
|
1968
|
-
/**
|
|
1969
|
-
* Calculate the next occurrence of a cron expression after `from`.
|
|
1970
|
-
* Searches forward minute-by-minute, up to 1 year.
|
|
1971
|
-
* Returns the Date of the next match.
|
|
1972
|
-
*/
|
|
1973
|
-
function nextCronTime(expr, from = /* @__PURE__ */ new Date()) {
|
|
1974
|
-
const fields = parseCron(expr);
|
|
1975
|
-
const next = new Date(from);
|
|
1976
|
-
next.setSeconds(0, 0);
|
|
1977
|
-
next.setMinutes(next.getMinutes() + 1);
|
|
1978
|
-
const maxMinutes = 366 * 24 * 60;
|
|
1979
|
-
for (let i = 0; i < maxMinutes; i++) {
|
|
1980
|
-
if (matchesCron(next, fields)) return next;
|
|
1981
|
-
next.setMinutes(next.getMinutes() + 1);
|
|
1982
|
-
}
|
|
1983
|
-
throw new Error(`No matching cron time found within 1 year: ${expr}`);
|
|
1984
|
-
}
|
|
1985
|
-
/**
|
|
1986
|
-
* Calculate ms until the next cron occurrence.
|
|
1987
|
-
*/
|
|
1988
|
-
function msUntilNextCron(expr, from = /* @__PURE__ */ new Date()) {
|
|
1989
|
-
return nextCronTime(expr, from).getTime() - from.getTime();
|
|
1990
|
-
}
|
|
1991
|
-
|
|
1992
|
-
//#endregion
|
|
1993
|
-
//#region src/workflow/loop/mock-runner.ts
|
|
1994
|
-
/**
|
|
1995
|
-
* Mock Agent Runner
|
|
1996
|
-
*
|
|
1997
|
-
* Orchestrates mock agent execution for workflow integration testing.
|
|
1998
|
-
* Uses AI SDK generateText with MockLanguageModelV3 and real MCP tool calls.
|
|
1999
|
-
*
|
|
2000
|
-
* This lives in the loop layer (not backends) because it does orchestration:
|
|
2001
|
-
* connecting to MCP, building prompts, managing tool loops.
|
|
2002
|
-
* The mock backend itself is just a simple send() adapter.
|
|
2003
|
-
*/
|
|
2004
|
-
/**
|
|
2005
|
-
* Connect to workflow MCP server via HTTP and create AI SDK tool wrappers
|
|
2006
|
-
*/
|
|
2007
|
-
async function createMCPToolBridge$1(mcpUrl, agentName) {
|
|
2008
|
-
const transport = new StreamableHTTPClientTransport(new URL(`${mcpUrl}?agent=${encodeURIComponent(agentName)}`));
|
|
2009
|
-
const client = new Client({
|
|
2010
|
-
name: agentName,
|
|
2011
|
-
version: "1.0.0"
|
|
2012
|
-
});
|
|
2013
|
-
await client.connect(transport);
|
|
2014
|
-
const { tools: mcpTools } = await client.listTools();
|
|
2015
|
-
const aiTools = {};
|
|
2016
|
-
for (const mcpTool of mcpTools) {
|
|
2017
|
-
const toolName = mcpTool.name;
|
|
2018
|
-
aiTools[toolName] = createTool({
|
|
2019
|
-
description: mcpTool.description || toolName,
|
|
2020
|
-
schema: mcpTool.inputSchema,
|
|
2021
|
-
execute: async (args) => {
|
|
2022
|
-
return (await client.callTool({
|
|
2023
|
-
name: toolName,
|
|
2024
|
-
arguments: args
|
|
2025
|
-
})).content;
|
|
2026
|
-
}
|
|
2027
|
-
});
|
|
2028
|
-
}
|
|
2029
|
-
return {
|
|
2030
|
-
tools: aiTools,
|
|
2031
|
-
close: () => client.close()
|
|
2032
|
-
};
|
|
2033
|
-
}
|
|
2034
|
-
/**
|
|
2035
|
-
* Run a mock agent with AI SDK and real MCP tools.
|
|
2036
|
-
*
|
|
2037
|
-
* Used by the loop when backend.type === 'mock'.
|
|
2038
|
-
* Unlike real backends that just send(), the mock runner needs to:
|
|
2039
|
-
* 1. Connect to MCP server for real tool execution
|
|
2040
|
-
* 2. Generate scripted tool calls via MockLanguageModelV3
|
|
2041
|
-
* 3. Execute the full tool loop to test channel/document flow
|
|
2042
|
-
*/
|
|
2043
|
-
async function runMockAgent(ctx, debugLog) {
|
|
2044
|
-
const startTime = Date.now();
|
|
2045
|
-
const log = debugLog || (() => {});
|
|
2046
|
-
try {
|
|
2047
|
-
if (!ctx.mcpUrl) return {
|
|
2048
|
-
success: false,
|
|
2049
|
-
error: "Mock runner requires mcpUrl (HTTP MCP server)",
|
|
2050
|
-
duration: 0
|
|
2051
|
-
};
|
|
2052
|
-
const mcp = await createMCPToolBridge$1(ctx.mcpUrl, ctx.name);
|
|
2053
|
-
log(`MCP connected, ${Object.keys(mcp.tools).length} tools`);
|
|
2054
|
-
const inboxSummary = ctx.inbox.map((m) => `${m.entry.from}: ${m.entry.content.slice(0, 80).replace(/@/g, "")}`).join("; ");
|
|
2055
|
-
const mockModel = new MockLanguageModelV3({ doGenerate: mockValues({
|
|
2056
|
-
content: [{
|
|
2057
|
-
type: "tool-call",
|
|
2058
|
-
toolCallId: `call-${ctx.name}-${Date.now()}`,
|
|
2059
|
-
toolName: "channel_send",
|
|
2060
|
-
input: JSON.stringify({ message: `[${ctx.name}] Processed: ${inboxSummary.slice(0, 200)}` })
|
|
2061
|
-
}],
|
|
2062
|
-
finishReason: {
|
|
2063
|
-
unified: "tool-calls",
|
|
2064
|
-
raw: "tool_use"
|
|
2065
|
-
},
|
|
2066
|
-
usage: {
|
|
2067
|
-
inputTokens: {
|
|
2068
|
-
total: 100,
|
|
2069
|
-
noCache: 100,
|
|
2070
|
-
cacheRead: 0,
|
|
2071
|
-
cacheWrite: 0
|
|
2072
|
-
},
|
|
2073
|
-
outputTokens: {
|
|
2074
|
-
total: 50,
|
|
2075
|
-
text: 50,
|
|
2076
|
-
reasoning: 0
|
|
2077
|
-
}
|
|
2078
|
-
}
|
|
2079
|
-
}, {
|
|
2080
|
-
content: [{
|
|
2081
|
-
type: "text",
|
|
2082
|
-
text: `${ctx.name} done.`
|
|
2083
|
-
}],
|
|
2084
|
-
finishReason: {
|
|
2085
|
-
unified: "stop",
|
|
2086
|
-
raw: "end_turn"
|
|
2087
|
-
},
|
|
2088
|
-
usage: {
|
|
2089
|
-
inputTokens: {
|
|
2090
|
-
total: 50,
|
|
2091
|
-
noCache: 50,
|
|
2092
|
-
cacheRead: 0,
|
|
2093
|
-
cacheWrite: 0
|
|
2094
|
-
},
|
|
2095
|
-
outputTokens: {
|
|
2096
|
-
total: 10,
|
|
2097
|
-
text: 10,
|
|
2098
|
-
reasoning: 0
|
|
2099
|
-
}
|
|
2100
|
-
}
|
|
2101
|
-
}) });
|
|
2102
|
-
const prompt = buildAgentPrompt(ctx);
|
|
2103
|
-
log(`Prompt (${prompt.length} chars)`);
|
|
2104
|
-
const result = await generateText({
|
|
2105
|
-
model: mockModel,
|
|
2106
|
-
tools: mcp.tools,
|
|
2107
|
-
prompt,
|
|
2108
|
-
system: ctx.agent.resolvedSystemPrompt,
|
|
2109
|
-
stopWhen: stepCountIs(3)
|
|
2110
|
-
});
|
|
2111
|
-
const totalToolCalls = result.steps.reduce((n, s) => n + s.toolCalls.length, 0);
|
|
2112
|
-
await mcp.close();
|
|
2113
|
-
return {
|
|
2114
|
-
success: true,
|
|
2115
|
-
duration: Date.now() - startTime,
|
|
2116
|
-
steps: result.steps.length,
|
|
2117
|
-
toolCalls: totalToolCalls
|
|
2118
|
-
};
|
|
2119
|
-
} catch (error) {
|
|
2120
|
-
return {
|
|
2121
|
-
success: false,
|
|
2122
|
-
error: error instanceof Error ? error.message : String(error),
|
|
2123
|
-
duration: Date.now() - startTime
|
|
2124
|
-
};
|
|
2125
|
-
}
|
|
2126
|
-
}
|
|
2127
|
-
|
|
2128
|
-
//#endregion
|
|
2129
|
-
//#region src/workflow/loop/sdk-runner.ts
|
|
2130
|
-
/**
|
|
2131
|
-
* SDK Agent Runner
|
|
2132
|
-
*
|
|
2133
|
-
* Runs SDK agents with full tool access in workflows:
|
|
2134
|
-
* - MCP context tools (channel_send, document_write, etc.)
|
|
2135
|
-
* - Bash tool for shell commands
|
|
2136
|
-
*
|
|
2137
|
-
* Same pattern as mock-runner.ts but with real models via createModelAsync.
|
|
2138
|
-
* This is the standard execution path for SDK backends in workflows —
|
|
2139
|
-
* all agents get MCP + bash regardless of backend type.
|
|
2140
|
-
*/
|
|
2141
|
-
/** Extract useful details from AI SDK errors (statusCode, url, responseBody) */
|
|
2142
|
-
function formatError(error) {
|
|
2143
|
-
if (!(error instanceof Error)) return String(error);
|
|
2144
|
-
const e = error;
|
|
2145
|
-
const parts = [error.message];
|
|
2146
|
-
if (e.statusCode) parts[0] = `HTTP ${e.statusCode}: ${error.message}`;
|
|
2147
|
-
if (e.url) parts.push(`url=${e.url}`);
|
|
2148
|
-
if (e.responseBody && typeof e.responseBody === "string") {
|
|
2149
|
-
const body = e.responseBody.length > 200 ? e.responseBody.slice(0, 200) + "…" : e.responseBody;
|
|
2150
|
-
parts.push(`body=${body}`);
|
|
2151
|
-
}
|
|
2152
|
-
return parts.join(" ");
|
|
2153
|
-
}
|
|
2154
|
-
/** Truncate string, flatten newlines */
|
|
2155
|
-
function truncate(s, max) {
|
|
2156
|
-
const flat = s.replace(/\s+/g, " ").trim();
|
|
2157
|
-
return flat.length > max ? flat.slice(0, max) + "…" : flat;
|
|
2158
|
-
}
|
|
2159
|
-
/** Format a tool call for concise single-line debug output (function call syntax) */
|
|
2160
|
-
function formatToolCall(tc) {
|
|
2161
|
-
const input = tc.input ?? tc.args ?? {};
|
|
2162
|
-
const pairs = Object.entries(input).map(([k, v]) => {
|
|
2163
|
-
return `${k}=${truncate(typeof v === "string" ? v : JSON.stringify(v), 60)}`;
|
|
2164
|
-
});
|
|
2165
|
-
return `${tc.toolName}(${pairs.join(", ")})`;
|
|
2166
|
-
}
|
|
2167
|
-
/**
|
|
2168
|
-
* Connect to workflow MCP server and create AI SDK tool wrappers.
|
|
2169
|
-
* Same bridge as mock-runner — extracted here for SDK agents.
|
|
2170
|
-
*/
|
|
2171
|
-
async function createMCPToolBridge(mcpUrl, agentName) {
|
|
2172
|
-
const transport = new StreamableHTTPClientTransport(new URL(`${mcpUrl}?agent=${encodeURIComponent(agentName)}`));
|
|
2173
|
-
const client = new Client({
|
|
2174
|
-
name: agentName,
|
|
2175
|
-
version: "1.0.0"
|
|
2176
|
-
});
|
|
2177
|
-
await client.connect(transport);
|
|
2178
|
-
const { tools: mcpTools } = await client.listTools();
|
|
2179
|
-
const aiTools = {};
|
|
2180
|
-
for (const mcpTool of mcpTools) {
|
|
2181
|
-
const toolName = mcpTool.name;
|
|
2182
|
-
aiTools[toolName] = createTool({
|
|
2183
|
-
description: mcpTool.description || toolName,
|
|
2184
|
-
schema: mcpTool.inputSchema,
|
|
2185
|
-
execute: async (args) => {
|
|
2186
|
-
return (await client.callTool({
|
|
2187
|
-
name: toolName,
|
|
2188
|
-
arguments: args
|
|
2189
|
-
})).content;
|
|
2190
|
-
}
|
|
2191
|
-
});
|
|
2192
|
-
}
|
|
2193
|
-
return {
|
|
2194
|
-
tools: aiTools,
|
|
2195
|
-
close: () => client.close()
|
|
2196
|
-
};
|
|
2197
|
-
}
|
|
2198
|
-
function createBashTool() {
|
|
2199
|
-
return createTool({
|
|
2200
|
-
description: "Execute a shell command and return stdout/stderr.",
|
|
2201
|
-
schema: {
|
|
2202
|
-
type: "object",
|
|
2203
|
-
properties: { command: {
|
|
2204
|
-
type: "string",
|
|
2205
|
-
description: "The shell command to execute"
|
|
2206
|
-
} },
|
|
2207
|
-
required: ["command"]
|
|
2208
|
-
},
|
|
2209
|
-
execute: async (args) => {
|
|
2210
|
-
const command = args.command;
|
|
2211
|
-
try {
|
|
2212
|
-
return execSync(command, {
|
|
2213
|
-
encoding: "utf-8",
|
|
2214
|
-
timeout: 12e4
|
|
2215
|
-
}).trim() || "(no output)";
|
|
2216
|
-
} catch (error) {
|
|
2217
|
-
return `Error (exit ${error.status}): ${error.stderr || error.message}`;
|
|
2218
|
-
}
|
|
2219
|
-
}
|
|
2220
|
-
});
|
|
2221
|
-
}
|
|
2222
|
-
/**
|
|
2223
|
-
* Run an SDK agent with real model + MCP tools + bash.
|
|
2224
|
-
*
|
|
2225
|
-
* Used by the loop when backend.type === 'default'.
|
|
2226
|
-
* Unlike the simple SdkBackend.send() (text-only), this runner:
|
|
2227
|
-
* 1. Connects to MCP server for context tools (channel, document)
|
|
2228
|
-
* 2. Adds bash tool for shell access
|
|
2229
|
-
* 3. Runs generateText with full tool loop
|
|
2230
|
-
*/
|
|
2231
|
-
async function runSdkAgent(ctx, debugLog) {
|
|
2232
|
-
const startTime = Date.now();
|
|
2233
|
-
const log = debugLog || (() => {});
|
|
2234
|
-
try {
|
|
2235
|
-
if (!ctx.mcpUrl) return {
|
|
2236
|
-
success: false,
|
|
2237
|
-
error: "SDK runner requires mcpUrl",
|
|
2238
|
-
duration: 0
|
|
2239
|
-
};
|
|
2240
|
-
const mcp = await createMCPToolBridge(ctx.mcpUrl, ctx.name);
|
|
2241
|
-
log(`MCP connected, ${Object.keys(mcp.tools).length} context tools`);
|
|
2242
|
-
const model = await createModelAsync(ctx.agent.model);
|
|
2243
|
-
const tools = {
|
|
2244
|
-
...mcp.tools,
|
|
2245
|
-
bash: createBashTool()
|
|
2246
|
-
};
|
|
2247
|
-
const prompt = buildAgentPrompt(ctx);
|
|
2248
|
-
log(`Prompt (${prompt.length} chars) → sdk with ${Object.keys(tools).length} tools`);
|
|
2249
|
-
let _stepNum = 0;
|
|
2250
|
-
const result = await generateText({
|
|
2251
|
-
model,
|
|
2252
|
-
tools,
|
|
2253
|
-
system: ctx.agent.resolvedSystemPrompt,
|
|
2254
|
-
prompt,
|
|
2255
|
-
maxOutputTokens: ctx.agent.max_tokens ?? 8192,
|
|
2256
|
-
stopWhen: stepCountIs(ctx.agent.max_steps ?? 200),
|
|
2257
|
-
onStepFinish: (step) => {
|
|
2258
|
-
_stepNum++;
|
|
2259
|
-
if (step.toolCalls?.length && ctx.eventLog) {
|
|
2260
|
-
for (const tc of step.toolCalls) if (tc.toolName === "bash") ctx.eventLog.toolCall(ctx.name, tc.toolName, formatToolCall(tc), "sdk");
|
|
2261
|
-
}
|
|
2262
|
-
}
|
|
2263
|
-
});
|
|
2264
|
-
const totalToolCalls = result.steps.reduce((n, s) => n + s.toolCalls.length, 0);
|
|
2265
|
-
const lastStep = result.steps[result.steps.length - 1];
|
|
2266
|
-
if (ctx.agent.max_steps && result.steps.length >= ctx.agent.max_steps && (lastStep?.toolCalls?.length ?? 0) > 0) {
|
|
2267
|
-
const warning = `⚠️ Agent reached max_steps limit (${ctx.agent.max_steps}) but wanted to continue. Consider increasing max_steps or removing the limit.`;
|
|
2268
|
-
log(warning);
|
|
2269
|
-
await ctx.provider.appendChannel(ctx.name, warning, { kind: "system" }).catch(() => {});
|
|
2270
|
-
}
|
|
2271
|
-
await mcp.close();
|
|
2272
|
-
return {
|
|
2273
|
-
success: true,
|
|
2274
|
-
duration: Date.now() - startTime,
|
|
2275
|
-
content: result.text,
|
|
2276
|
-
steps: result.steps.length,
|
|
2277
|
-
toolCalls: totalToolCalls
|
|
2278
|
-
};
|
|
2279
|
-
} catch (error) {
|
|
2280
|
-
return {
|
|
2281
|
-
success: false,
|
|
2282
|
-
error: formatError(error),
|
|
2283
|
-
duration: Date.now() - startTime
|
|
2284
|
-
};
|
|
2285
|
-
}
|
|
2286
|
-
}
|
|
2287
|
-
|
|
2288
|
-
//#endregion
|
|
2289
|
-
//#region src/workflow/loop/loop.ts
|
|
2290
|
-
/** Check if loop should continue running */
|
|
2291
|
-
function shouldContinue(state) {
|
|
2292
|
-
return state !== "stopped";
|
|
2293
|
-
}
|
|
2294
|
-
/**
|
|
2295
|
-
* Create an agent loop
|
|
2296
|
-
*
|
|
2297
|
-
* The loop:
|
|
2298
|
-
* 1. Polls for inbox messages on an interval
|
|
2299
|
-
* 2. Runs the agent when messages are found
|
|
2300
|
-
* 3. Acknowledges inbox only on successful run
|
|
2301
|
-
* 4. Retries with exponential backoff on failure
|
|
2302
|
-
* 5. Can be woken early via wake()
|
|
2303
|
-
*/
|
|
2304
|
-
function createAgentLoop(config) {
|
|
2305
|
-
const { name, agent, contextProvider, eventLog, mcpUrl, workspaceDir, projectDir, backend, onRunComplete, log = () => {}, feedback } = config;
|
|
2306
|
-
const infoLog = config.infoLog ?? log;
|
|
2307
|
-
const errorLog = config.errorLog ?? log;
|
|
2308
|
-
const pollInterval = config.pollInterval ?? LOOP_DEFAULTS.pollInterval;
|
|
2309
|
-
const retryConfig = {
|
|
2310
|
-
maxAttempts: config.retry?.maxAttempts ?? LOOP_DEFAULTS.retry.maxAttempts,
|
|
2311
|
-
backoffMs: config.retry?.backoffMs ?? LOOP_DEFAULTS.retry.backoffMs,
|
|
2312
|
-
backoffMultiplier: config.retry?.backoffMultiplier ?? LOOP_DEFAULTS.retry.backoffMultiplier
|
|
2313
|
-
};
|
|
2314
|
-
let state = "stopped";
|
|
2315
|
-
let wakeResolver = null;
|
|
2316
|
-
let pollTimeout = null;
|
|
2317
|
-
let directRunning = false;
|
|
2318
|
-
let _hasFailures = false;
|
|
2319
|
-
let _lastError;
|
|
2320
|
-
const scheduleConfig = agent.schedule;
|
|
2321
|
-
let resolvedSchedule;
|
|
2322
|
-
if (scheduleConfig) try {
|
|
2323
|
-
resolvedSchedule = resolveSchedule(scheduleConfig);
|
|
2324
|
-
} catch (err) {
|
|
2325
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
2326
|
-
throw new Error(`Agent "${name}" has invalid schedule config: ${msg}`);
|
|
2327
|
-
}
|
|
2328
|
-
let lastActivityTime = Date.now();
|
|
2329
|
-
/**
|
|
2330
|
-
* Wait for either poll interval or wake() call
|
|
2331
|
-
*/
|
|
2332
|
-
async function waitForWakeOrPoll() {
|
|
2333
|
-
return new Promise((resolve) => {
|
|
2334
|
-
wakeResolver = resolve;
|
|
2335
|
-
pollTimeout = setTimeout(() => {
|
|
2336
|
-
wakeResolver = null;
|
|
2337
|
-
resolve();
|
|
2338
|
-
}, pollInterval);
|
|
2339
|
-
});
|
|
2340
|
-
}
|
|
2341
|
-
/**
|
|
2342
|
-
* Main poll loop
|
|
2343
|
-
*/
|
|
2344
|
-
async function runLoop() {
|
|
2345
|
-
while (shouldContinue(state)) {
|
|
2346
|
-
await waitForWakeOrPoll();
|
|
2347
|
-
if (!shouldContinue(state)) break;
|
|
2348
|
-
if (directRunning) continue;
|
|
2349
|
-
const inbox = await contextProvider.getInbox(name);
|
|
2350
|
-
if (inbox.length === 0) {
|
|
2351
|
-
if (resolvedSchedule) {
|
|
2352
|
-
const now = Date.now();
|
|
2353
|
-
let wakeupDue = false;
|
|
2354
|
-
if (resolvedSchedule.type === "interval") {
|
|
2355
|
-
if (now - lastActivityTime >= resolvedSchedule.ms) wakeupDue = true;
|
|
2356
|
-
} else if (resolvedSchedule.type === "cron") {
|
|
2357
|
-
const msTillNext = msUntilNextCron(resolvedSchedule.expr, new Date(lastActivityTime));
|
|
2358
|
-
if (now >= lastActivityTime + msTillNext) wakeupDue = true;
|
|
2359
|
-
}
|
|
2360
|
-
if (wakeupDue) {
|
|
2361
|
-
const wakeupPrompt = resolvedSchedule.prompt ?? "Scheduled wakeup. Check for any pending work or updates.";
|
|
2362
|
-
log(`Schedule wakeup triggered for ${name}`);
|
|
2363
|
-
await contextProvider.appendChannel("system", `@${name} ${wakeupPrompt}`);
|
|
2364
|
-
lastActivityTime = now;
|
|
2365
|
-
continue;
|
|
2366
|
-
}
|
|
2367
|
-
}
|
|
2368
|
-
state = "idle";
|
|
2369
|
-
await contextProvider.setAgentStatus(name, { state: "idle" });
|
|
2370
|
-
continue;
|
|
2371
|
-
}
|
|
2372
|
-
const senders = inbox.map((m) => m.entry.from);
|
|
2373
|
-
infoLog(`Inbox: ${inbox.length} message(s) from [${senders.join(", ")}]`);
|
|
2374
|
-
for (const msg of inbox) {
|
|
2375
|
-
const preview = msg.entry.content.length > 120 ? msg.entry.content.slice(0, 120) + "..." : msg.entry.content;
|
|
2376
|
-
log(` from @${msg.entry.from}: ${preview}`);
|
|
2377
|
-
}
|
|
2378
|
-
const latestId = inbox[inbox.length - 1].entry.id;
|
|
2379
|
-
await contextProvider.markInboxSeen(name, latestId);
|
|
2380
|
-
let attempt = 0;
|
|
2381
|
-
let lastResult = null;
|
|
2382
|
-
while (attempt < retryConfig.maxAttempts && shouldContinue(state)) {
|
|
2383
|
-
attempt++;
|
|
2384
|
-
state = "running";
|
|
2385
|
-
await contextProvider.setAgentStatus(name, { state: "running" });
|
|
2386
|
-
infoLog(`Running (attempt ${attempt}/${retryConfig.maxAttempts})`);
|
|
2387
|
-
lastResult = await runAgent(backend, {
|
|
2388
|
-
name,
|
|
2389
|
-
agent,
|
|
2390
|
-
inbox,
|
|
2391
|
-
recentChannel: await contextProvider.readChannel({
|
|
2392
|
-
limit: LOOP_DEFAULTS.recentChannelLimit,
|
|
2393
|
-
agent: name
|
|
2394
|
-
}),
|
|
2395
|
-
documentContent: await contextProvider.readDocument(),
|
|
2396
|
-
mcpUrl,
|
|
2397
|
-
workspaceDir,
|
|
2398
|
-
projectDir,
|
|
2399
|
-
retryAttempt: attempt,
|
|
2400
|
-
provider: contextProvider,
|
|
2401
|
-
eventLog,
|
|
2402
|
-
feedback
|
|
2403
|
-
}, log, infoLog);
|
|
2404
|
-
if (lastResult.success) {
|
|
2405
|
-
infoLog(`DONE ${lastResult.steps ? `${lastResult.steps} steps, ${lastResult.toolCalls} tool calls, ${lastResult.duration}ms` : `${lastResult.duration}ms`}`);
|
|
2406
|
-
if (lastResult.content) await contextProvider.appendChannel(name, lastResult.content);
|
|
2407
|
-
await contextProvider.ackInbox(name, latestId);
|
|
2408
|
-
lastActivityTime = Date.now();
|
|
2409
|
-
await contextProvider.setAgentStatus(name, { state: "idle" });
|
|
2410
|
-
break;
|
|
2411
|
-
}
|
|
2412
|
-
errorLog(`ERROR ${lastResult.error}`);
|
|
2413
|
-
if (attempt < retryConfig.maxAttempts && shouldContinue(state)) {
|
|
2414
|
-
const delay = retryConfig.backoffMs * Math.pow(retryConfig.backoffMultiplier, attempt - 1);
|
|
2415
|
-
log(`Retrying in ${delay}ms...`);
|
|
2416
|
-
await sleep(delay);
|
|
2417
|
-
}
|
|
2418
|
-
}
|
|
2419
|
-
if (lastResult && !lastResult.success) {
|
|
2420
|
-
_hasFailures = true;
|
|
2421
|
-
_lastError = lastResult.error;
|
|
2422
|
-
errorLog(`ERROR max retries exhausted, acknowledging to prevent loop`);
|
|
2423
|
-
await contextProvider.ackInbox(name, latestId);
|
|
2424
|
-
}
|
|
2425
|
-
if (lastResult && onRunComplete) onRunComplete(lastResult);
|
|
2426
|
-
state = "idle";
|
|
2427
|
-
await contextProvider.setAgentStatus(name, { state: "idle" });
|
|
2428
|
-
}
|
|
2429
|
-
}
|
|
2430
|
-
return {
|
|
2431
|
-
get name() {
|
|
2432
|
-
return name;
|
|
2433
|
-
},
|
|
2434
|
-
get state() {
|
|
2435
|
-
return state;
|
|
2436
|
-
},
|
|
2437
|
-
get hasFailures() {
|
|
2438
|
-
return _hasFailures;
|
|
2439
|
-
},
|
|
2440
|
-
get lastError() {
|
|
2441
|
-
return _lastError;
|
|
2442
|
-
},
|
|
2443
|
-
async start() {
|
|
2444
|
-
if (state !== "stopped") throw new Error(`Loop ${name} is already running`);
|
|
2445
|
-
state = "idle";
|
|
2446
|
-
lastActivityTime = Date.now();
|
|
2447
|
-
await contextProvider.setAgentStatus(name, { state: "idle" });
|
|
2448
|
-
if (resolvedSchedule) infoLog(`Starting (schedule: ${resolvedSchedule.type === "interval" ? `${resolvedSchedule.ms}ms interval` : `cron "${resolvedSchedule.expr}"`})`);
|
|
2449
|
-
else infoLog(`Starting`);
|
|
2450
|
-
runLoop().catch((error) => {
|
|
2451
|
-
errorLog(`ERROR ${error instanceof Error ? error.message : String(error)}`);
|
|
2452
|
-
state = "stopped";
|
|
2453
|
-
contextProvider.setAgentStatus(name, { state: "stopped" }).catch(() => {});
|
|
2454
|
-
});
|
|
2455
|
-
},
|
|
2456
|
-
async stop() {
|
|
2457
|
-
log(`Stopping`);
|
|
2458
|
-
state = "stopped";
|
|
2459
|
-
await contextProvider.setAgentStatus(name, { state: "stopped" });
|
|
2460
|
-
if (backend.abort) backend.abort();
|
|
2461
|
-
if (pollTimeout) {
|
|
2462
|
-
clearTimeout(pollTimeout);
|
|
2463
|
-
pollTimeout = null;
|
|
2464
|
-
}
|
|
2465
|
-
if (wakeResolver) {
|
|
2466
|
-
wakeResolver();
|
|
2467
|
-
wakeResolver = null;
|
|
2468
|
-
}
|
|
2469
|
-
},
|
|
2470
|
-
wake() {
|
|
2471
|
-
if (state === "idle" && wakeResolver) {
|
|
2472
|
-
log(`Waking`);
|
|
2473
|
-
if (pollTimeout) {
|
|
2474
|
-
clearTimeout(pollTimeout);
|
|
2475
|
-
pollTimeout = null;
|
|
2476
|
-
}
|
|
2477
|
-
wakeResolver();
|
|
2478
|
-
wakeResolver = null;
|
|
2479
|
-
}
|
|
2480
|
-
},
|
|
2481
|
-
async sendDirect(message) {
|
|
2482
|
-
if (directRunning) return {
|
|
2483
|
-
success: false,
|
|
2484
|
-
error: "Agent is already processing a direct request",
|
|
2485
|
-
duration: 0
|
|
2486
|
-
};
|
|
2487
|
-
if (state === "running") return {
|
|
2488
|
-
success: false,
|
|
2489
|
-
error: "Agent is currently running (poll loop)",
|
|
2490
|
-
duration: 0
|
|
2491
|
-
};
|
|
2492
|
-
directRunning = true;
|
|
2493
|
-
const prevState = state;
|
|
2494
|
-
state = "running";
|
|
2495
|
-
await contextProvider.setAgentStatus(name, { state: "running" });
|
|
2496
|
-
try {
|
|
2497
|
-
await contextProvider.appendChannel("user", `@${name} ${message}`);
|
|
2498
|
-
const inbox = await contextProvider.getInbox(name);
|
|
2499
|
-
const latestId = inbox.length > 0 ? inbox[inbox.length - 1].entry.id : void 0;
|
|
2500
|
-
if (latestId) await contextProvider.markInboxSeen(name, latestId);
|
|
2501
|
-
const runContext = {
|
|
2502
|
-
name,
|
|
2503
|
-
agent,
|
|
2504
|
-
inbox,
|
|
2505
|
-
recentChannel: await contextProvider.readChannel({
|
|
2506
|
-
limit: LOOP_DEFAULTS.recentChannelLimit,
|
|
2507
|
-
agent: name
|
|
2508
|
-
}),
|
|
2509
|
-
documentContent: await contextProvider.readDocument(),
|
|
2510
|
-
mcpUrl,
|
|
2511
|
-
workspaceDir,
|
|
2512
|
-
projectDir,
|
|
2513
|
-
retryAttempt: 1,
|
|
2514
|
-
provider: contextProvider,
|
|
2515
|
-
eventLog,
|
|
2516
|
-
feedback
|
|
2517
|
-
};
|
|
2518
|
-
infoLog(`Direct send (${message.length} chars)`);
|
|
2519
|
-
const result = await runAgent(backend, runContext, log, infoLog);
|
|
2520
|
-
if (result.success) {
|
|
2521
|
-
if (result.content) await contextProvider.appendChannel(name, result.content);
|
|
2522
|
-
if (latestId) await contextProvider.ackInbox(name, latestId);
|
|
2523
|
-
lastActivityTime = Date.now();
|
|
2524
|
-
}
|
|
2525
|
-
return result;
|
|
2526
|
-
} finally {
|
|
2527
|
-
directRunning = false;
|
|
2528
|
-
state = prevState === "stopped" ? "stopped" : "idle";
|
|
2529
|
-
await contextProvider.setAgentStatus(name, { state }).catch(() => {});
|
|
2530
|
-
}
|
|
2531
|
-
}
|
|
2532
|
-
};
|
|
2533
|
-
}
|
|
2534
|
-
/**
|
|
2535
|
-
* Run an agent: build prompt, configure workspace, call backend.send()
|
|
2536
|
-
*
|
|
2537
|
-
* This is the single orchestration function that the loop calls.
|
|
2538
|
-
* All the "how to run an agent" logic lives here — backends just send().
|
|
2539
|
-
*
|
|
2540
|
-
* SDK and mock backends get special runners with MCP tool bridge + bash,
|
|
2541
|
-
* because they can't manage tools on their own (unlike CLI backends).
|
|
2542
|
-
*/
|
|
2543
|
-
async function runAgent(backend, ctx, log, infoLog) {
|
|
2544
|
-
const info = infoLog ?? log;
|
|
2545
|
-
if (backend.type === "mock") return runMockAgent(ctx, (msg) => log(msg));
|
|
2546
|
-
if (backend.type === "default") return runSdkAgent(ctx, (msg) => log(msg));
|
|
2547
|
-
const startTime = Date.now();
|
|
2548
|
-
try {
|
|
2549
|
-
const mcpConfig = generateWorkflowMCPConfig(ctx.mcpUrl, ctx.name);
|
|
2550
|
-
writeBackendMcpConfig(backend.type, ctx.workspaceDir, mcpConfig);
|
|
2551
|
-
const prompt = buildAgentPrompt(ctx);
|
|
2552
|
-
info(`Prompt (${prompt.length} chars) → ${backend.type} backend`);
|
|
2553
|
-
const response = await backend.send(prompt, { system: ctx.agent.resolvedSystemPrompt });
|
|
2554
|
-
return {
|
|
2555
|
-
success: true,
|
|
2556
|
-
duration: Date.now() - startTime,
|
|
2557
|
-
content: response.content
|
|
2558
|
-
};
|
|
2559
|
-
} catch (error) {
|
|
2560
|
-
return {
|
|
2561
|
-
success: false,
|
|
2562
|
-
error: error instanceof Error ? error.message : String(error),
|
|
2563
|
-
duration: Date.now() - startTime
|
|
2564
|
-
};
|
|
2565
|
-
}
|
|
2566
|
-
}
|
|
2567
|
-
/**
|
|
2568
|
-
* Sleep helper
|
|
2569
|
-
*/
|
|
2570
|
-
function sleep(ms) {
|
|
2571
|
-
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
2572
|
-
}
|
|
2573
|
-
/**
|
|
2574
|
-
* Check if workflow is complete (all agents idle, no pending work)
|
|
2575
|
-
*/
|
|
2576
|
-
async function checkWorkflowIdle(loops, provider, debounceMs = LOOP_DEFAULTS.idleDebounceMs) {
|
|
2577
|
-
if (![...loops.values()].every((c) => c.state === "idle")) return false;
|
|
2578
|
-
for (const [name] of loops) if ((await provider.getInbox(name)).length > 0) return false;
|
|
2579
|
-
await sleep(debounceMs);
|
|
2580
|
-
return [...loops.values()].every((c) => c.state === "idle");
|
|
2581
|
-
}
|
|
2582
|
-
|
|
2583
|
-
//#endregion
|
|
2584
|
-
//#region src/workflow/loop/backend.ts
|
|
2585
|
-
/**
|
|
2586
|
-
* Get backend by explicit backend type
|
|
2587
|
-
*
|
|
2588
|
-
* All backends are created via the canonical createBackend() factory
|
|
2589
|
-
* from backends/index.ts. Mock backend is handled specially (no model needed).
|
|
2590
|
-
*/
|
|
2591
|
-
function getBackendByType(backendType, options) {
|
|
2592
|
-
if (backendType === "mock") return createMockBackend(options?.debugLog);
|
|
2593
|
-
const backendOptions = {};
|
|
2594
|
-
if (options?.timeout) backendOptions.timeout = options.timeout;
|
|
2595
|
-
if (options?.streamCallbacks) backendOptions.streamCallbacks = options.streamCallbacks;
|
|
2596
|
-
if (options?.workspace) backendOptions.workspace = options.workspace;
|
|
2597
|
-
return createBackend({
|
|
2598
|
-
type: backendType,
|
|
2599
|
-
model: options?.model,
|
|
2600
|
-
...backendType === "default" && options?.provider ? { provider: options.provider } : {},
|
|
2601
|
-
...Object.keys(backendOptions).length > 0 ? { options: backendOptions } : {}
|
|
2602
|
-
});
|
|
2603
|
-
}
|
|
2604
|
-
/**
|
|
2605
|
-
* Get appropriate backend for a model identifier
|
|
2606
|
-
*
|
|
2607
|
-
* Infers backend type from model name and delegates to getBackendByType.
|
|
2608
|
-
* Prefer using getBackendByType with explicit backend field in workflow configs.
|
|
2609
|
-
*/
|
|
2610
|
-
function getBackendForModel(model, options) {
|
|
2611
|
-
if (options?.provider) return getBackendByType("default", {
|
|
2612
|
-
...options,
|
|
2613
|
-
model
|
|
2614
|
-
});
|
|
2615
|
-
const { provider } = parseModel(model);
|
|
2616
|
-
if (provider === "claude") return getBackendByType("claude", {
|
|
2617
|
-
...options,
|
|
2618
|
-
model
|
|
2619
|
-
});
|
|
2620
|
-
if (provider === "codex") return getBackendByType("codex", {
|
|
2621
|
-
...options,
|
|
2622
|
-
model
|
|
2623
|
-
});
|
|
2624
|
-
return getBackendByType("default", {
|
|
2625
|
-
...options,
|
|
2626
|
-
model
|
|
2627
|
-
});
|
|
2628
|
-
}
|
|
2629
|
-
|
|
2630
|
-
//#endregion
|
|
2631
|
-
//#region src/workflow/logger.ts
|
|
2632
|
-
var logger_exports = /* @__PURE__ */ __exportAll({
|
|
2633
|
-
createChannelLogger: () => createChannelLogger,
|
|
2634
|
-
createSilentLogger: () => createSilentLogger
|
|
2635
|
-
});
|
|
2636
|
-
/**
|
|
2637
|
-
* Create a silent logger (no output)
|
|
2638
|
-
*/
|
|
2639
|
-
function createSilentLogger() {
|
|
2640
|
-
const noop = () => {};
|
|
2641
|
-
return {
|
|
2642
|
-
debug: noop,
|
|
2643
|
-
info: noop,
|
|
2644
|
-
warn: noop,
|
|
2645
|
-
error: noop,
|
|
2646
|
-
isDebug: () => false,
|
|
2647
|
-
child: () => createSilentLogger()
|
|
2648
|
-
};
|
|
2649
|
-
}
|
|
2650
|
-
/**
|
|
2651
|
-
* Create a logger that writes to the channel.
|
|
2652
|
-
*
|
|
2653
|
-
* - info/warn/error → channel entry with kind="system" (always shown to user)
|
|
2654
|
-
* - debug → channel entry with kind="debug" (only shown with --debug)
|
|
2655
|
-
*
|
|
2656
|
-
* The display layer handles formatting and filtering.
|
|
2657
|
-
*/
|
|
2658
|
-
function createChannelLogger(config) {
|
|
2659
|
-
const { provider, from = "system" } = config;
|
|
2660
|
-
const formatContent = (level, message, args) => {
|
|
2661
|
-
const argsStr = args.length > 0 ? " " + args.map(formatArg).join(" ") : "";
|
|
2662
|
-
if (level === "warn") return `[WARN] ${message}${argsStr}`;
|
|
2663
|
-
if (level === "error") return `[ERROR] ${message}${argsStr}`;
|
|
2664
|
-
return `${message}${argsStr}`;
|
|
2665
|
-
};
|
|
2666
|
-
const write = (level, message, args) => {
|
|
2667
|
-
const content = formatContent(level, message, args);
|
|
2668
|
-
const kind = level === "debug" ? "debug" : "system";
|
|
2669
|
-
provider.appendChannel(from, content, { kind }).catch(() => {});
|
|
2670
|
-
};
|
|
2671
|
-
return {
|
|
2672
|
-
debug: (message, ...args) => write("debug", message, args),
|
|
2673
|
-
info: (message, ...args) => write("info", message, args),
|
|
2674
|
-
warn: (message, ...args) => write("warn", message, args),
|
|
2675
|
-
error: (message, ...args) => write("error", message, args),
|
|
2676
|
-
isDebug: () => true,
|
|
2677
|
-
child: (childPrefix) => {
|
|
2678
|
-
return createChannelLogger({
|
|
2679
|
-
provider,
|
|
2680
|
-
from: from ? `${from}:${childPrefix}` : childPrefix
|
|
2681
|
-
});
|
|
2682
|
-
}
|
|
2683
|
-
};
|
|
2684
|
-
}
|
|
2685
|
-
/** Format an argument for logging */
|
|
2686
|
-
function formatArg(arg) {
|
|
2687
|
-
if (arg === null || arg === void 0) return String(arg);
|
|
2688
|
-
if (typeof arg === "object") try {
|
|
2689
|
-
return JSON.stringify(arg);
|
|
2690
|
-
} catch {
|
|
2691
|
-
return String(arg);
|
|
2692
|
-
}
|
|
2693
|
-
return String(arg);
|
|
2694
|
-
}
|
|
2695
|
-
|
|
2696
|
-
//#endregion
|
|
2697
|
-
//#region src/workflow/factory.ts
|
|
2698
|
-
/**
|
|
2699
|
-
* Workflow Factory — Composable primitives for building workflow runtimes.
|
|
2700
|
-
*
|
|
2701
|
-
* These functions are the building blocks that both runner.ts (CLI direct)
|
|
2702
|
-
* and daemon.ts (service) use to create workflow infrastructure.
|
|
2703
|
-
*
|
|
2704
|
-
* Extracted from the monolithic runWorkflowWithLoops() so that
|
|
2705
|
-
* the daemon can create and manage workflow components independently.
|
|
2706
|
-
*
|
|
2707
|
-
* Usage:
|
|
2708
|
-
* 1. createMinimalRuntime() — context + MCP + event log (the "workspace")
|
|
2709
|
-
* 2. createWiredLoop() — backend + workspace dir + loop (per agent)
|
|
2710
|
-
* 3. Caller manages lifecycle — start/stop loops, send kickoff, shutdown
|
|
2711
|
-
*/
|
|
2712
|
-
/**
|
|
2713
|
-
* Create a minimal workflow runtime.
|
|
2714
|
-
*
|
|
2715
|
-
* Sets up the shared infrastructure (context + MCP + event log) without
|
|
2716
|
-
* creating loops or backends. The daemon can use this to create
|
|
2717
|
-
* workflow infrastructure for both standalone and multi-agent workflows.
|
|
2718
|
-
*
|
|
2719
|
-
* For standalone agents created via `POST /agents`, this gives them
|
|
2720
|
-
* the same context infrastructure that workflow agents get.
|
|
2721
|
-
*/
|
|
2722
|
-
async function createMinimalRuntime(config) {
|
|
2723
|
-
const { workflowName, tag, agentNames, onMention, feedback: feedbackEnabled, debugLog } = config;
|
|
2724
|
-
let contextProvider;
|
|
2725
|
-
let contextDir;
|
|
2726
|
-
let persistent = false;
|
|
2727
|
-
if (config.contextProvider && config.contextDir) {
|
|
2728
|
-
contextProvider = config.contextProvider;
|
|
2729
|
-
contextDir = config.contextDir;
|
|
2730
|
-
persistent = config.persistent ?? false;
|
|
2731
|
-
} else {
|
|
2732
|
-
contextDir = getDefaultContextDir(workflowName, tag);
|
|
2733
|
-
if (!existsSync(contextDir)) mkdirSync(contextDir, { recursive: true });
|
|
2734
|
-
contextProvider = createFileContextProvider(contextDir, agentNames);
|
|
2735
|
-
persistent = false;
|
|
2736
|
-
}
|
|
2737
|
-
await contextProvider.markRunStart();
|
|
2738
|
-
const projectDir = process.cwd();
|
|
2739
|
-
let mcpGetFeedback;
|
|
2740
|
-
let mcpToolNames = /* @__PURE__ */ new Set();
|
|
2741
|
-
const eventLog = new EventLog(contextProvider);
|
|
2742
|
-
const createMCPServerInstance = () => {
|
|
2743
|
-
const mcp = createContextMCPServer({
|
|
2744
|
-
provider: contextProvider,
|
|
2745
|
-
validAgents: agentNames,
|
|
2746
|
-
name: `${workflowName}-context`,
|
|
2747
|
-
version: "1.0.0",
|
|
2748
|
-
onMention,
|
|
2749
|
-
feedback: feedbackEnabled,
|
|
2750
|
-
debugLog
|
|
2751
|
-
});
|
|
2752
|
-
mcpGetFeedback = mcp.getFeedback;
|
|
2753
|
-
mcpToolNames = mcp.mcpToolNames;
|
|
2754
|
-
return mcp.server;
|
|
2755
|
-
};
|
|
2756
|
-
const httpMcpServer = await runWithHttp({
|
|
2757
|
-
createServerInstance: createMCPServerInstance,
|
|
2758
|
-
port: 0
|
|
2759
|
-
});
|
|
2760
|
-
const shutdown = async () => {
|
|
2761
|
-
if (persistent) {
|
|
2762
|
-
if (contextProvider instanceof FileContextProvider) contextProvider.releaseLock();
|
|
2763
|
-
} else await contextProvider.destroy();
|
|
2764
|
-
await httpMcpServer.close();
|
|
2765
|
-
};
|
|
2766
|
-
return {
|
|
2767
|
-
contextProvider,
|
|
2768
|
-
contextDir,
|
|
2769
|
-
persistent,
|
|
2770
|
-
eventLog,
|
|
2771
|
-
httpMcpServer,
|
|
2772
|
-
mcpUrl: httpMcpServer.url,
|
|
2773
|
-
mcpToolNames,
|
|
2774
|
-
projectDir,
|
|
2775
|
-
getFeedback: mcpGetFeedback,
|
|
2776
|
-
shutdown
|
|
2777
|
-
};
|
|
2778
|
-
}
|
|
2779
|
-
/**
|
|
2780
|
-
* Create a fully-wired agent loop.
|
|
2781
|
-
*
|
|
2782
|
-
* This handles the full setup:
|
|
2783
|
-
* 1. Create backend from agent definition (or use custom factory)
|
|
2784
|
-
* 2. Create isolated workspace directory
|
|
2785
|
-
* 3. Configure stream callbacks for structured event logging
|
|
2786
|
-
* 4. Create the AgentLoop with all wiring
|
|
2787
|
-
*
|
|
2788
|
-
* Extracted from runWorkflowWithLoops() so both runner.ts and
|
|
2789
|
-
* daemon.ts can create loops with the same quality.
|
|
2790
|
-
*/
|
|
2791
|
-
function createWiredLoop(config) {
|
|
2792
|
-
const { name, agent, runtime, pollInterval, feedback: feedbackEnabled } = config;
|
|
2793
|
-
const logger = config.logger ?? createSilentLogger();
|
|
2794
|
-
const workspaceDir = join(runtime.contextDir, "workspaces", name);
|
|
2795
|
-
if (!existsSync(workspaceDir)) mkdirSync(workspaceDir, { recursive: true });
|
|
2796
|
-
const streamCallbacks = {
|
|
2797
|
-
debugLog: (msg) => logger.debug(msg),
|
|
2798
|
-
outputLog: (msg) => runtime.eventLog.output(name, msg),
|
|
2799
|
-
toolCallLog: (toolName, args) => runtime.eventLog.toolCall(name, toolName, args, "backend"),
|
|
2800
|
-
mcpToolNames: runtime.mcpToolNames
|
|
2801
|
-
};
|
|
2802
|
-
let effectiveModel;
|
|
2803
|
-
let effectiveProvider = agent.provider;
|
|
2804
|
-
if (isAutoProvider(agent.model) || isAutoProvider(agent.provider)) {
|
|
2805
|
-
const resolved = resolveModelFallback({
|
|
2806
|
-
model: agent.model,
|
|
2807
|
-
provider: typeof agent.provider === "string" ? agent.provider : void 0
|
|
2808
|
-
});
|
|
2809
|
-
effectiveModel = resolved.model;
|
|
2810
|
-
effectiveProvider = resolved.provider;
|
|
2811
|
-
logger.info(`Model resolved: ${effectiveModel}`);
|
|
2812
|
-
} else effectiveModel = agent.model;
|
|
2813
|
-
let backend;
|
|
2814
|
-
if (config.createBackend) backend = config.createBackend(name, agent);
|
|
2815
|
-
else if (agent.backend) backend = getBackendByType(agent.backend, {
|
|
2816
|
-
model: effectiveModel,
|
|
2817
|
-
provider: effectiveProvider,
|
|
2818
|
-
debugLog: (msg) => logger.debug(msg),
|
|
2819
|
-
streamCallbacks,
|
|
2820
|
-
timeout: agent.timeout,
|
|
2821
|
-
workspace: workspaceDir
|
|
2822
|
-
});
|
|
2823
|
-
else if (effectiveModel) backend = getBackendForModel(effectiveModel, {
|
|
2824
|
-
provider: effectiveProvider,
|
|
2825
|
-
debugLog: (msg) => logger.debug(msg),
|
|
2826
|
-
streamCallbacks,
|
|
2827
|
-
workspace: workspaceDir
|
|
2828
|
-
});
|
|
2829
|
-
else throw new Error(`Agent "${name}" requires either a backend or model field`);
|
|
2830
|
-
return {
|
|
2831
|
-
loop: createAgentLoop({
|
|
2832
|
-
name,
|
|
2833
|
-
agent: effectiveModel !== agent.model || effectiveProvider !== agent.provider ? {
|
|
2834
|
-
...agent,
|
|
2835
|
-
model: effectiveModel,
|
|
2836
|
-
provider: effectiveProvider
|
|
2837
|
-
} : agent,
|
|
2838
|
-
contextProvider: runtime.contextProvider,
|
|
2839
|
-
eventLog: runtime.eventLog,
|
|
2840
|
-
mcpUrl: runtime.mcpUrl,
|
|
2841
|
-
workspaceDir,
|
|
2842
|
-
projectDir: runtime.projectDir,
|
|
2843
|
-
backend,
|
|
2844
|
-
pollInterval,
|
|
2845
|
-
log: (msg) => logger.debug(msg),
|
|
2846
|
-
infoLog: (msg) => logger.info(msg),
|
|
2847
|
-
errorLog: (msg) => logger.error(msg),
|
|
2848
|
-
feedback: feedbackEnabled
|
|
2849
|
-
}),
|
|
2850
|
-
backend
|
|
2851
|
-
};
|
|
2852
|
-
}
|
|
2853
|
-
|
|
2854
|
-
//#endregion
|
|
2855
|
-
//#region src/daemon/daemon.ts
|
|
2856
|
-
/**
|
|
2857
|
-
* Daemon — Centralized agent coordinator.
|
|
2858
|
-
*
|
|
2859
|
-
* Architecture: Interface → Daemon → Loop (three layers)
|
|
2860
|
-
* Interface: CLI/REST/MCP clients talk to daemon via HTTP
|
|
2861
|
-
* Daemon: This module — owns lifecycle, creates workflows + loops
|
|
2862
|
-
* Loop: AgentLoop + Backend — executes agent reasoning
|
|
2863
|
-
*
|
|
2864
|
-
* Data ownership:
|
|
2865
|
-
* Registry (configs) — what agents exist and their configuration
|
|
2866
|
-
* Workflows (workflows) — running workflow instances with loops + context
|
|
2867
|
-
*
|
|
2868
|
-
* Key principle: every agent lives in a workflow. Standalone agents created via
|
|
2869
|
-
* POST /agents get a 1-agent workflow (created lazily on first /run or /serve).
|
|
2870
|
-
* This unifies the runtime so there's one code path for execution.
|
|
2871
|
-
*
|
|
2872
|
-
* HTTP endpoints:
|
|
2873
|
-
* GET /health, POST /shutdown
|
|
2874
|
-
* GET/POST /agents, GET/DELETE /agents/:name
|
|
2875
|
-
* POST /run (SSE), POST /serve
|
|
2876
|
-
* GET/POST /workflows, DELETE /workflows/:name/:tag
|
|
2877
|
-
* ALL /mcp
|
|
2878
|
-
*/
|
|
2879
|
-
var daemon_exports = /* @__PURE__ */ __exportAll({
|
|
2880
|
-
createDaemonApp: () => createDaemonApp,
|
|
2881
|
-
startDaemon: () => startDaemon
|
|
2882
|
-
});
|
|
2883
|
-
/** Key prefix for standalone agent workflow handles */
|
|
2884
|
-
const STANDALONE_PREFIX = "standalone:";
|
|
2885
|
-
/** Build a workflow key for standalone agents */
|
|
2886
|
-
function standaloneKey(agentName) {
|
|
2887
|
-
return `${STANDALONE_PREFIX}${agentName}`;
|
|
2888
|
-
}
|
|
2889
|
-
let state = null;
|
|
2890
|
-
let shuttingDown = false;
|
|
2891
|
-
const mcpSessions = /* @__PURE__ */ new Map();
|
|
2892
|
-
async function gracefulShutdown() {
|
|
2893
|
-
if (shuttingDown) return;
|
|
2894
|
-
shuttingDown = true;
|
|
2895
|
-
if (state) {
|
|
2896
|
-
for (const [, loop] of state.loops) try {
|
|
2897
|
-
await loop.stop();
|
|
2898
|
-
} catch {}
|
|
2899
|
-
state.loops.clear();
|
|
2900
|
-
for (const [, wf] of state.workflows) try {
|
|
2901
|
-
await wf.shutdown();
|
|
2902
|
-
} catch {}
|
|
2903
|
-
state.workflows.clear();
|
|
2904
|
-
if (state.server) await state.server.close();
|
|
2905
|
-
}
|
|
2906
|
-
for (const [, session] of mcpSessions) try {
|
|
2907
|
-
await session.transport.close();
|
|
2908
|
-
} catch {}
|
|
2909
|
-
mcpSessions.clear();
|
|
2910
|
-
removeDaemonInfo();
|
|
2911
|
-
process.exit(0);
|
|
2912
|
-
}
|
|
2913
|
-
/** Safe JSON body parsing — returns null on malformed input */
|
|
2914
|
-
async function parseJsonBody(c) {
|
|
2915
|
-
try {
|
|
2916
|
-
return await c.req.json();
|
|
2917
|
-
} catch {
|
|
2918
|
-
return null;
|
|
2919
|
-
}
|
|
2920
|
-
}
|
|
2921
|
-
/** Map AgentConfig to the ResolvedWorkflowAgent type needed by the factory */
|
|
2922
|
-
function configToResolvedWorkflowAgent(cfg) {
|
|
2923
|
-
return {
|
|
2924
|
-
backend: cfg.backend,
|
|
2925
|
-
model: cfg.model,
|
|
2926
|
-
provider: cfg.provider,
|
|
2927
|
-
resolvedSystemPrompt: cfg.system,
|
|
2928
|
-
schedule: cfg.schedule
|
|
2929
|
-
};
|
|
2930
|
-
}
|
|
2931
|
-
/**
|
|
2932
|
-
* Find an agent's loop.
|
|
2933
|
-
* First checks daemon-level loops (standalone agents),
|
|
2934
|
-
* then falls back to workflow-scoped loops (workflow agents).
|
|
2935
|
-
*/
|
|
2936
|
-
function findLoop(s, agentName) {
|
|
2937
|
-
const daemonLoop = s.loops.get(agentName);
|
|
2938
|
-
if (daemonLoop) return {
|
|
2939
|
-
loop: daemonLoop,
|
|
2940
|
-
workflow: s.workflows.get(standaloneKey(agentName)) ?? null
|
|
2941
|
-
};
|
|
2942
|
-
for (const wf of s.workflows.values()) {
|
|
2943
|
-
const l = wf.loops.get(agentName);
|
|
2944
|
-
if (l) return {
|
|
2945
|
-
loop: l,
|
|
2946
|
-
workflow: wf
|
|
2947
|
-
};
|
|
2948
|
-
}
|
|
2949
|
-
return null;
|
|
2950
|
-
}
|
|
2951
|
-
/**
|
|
2952
|
-
* Ensure a standalone agent has a loop + runtime.
|
|
2953
|
-
* Creates the infrastructure lazily on first call (starts MCP server, etc.).
|
|
2954
|
-
*
|
|
2955
|
-
* The loop is stored in `s.loops` (daemon-owned).
|
|
2956
|
-
* A WorkflowHandle is still created for runtime resource management (MCP, context).
|
|
2957
|
-
*
|
|
2958
|
-
* This is the bridge between POST /agents (stores config only) and
|
|
2959
|
-
* POST /run or /serve (needs a loop to execute).
|
|
2960
|
-
*/
|
|
2961
|
-
async function ensureAgentLoop(s, agentName) {
|
|
2962
|
-
const existing = findLoop(s, agentName);
|
|
2963
|
-
if (existing) return existing;
|
|
2964
|
-
const cfg = s.configs.get(agentName);
|
|
2965
|
-
if (!cfg) throw new Error(`Agent not found: ${agentName}`);
|
|
2966
|
-
const agentDef = configToResolvedWorkflowAgent(cfg);
|
|
2967
|
-
const wfKey = standaloneKey(agentName);
|
|
2968
|
-
const workflowName = cfg.workflow ?? "global";
|
|
2969
|
-
const workflowTag = cfg.tag ?? "main";
|
|
2970
|
-
const runtime = await createMinimalRuntime({
|
|
2971
|
-
workflowName,
|
|
2972
|
-
tag: workflowTag,
|
|
2973
|
-
agentNames: [agentName]
|
|
2974
|
-
});
|
|
2975
|
-
let loop;
|
|
2976
|
-
try {
|
|
2977
|
-
({loop} = createWiredLoop({
|
|
2978
|
-
name: agentName,
|
|
2979
|
-
agent: agentDef,
|
|
2980
|
-
runtime
|
|
2981
|
-
}));
|
|
2982
|
-
} catch (err) {
|
|
2983
|
-
await runtime.shutdown();
|
|
2984
|
-
throw err;
|
|
2985
|
-
}
|
|
2986
|
-
s.loops.set(agentName, loop);
|
|
2987
|
-
const handle = {
|
|
2988
|
-
name: workflowName,
|
|
2989
|
-
tag: workflowTag,
|
|
2990
|
-
key: wfKey,
|
|
2991
|
-
standalone: true,
|
|
2992
|
-
agents: [agentName],
|
|
2993
|
-
loops: new Map([[agentName, loop]]),
|
|
2994
|
-
contextProvider: runtime.contextProvider,
|
|
2995
|
-
shutdown: async () => {
|
|
2996
|
-
try {
|
|
2997
|
-
await loop.stop();
|
|
2998
|
-
} finally {
|
|
2999
|
-
await runtime.shutdown();
|
|
3000
|
-
}
|
|
3001
|
-
},
|
|
3002
|
-
startedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
3003
|
-
};
|
|
3004
|
-
s.workflows.set(wfKey, handle);
|
|
3005
|
-
return {
|
|
3006
|
-
loop,
|
|
3007
|
-
workflow: handle
|
|
3008
|
-
};
|
|
3009
|
-
}
|
|
3010
|
-
/**
|
|
3011
|
-
* Create the Hono app with all daemon routes.
|
|
3012
|
-
*
|
|
3013
|
-
* Accepts a state getter so the app can be used both in production
|
|
3014
|
-
* (module-level state set by startDaemon) and in tests (injected state).
|
|
3015
|
-
*
|
|
3016
|
-
* When a token is provided, all endpoints require `Authorization: Bearer <token>`.
|
|
3017
|
-
* This prevents cross-origin attacks from malicious websites.
|
|
3018
|
-
*/
|
|
3019
|
-
function createDaemonApp(options) {
|
|
3020
|
-
const { getState, token } = options;
|
|
3021
|
-
const app = new Hono();
|
|
3022
|
-
if (token) app.use("*", async (c, next) => {
|
|
3023
|
-
if (c.req.header("authorization") !== `Bearer ${token}`) return c.json({ error: "Unauthorized" }, 401);
|
|
3024
|
-
await next();
|
|
3025
|
-
});
|
|
3026
|
-
function getWorkflowAgentNames(workflow, tag) {
|
|
3027
|
-
const s = getState();
|
|
3028
|
-
if (!s) return [];
|
|
3029
|
-
return [...s.configs.values()].filter((c) => c.workflow === workflow && c.tag === tag).map((c) => c.name);
|
|
3030
|
-
}
|
|
3031
|
-
app.get("/health", (c) => {
|
|
3032
|
-
const s = getState();
|
|
3033
|
-
if (!s) return c.json({ status: "unavailable" }, 503);
|
|
3034
|
-
const standaloneAgents = [...s.configs.keys()];
|
|
3035
|
-
const workflowList = [...s.workflows.values()].filter((wf) => !wf.standalone).map((wf) => ({
|
|
3036
|
-
name: wf.name,
|
|
3037
|
-
tag: wf.tag,
|
|
3038
|
-
agents: wf.agents
|
|
3039
|
-
}));
|
|
3040
|
-
return c.json({
|
|
3041
|
-
status: "ok",
|
|
3042
|
-
pid: process.pid,
|
|
3043
|
-
port: s.port,
|
|
3044
|
-
uptime: Date.now() - new Date(s.startedAt).getTime(),
|
|
3045
|
-
agents: standaloneAgents,
|
|
3046
|
-
workflows: workflowList
|
|
3047
|
-
});
|
|
3048
|
-
});
|
|
3049
|
-
app.post("/shutdown", (c) => {
|
|
3050
|
-
setImmediate(() => gracefulShutdown());
|
|
3051
|
-
return c.json({ success: true });
|
|
3052
|
-
});
|
|
3053
|
-
app.get("/agents", (c) => {
|
|
3054
|
-
const s = getState();
|
|
3055
|
-
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
3056
|
-
const standaloneAgents = [...s.configs.values()].map((cfg) => {
|
|
3057
|
-
const found = findLoop(s, cfg.name);
|
|
3058
|
-
return {
|
|
3059
|
-
name: cfg.name,
|
|
3060
|
-
model: cfg.model,
|
|
3061
|
-
backend: cfg.backend,
|
|
3062
|
-
workflow: cfg.workflow,
|
|
3063
|
-
tag: cfg.tag,
|
|
3064
|
-
createdAt: cfg.createdAt,
|
|
3065
|
-
source: "standalone",
|
|
3066
|
-
state: found?.loop.state
|
|
3067
|
-
};
|
|
3068
|
-
});
|
|
3069
|
-
const workflowAgents = [...s.workflows.values()].flatMap((wf) => wf.agents.map((agentName) => {
|
|
3070
|
-
const loop = wf.loops.get(agentName);
|
|
3071
|
-
return {
|
|
3072
|
-
name: agentName,
|
|
3073
|
-
model: "",
|
|
3074
|
-
backend: "",
|
|
3075
|
-
workflow: wf.name,
|
|
3076
|
-
tag: wf.tag,
|
|
3077
|
-
createdAt: wf.startedAt,
|
|
3078
|
-
source: "workflow",
|
|
3079
|
-
state: loop?.state ?? "unknown"
|
|
3080
|
-
};
|
|
3081
|
-
}));
|
|
3082
|
-
return c.json({ agents: [...standaloneAgents, ...workflowAgents] });
|
|
3083
|
-
});
|
|
3084
|
-
app.post("/agents", async (c) => {
|
|
3085
|
-
const s = getState();
|
|
3086
|
-
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
3087
|
-
const body = await parseJsonBody(c);
|
|
3088
|
-
if (!body || typeof body !== "object") return c.json({ error: "Invalid JSON body" }, 400);
|
|
3089
|
-
const { name, model, system, backend = "default", provider, workflow, tag, schedule } = body;
|
|
3090
|
-
if (!name || !model || !system) return c.json({ error: "name, model, system required" }, 400);
|
|
3091
|
-
if (s.configs.has(name)) return c.json({ error: `Agent already exists: ${name}` }, 409);
|
|
3092
|
-
const agentConfig = {
|
|
3093
|
-
name,
|
|
3094
|
-
model,
|
|
3095
|
-
system,
|
|
3096
|
-
backend,
|
|
3097
|
-
provider,
|
|
3098
|
-
workflow,
|
|
3099
|
-
tag,
|
|
3100
|
-
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
3101
|
-
schedule
|
|
3102
|
-
};
|
|
3103
|
-
s.configs.set(name, agentConfig);
|
|
3104
|
-
return c.json({
|
|
3105
|
-
name,
|
|
3106
|
-
model,
|
|
3107
|
-
backend,
|
|
3108
|
-
workflow,
|
|
3109
|
-
tag,
|
|
3110
|
-
schedule
|
|
3111
|
-
}, 201);
|
|
3112
|
-
});
|
|
3113
|
-
app.get("/agents/:name", (c) => {
|
|
3114
|
-
const s = getState();
|
|
3115
|
-
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
3116
|
-
const cfg = s.configs.get(c.req.param("name"));
|
|
3117
|
-
if (!cfg) return c.json({ error: "Agent not found" }, 404);
|
|
3118
|
-
return c.json({
|
|
3119
|
-
name: cfg.name,
|
|
3120
|
-
model: cfg.model,
|
|
3121
|
-
backend: cfg.backend,
|
|
3122
|
-
system: cfg.system,
|
|
3123
|
-
workflow: cfg.workflow,
|
|
3124
|
-
tag: cfg.tag,
|
|
3125
|
-
createdAt: cfg.createdAt,
|
|
3126
|
-
schedule: cfg.schedule
|
|
3127
|
-
});
|
|
3128
|
-
});
|
|
3129
|
-
app.delete("/agents/:name", async (c) => {
|
|
3130
|
-
const s = getState();
|
|
3131
|
-
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
3132
|
-
const name = c.req.param("name");
|
|
3133
|
-
if (!s.configs.delete(name)) return c.json({ error: "Agent not found" }, 404);
|
|
3134
|
-
const daemonLoop = s.loops.get(name);
|
|
3135
|
-
if (daemonLoop) {
|
|
3136
|
-
try {
|
|
3137
|
-
await daemonLoop.stop();
|
|
3138
|
-
} catch {}
|
|
3139
|
-
s.loops.delete(name);
|
|
3140
|
-
}
|
|
3141
|
-
const wfKey = standaloneKey(name);
|
|
3142
|
-
const wf = s.workflows.get(wfKey);
|
|
3143
|
-
if (wf) {
|
|
3144
|
-
try {
|
|
3145
|
-
await wf.shutdown();
|
|
3146
|
-
} catch {}
|
|
3147
|
-
s.workflows.delete(wfKey);
|
|
3148
|
-
}
|
|
3149
|
-
return c.json({ success: true });
|
|
3150
|
-
});
|
|
3151
|
-
app.post("/run", async (c) => {
|
|
3152
|
-
const s = getState();
|
|
3153
|
-
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
3154
|
-
const body = await parseJsonBody(c);
|
|
3155
|
-
if (!body || typeof body !== "object") return c.json({ error: "Invalid JSON body" }, 400);
|
|
3156
|
-
const { agent: agentName, message } = body;
|
|
3157
|
-
if (!agentName || !message) return c.json({ error: "agent and message required" }, 400);
|
|
3158
|
-
let loop;
|
|
3159
|
-
const loopResult = findLoop(s, agentName);
|
|
3160
|
-
if (loopResult) loop = loopResult.loop;
|
|
3161
|
-
else if (s.configs.has(agentName)) try {
|
|
3162
|
-
loop = (await ensureAgentLoop(s, agentName)).loop;
|
|
3163
|
-
} catch (error) {
|
|
3164
|
-
const msg = error instanceof Error ? error.message : String(error);
|
|
3165
|
-
return c.json({ error: `Failed to create agent runtime: ${msg}` }, 500);
|
|
3166
|
-
}
|
|
3167
|
-
if (!loop) return c.json({ error: `Agent not found: ${agentName}` }, 404);
|
|
3168
|
-
const agentLoop = loop;
|
|
3169
|
-
return streamSSE(c, async (stream) => {
|
|
3170
|
-
try {
|
|
3171
|
-
const result = await agentLoop.sendDirect(message);
|
|
3172
|
-
if (result.success) {
|
|
3173
|
-
if (result.content) await stream.writeSSE({
|
|
3174
|
-
event: "chunk",
|
|
3175
|
-
data: JSON.stringify({
|
|
3176
|
-
agent: agentName,
|
|
3177
|
-
text: result.content
|
|
3178
|
-
})
|
|
3179
|
-
});
|
|
3180
|
-
await stream.writeSSE({
|
|
3181
|
-
event: "done",
|
|
3182
|
-
data: JSON.stringify(result)
|
|
3183
|
-
});
|
|
3184
|
-
} else await stream.writeSSE({
|
|
3185
|
-
event: "error",
|
|
3186
|
-
data: JSON.stringify({ error: result.error })
|
|
3187
|
-
});
|
|
3188
|
-
} catch (error) {
|
|
3189
|
-
const msg = error instanceof Error ? error.message : String(error);
|
|
3190
|
-
await stream.writeSSE({
|
|
3191
|
-
event: "error",
|
|
3192
|
-
data: JSON.stringify({ error: msg })
|
|
3193
|
-
});
|
|
3194
|
-
}
|
|
3195
|
-
});
|
|
3196
|
-
});
|
|
3197
|
-
app.post("/serve", async (c) => {
|
|
3198
|
-
const s = getState();
|
|
3199
|
-
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
3200
|
-
const body = await parseJsonBody(c);
|
|
3201
|
-
if (!body || typeof body !== "object") return c.json({ error: "Invalid JSON body" }, 400);
|
|
3202
|
-
const { agent: agentName, message } = body;
|
|
3203
|
-
if (!agentName || !message) return c.json({ error: "agent and message required" }, 400);
|
|
3204
|
-
let loop;
|
|
3205
|
-
const loopResult = findLoop(s, agentName);
|
|
3206
|
-
if (loopResult) loop = loopResult.loop;
|
|
3207
|
-
else if (s.configs.has(agentName)) try {
|
|
3208
|
-
loop = (await ensureAgentLoop(s, agentName)).loop;
|
|
3209
|
-
} catch (error) {
|
|
3210
|
-
const msg = error instanceof Error ? error.message : String(error);
|
|
3211
|
-
return c.json({ error: msg }, 500);
|
|
3212
|
-
}
|
|
3213
|
-
if (!loop) return c.json({ error: `Agent not found: ${agentName}` }, 404);
|
|
3214
|
-
try {
|
|
3215
|
-
const result = await loop.sendDirect(message);
|
|
3216
|
-
if (!result.success) return c.json({ error: result.error }, 500);
|
|
3217
|
-
return c.json({
|
|
3218
|
-
content: result.content ?? "",
|
|
3219
|
-
duration: result.duration,
|
|
3220
|
-
success: true
|
|
3221
|
-
});
|
|
3222
|
-
} catch (error) {
|
|
3223
|
-
const msg = error instanceof Error ? error.message : String(error);
|
|
3224
|
-
return c.json({ error: msg }, 500);
|
|
3225
|
-
}
|
|
3226
|
-
});
|
|
3227
|
-
app.all("/mcp", async (c) => {
|
|
3228
|
-
const s = getState();
|
|
3229
|
-
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
3230
|
-
const req = c.req.raw;
|
|
3231
|
-
const sessionId = req.headers.get("mcp-session-id");
|
|
3232
|
-
if (sessionId && mcpSessions.has(sessionId)) {
|
|
3233
|
-
const session = mcpSessions.get(sessionId);
|
|
3234
|
-
if (req.method === "DELETE") {
|
|
3235
|
-
await session.transport.close();
|
|
3236
|
-
mcpSessions.delete(sessionId);
|
|
3237
|
-
return new Response(null, { status: 200 });
|
|
3238
|
-
}
|
|
3239
|
-
return session.transport.handleRequest(req);
|
|
3240
|
-
}
|
|
3241
|
-
if (req.method === "POST") {
|
|
3242
|
-
const body = await req.json();
|
|
3243
|
-
if (!(Array.isArray(body) ? body.some((m) => m?.method === "initialize") : body?.method === "initialize")) return c.json({ error: "Bad request: session required" }, 400);
|
|
3244
|
-
const agentName = new URL(req.url).searchParams.get("agent") || "user";
|
|
3245
|
-
const agentCfg = s.configs.get(agentName);
|
|
3246
|
-
const workflow = agentCfg?.workflow ?? "global";
|
|
3247
|
-
const tag = agentCfg?.tag ?? "main";
|
|
3248
|
-
const existingWf = findLoop(s, agentName)?.workflow ?? s.workflows.get(`${workflow}:${tag}`);
|
|
3249
|
-
const workflowAgents = getWorkflowAgentNames(workflow, tag);
|
|
3250
|
-
const allNames = [...new Set([
|
|
3251
|
-
...workflowAgents,
|
|
3252
|
-
agentName,
|
|
3253
|
-
"user"
|
|
3254
|
-
])];
|
|
3255
|
-
const provider = existingWf?.contextProvider ?? (() => {
|
|
3256
|
-
const contextDir = getDefaultContextDir(workflow, tag);
|
|
3257
|
-
mkdirSync(contextDir, { recursive: true });
|
|
3258
|
-
return createFileContextProvider(contextDir, allNames);
|
|
3259
|
-
})();
|
|
3260
|
-
const transport = new WebStandardStreamableHTTPServerTransport({
|
|
3261
|
-
sessionIdGenerator: () => `${agentName}-${randomUUID().slice(0, 8)}`,
|
|
3262
|
-
onsessioninitialized: (sid) => {
|
|
3263
|
-
mcpSessions.set(sid, {
|
|
3264
|
-
transport,
|
|
3265
|
-
agentId: agentName
|
|
3266
|
-
});
|
|
3267
|
-
},
|
|
3268
|
-
onsessionclosed: (sid) => {
|
|
3269
|
-
mcpSessions.delete(sid);
|
|
3270
|
-
},
|
|
3271
|
-
enableJsonResponse: true
|
|
3272
|
-
});
|
|
3273
|
-
await createContextMCPServer({
|
|
3274
|
-
provider,
|
|
3275
|
-
validAgents: allNames,
|
|
3276
|
-
name: `${workflow}-context`,
|
|
3277
|
-
version: "1.0.0"
|
|
3278
|
-
}).server.connect(transport);
|
|
3279
|
-
return transport.handleRequest(req, { parsedBody: body });
|
|
3280
|
-
}
|
|
3281
|
-
if (req.method === "GET") return c.json({ error: "Session ID required for GET requests" }, 400);
|
|
3282
|
-
return c.json({ error: "Method not allowed" }, 405);
|
|
3283
|
-
});
|
|
3284
|
-
app.post("/workflows", async (c) => {
|
|
3285
|
-
const s = getState();
|
|
3286
|
-
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
3287
|
-
const body = await parseJsonBody(c);
|
|
3288
|
-
if (!body || typeof body !== "object") return c.json({ error: "Invalid JSON body" }, 400);
|
|
3289
|
-
const { workflow, tag = "main", feedback, pollInterval, params } = body;
|
|
3290
|
-
if (!workflow || !workflow.agents) return c.json({ error: "workflow (parsed YAML) required" }, 400);
|
|
3291
|
-
const workflowName = workflow.name || "global";
|
|
3292
|
-
const key = `${workflowName}:${tag}`;
|
|
3293
|
-
if (s.workflows.has(key)) return c.json({ error: `Workflow already running: ${key}` }, 409);
|
|
3294
|
-
try {
|
|
3295
|
-
const { runWorkflowWithLoops } = await import("../runner-BmT0Y8MD.mjs");
|
|
3296
|
-
const result = await runWorkflowWithLoops({
|
|
3297
|
-
workflow,
|
|
3298
|
-
workflowName,
|
|
3299
|
-
tag,
|
|
3300
|
-
mode: "start",
|
|
3301
|
-
headless: true,
|
|
3302
|
-
feedback,
|
|
3303
|
-
pollInterval,
|
|
3304
|
-
params,
|
|
3305
|
-
log: () => {}
|
|
3306
|
-
});
|
|
3307
|
-
if (!result.success) return c.json({ error: result.error || "Workflow failed to start" }, 500);
|
|
3308
|
-
const handle = {
|
|
3309
|
-
name: workflowName,
|
|
3310
|
-
tag,
|
|
3311
|
-
key,
|
|
3312
|
-
agents: Object.keys(workflow.agents),
|
|
3313
|
-
loops: result.loops,
|
|
3314
|
-
contextProvider: result.contextProvider,
|
|
3315
|
-
shutdown: result.shutdown,
|
|
3316
|
-
workflowPath: workflow.filePath,
|
|
3317
|
-
startedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
3318
|
-
};
|
|
3319
|
-
s.workflows.set(key, handle);
|
|
3320
|
-
return c.json({
|
|
3321
|
-
key,
|
|
3322
|
-
name: workflowName,
|
|
3323
|
-
tag,
|
|
3324
|
-
agents: handle.agents
|
|
3325
|
-
}, 201);
|
|
3326
|
-
} catch (error) {
|
|
3327
|
-
const msg = error instanceof Error ? error.message : String(error);
|
|
3328
|
-
return c.json({ error: `Failed to start workflow: ${msg}` }, 500);
|
|
3329
|
-
}
|
|
3330
|
-
});
|
|
3331
|
-
app.get("/workflows", (c) => {
|
|
3332
|
-
const s = getState();
|
|
3333
|
-
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
3334
|
-
const workflows = [...s.workflows.values()].filter((wf) => !wf.standalone).map((wf) => {
|
|
3335
|
-
const agentStates = {};
|
|
3336
|
-
for (const [name, loop] of wf.loops) agentStates[name] = loop.state;
|
|
3337
|
-
return {
|
|
3338
|
-
name: wf.name,
|
|
3339
|
-
tag: wf.tag,
|
|
3340
|
-
key: wf.key,
|
|
3341
|
-
agents: wf.agents,
|
|
3342
|
-
agentStates,
|
|
3343
|
-
workflowPath: wf.workflowPath,
|
|
3344
|
-
startedAt: wf.startedAt
|
|
3345
|
-
};
|
|
3346
|
-
});
|
|
3347
|
-
return c.json({ workflows });
|
|
3348
|
-
});
|
|
3349
|
-
async function deleteWorkflow(c, name, tag) {
|
|
3350
|
-
const s = getState();
|
|
3351
|
-
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
3352
|
-
const key = `${name}:${tag}`;
|
|
3353
|
-
const handle = s.workflows.get(key);
|
|
3354
|
-
if (!handle) return c.json({ error: `Workflow not found: ${key}` }, 404);
|
|
3355
|
-
try {
|
|
3356
|
-
await handle.shutdown();
|
|
3357
|
-
s.workflows.delete(key);
|
|
3358
|
-
return c.json({
|
|
3359
|
-
success: true,
|
|
3360
|
-
key
|
|
3361
|
-
});
|
|
3362
|
-
} catch (error) {
|
|
3363
|
-
const msg = error instanceof Error ? error.message : String(error);
|
|
3364
|
-
return c.json({ error: `Failed to stop workflow: ${msg}` }, 500);
|
|
3365
|
-
}
|
|
3366
|
-
}
|
|
3367
|
-
app.delete("/workflows/:name/:tag", (c) => deleteWorkflow(c, c.req.param("name"), c.req.param("tag")));
|
|
3368
|
-
app.delete("/workflows/:name", (c) => deleteWorkflow(c, c.req.param("name"), "main"));
|
|
3369
|
-
return app;
|
|
3370
|
-
}
|
|
3371
|
-
async function startDaemon(config = {}) {
|
|
3372
|
-
const existing = isDaemonRunning();
|
|
3373
|
-
if (existing) {
|
|
3374
|
-
console.error(`Daemon already running: pid=${existing.pid} port=${existing.port}`);
|
|
3375
|
-
process.exit(1);
|
|
3376
|
-
}
|
|
3377
|
-
const host = config.host ?? "127.0.0.1";
|
|
3378
|
-
const store = config.store ?? new MemoryStateStore();
|
|
3379
|
-
const token = randomUUID();
|
|
3380
|
-
const server = await startHttpServer(createDaemonApp({
|
|
3381
|
-
getState: () => state,
|
|
3382
|
-
token
|
|
3383
|
-
}), {
|
|
3384
|
-
port: config.port ?? DEFAULT_PORT,
|
|
3385
|
-
hostname: host
|
|
3386
|
-
});
|
|
3387
|
-
const actualPort = server.port;
|
|
3388
|
-
const startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
3389
|
-
writeDaemonInfo({
|
|
3390
|
-
pid: process.pid,
|
|
3391
|
-
host,
|
|
3392
|
-
port: actualPort,
|
|
3393
|
-
startedAt,
|
|
3394
|
-
token
|
|
3395
|
-
});
|
|
3396
|
-
state = {
|
|
3397
|
-
configs: /* @__PURE__ */ new Map(),
|
|
3398
|
-
loops: /* @__PURE__ */ new Map(),
|
|
3399
|
-
workflows: /* @__PURE__ */ new Map(),
|
|
3400
|
-
store,
|
|
3401
|
-
server,
|
|
3402
|
-
port: actualPort,
|
|
3403
|
-
host,
|
|
3404
|
-
startedAt
|
|
3405
|
-
};
|
|
3406
|
-
console.log(`Daemon started: pid=${process.pid}`);
|
|
3407
|
-
console.log(`Listening: http://${host}:${actualPort}`);
|
|
3408
|
-
console.log(`MCP: http://${host}:${actualPort}/mcp`);
|
|
3409
|
-
process.on("SIGINT", () => {
|
|
3410
|
-
console.log("\nShutting down...");
|
|
3411
|
-
gracefulShutdown();
|
|
3412
|
-
});
|
|
3413
|
-
process.on("SIGTERM", () => {
|
|
3414
|
-
gracefulShutdown();
|
|
3415
|
-
});
|
|
3416
|
-
}
|
|
3417
|
-
|
|
3418
|
-
//#endregion
|
|
3419
|
-
//#region src/cli/client.ts
|
|
3420
|
-
/**
|
|
3421
|
-
* CLI client — HTTP client for daemon REST API.
|
|
3422
|
-
*
|
|
3423
|
-
* Talks to the 9-endpoint daemon:
|
|
3424
|
-
* GET /health, POST /shutdown
|
|
3425
|
-
* GET/POST /agents, GET/DELETE /agents/:name
|
|
3426
|
-
* POST /run (SSE), POST /serve
|
|
3427
|
-
* ALL /mcp
|
|
3428
|
-
*/
|
|
3429
|
-
var client_exports = /* @__PURE__ */ __exportAll({
|
|
3430
|
-
createAgent: () => createAgent,
|
|
3431
|
-
deleteAgent: () => deleteAgent,
|
|
3432
|
-
health: () => health,
|
|
3433
|
-
isDaemonActive: () => isDaemonActive,
|
|
3434
|
-
listAgents: () => listAgents,
|
|
3435
|
-
run: () => run,
|
|
3436
|
-
serve: () => serve,
|
|
3437
|
-
shutdown: () => shutdown,
|
|
3438
|
-
startWorkflow: () => startWorkflow,
|
|
3439
|
-
stopWorkflow: () => stopWorkflow
|
|
3440
|
-
});
|
|
3441
|
-
const MAX_RETRIES = 3;
|
|
3442
|
-
const BASE_DELAY_MS = 200;
|
|
3443
|
-
function isRetryableError(error) {
|
|
3444
|
-
if (error instanceof TypeError) return true;
|
|
3445
|
-
if (error instanceof Error) {
|
|
3446
|
-
const code = error.code;
|
|
3447
|
-
return code === "ECONNREFUSED" || code === "ECONNRESET";
|
|
3448
|
-
}
|
|
3449
|
-
return false;
|
|
3450
|
-
}
|
|
3451
|
-
function getDaemonConnection() {
|
|
3452
|
-
const daemon = isDaemonRunning();
|
|
3453
|
-
if (!daemon) return null;
|
|
3454
|
-
return {
|
|
3455
|
-
url: `http://${daemon.host}:${daemon.port}`,
|
|
3456
|
-
token: daemon.token
|
|
3457
|
-
};
|
|
3458
|
-
}
|
|
3459
|
-
function requireDaemon() {
|
|
3460
|
-
const conn = getDaemonConnection();
|
|
3461
|
-
if (!conn) throw new Error("No daemon running. Start one with: agent-worker daemon");
|
|
3462
|
-
return conn;
|
|
3463
|
-
}
|
|
3464
|
-
/** Build headers with auth token */
|
|
3465
|
-
function authHeaders(token, extra) {
|
|
3466
|
-
const headers = { ...extra };
|
|
3467
|
-
if (token) headers["Authorization"] = `Bearer ${token}`;
|
|
3468
|
-
return headers;
|
|
3469
|
-
}
|
|
3470
|
-
async function request(method, path, body) {
|
|
3471
|
-
const { url: baseUrl, token } = requireDaemon();
|
|
3472
|
-
let lastError;
|
|
3473
|
-
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) try {
|
|
3474
|
-
const init = {
|
|
3475
|
-
method,
|
|
3476
|
-
headers: authHeaders(token, body !== void 0 ? { "Content-Type": "application/json" } : void 0),
|
|
3477
|
-
body: body !== void 0 ? JSON.stringify(body) : void 0,
|
|
3478
|
-
signal: AbortSignal.timeout(6e4)
|
|
3479
|
-
};
|
|
3480
|
-
return await (await fetch(`${baseUrl}${path}`, init)).json();
|
|
3481
|
-
} catch (error) {
|
|
3482
|
-
lastError = error;
|
|
3483
|
-
if (attempt < MAX_RETRIES && isRetryableError(error)) {
|
|
3484
|
-
const delay = BASE_DELAY_MS * Math.pow(2, attempt);
|
|
3485
|
-
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
3486
|
-
} else break;
|
|
3487
|
-
}
|
|
3488
|
-
return {
|
|
3489
|
-
success: false,
|
|
3490
|
-
error: `Connection failed: ${lastError instanceof Error ? lastError.message : String(lastError)}`
|
|
3491
|
-
};
|
|
3492
|
-
}
|
|
3493
|
-
/** GET /health */
|
|
3494
|
-
function health() {
|
|
3495
|
-
return request("GET", "/health");
|
|
3496
|
-
}
|
|
3497
|
-
/** POST /shutdown */
|
|
3498
|
-
function shutdown() {
|
|
3499
|
-
return request("POST", "/shutdown");
|
|
3500
|
-
}
|
|
3501
|
-
/** GET /agents */
|
|
3502
|
-
function listAgents() {
|
|
3503
|
-
return request("GET", "/agents");
|
|
3504
|
-
}
|
|
3505
|
-
/** POST /agents */
|
|
3506
|
-
function createAgent(body) {
|
|
3507
|
-
return request("POST", "/agents", body);
|
|
3508
|
-
}
|
|
3509
|
-
/** DELETE /agents/:name */
|
|
3510
|
-
function deleteAgent(name) {
|
|
3511
|
-
return request("DELETE", `/agents/${encodeURIComponent(name)}`);
|
|
3512
|
-
}
|
|
3513
|
-
/** POST /serve (sync JSON response) */
|
|
3514
|
-
function serve(body) {
|
|
3515
|
-
return request("POST", "/serve", body);
|
|
3516
|
-
}
|
|
3517
|
-
/**
|
|
3518
|
-
* POST /run (SSE stream).
|
|
3519
|
-
* Calls onChunk for each chunk, returns final response.
|
|
3520
|
-
*/
|
|
3521
|
-
async function run(body, onChunk) {
|
|
3522
|
-
let baseUrl;
|
|
3523
|
-
let token;
|
|
3524
|
-
try {
|
|
3525
|
-
const conn = requireDaemon();
|
|
3526
|
-
baseUrl = conn.url;
|
|
3527
|
-
token = conn.token;
|
|
3528
|
-
} catch (error) {
|
|
3529
|
-
return {
|
|
3530
|
-
success: false,
|
|
3531
|
-
error: error instanceof Error ? error.message : String(error)
|
|
3532
|
-
};
|
|
3533
|
-
}
|
|
3534
|
-
try {
|
|
3535
|
-
const res = await fetch(`${baseUrl}/run`, {
|
|
3536
|
-
method: "POST",
|
|
3537
|
-
headers: authHeaders(token, { "Content-Type": "application/json" }),
|
|
3538
|
-
body: JSON.stringify(body)
|
|
3539
|
-
});
|
|
3540
|
-
if (!res.ok || !res.body) return await res.json();
|
|
3541
|
-
const reader = res.body.getReader();
|
|
3542
|
-
const decoder = new TextDecoder();
|
|
3543
|
-
let buffer = "";
|
|
3544
|
-
let finalResponse = { success: true };
|
|
3545
|
-
while (true) {
|
|
3546
|
-
const { value, done } = await reader.read();
|
|
3547
|
-
if (done) break;
|
|
3548
|
-
buffer += decoder.decode(value, { stream: true });
|
|
3549
|
-
const lines = buffer.split("\n");
|
|
3550
|
-
buffer = lines.pop() ?? "";
|
|
3551
|
-
let currentEvent = "";
|
|
3552
|
-
for (const line of lines) if (line.startsWith("event: ")) currentEvent = line.slice(7);
|
|
3553
|
-
else if (line.startsWith("data: ")) {
|
|
3554
|
-
const data = line.slice(6);
|
|
3555
|
-
try {
|
|
3556
|
-
const parsed = JSON.parse(data);
|
|
3557
|
-
if (currentEvent === "chunk" && onChunk) onChunk(parsed);
|
|
3558
|
-
else if (currentEvent === "done") finalResponse = parsed;
|
|
3559
|
-
else if (currentEvent === "error") return {
|
|
3560
|
-
success: false,
|
|
3561
|
-
error: parsed.error
|
|
3562
|
-
};
|
|
3563
|
-
} catch {}
|
|
3564
|
-
}
|
|
3565
|
-
}
|
|
3566
|
-
return finalResponse;
|
|
3567
|
-
} catch (error) {
|
|
3568
|
-
return {
|
|
3569
|
-
success: false,
|
|
3570
|
-
error: `Connection failed: ${error instanceof Error ? error.message : String(error)}`
|
|
3571
|
-
};
|
|
3572
|
-
}
|
|
3573
|
-
}
|
|
3574
|
-
/** POST /workflows — start a workflow via daemon */
|
|
3575
|
-
function startWorkflow(body) {
|
|
3576
|
-
return request("POST", "/workflows", body);
|
|
3577
|
-
}
|
|
3578
|
-
/** DELETE /workflows/:name/:tag — stop a workflow */
|
|
3579
|
-
function stopWorkflow(name, tag = "main") {
|
|
3580
|
-
return request("DELETE", `/workflows/${encodeURIComponent(name)}/${encodeURIComponent(tag)}`);
|
|
3581
|
-
}
|
|
3582
|
-
/** Check if daemon is running */
|
|
3583
|
-
function isDaemonActive() {
|
|
3584
|
-
return getDaemonConnection() !== null;
|
|
3585
|
-
}
|
|
3586
|
-
|
|
3587
|
-
//#endregion
|
|
3588
|
-
//#region src/cli/output.ts
|
|
3589
|
-
var output_exports = /* @__PURE__ */ __exportAll({ outputJson: () => outputJson });
|
|
3590
|
-
/**
|
|
3591
|
-
* CLI Output Utilities
|
|
3592
|
-
*
|
|
3593
|
-
* Rules:
|
|
3594
|
-
* - --json mode: stdout = pure JSON data only, everything else to stderr
|
|
3595
|
-
* - Errors: always to stderr via console.error + process.exit(1)
|
|
3596
|
-
* - Exit codes: 0 = success, 1 = failure (authoritative for agent callers)
|
|
3597
|
-
*/
|
|
3598
|
-
/**
|
|
3599
|
-
* Output JSON data to stdout.
|
|
3600
|
-
* Use this instead of raw console.log(JSON.stringify(...)) for consistency.
|
|
3601
|
-
*/
|
|
3602
|
-
function outputJson(data) {
|
|
3603
|
-
console.log(JSON.stringify(data, null, 2));
|
|
3604
|
-
}
|
|
3605
|
-
|
|
3606
|
-
//#endregion
|
|
3607
|
-
//#region src/agent/definition.ts
|
|
3608
|
-
/**
|
|
3609
|
-
* AgentDefinition — Top-level persistent agent identity.
|
|
3610
|
-
*
|
|
3611
|
-
* This is the NEW AgentDefinition from AGENT-TOP-LEVEL architecture.
|
|
3612
|
-
* It describes WHO an agent is (prompt, soul, context) — not how it runs in a workflow.
|
|
3613
|
-
*
|
|
3614
|
-
* Loaded from .agents/*.yaml files. Workflows reference agents by name.
|
|
3615
|
-
*
|
|
3616
|
-
* Distinct from:
|
|
3617
|
-
* - WorkflowAgentDef (workflow/types.ts) — inline agent config within a workflow
|
|
3618
|
-
* - AgentConfig (agent/config.ts) — runtime config for daemon-created agents
|
|
3619
|
-
*/
|
|
3620
|
-
/**
|
|
3621
|
-
* Standard subdirectories within an agent's context directory.
|
|
3622
|
-
* Created automatically when the agent is loaded.
|
|
3623
|
-
*/
|
|
3624
|
-
const CONTEXT_SUBDIRS = [
|
|
3625
|
-
"memory",
|
|
3626
|
-
"notes",
|
|
3627
|
-
"conversations",
|
|
3628
|
-
"todo"
|
|
3629
|
-
];
|
|
3630
|
-
const AgentSoulSchema = z$1.object({
|
|
3631
|
-
role: z$1.string().optional(),
|
|
3632
|
-
expertise: z$1.array(z$1.string()).optional(),
|
|
3633
|
-
style: z$1.string().optional(),
|
|
3634
|
-
principles: z$1.array(z$1.string()).optional()
|
|
3635
|
-
}).passthrough();
|
|
3636
|
-
const ProviderConfigSchema = z$1.object({
|
|
3637
|
-
name: z$1.string(),
|
|
3638
|
-
base_url: z$1.string().optional(),
|
|
3639
|
-
api_key: z$1.string().optional()
|
|
3640
|
-
}).passthrough();
|
|
3641
|
-
const AgentPromptConfigSchema = z$1.union([z$1.object({
|
|
3642
|
-
system: z$1.string(),
|
|
3643
|
-
system_file: z$1.undefined().optional()
|
|
3644
|
-
}), z$1.object({
|
|
3645
|
-
system_file: z$1.string(),
|
|
3646
|
-
system: z$1.undefined().optional()
|
|
3647
|
-
})]);
|
|
3648
|
-
const AgentContextConfigSchema = z$1.object({
|
|
3649
|
-
dir: z$1.string().optional(),
|
|
3650
|
-
thin_thread: z$1.number().int().min(1).optional()
|
|
3651
|
-
});
|
|
3652
|
-
const ScheduleConfigSchema = z$1.object({
|
|
3653
|
-
wakeup: z$1.union([z$1.string(), z$1.number()]),
|
|
3654
|
-
prompt: z$1.string().optional()
|
|
3655
|
-
});
|
|
3656
|
-
const AgentDefinitionSchema = z$1.object({
|
|
3657
|
-
name: z$1.string().min(1),
|
|
3658
|
-
model: z$1.string().min(1),
|
|
3659
|
-
backend: z$1.enum([
|
|
3660
|
-
"sdk",
|
|
3661
|
-
"claude",
|
|
3662
|
-
"cursor",
|
|
3663
|
-
"codex",
|
|
3664
|
-
"opencode",
|
|
3665
|
-
"mock"
|
|
3666
|
-
]).optional(),
|
|
3667
|
-
provider: z$1.union([z$1.string(), ProviderConfigSchema]).optional(),
|
|
3668
|
-
prompt: AgentPromptConfigSchema,
|
|
3669
|
-
soul: AgentSoulSchema.optional(),
|
|
3670
|
-
context: AgentContextConfigSchema.optional(),
|
|
3671
|
-
max_tokens: z$1.number().int().positive().optional(),
|
|
3672
|
-
max_steps: z$1.number().int().positive().optional(),
|
|
3673
|
-
schedule: ScheduleConfigSchema.optional()
|
|
3674
|
-
});
|
|
3675
|
-
|
|
3676
|
-
//#endregion
|
|
3677
|
-
//#region src/agent/agent-handle.ts
|
|
3678
|
-
/**
|
|
3679
|
-
* AgentHandle — Runtime wrapper for an agent definition + persistent context.
|
|
3680
|
-
*
|
|
3681
|
-
* Created by AgentRegistry when an agent is loaded. Provides:
|
|
3682
|
-
* - Context directory management (memory/, notes/, conversations/, todo/)
|
|
3683
|
-
* - Read/write operations for personal context
|
|
3684
|
-
* - State tracking (idle, running, stopped, error)
|
|
3685
|
-
*
|
|
3686
|
-
* Phase 1 scope: context directory + read/write ops.
|
|
3687
|
-
* Phase 3 adds: loop, workspaces, threads.
|
|
3688
|
-
*/
|
|
3689
|
-
var AgentHandle = class {
|
|
3690
|
-
/** Agent definition (from YAML) */
|
|
3691
|
-
definition;
|
|
3692
|
-
/** Absolute path to agent's persistent context directory */
|
|
3693
|
-
contextDir;
|
|
3694
|
-
/** Current agent state */
|
|
3695
|
-
state = "idle";
|
|
3696
|
-
constructor(definition, contextDir) {
|
|
3697
|
-
this.definition = definition;
|
|
3698
|
-
this.contextDir = contextDir;
|
|
3699
|
-
}
|
|
3700
|
-
/** Agent name (convenience accessor) */
|
|
3701
|
-
get name() {
|
|
3702
|
-
return this.definition.name;
|
|
3703
|
-
}
|
|
3704
|
-
/**
|
|
3705
|
-
* Ensure the context directory and all subdirectories exist.
|
|
3706
|
-
* Called on agent load/creation. Idempotent.
|
|
3707
|
-
*/
|
|
3708
|
-
ensureContextDir() {
|
|
3709
|
-
for (const sub of CONTEXT_SUBDIRS) mkdirSync(join(this.contextDir, sub), { recursive: true });
|
|
3710
|
-
}
|
|
3711
|
-
/**
|
|
3712
|
-
* Read all memory entries as key-value records.
|
|
3713
|
-
* Memory files are YAML in memory/<key>.yaml.
|
|
3714
|
-
*/
|
|
3715
|
-
async readMemory() {
|
|
3716
|
-
const memDir = join(this.contextDir, "memory");
|
|
3717
|
-
if (!existsSync(memDir)) return {};
|
|
3718
|
-
const result = {};
|
|
3719
|
-
const files = await readdir(memDir);
|
|
3720
|
-
for (const file of files) {
|
|
3721
|
-
if (!file.endsWith(".yaml") && !file.endsWith(".yml")) continue;
|
|
3722
|
-
const key = basename(file).replace(/\.ya?ml$/i, "");
|
|
3723
|
-
try {
|
|
3724
|
-
result[key] = parse(await readFile(join(memDir, file), "utf-8"));
|
|
3725
|
-
} catch (err) {
|
|
3726
|
-
console.warn(`Skipping malformed memory file ${file}:`, err);
|
|
3727
|
-
}
|
|
3728
|
-
}
|
|
3729
|
-
return result;
|
|
3730
|
-
}
|
|
3731
|
-
/**
|
|
3732
|
-
* Write a memory entry. Creates/overwrites memory/<key>.yaml.
|
|
3733
|
-
*/
|
|
3734
|
-
async writeMemory(key, value) {
|
|
3735
|
-
const memDir = join(this.contextDir, "memory");
|
|
3736
|
-
await mkdir(memDir, { recursive: true });
|
|
3737
|
-
await writeFile(join(memDir, `${key}.yaml`), stringify(value));
|
|
3738
|
-
}
|
|
3739
|
-
/**
|
|
3740
|
-
* Read agent's notes, most recent first.
|
|
3741
|
-
* Notes are markdown files in notes/.
|
|
3742
|
-
*/
|
|
3743
|
-
async readNotes(limit) {
|
|
3744
|
-
const notesDir = join(this.contextDir, "notes");
|
|
3745
|
-
if (!existsSync(notesDir)) return [];
|
|
3746
|
-
const files = (await readdir(notesDir)).filter((f) => f.endsWith(".md")).sort().reverse();
|
|
3747
|
-
const selected = limit ? files.slice(0, limit) : files;
|
|
3748
|
-
return Promise.all(selected.map((f) => readFile(join(notesDir, f), "utf-8")));
|
|
3749
|
-
}
|
|
3750
|
-
/**
|
|
3751
|
-
* Append a note. Creates notes/<date>-<slug>.md.
|
|
3752
|
-
*/
|
|
3753
|
-
async appendNote(content, slug) {
|
|
3754
|
-
const notesDir = join(this.contextDir, "notes");
|
|
3755
|
-
await mkdir(notesDir, { recursive: true });
|
|
3756
|
-
const filename = `${(/* @__PURE__ */ new Date()).toISOString().slice(0, 10)}-${slug ?? `note-${Date.now().toString(36)}`}.md`;
|
|
3757
|
-
await writeFile(join(notesDir, filename), content);
|
|
3758
|
-
return filename;
|
|
3759
|
-
}
|
|
3760
|
-
/**
|
|
3761
|
-
* Read active todos from todo/index.md.
|
|
3762
|
-
* Returns lines that look like incomplete tasks: "- [ ] ..."
|
|
3763
|
-
*/
|
|
3764
|
-
async readTodos() {
|
|
3765
|
-
const todoFile = join(this.contextDir, "todo", "index.md");
|
|
3766
|
-
if (!existsSync(todoFile)) return [];
|
|
3767
|
-
return (await readFile(todoFile, "utf-8")).split("\n").filter((line) => line.match(/^\s*-\s*\[\s*\]/)).map((line) => line.replace(/^\s*-\s*\[\s*\]\s*/, "").trim());
|
|
3768
|
-
}
|
|
3769
|
-
/**
|
|
3770
|
-
* Write the full todo list. Replaces todo/index.md.
|
|
3771
|
-
*/
|
|
3772
|
-
async writeTodos(todos) {
|
|
3773
|
-
const todoDir = join(this.contextDir, "todo");
|
|
3774
|
-
await mkdir(todoDir, { recursive: true });
|
|
3775
|
-
const content = todos.map((t) => `- [ ] ${t}`).join("\n") + "\n";
|
|
3776
|
-
await writeFile(join(todoDir, "index.md"), content);
|
|
3777
|
-
}
|
|
3778
|
-
};
|
|
3779
|
-
|
|
3780
|
-
//#endregion
|
|
3781
|
-
//#region src/agent/yaml-parser.ts
|
|
3782
|
-
/**
|
|
3783
|
-
* Agent YAML Parser — Load agent definitions from .agents/*.yaml files.
|
|
3784
|
-
*
|
|
3785
|
-
* Handles:
|
|
3786
|
-
* - Single file: parseAgentFile("path/to/alice.yaml")
|
|
3787
|
-
* - Directory: discoverAgents("path/to/project") → scans .agents/*.yaml
|
|
3788
|
-
* - Validation: Zod schema + semantic checks (system XOR system_file)
|
|
3789
|
-
* - Resolution: system_file → reads content into system (relative to YAML dir)
|
|
3790
|
-
*
|
|
3791
|
-
* The name field is optional in YAML — defaults to filename (without .yaml).
|
|
3792
|
-
*/
|
|
3793
|
-
/** Default directory for agent definitions (relative to project root) */
|
|
3794
|
-
const AGENTS_DIR = ".agents";
|
|
3795
|
-
/**
|
|
3796
|
-
* Parse an agent definition from a YAML file.
|
|
3797
|
-
*
|
|
3798
|
-
* Validates the schema, resolves system_file to inline content,
|
|
3799
|
-
* and infers name from filename if not specified.
|
|
3800
|
-
*
|
|
3801
|
-
* @throws Error if file doesn't exist, YAML is malformed, or validation fails.
|
|
3802
|
-
*/
|
|
3803
|
-
function parseAgentFile(filePath) {
|
|
3804
|
-
if (!existsSync(filePath)) throw new Error(`Agent file not found: ${filePath}`);
|
|
3805
|
-
const data = parse(readFileSync(filePath, "utf-8"));
|
|
3806
|
-
if (!data || typeof data !== "object") throw new Error(`Invalid YAML in ${filePath}: expected an object`);
|
|
3807
|
-
const obj = data;
|
|
3808
|
-
if (!obj.name) obj.name = basename(filePath).replace(/\.ya?ml$/i, "");
|
|
3809
|
-
const result = AgentDefinitionSchema.safeParse(obj);
|
|
3810
|
-
if (!result.success) {
|
|
3811
|
-
const issues = result.error.issues.map((i) => ` ${i.path.join(".")}: ${i.message}`).join("\n");
|
|
3812
|
-
throw new Error(`Invalid agent definition in ${filePath}:\n${issues}`);
|
|
3813
|
-
}
|
|
3814
|
-
const def = result.data;
|
|
3815
|
-
if (def.prompt.system_file) {
|
|
3816
|
-
const promptPath = join(dirname(filePath), def.prompt.system_file);
|
|
3817
|
-
if (!existsSync(promptPath)) throw new Error(`system_file not found: ${def.prompt.system_file} (resolved: ${promptPath})`);
|
|
3818
|
-
const content = readFileSync(promptPath, "utf-8");
|
|
3819
|
-
return {
|
|
3820
|
-
...def,
|
|
3821
|
-
prompt: { system: content }
|
|
3822
|
-
};
|
|
3823
|
-
}
|
|
3824
|
-
return def;
|
|
3825
|
-
}
|
|
3826
|
-
/**
|
|
3827
|
-
* Discover all agent YAML files in a project's .agents/ directory.
|
|
3828
|
-
* Returns parsed and validated definitions.
|
|
3829
|
-
*
|
|
3830
|
-
* Non-fatal: logs warnings for invalid files, skips them.
|
|
3831
|
-
*
|
|
3832
|
-
* @param projectDir - Project root directory
|
|
3833
|
-
* @param log - Optional warning logger (default: console.warn)
|
|
3834
|
-
* @returns Array of valid agent definitions
|
|
3835
|
-
*/
|
|
3836
|
-
function discoverAgents(projectDir, log) {
|
|
3837
|
-
const agentsDir = join(projectDir, AGENTS_DIR);
|
|
3838
|
-
if (!existsSync(agentsDir)) return [];
|
|
3839
|
-
const warn = log ?? console.warn;
|
|
3840
|
-
const agents = [];
|
|
3841
|
-
let entries;
|
|
3842
|
-
try {
|
|
3843
|
-
entries = readdirSync(agentsDir);
|
|
3844
|
-
} catch {
|
|
3845
|
-
return [];
|
|
3846
|
-
}
|
|
3847
|
-
for (const entry of entries) {
|
|
3848
|
-
if (!entry.endsWith(".yaml") && !entry.endsWith(".yml")) continue;
|
|
3849
|
-
const filePath = join(agentsDir, entry);
|
|
3850
|
-
try {
|
|
3851
|
-
agents.push(parseAgentFile(filePath));
|
|
3852
|
-
} catch (err) {
|
|
3853
|
-
warn(`Skipping ${entry}: ${err instanceof Error ? err.message : String(err)}`);
|
|
3854
|
-
}
|
|
3855
|
-
}
|
|
3856
|
-
return agents;
|
|
3857
|
-
}
|
|
3858
|
-
/**
|
|
3859
|
-
* Serialize an agent definition to YAML string.
|
|
3860
|
-
* Used by CLI `agent create` to write .agents/<name>.yaml.
|
|
3861
|
-
*/
|
|
3862
|
-
function serializeAgent(def) {
|
|
3863
|
-
const obj = {
|
|
3864
|
-
name: def.name,
|
|
3865
|
-
model: def.model
|
|
3866
|
-
};
|
|
3867
|
-
if (def.backend) obj.backend = def.backend;
|
|
3868
|
-
if (def.provider) obj.provider = def.provider;
|
|
3869
|
-
obj.prompt = def.prompt;
|
|
3870
|
-
if (def.soul) obj.soul = def.soul;
|
|
3871
|
-
if (def.context) obj.context = def.context;
|
|
3872
|
-
if (def.max_tokens) obj.max_tokens = def.max_tokens;
|
|
3873
|
-
if (def.max_steps) obj.max_steps = def.max_steps;
|
|
3874
|
-
if (def.schedule) obj.schedule = def.schedule;
|
|
3875
|
-
return stringify(obj, { lineWidth: 120 });
|
|
3876
|
-
}
|
|
3877
|
-
|
|
3878
|
-
//#endregion
|
|
3879
|
-
//#region src/agent/agent-registry.ts
|
|
3880
|
-
/**
|
|
3881
|
-
* AgentRegistry — Loads and manages top-level agent definitions.
|
|
3882
|
-
*
|
|
3883
|
-
* Responsibilities:
|
|
3884
|
-
* - Discover agents from .agents/*.yaml
|
|
3885
|
-
* - Load definitions → create AgentHandles
|
|
3886
|
-
* - Register/unregister agents at runtime
|
|
3887
|
-
* - Ensure context directories exist
|
|
3888
|
-
* - Provide agent lookup by name
|
|
3889
|
-
*
|
|
3890
|
-
* Owned by the daemon. One registry per daemon process.
|
|
3891
|
-
*/
|
|
3892
|
-
var AgentRegistry = class {
|
|
3893
|
-
/** Loaded agent handles, keyed by name */
|
|
3894
|
-
agents = /* @__PURE__ */ new Map();
|
|
3895
|
-
/** Project root directory */
|
|
3896
|
-
projectDir;
|
|
3897
|
-
/** Agents directory (.agents/) */
|
|
3898
|
-
agentsDir;
|
|
3899
|
-
constructor(projectDir) {
|
|
3900
|
-
this.projectDir = projectDir;
|
|
3901
|
-
this.agentsDir = join(projectDir, AGENTS_DIR);
|
|
3902
|
-
}
|
|
3903
|
-
/**
|
|
3904
|
-
* Load all agents from .agents/*.yaml.
|
|
3905
|
-
* Skips invalid files (logs warnings).
|
|
3906
|
-
* Creates context directories for each loaded agent.
|
|
3907
|
-
*/
|
|
3908
|
-
loadFromDisk(log) {
|
|
3909
|
-
const defs = discoverAgents(this.projectDir, log);
|
|
3910
|
-
for (const def of defs) this.registerDefinition(def);
|
|
3911
|
-
}
|
|
3912
|
-
/**
|
|
3913
|
-
* Register an agent definition. Creates AgentHandle + ensures context dir.
|
|
3914
|
-
* Overwrites existing agent with same name (reload semantics).
|
|
3915
|
-
*/
|
|
3916
|
-
registerDefinition(def) {
|
|
3917
|
-
const handle = new AgentHandle(def, this.resolveContextDir(def));
|
|
3918
|
-
handle.ensureContextDir();
|
|
3919
|
-
this.agents.set(def.name, handle);
|
|
3920
|
-
return handle;
|
|
3921
|
-
}
|
|
3922
|
-
/**
|
|
3923
|
-
* Create a new agent: write YAML file + register.
|
|
3924
|
-
* @throws Error if agent already exists on disk.
|
|
3925
|
-
*/
|
|
3926
|
-
create(def) {
|
|
3927
|
-
const yamlPath = this.agentYamlPath(def.name);
|
|
3928
|
-
if (existsSync(yamlPath)) throw new Error(`Agent file already exists: ${yamlPath}`);
|
|
3929
|
-
mkdirSync(this.agentsDir, { recursive: true });
|
|
3930
|
-
writeFileSync(yamlPath, serializeAgent(def));
|
|
3931
|
-
return this.registerDefinition(def);
|
|
3932
|
-
}
|
|
3933
|
-
/**
|
|
3934
|
-
* Delete an agent: remove YAML file + context directory + unregister.
|
|
3935
|
-
* @returns true if agent existed and was deleted.
|
|
3936
|
-
*/
|
|
3937
|
-
delete(name) {
|
|
3938
|
-
const handle = this.agents.get(name);
|
|
3939
|
-
if (!handle) return false;
|
|
3940
|
-
this.agents.delete(name);
|
|
3941
|
-
const yamlPath = this.agentYamlPath(name);
|
|
3942
|
-
if (existsSync(yamlPath)) try {
|
|
3943
|
-
unlinkSync(yamlPath);
|
|
3944
|
-
} catch {}
|
|
3945
|
-
if (existsSync(handle.contextDir)) try {
|
|
3946
|
-
rmSync(handle.contextDir, {
|
|
3947
|
-
recursive: true,
|
|
3948
|
-
force: true
|
|
3949
|
-
});
|
|
3950
|
-
} catch {}
|
|
3951
|
-
return true;
|
|
3952
|
-
}
|
|
3953
|
-
/** Get agent handle by name */
|
|
3954
|
-
get(name) {
|
|
3955
|
-
return this.agents.get(name);
|
|
3956
|
-
}
|
|
3957
|
-
/** Check if agent exists */
|
|
3958
|
-
has(name) {
|
|
3959
|
-
return this.agents.has(name);
|
|
3960
|
-
}
|
|
3961
|
-
/** List all registered agent handles */
|
|
3962
|
-
list() {
|
|
3963
|
-
return [...this.agents.values()];
|
|
3964
|
-
}
|
|
3965
|
-
/** Number of registered agents */
|
|
3966
|
-
get size() {
|
|
3967
|
-
return this.agents.size;
|
|
3968
|
-
}
|
|
3969
|
-
/** Resolve agent's context directory (absolute path) */
|
|
3970
|
-
resolveContextDir(def) {
|
|
3971
|
-
if (def.context?.dir) return join(this.projectDir, def.context.dir);
|
|
3972
|
-
return join(this.agentsDir, def.name);
|
|
3973
|
-
}
|
|
3974
|
-
/** Path to agent's YAML file */
|
|
3975
|
-
agentYamlPath(name) {
|
|
3976
|
-
return join(this.agentsDir, `${name}.yaml`);
|
|
3977
|
-
}
|
|
3978
|
-
};
|
|
3979
|
-
|
|
3980
|
-
//#endregion
|
|
11
|
+
import { spawn } from "node:child_process";
|
|
3981
12
|
//#region src/cli/commands/agent.ts
|
|
3982
13
|
var agent_exports = /* @__PURE__ */ __exportAll({
|
|
3983
14
|
ensureDaemon: () => ensureDaemon,
|
|
3984
15
|
registerAgentCommands: () => registerAgentCommands
|
|
3985
16
|
});
|
|
3986
|
-
/**
|
|
3987
|
-
|
|
3988
|
-
|
|
3989
|
-
|
|
3990
|
-
|
|
3991
|
-
|
|
17
|
+
/** Start a detached daemon child process. */
|
|
18
|
+
function spawnDaemonProcess(port, host) {
|
|
19
|
+
const args = [
|
|
20
|
+
process.argv[1] ?? "",
|
|
21
|
+
"up",
|
|
22
|
+
"-f"
|
|
23
|
+
];
|
|
3992
24
|
if (port) args.push("--port", String(port));
|
|
3993
25
|
if (host) args.push("--host", host);
|
|
3994
26
|
spawn(process.execPath, args, {
|
|
3995
27
|
detached: true,
|
|
3996
28
|
stdio: "ignore"
|
|
3997
29
|
}).unref();
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Ensure daemon is running. If not, start a detached child and wait for readiness.
|
|
33
|
+
*
|
|
34
|
+
* The detached child runs `agent-worker up -f` so the daemon itself stays in the
|
|
35
|
+
* child process foreground while the parent CLI command returns immediately.
|
|
36
|
+
*/
|
|
37
|
+
async function ensureDaemon(port, host) {
|
|
38
|
+
if (isDaemonRunning()) return;
|
|
39
|
+
spawnDaemonProcess(port, host);
|
|
3998
40
|
const maxWait = 5e3;
|
|
3999
41
|
const start = Date.now();
|
|
4000
42
|
while (Date.now() - start < maxWait) {
|
|
@@ -4005,14 +47,40 @@ async function ensureDaemon(port, host) {
|
|
|
4005
47
|
process.exit(1);
|
|
4006
48
|
}
|
|
4007
49
|
function registerAgentCommands(program) {
|
|
4008
|
-
program.command("
|
|
4009
|
-
|
|
4010
|
-
|
|
4011
|
-
|
|
4012
|
-
|
|
4013
|
-
|
|
50
|
+
program.command("up").description("Start daemon, load config.yml agents").option("-f, --foreground", "Run in foreground (for debugging)").option("--port <port>", `HTTP port (default: ${DEFAULT_PORT})`).option("--host <host>", "Host to bind to (default: 127.0.0.1)").addHelpText("after", `
|
|
51
|
+
Examples:
|
|
52
|
+
$ agent-worker up # Start daemon in background
|
|
53
|
+
$ agent-worker up -f # Start daemon in foreground
|
|
54
|
+
$ agent-worker up --port 5100 # Custom port
|
|
55
|
+
`).action(async (options) => {
|
|
56
|
+
if (options.foreground) {
|
|
57
|
+
const { startDaemon } = await import("../daemon-CwaHgxs6.mjs").then((n) => n.t);
|
|
58
|
+
await startDaemon({
|
|
59
|
+
port: options.port ? parseInt(options.port, 10) : void 0,
|
|
60
|
+
host: options.host
|
|
61
|
+
});
|
|
62
|
+
} else {
|
|
63
|
+
if (isDaemonRunning()) {
|
|
64
|
+
console.log("Daemon already running");
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
await ensureDaemon(options.port ? parseInt(options.port, 10) : void 0, options.host);
|
|
68
|
+
console.log("Daemon started");
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
program.command("down").description("Stop daemon (all agents and workspaces)").action(async () => {
|
|
72
|
+
if (!isDaemonActive()) {
|
|
73
|
+
console.log("No daemon running");
|
|
74
|
+
return;
|
|
75
|
+
}
|
|
76
|
+
const res = await shutdown();
|
|
77
|
+
if (res.success) console.log("Daemon stopped");
|
|
78
|
+
else {
|
|
79
|
+
console.error("Error:", res.error);
|
|
80
|
+
process.exit(1);
|
|
81
|
+
}
|
|
4014
82
|
});
|
|
4015
|
-
program.command("new <name>").description("Create a new agent").option("-m, --model <model>", `Model identifier (default: ${getDefaultModel()})`).addOption(new Option("-b, --backend <type>", "Backend type").choices([
|
|
83
|
+
program.command("new <name>").description("Create a new ephemeral agent").option("-m, --model <model>", `Model identifier (default: ${getDefaultModel()})`).addOption(new Option("-b, --backend <type>", "Backend type").choices([
|
|
4016
84
|
"default",
|
|
4017
85
|
"sdk",
|
|
4018
86
|
"claude",
|
|
@@ -4020,11 +88,10 @@ function registerAgentCommands(program) {
|
|
|
4020
88
|
"cursor",
|
|
4021
89
|
"opencode",
|
|
4022
90
|
"mock"
|
|
4023
|
-
]).default("default")).option("--provider <name>", "Provider SDK name (e.g., anthropic, openai)").option("--base-url <url>", "Override provider base URL").option("--api-key <ref>", "API key env var (e.g., $MINIMAX_API_KEY)").option("-s, --system <prompt>", "System prompt", "You are a helpful assistant.").option("-f, --system-file <file>", "Read system prompt from file").option("--
|
|
91
|
+
]).default("default")).option("--provider <name>", "Provider SDK name (e.g., anthropic, openai)").option("--base-url <url>", "Override provider base URL").option("--api-key <ref>", "API key env var (e.g., $MINIMAX_API_KEY)").option("-s, --system <prompt>", "System prompt", "You are a helpful assistant.").option("-f, --system-file <file>", "Read system prompt from file").option("--wakeup <interval|cron>", "Periodic wakeup schedule (e.g., 30s, 5m, 0 9 * * 1-5)").option("--wakeup-prompt <text>", "Custom prompt for wakeup events").option("--port <port>", `Daemon port if starting new daemon (default: ${DEFAULT_PORT})`).option("--host <host>", "Daemon host (default: 127.0.0.1)").option("--json", "Output as JSON").addHelpText("after", `
|
|
4024
92
|
Examples:
|
|
4025
93
|
$ agent-worker new alice -m anthropic/claude-sonnet-4-5
|
|
4026
94
|
$ agent-worker new bot -b mock
|
|
4027
|
-
$ agent-worker new reviewer --workflow review --tag pr-123
|
|
4028
95
|
$ agent-worker new monitor --wakeup 30s --system "Check status"
|
|
4029
96
|
$ agent-worker new coder -m MiniMax-M2.5 --provider anthropic --base-url https://api.minimax.io/anthropic/v1 --api-key '$MINIMAX_API_KEY'
|
|
4030
97
|
`).action(async (name, options) => {
|
|
@@ -4055,9 +122,8 @@ Examples:
|
|
|
4055
122
|
system,
|
|
4056
123
|
backend,
|
|
4057
124
|
provider,
|
|
4058
|
-
|
|
4059
|
-
|
|
4060
|
-
schedule
|
|
125
|
+
schedule,
|
|
126
|
+
ephemeral: true
|
|
4061
127
|
});
|
|
4062
128
|
if (res.error) {
|
|
4063
129
|
console.error("Error:", res.error);
|
|
@@ -4066,7 +132,29 @@ Examples:
|
|
|
4066
132
|
if (options.json) outputJson(res);
|
|
4067
133
|
else console.log(`${name} (${model})`);
|
|
4068
134
|
});
|
|
4069
|
-
program.command("
|
|
135
|
+
program.command("rm <name>").description("Remove an ephemeral agent").addHelpText("after", `
|
|
136
|
+
Removes an ephemeral agent created with 'new'.
|
|
137
|
+
Config agents (defined in config.yml) cannot be removed — edit config.yml instead.
|
|
138
|
+
|
|
139
|
+
Examples:
|
|
140
|
+
$ agent-worker rm alice
|
|
141
|
+
`).action(async (name) => {
|
|
142
|
+
if (!isDaemonActive()) {
|
|
143
|
+
console.error("No daemon running");
|
|
144
|
+
process.exit(1);
|
|
145
|
+
}
|
|
146
|
+
if (((await health()).configAgents ?? []).includes(name)) {
|
|
147
|
+
console.error(`Error: "${name}" is defined in config.yml — edit config to remove`);
|
|
148
|
+
process.exit(1);
|
|
149
|
+
}
|
|
150
|
+
const res = await deleteAgent(name);
|
|
151
|
+
if (res.success) console.log(`Removed: ${name}`);
|
|
152
|
+
else {
|
|
153
|
+
console.error("Error:", res.error);
|
|
154
|
+
process.exit(1);
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
program.command("ls").description("List running agents").option("--json", "Output as JSON").addHelpText("after", `
|
|
4070
158
|
Examples:
|
|
4071
159
|
$ agent-worker ls
|
|
4072
160
|
$ agent-worker ls --json
|
|
@@ -4091,38 +179,31 @@ Examples:
|
|
|
4091
179
|
return;
|
|
4092
180
|
}
|
|
4093
181
|
for (const a of agents) {
|
|
4094
|
-
const wf = a.workflow ? a.tag
|
|
182
|
+
const wf = a.workflow ? a.tag ? `@${a.workflow}:${a.tag}` : `@${a.workflow}` : "";
|
|
4095
183
|
const info = a.model || a.state || "";
|
|
4096
184
|
console.log(`${a.name.padEnd(12)} ${info.padEnd(30)} ${wf}`);
|
|
4097
185
|
}
|
|
4098
186
|
});
|
|
4099
|
-
program.command("stop [name]").description("Stop agent
|
|
187
|
+
program.command("stop [name]").description("Stop agent or workspace").addHelpText("after", `
|
|
4100
188
|
Examples:
|
|
4101
189
|
$ agent-worker stop alice # Stop specific agent
|
|
4102
|
-
$ agent-worker stop @review:pr-123 # Stop
|
|
4103
|
-
$ agent-worker stop @review # Stop
|
|
4104
|
-
|
|
4105
|
-
`).action(async (name, options) => {
|
|
190
|
+
$ agent-worker stop @review:pr-123 # Stop workspace
|
|
191
|
+
$ agent-worker stop @review # Stop workspace (no tag)
|
|
192
|
+
`).action(async (name) => {
|
|
4106
193
|
if (!isDaemonActive()) {
|
|
4107
194
|
console.error("No daemon running");
|
|
4108
195
|
process.exit(1);
|
|
4109
196
|
}
|
|
4110
|
-
if (options.all) {
|
|
4111
|
-
const res = await shutdown();
|
|
4112
|
-
if (res.success) console.log("Daemon stopped");
|
|
4113
|
-
else console.error("Error:", res.error);
|
|
4114
|
-
return;
|
|
4115
|
-
}
|
|
4116
197
|
if (!name) {
|
|
4117
|
-
console.error("Specify agent name
|
|
198
|
+
console.error("Specify agent name or @workspace[:tag]. Use 'down' to stop daemon.");
|
|
4118
199
|
process.exit(1);
|
|
4119
200
|
}
|
|
4120
|
-
const { parseTarget } = await
|
|
201
|
+
const { parseTarget } = await import("../target-9yiBRXxa.mjs").then((n) => n.r);
|
|
4121
202
|
const target = parseTarget(name);
|
|
4122
203
|
let res;
|
|
4123
204
|
if (target.agent === void 0) {
|
|
4124
|
-
const { stopWorkflow: stopWf } = await
|
|
4125
|
-
res = await stopWf(target.
|
|
205
|
+
const { stopWorkflow: stopWf } = await import("../client-DAKkzdOn.mjs").then((n) => n.t);
|
|
206
|
+
res = await stopWf(target.workspace, target.tag);
|
|
4126
207
|
} else res = await deleteAgent(target.agent);
|
|
4127
208
|
if (res.success) console.log(`Stopped: ${target.display}`);
|
|
4128
209
|
else {
|
|
@@ -4144,9 +225,9 @@ Examples:
|
|
|
4144
225
|
console.log(`Agents: ${agents.length > 0 ? agents.join(", ") : "(none)"}`);
|
|
4145
226
|
const workflows = res.workflows ?? [];
|
|
4146
227
|
if (workflows.length > 0) {
|
|
4147
|
-
console.log(`
|
|
228
|
+
console.log(`Workspaces:`);
|
|
4148
229
|
for (const wf of workflows) {
|
|
4149
|
-
const display = wf.tag
|
|
230
|
+
const display = wf.tag ? `@${wf.name}:${wf.tag}` : `@${wf.name}`;
|
|
4150
231
|
console.log(` ${display} → ${wf.agents.join(", ")}`);
|
|
4151
232
|
}
|
|
4152
233
|
}
|
|
@@ -4156,285 +237,97 @@ Examples:
|
|
|
4156
237
|
}
|
|
4157
238
|
}
|
|
4158
239
|
});
|
|
4159
|
-
program.command("ask <agent> <message>").description("Send message to agent (
|
|
240
|
+
program.command("ask <agent> <message>").description("Send message to agent and get response").option("--no-stream", "Sync response (no streaming)").option("--json", "Output response as JSON").addHelpText("after", `
|
|
4160
241
|
Examples:
|
|
4161
242
|
$ agent-worker ask alice "analyze this code"
|
|
243
|
+
$ agent-worker ask alice "hello" --no-stream
|
|
4162
244
|
$ agent-worker ask alice "hello" --json
|
|
4163
245
|
`).action(async (agent, message, options) => {
|
|
4164
246
|
if (!isDaemonActive()) {
|
|
4165
247
|
console.error("No daemon running");
|
|
4166
248
|
process.exit(1);
|
|
4167
249
|
}
|
|
4168
|
-
|
|
4169
|
-
|
|
4170
|
-
|
|
4171
|
-
|
|
4172
|
-
|
|
4173
|
-
|
|
4174
|
-
|
|
4175
|
-
|
|
4176
|
-
|
|
4177
|
-
|
|
4178
|
-
|
|
4179
|
-
|
|
4180
|
-
|
|
250
|
+
if (options.stream === false) {
|
|
251
|
+
const res = await serve({
|
|
252
|
+
agent,
|
|
253
|
+
message
|
|
254
|
+
});
|
|
255
|
+
if (options.json) outputJson(res);
|
|
256
|
+
else if (res.error) {
|
|
257
|
+
console.error("Error:", res.error);
|
|
258
|
+
process.exit(1);
|
|
259
|
+
} else console.log(res.content ?? JSON.stringify(res));
|
|
260
|
+
} else {
|
|
261
|
+
const res = await run({
|
|
262
|
+
agent,
|
|
263
|
+
message
|
|
264
|
+
}, (chunk) => {
|
|
265
|
+
if (!options.json) process.stdout.write(chunk.text);
|
|
266
|
+
});
|
|
267
|
+
if (options.json) outputJson(res);
|
|
268
|
+
else console.log();
|
|
4181
269
|
}
|
|
4182
|
-
const res = await serve({
|
|
4183
|
-
agent,
|
|
4184
|
-
message
|
|
4185
|
-
});
|
|
4186
|
-
if (options.json) outputJson(res);
|
|
4187
|
-
else if (res.error) {
|
|
4188
|
-
console.error("Error:", res.error);
|
|
4189
|
-
process.exit(1);
|
|
4190
|
-
} else console.log(res.content ?? JSON.stringify(res));
|
|
4191
270
|
});
|
|
4192
|
-
|
|
4193
|
-
|
|
4194
|
-
|
|
4195
|
-
"claude",
|
|
4196
|
-
"codex",
|
|
4197
|
-
"cursor",
|
|
4198
|
-
"opencode",
|
|
4199
|
-
"mock"
|
|
4200
|
-
]).default(void 0)).option("-s, --system <prompt>", "System prompt").option("-f, --system-file <file>", "Read system prompt from file").option("--role <role>", "Soul: agent role").option("--expertise <items>", "Soul: expertise (comma-separated)").option("--style <style>", "Soul: communication style").option("--dir <path>", "Project directory", ".").option("--json", "Output as JSON").addHelpText("after", `
|
|
4201
|
-
Creates .agents/<name>.yaml and context directory (.agents/<name>/).
|
|
271
|
+
program.command("onboard").description("Interactive config.yml setup").addHelpText("after", `
|
|
272
|
+
Creates or updates ~/.agent-worker/config.yml interactively.
|
|
273
|
+
Guides you through defining agents and channel bridges.
|
|
4202
274
|
|
|
4203
275
|
Examples:
|
|
4204
|
-
$ agent-worker
|
|
4205
|
-
|
|
4206
|
-
|
|
4207
|
-
|
|
4208
|
-
const
|
|
4209
|
-
|
|
4210
|
-
|
|
4211
|
-
|
|
4212
|
-
|
|
4213
|
-
|
|
4214
|
-
prompt: { system }
|
|
4215
|
-
};
|
|
4216
|
-
if (options.backend) def.backend = options.backend;
|
|
4217
|
-
if (options.role || options.expertise || options.style) {
|
|
4218
|
-
def.soul = {};
|
|
4219
|
-
if (options.role) def.soul.role = options.role;
|
|
4220
|
-
if (options.expertise) def.soul.expertise = options.expertise.split(",").map((s) => s.trim());
|
|
4221
|
-
if (options.style) def.soul.style = options.style;
|
|
4222
|
-
}
|
|
4223
|
-
try {
|
|
4224
|
-
const handle = registry.create(def);
|
|
4225
|
-
if (options.json) outputJson({
|
|
4226
|
-
name,
|
|
4227
|
-
model: def.model,
|
|
4228
|
-
contextDir: handle.contextDir
|
|
4229
|
-
});
|
|
4230
|
-
else {
|
|
4231
|
-
console.log(`Created: .agents/${name}.yaml`);
|
|
4232
|
-
console.log(`Context: ${handle.contextDir}`);
|
|
4233
|
-
}
|
|
4234
|
-
} catch (err) {
|
|
4235
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
4236
|
-
console.error(`Error: ${msg}`);
|
|
4237
|
-
process.exit(1);
|
|
4238
|
-
}
|
|
4239
|
-
});
|
|
4240
|
-
agentCmd.command("list").description("List persistent agent definitions").option("--dir <path>", "Project directory", ".").option("--json", "Output as JSON").action(async (options) => {
|
|
4241
|
-
const registry = new AgentRegistry(resolve(options.dir));
|
|
4242
|
-
registry.loadFromDisk();
|
|
4243
|
-
const agents = registry.list();
|
|
4244
|
-
if (options.json) {
|
|
4245
|
-
outputJson({ agents: agents.map((h) => ({
|
|
4246
|
-
name: h.name,
|
|
4247
|
-
model: h.definition.model,
|
|
4248
|
-
backend: h.definition.backend,
|
|
4249
|
-
soul: h.definition.soul,
|
|
4250
|
-
contextDir: h.contextDir
|
|
4251
|
-
})) });
|
|
4252
|
-
return;
|
|
4253
|
-
}
|
|
4254
|
-
if (agents.length === 0) {
|
|
4255
|
-
console.log("No agent definitions found in .agents/");
|
|
4256
|
-
return;
|
|
4257
|
-
}
|
|
4258
|
-
for (const h of agents) {
|
|
4259
|
-
const soul = h.definition.soul?.role ? ` (${h.definition.soul.role})` : "";
|
|
4260
|
-
console.log(`${h.name.padEnd(16)} ${h.definition.model}${soul}`);
|
|
4261
|
-
}
|
|
4262
|
-
});
|
|
4263
|
-
agentCmd.command("info <name>").description("Show agent definition details").option("--dir <path>", "Project directory", ".").option("--json", "Output as JSON").action(async (name, options) => {
|
|
4264
|
-
const registry = new AgentRegistry(resolve(options.dir));
|
|
4265
|
-
registry.loadFromDisk();
|
|
4266
|
-
const handle = registry.get(name);
|
|
4267
|
-
if (!handle) {
|
|
4268
|
-
console.error(`Agent not found: ${name}`);
|
|
4269
|
-
process.exit(1);
|
|
4270
|
-
}
|
|
4271
|
-
const def = handle.definition;
|
|
4272
|
-
if (options.json) {
|
|
4273
|
-
outputJson({
|
|
4274
|
-
...def,
|
|
4275
|
-
contextDir: handle.contextDir
|
|
4276
|
-
});
|
|
276
|
+
$ agent-worker onboard
|
|
277
|
+
`).action(async () => {
|
|
278
|
+
const { existsSync: exists } = await import("node:fs");
|
|
279
|
+
const { join } = await import("node:path");
|
|
280
|
+
const { homedir } = await import("node:os");
|
|
281
|
+
const configDir = join(homedir(), ".agent-worker");
|
|
282
|
+
const configPath = join(configDir, "config.yml");
|
|
283
|
+
if (exists(configPath)) {
|
|
284
|
+
console.log(`Config already exists: ${configPath}`);
|
|
285
|
+
console.log("Edit it directly to add/remove agents and channels.");
|
|
4277
286
|
return;
|
|
4278
287
|
}
|
|
4279
|
-
|
|
4280
|
-
|
|
4281
|
-
|
|
4282
|
-
|
|
4283
|
-
|
|
4284
|
-
console.log(`Prompt: ${preview}`);
|
|
4285
|
-
}
|
|
4286
|
-
if (def.soul) {
|
|
4287
|
-
if (def.soul.role) console.log(`Role: ${def.soul.role}`);
|
|
4288
|
-
if (def.soul.expertise) console.log(`Expert: ${def.soul.expertise.join(", ")}`);
|
|
4289
|
-
if (def.soul.style) console.log(`Style: ${def.soul.style}`);
|
|
4290
|
-
if (def.soul.principles) {
|
|
4291
|
-
console.log(`Principles:`);
|
|
4292
|
-
for (const p of def.soul.principles) console.log(` - ${p}`);
|
|
4293
|
-
}
|
|
4294
|
-
}
|
|
4295
|
-
console.log(`Context: ${handle.contextDir}`);
|
|
4296
|
-
});
|
|
4297
|
-
agentCmd.command("delete <name>").description("Delete agent definition and context").option("--dir <path>", "Project directory", ".").action(async (name, options) => {
|
|
4298
|
-
const registry = new AgentRegistry(resolve(options.dir));
|
|
4299
|
-
registry.loadFromDisk();
|
|
4300
|
-
if (!registry.has(name)) {
|
|
4301
|
-
console.error(`Agent not found: ${name}`);
|
|
4302
|
-
process.exit(1);
|
|
4303
|
-
}
|
|
4304
|
-
registry.delete(name);
|
|
4305
|
-
console.log(`Deleted: ${name}`);
|
|
4306
|
-
});
|
|
4307
|
-
}
|
|
288
|
+
const { mkdirSync, writeFileSync } = await import("node:fs");
|
|
289
|
+
mkdirSync(configDir, { recursive: true });
|
|
290
|
+
writeFileSync(configPath, `# agent-worker config
|
|
291
|
+
# Agents defined here are loaded when the daemon starts (agent-worker up).
|
|
292
|
+
# Edit this file to add/remove agents. Changes take effect on next 'up'.
|
|
4308
293
|
|
|
4309
|
-
|
|
4310
|
-
|
|
4311
|
-
|
|
4312
|
-
|
|
4313
|
-
|
|
4314
|
-
parseTarget: () => parseTarget
|
|
4315
|
-
});
|
|
4316
|
-
/**
|
|
4317
|
-
* Target identifier utilities
|
|
4318
|
-
*
|
|
4319
|
-
* Format: agent@workflow:tag (inspired by Docker image:tag)
|
|
4320
|
-
* - agent: agent name (optional for @workflow references)
|
|
4321
|
-
* - workflow: workflow name (optional, defaults to 'global')
|
|
4322
|
-
* - tag: workflow instance tag (optional, defaults to 'main')
|
|
4323
|
-
*
|
|
4324
|
-
* Examples:
|
|
4325
|
-
* - "alice" → { agent: "alice", workflow: "global", tag: "main", display: "alice" }
|
|
4326
|
-
* - "alice@review" → { agent: "alice", workflow: "review", tag: "main", display: "alice@review" }
|
|
4327
|
-
* - "alice@review:pr-123"→ { agent: "alice", workflow: "review", tag: "pr-123", display: "alice@review:pr-123" }
|
|
4328
|
-
* - "@review" → { agent: undefined, workflow: "review", tag: "main", display: "@review" }
|
|
4329
|
-
* - "@review:pr-123" → { agent: undefined, workflow: "review", tag: "pr-123", display: "@review:pr-123" }
|
|
4330
|
-
*
|
|
4331
|
-
* Display rules:
|
|
4332
|
-
* - Omit @global (standalone agents): "alice" not "alice@global"
|
|
4333
|
-
* - Omit :main (default tag): "alice@review" not "alice@review:main"
|
|
4334
|
-
*/
|
|
4335
|
-
const DEFAULT_WORKFLOW = "global";
|
|
4336
|
-
const DEFAULT_TAG = "main";
|
|
4337
|
-
/**
|
|
4338
|
-
* Parse target identifier from string
|
|
4339
|
-
* Supports: "agent", "agent@workflow", "agent@workflow:tag", "@workflow", "@workflow:tag"
|
|
4340
|
-
*/
|
|
4341
|
-
function parseTarget(input) {
|
|
4342
|
-
if (input.startsWith("@")) {
|
|
4343
|
-
const workflowPart = input.slice(1);
|
|
4344
|
-
const colonIndex = workflowPart.indexOf(":");
|
|
4345
|
-
if (colonIndex === -1) {
|
|
4346
|
-
const workflow = workflowPart || DEFAULT_WORKFLOW;
|
|
4347
|
-
return {
|
|
4348
|
-
agent: void 0,
|
|
4349
|
-
workflow,
|
|
4350
|
-
tag: DEFAULT_TAG,
|
|
4351
|
-
full: `@${workflow}:${DEFAULT_TAG}`,
|
|
4352
|
-
display: workflow === DEFAULT_WORKFLOW ? `@${workflow}` : `@${workflow}`
|
|
4353
|
-
};
|
|
4354
|
-
} else {
|
|
4355
|
-
const workflow = workflowPart.slice(0, colonIndex) || DEFAULT_WORKFLOW;
|
|
4356
|
-
const tag = workflowPart.slice(colonIndex + 1) || DEFAULT_TAG;
|
|
4357
|
-
return {
|
|
4358
|
-
agent: void 0,
|
|
4359
|
-
workflow,
|
|
4360
|
-
tag,
|
|
4361
|
-
full: `@${workflow}:${tag}`,
|
|
4362
|
-
display: buildDisplay(void 0, workflow, tag)
|
|
4363
|
-
};
|
|
4364
|
-
}
|
|
4365
|
-
}
|
|
4366
|
-
const atIndex = input.indexOf("@");
|
|
4367
|
-
if (atIndex === -1) return {
|
|
4368
|
-
agent: input,
|
|
4369
|
-
workflow: DEFAULT_WORKFLOW,
|
|
4370
|
-
tag: DEFAULT_TAG,
|
|
4371
|
-
full: `${input}@${DEFAULT_WORKFLOW}:${DEFAULT_TAG}`,
|
|
4372
|
-
display: input
|
|
4373
|
-
};
|
|
4374
|
-
const agent = input.slice(0, atIndex);
|
|
4375
|
-
const workflowPart = input.slice(atIndex + 1);
|
|
4376
|
-
const colonIndex = workflowPart.indexOf(":");
|
|
4377
|
-
if (colonIndex === -1) {
|
|
4378
|
-
const workflow = workflowPart || DEFAULT_WORKFLOW;
|
|
4379
|
-
return {
|
|
4380
|
-
agent,
|
|
4381
|
-
workflow,
|
|
4382
|
-
tag: DEFAULT_TAG,
|
|
4383
|
-
full: `${agent}@${workflow}:${DEFAULT_TAG}`,
|
|
4384
|
-
display: buildDisplay(agent, workflow, DEFAULT_TAG)
|
|
4385
|
-
};
|
|
4386
|
-
} else {
|
|
4387
|
-
const workflow = workflowPart.slice(0, colonIndex) || DEFAULT_WORKFLOW;
|
|
4388
|
-
const tag = workflowPart.slice(colonIndex + 1) || DEFAULT_TAG;
|
|
4389
|
-
return {
|
|
4390
|
-
agent,
|
|
4391
|
-
workflow,
|
|
4392
|
-
tag,
|
|
4393
|
-
full: `${agent}@${workflow}:${tag}`,
|
|
4394
|
-
display: buildDisplay(agent, workflow, tag)
|
|
4395
|
-
};
|
|
4396
|
-
}
|
|
4397
|
-
}
|
|
4398
|
-
/**
|
|
4399
|
-
* Build display string following display rules:
|
|
4400
|
-
* - Omit @global for standalone agents
|
|
4401
|
-
* - Omit :main for default tag
|
|
4402
|
-
*/
|
|
4403
|
-
function buildDisplay(agent, workflow, tag) {
|
|
4404
|
-
const isGlobal = workflow === DEFAULT_WORKFLOW;
|
|
4405
|
-
const isMainTag = tag === DEFAULT_TAG;
|
|
4406
|
-
if (agent === void 0) {
|
|
4407
|
-
if (isMainTag) return `@${workflow}`;
|
|
4408
|
-
return `@${workflow}:${tag}`;
|
|
4409
|
-
}
|
|
4410
|
-
if (isGlobal && isMainTag) return agent;
|
|
4411
|
-
if (isGlobal && !isMainTag) return `${agent}@${workflow}:${tag}`;
|
|
4412
|
-
if (!isGlobal && isMainTag) return `${agent}@${workflow}`;
|
|
4413
|
-
return `${agent}@${workflow}:${tag}`;
|
|
4414
|
-
}
|
|
294
|
+
agents:
|
|
295
|
+
# Example agent:
|
|
296
|
+
# assistant:
|
|
297
|
+
# model: anthropic/claude-sonnet-4-5
|
|
298
|
+
# system: You are a helpful assistant.
|
|
4415
299
|
|
|
300
|
+
# channels:
|
|
301
|
+
# telegram:
|
|
302
|
+
# type: telegram
|
|
303
|
+
# token: \${{ env.TELEGRAM_BOT_TOKEN }}
|
|
304
|
+
`);
|
|
305
|
+
console.log(`Created: ${configPath}`);
|
|
306
|
+
console.log("Edit this file to define your agents, then run: agent-worker up");
|
|
307
|
+
});
|
|
308
|
+
}
|
|
4416
309
|
//#endregion
|
|
4417
310
|
//#region src/cli/commands/workflow.ts
|
|
4418
311
|
function registerWorkflowCommands(program) {
|
|
4419
|
-
program.command("run <file>").description("Execute
|
|
312
|
+
program.command("run <file>").description("Execute workspace and exit when complete").option("--tag <tag>", "Workspace instance tag (optional)").option("-d, --debug", "Show debug details (internal logs, MCP traces, idle checks)").option("--feedback", "Enable feedback tool").option("--json", "Output results as JSON").allowExcessArguments().addHelpText("after", `
|
|
4420
313
|
Examples:
|
|
4421
|
-
$ agent-worker run review.yaml # Run
|
|
4422
|
-
$ agent-worker run review.yaml --tag pr-123 # Run
|
|
314
|
+
$ agent-worker run review.yaml # Run workspace
|
|
315
|
+
$ agent-worker run review.yaml --tag pr-123 # Run with tag
|
|
4423
316
|
$ agent-worker run review.yaml --json | jq .document # Machine-readable output
|
|
4424
|
-
$ agent-worker run review.yaml -- --target main -n 3 # With
|
|
317
|
+
$ agent-worker run review.yaml -- --target main -n 3 # With params
|
|
4425
318
|
|
|
4426
|
-
Remote
|
|
4427
|
-
$ agent-worker run github:acme/
|
|
4428
|
-
$ agent-worker run github:acme/
|
|
4429
|
-
$ agent-worker run github:acme/
|
|
4430
|
-
$ agent-worker run github:acme/
|
|
319
|
+
Remote workspaces (github:owner/repo@ref/path):
|
|
320
|
+
$ agent-worker run github:acme/workspaces/review.yml # Default branch
|
|
321
|
+
$ agent-worker run github:acme/workspaces@v1.0/review.yml # Pinned version
|
|
322
|
+
$ agent-worker run github:acme/workspaces#review # Shorthand
|
|
323
|
+
$ agent-worker run github:acme/workspaces#review -- --target main # With params
|
|
4431
324
|
|
|
4432
|
-
Note:
|
|
4433
|
-
|
|
325
|
+
Note: Workspace name is inferred from YAML 'name' field or filename.
|
|
326
|
+
Params (see 'params:' in YAML) are passed after '--'.
|
|
4434
327
|
Set GITHUB_TOKEN env var to access private repositories.
|
|
4435
328
|
`).action(async (file, options) => {
|
|
4436
|
-
const { parseWorkflowFile, parseWorkflowParams, formatParamHelp, runWorkflowWithLoops } = await import("
|
|
4437
|
-
const tag = options.tag ||
|
|
329
|
+
const { parseWorkflowFile, parseWorkflowParams, formatParamHelp, runWorkflowWithLoops } = await import("@moniro/workspace");
|
|
330
|
+
const tag = options.tag || void 0;
|
|
4438
331
|
const parsedWorkflow = await parseWorkflowFile(file, { tag });
|
|
4439
332
|
const workflowName = parsedWorkflow.name;
|
|
4440
333
|
let params;
|
|
@@ -4456,8 +349,8 @@ Note: Workflow name is inferred from YAML 'name' field or filename.
|
|
|
4456
349
|
isCleaningUp = true;
|
|
4457
350
|
console.log("\nInterrupted, cleaning up...");
|
|
4458
351
|
if (loops) {
|
|
4459
|
-
const { shutdownLoops } = await import("
|
|
4460
|
-
const { createSilentLogger } = await
|
|
352
|
+
const { shutdownLoops } = await import("@moniro/workspace");
|
|
353
|
+
const { createSilentLogger } = await import("@moniro/workspace");
|
|
4461
354
|
await shutdownLoops(loops, createSilentLogger());
|
|
4462
355
|
}
|
|
4463
356
|
process.exit(130);
|
|
@@ -4482,7 +375,7 @@ Note: Workflow name is inferred from YAML 'name' field or filename.
|
|
|
4482
375
|
process.off("SIGINT", cleanup);
|
|
4483
376
|
process.off("SIGTERM", cleanup);
|
|
4484
377
|
if (!result.success) {
|
|
4485
|
-
console.error("
|
|
378
|
+
console.error("Failed:", result.error);
|
|
4486
379
|
process.exit(1);
|
|
4487
380
|
}
|
|
4488
381
|
if (result.contextProvider) {
|
|
@@ -4494,7 +387,7 @@ Note: Workflow name is inferred from YAML 'name' field or filename.
|
|
|
4494
387
|
feedback: result.feedback
|
|
4495
388
|
}, null, 2));
|
|
4496
389
|
else if (!options.debug) {
|
|
4497
|
-
const { showWorkflowSummary } = await import("
|
|
390
|
+
const { showWorkflowSummary } = await import("@moniro/workspace");
|
|
4498
391
|
showWorkflowSummary({
|
|
4499
392
|
duration: result.duration,
|
|
4500
393
|
document: finalDoc,
|
|
@@ -4518,22 +411,22 @@ Note: Workflow name is inferred from YAML 'name' field or filename.
|
|
|
4518
411
|
process.exit(1);
|
|
4519
412
|
}
|
|
4520
413
|
});
|
|
4521
|
-
program.command("start <file>").description("Start
|
|
414
|
+
program.command("start <file>").description("Start workspace via daemon and keep agents running").option("--tag <tag>", "Workspace instance tag (optional)").option("--feedback", "Enable feedback tool").option("--json", "Output as JSON").allowExcessArguments().addHelpText("after", `
|
|
4522
415
|
Examples:
|
|
4523
|
-
$ agent-worker start review.yaml # Start
|
|
4524
|
-
$ agent-worker start review.yaml --tag pr-123 # Start
|
|
4525
|
-
$ agent-worker start review.yaml -- --target main # With
|
|
416
|
+
$ agent-worker start review.yaml # Start workspace (Ctrl+C to stop)
|
|
417
|
+
$ agent-worker start review.yaml --tag pr-123 # Start with tag
|
|
418
|
+
$ agent-worker start review.yaml -- --target main # With params
|
|
4526
419
|
|
|
4527
|
-
|
|
420
|
+
Workspace runs inside the daemon. Use ls/stop to manage:
|
|
4528
421
|
$ agent-worker ls # List all agents
|
|
4529
|
-
$ agent-worker stop @review:pr-123 # Stop
|
|
422
|
+
$ agent-worker stop @review:pr-123 # Stop workspace
|
|
4530
423
|
|
|
4531
|
-
Note:
|
|
4532
|
-
|
|
424
|
+
Note: Workspace name is inferred from YAML 'name' field or filename.
|
|
425
|
+
Params (see 'params:' in YAML) are passed after '--'.
|
|
4533
426
|
`).action(async (file, options) => {
|
|
4534
|
-
const { parseWorkflowFile, parseWorkflowParams, formatParamHelp } = await import("
|
|
427
|
+
const { parseWorkflowFile, parseWorkflowParams, formatParamHelp } = await import("@moniro/workspace");
|
|
4535
428
|
const { ensureDaemon } = await Promise.resolve().then(() => agent_exports);
|
|
4536
|
-
const tag = options.tag ||
|
|
429
|
+
const tag = options.tag || void 0;
|
|
4537
430
|
const parsedWorkflow = await parseWorkflowFile(file, { tag });
|
|
4538
431
|
const workflowName = parsedWorkflow.name;
|
|
4539
432
|
let params;
|
|
@@ -4561,7 +454,7 @@ Note: Workflow name is inferred from YAML 'name' field or filename.
|
|
|
4561
454
|
}
|
|
4562
455
|
const agents = res.agents ?? [];
|
|
4563
456
|
if (options.json) {
|
|
4564
|
-
const { outputJson } = await
|
|
457
|
+
const { outputJson } = await import("../output-B0mwPqjv.mjs").then((n) => n.n);
|
|
4565
458
|
outputJson({
|
|
4566
459
|
name: workflowName,
|
|
4567
460
|
tag,
|
|
@@ -4569,18 +462,19 @@ Note: Workflow name is inferred from YAML 'name' field or filename.
|
|
|
4569
462
|
});
|
|
4570
463
|
return;
|
|
4571
464
|
}
|
|
4572
|
-
|
|
465
|
+
const tagSuffix = tag ? `:${tag}` : "";
|
|
466
|
+
console.log(`Workspace: @${workflowName}${tagSuffix}`);
|
|
4573
467
|
console.log(`Agents: ${agents.join(", ")}`);
|
|
4574
468
|
console.log(`\nTo monitor:`);
|
|
4575
469
|
console.log(` agent-worker ls`);
|
|
4576
|
-
console.log(` agent-worker peek @${workflowName}${
|
|
470
|
+
console.log(` agent-worker peek @${workflowName}${tagSuffix}`);
|
|
4577
471
|
console.log(`\nTo stop:`);
|
|
4578
|
-
console.log(` agent-worker stop @${workflowName}${
|
|
472
|
+
console.log(` agent-worker stop @${workflowName}${tagSuffix}`);
|
|
4579
473
|
let isCleaningUp = false;
|
|
4580
474
|
const cleanup = async () => {
|
|
4581
475
|
if (isCleaningUp) return;
|
|
4582
476
|
isCleaningUp = true;
|
|
4583
|
-
console.log("\nStopping
|
|
477
|
+
console.log("\nStopping workspace...");
|
|
4584
478
|
await stopWorkflow(workflowName, tag);
|
|
4585
479
|
process.exit(0);
|
|
4586
480
|
};
|
|
@@ -4598,39 +492,38 @@ function getArgsAfterSeparator() {
|
|
|
4598
492
|
const idx = process.argv.indexOf("--");
|
|
4599
493
|
return idx === -1 ? [] : process.argv.slice(idx + 1);
|
|
4600
494
|
}
|
|
4601
|
-
|
|
4602
495
|
//#endregion
|
|
4603
496
|
//#region src/cli/commands/send.ts
|
|
4604
497
|
/**
|
|
4605
|
-
* Get agent names for a
|
|
498
|
+
* Get agent names for a workspace from the daemon.
|
|
4606
499
|
* Falls back to ["user"] if daemon is not running.
|
|
4607
500
|
*/
|
|
4608
|
-
async function
|
|
501
|
+
async function getWorkspaceAgentNames(workspace, tag) {
|
|
4609
502
|
if (!isDaemonActive()) return ["user"];
|
|
4610
503
|
try {
|
|
4611
|
-
const names = ((await listAgents()).agents ?? []).filter((a) => a.workflow ===
|
|
504
|
+
const names = ((await listAgents()).agents ?? []).filter((a) => a.workflow === workspace && a.tag === tag).map((a) => a.name);
|
|
4612
505
|
return [...new Set([...names, "user"])];
|
|
4613
506
|
} catch {
|
|
4614
507
|
return ["user"];
|
|
4615
508
|
}
|
|
4616
509
|
}
|
|
4617
510
|
/**
|
|
4618
|
-
* Get a context provider for the given
|
|
511
|
+
* Get a context provider for the given workspace.
|
|
4619
512
|
*/
|
|
4620
|
-
async function getContextProvider(
|
|
4621
|
-
const dir = getDefaultContextDir(
|
|
513
|
+
async function getContextProvider(workspace, tag) {
|
|
514
|
+
const dir = getDefaultContextDir(workspace, tag);
|
|
4622
515
|
mkdirSync(dir, { recursive: true });
|
|
4623
|
-
return createFileContextProvider(dir, await
|
|
516
|
+
return createFileContextProvider(dir, await getWorkspaceAgentNames(workspace, tag));
|
|
4624
517
|
}
|
|
4625
518
|
function registerSendCommands(program) {
|
|
4626
|
-
program.command("send <target> <message>").description("Send message to agent or
|
|
519
|
+
program.command("send <target> <message>").description("Send message to agent or workspace channel").option("--json", "Output as JSON").addHelpText("after", `
|
|
4627
520
|
Examples:
|
|
4628
521
|
$ agent-worker send alice "analyze this code"
|
|
4629
522
|
$ agent-worker send @review "team update"
|
|
4630
523
|
$ agent-worker send @review "@alice @bob discuss this"
|
|
4631
524
|
`).action(async (targetInput, message, options) => {
|
|
4632
525
|
const target = parseTarget(targetInput);
|
|
4633
|
-
const entry = await (await getContextProvider(target.
|
|
526
|
+
const entry = await (await getContextProvider(target.workspace, target.tag)).appendChannel("user", message);
|
|
4634
527
|
if (options.json) outputJson({
|
|
4635
528
|
id: entry.id,
|
|
4636
529
|
timestamp: entry.timestamp,
|
|
@@ -4646,8 +539,8 @@ Examples:
|
|
|
4646
539
|
$ agent-worker peek @review
|
|
4647
540
|
$ agent-worker peek @review:pr-123
|
|
4648
541
|
`).action(async (targetInput, options) => {
|
|
4649
|
-
const target = parseTarget(targetInput ||
|
|
4650
|
-
const provider = await getContextProvider(target.
|
|
542
|
+
const target = parseTarget(targetInput || `@global`);
|
|
543
|
+
const provider = await getContextProvider(target.workspace, target.tag);
|
|
4651
544
|
const limit = options.all ? void 0 : options.last ?? 10;
|
|
4652
545
|
let messages = await provider.readChannel({ limit });
|
|
4653
546
|
if (options.find) {
|
|
@@ -4669,7 +562,6 @@ Examples:
|
|
|
4669
562
|
}
|
|
4670
563
|
});
|
|
4671
564
|
}
|
|
4672
|
-
|
|
4673
565
|
//#endregion
|
|
4674
566
|
//#region src/cli/commands/info.ts
|
|
4675
567
|
const PROVIDER_API_KEYS = {
|
|
@@ -4730,7 +622,7 @@ function registerInfoCommands(program) {
|
|
|
4730
622
|
console.log(`Auto: AGENT_DEFAULT_MODELS="deepseek-chat, anthropic/claude-sonnet-4-5"`);
|
|
4731
623
|
});
|
|
4732
624
|
program.command("backends").description("Check available backends (SDK, CLI tools)").action(async () => {
|
|
4733
|
-
const { listBackends } = await import("
|
|
625
|
+
const { listBackends } = await import("@moniro/agent-loop");
|
|
4734
626
|
const backends = await listBackends();
|
|
4735
627
|
console.log("Backend Status:\n");
|
|
4736
628
|
for (const backend of backends) {
|
|
@@ -4749,22 +641,21 @@ function registerInfoCommands(program) {
|
|
|
4749
641
|
console.log("Tool management (add, mock, import) is only supported with SDK backend.");
|
|
4750
642
|
});
|
|
4751
643
|
}
|
|
4752
|
-
|
|
4753
644
|
//#endregion
|
|
4754
645
|
//#region src/cli/commands/doc.ts
|
|
4755
646
|
function registerDocCommands(program) {
|
|
4756
|
-
const docCmd = program.command("doc").description("Read/write
|
|
4757
|
-
docCmd.command("read <target>").description("Read the
|
|
647
|
+
const docCmd = program.command("doc").description("Read/write workspace documents");
|
|
648
|
+
docCmd.command("read <target>").description("Read the workspace document").addHelpText("after", `
|
|
4758
649
|
Examples:
|
|
4759
|
-
$ agent-worker doc read @review # Read @review
|
|
4760
|
-
$ agent-worker doc read @review:pr-123 # Read specific
|
|
650
|
+
$ agent-worker doc read @review # Read @review document
|
|
651
|
+
$ agent-worker doc read @review:pr-123 # Read specific workspace:tag document
|
|
4761
652
|
`).action(async (targetInput) => {
|
|
4762
653
|
const dir = await resolveDir(targetInput);
|
|
4763
|
-
const { createFileContextProvider } = await import("
|
|
654
|
+
const { createFileContextProvider } = await import("@moniro/workspace");
|
|
4764
655
|
const content = await createFileContextProvider(dir, []).readDocument();
|
|
4765
656
|
console.log(content || "(empty document)");
|
|
4766
657
|
});
|
|
4767
|
-
docCmd.command("write <target>").description("Write content to the
|
|
658
|
+
docCmd.command("write <target>").description("Write content to the workspace document").option("--content <text>", "Content to write").option("--file <path>", "Read content from file").addHelpText("after", `
|
|
4768
659
|
Examples:
|
|
4769
660
|
$ agent-worker doc write @review --content "Document content"
|
|
4770
661
|
$ agent-worker doc write @review:pr-123 --file content.txt
|
|
@@ -4778,11 +669,11 @@ Examples:
|
|
|
4778
669
|
process.exit(1);
|
|
4779
670
|
}
|
|
4780
671
|
const dir = await resolveDir(targetInput);
|
|
4781
|
-
const { createFileContextProvider } = await import("
|
|
672
|
+
const { createFileContextProvider } = await import("@moniro/workspace");
|
|
4782
673
|
await createFileContextProvider(dir, []).writeDocument(content);
|
|
4783
674
|
console.log("Document written");
|
|
4784
675
|
});
|
|
4785
|
-
docCmd.command("append <target>").description("Append content to the
|
|
676
|
+
docCmd.command("append <target>").description("Append content to the workspace document").option("--content <text>", "Content to append (use $'...' for newlines in bash)").option("--file <path>", "Read content from file").addHelpText("after", `
|
|
4786
677
|
Examples:
|
|
4787
678
|
$ agent-worker doc append @review --content $'\\nNew line'
|
|
4788
679
|
$ agent-worker doc append @review:pr-123 --file content.txt
|
|
@@ -4796,22 +687,20 @@ Examples:
|
|
|
4796
687
|
process.exit(1);
|
|
4797
688
|
}
|
|
4798
689
|
const dir = await resolveDir(targetInput);
|
|
4799
|
-
const { createFileContextProvider } = await import("
|
|
690
|
+
const { createFileContextProvider } = await import("@moniro/workspace");
|
|
4800
691
|
await createFileContextProvider(dir, []).appendDocument(content);
|
|
4801
692
|
console.log("Content appended");
|
|
4802
693
|
});
|
|
4803
694
|
}
|
|
4804
695
|
async function resolveDir(targetInput) {
|
|
4805
|
-
const { getDefaultContextDir } = await
|
|
4806
|
-
const { parseTarget } = await
|
|
696
|
+
const { getDefaultContextDir } = await import("@moniro/workspace");
|
|
697
|
+
const { parseTarget } = await import("../target-9yiBRXxa.mjs").then((n) => n.r);
|
|
4807
698
|
const target = parseTarget(targetInput);
|
|
4808
|
-
return getDefaultContextDir(target.
|
|
699
|
+
return getDefaultContextDir(target.workspace, target.tag);
|
|
4809
700
|
}
|
|
4810
|
-
|
|
4811
701
|
//#endregion
|
|
4812
702
|
//#region package.json
|
|
4813
|
-
var version = "0.
|
|
4814
|
-
|
|
703
|
+
var version = "0.19.0";
|
|
4815
704
|
//#endregion
|
|
4816
705
|
//#region src/cli/index.ts
|
|
4817
706
|
globalThis.AI_SDK_LOG_WARNINGS = false;
|
|
@@ -4838,6 +727,5 @@ registerSendCommands(program);
|
|
|
4838
727
|
registerInfoCommands(program);
|
|
4839
728
|
registerDocCommands(program);
|
|
4840
729
|
program.parse();
|
|
4841
|
-
|
|
4842
730
|
//#endregion
|
|
4843
|
-
export {
|
|
731
|
+
export {};
|