agent-worker 0.18.0 → 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +49 -38
- package/dist/cli/index.mjs +208 -4320
- package/dist/client-DAKkzdOn.mjs +171 -0
- package/dist/daemon-CwaHgxs6.mjs +1071 -0
- package/dist/index.d.mts +249 -849
- package/dist/index.mjs +27 -1102
- package/dist/output-B0mwPqjv.mjs +20 -0
- package/dist/rolldown-runtime-wcPFST8Q.mjs +13 -0
- package/dist/target-9yiBRXxa.mjs +105 -0
- package/package.json +25 -37
- package/dist/backends-D7DT0uox.mjs +0 -1484
- package/dist/backends-DUvcm-ce.mjs +0 -3
- package/dist/context-CoRTddGx.mjs +0 -4
- package/dist/create-tool-gcUuI1FD.mjs +0 -32
- package/dist/display-pretty-Kyd40DEF.mjs +0 -190
- package/dist/memory-provider-Z9D8NdwS.mjs +0 -75
- package/dist/runner-BmT0Y8MD.mjs +0 -690
- package/dist/workflow-LOZUlaDo.mjs +0 -744
|
@@ -0,0 +1,1071 @@
|
|
|
1
|
+
import { t as __exportAll } from "./rolldown-runtime-wcPFST8Q.mjs";
|
|
2
|
+
import { Hono } from "hono";
|
|
3
|
+
import { streamSSE } from "hono/streaming";
|
|
4
|
+
import { randomUUID } from "node:crypto";
|
|
5
|
+
import { basename, join } from "node:path";
|
|
6
|
+
import { mkdir, open, readFile, readdir, writeFile } from "node:fs/promises";
|
|
7
|
+
import { appendFileSync, existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from "node:fs";
|
|
8
|
+
import { parse, stringify } from "yaml";
|
|
9
|
+
import { CONTEXT_SUBDIRS } from "@moniro/agent-loop";
|
|
10
|
+
import { ConversationLog, DEFAULT_THIN_THREAD_SIZE, ThinThread } from "@moniro/agent-worker";
|
|
11
|
+
import { createChannelAdapters, createContextMCPServer, createEventLogger, createMinimalRuntime, createSilentLogger, createWiredLoop, generateInstructionId } from "@moniro/workspace";
|
|
12
|
+
import { homedir } from "node:os";
|
|
13
|
+
import { WebStandardStreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/webStandardStreamableHttp.js";
|
|
14
|
+
import { nanoid } from "nanoid";
|
|
15
|
+
//#region src/agent/agent-handle.ts
|
|
16
|
+
/**
|
|
17
|
+
* AgentHandle — Runtime wrapper for an agent definition + persistent context.
|
|
18
|
+
*
|
|
19
|
+
* Created by AgentRegistry when an agent is loaded. Provides:
|
|
20
|
+
* - Context directory management (memory/, notes/, conversations/, todo/)
|
|
21
|
+
* - Read/write operations for personal context
|
|
22
|
+
* - State tracking (idle, running, stopped, error)
|
|
23
|
+
* - Loop ownership (lazy — created on first message)
|
|
24
|
+
* - Ephemeral vs persistent mode
|
|
25
|
+
*/
|
|
26
|
+
var AgentHandle = class {
|
|
27
|
+
/** Agent definition (from YAML or API) */
|
|
28
|
+
definition;
|
|
29
|
+
/** Absolute path to agent's persistent context directory */
|
|
30
|
+
contextDir;
|
|
31
|
+
/**
|
|
32
|
+
* Whether this agent is ephemeral (in-memory only).
|
|
33
|
+
* Ephemeral agents are created via the daemon API (POST /agents) and
|
|
34
|
+
* lost on daemon restart. Config agents are defined in config.yml.
|
|
35
|
+
*/
|
|
36
|
+
ephemeral;
|
|
37
|
+
/** Current agent state */
|
|
38
|
+
state = "idle";
|
|
39
|
+
/** The agent's execution loop (lazy — created on first run/serve) */
|
|
40
|
+
loop = null;
|
|
41
|
+
/** Conversation log (lazy — created on first access) */
|
|
42
|
+
_conversationLog;
|
|
43
|
+
/** Thin thread (lazy — created on first access) */
|
|
44
|
+
_thinThread;
|
|
45
|
+
/** Optional logger (injected by registry; absent in standalone CLI) */
|
|
46
|
+
log;
|
|
47
|
+
constructor(definition, contextDir, logger, ephemeral = false) {
|
|
48
|
+
this.definition = definition;
|
|
49
|
+
this.contextDir = contextDir;
|
|
50
|
+
this.log = logger;
|
|
51
|
+
this.ephemeral = ephemeral;
|
|
52
|
+
}
|
|
53
|
+
/** Agent name (convenience accessor) */
|
|
54
|
+
get name() {
|
|
55
|
+
return this.definition.name;
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Get the conversation log (JSONL persistence).
|
|
59
|
+
* Returns null for ephemeral agents (no disk).
|
|
60
|
+
* Lazy — created on first access.
|
|
61
|
+
*/
|
|
62
|
+
get conversationLog() {
|
|
63
|
+
if (this.ephemeral) return null;
|
|
64
|
+
if (!this._conversationLog) this._conversationLog = new ConversationLog(join(this.contextDir, "conversations", "personal.jsonl"));
|
|
65
|
+
return this._conversationLog;
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Get the thin thread (bounded in-memory conversation buffer).
|
|
69
|
+
* Restores from ConversationLog on first access if history exists.
|
|
70
|
+
* Lazy — created on first access.
|
|
71
|
+
*/
|
|
72
|
+
get thinThread() {
|
|
73
|
+
if (!this._thinThread) {
|
|
74
|
+
const maxMessages = this.definition.context?.thin_thread ?? DEFAULT_THIN_THREAD_SIZE;
|
|
75
|
+
const log = this.conversationLog;
|
|
76
|
+
this._thinThread = log?.exists ? ThinThread.fromLog(log, maxMessages) : new ThinThread(maxMessages);
|
|
77
|
+
}
|
|
78
|
+
return this._thinThread;
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Send a typed instruction to this agent's loop.
|
|
82
|
+
*
|
|
83
|
+
* Convenience method that creates an AgentInstruction and enqueues it.
|
|
84
|
+
* The loop must exist (call ensureAgentLoop first for standalone agents).
|
|
85
|
+
*
|
|
86
|
+
* @throws if no loop is attached
|
|
87
|
+
*/
|
|
88
|
+
send(instruction) {
|
|
89
|
+
if (!this.loop) throw new Error(`Agent "${this.name}" has no loop — call ensureAgentLoop first`);
|
|
90
|
+
this.loop.enqueue(instruction);
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Send a simple message with auto-classified priority.
|
|
94
|
+
*
|
|
95
|
+
* Shorthand for creating an AgentInstruction from a raw message string.
|
|
96
|
+
*/
|
|
97
|
+
sendMessage(message, options = {}) {
|
|
98
|
+
this.send({
|
|
99
|
+
id: generateInstructionId(),
|
|
100
|
+
message,
|
|
101
|
+
source: options.source ?? "mention",
|
|
102
|
+
priority: options.priority ?? "immediate",
|
|
103
|
+
queuedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Ensure the context directory and all subdirectories exist.
|
|
108
|
+
* Called on agent load/creation. Idempotent.
|
|
109
|
+
*/
|
|
110
|
+
ensureContextDir() {
|
|
111
|
+
for (const sub of CONTEXT_SUBDIRS) mkdirSync(join(this.contextDir, sub), { recursive: true });
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Read all memory entries as key-value records.
|
|
115
|
+
* Memory files are YAML in memory/<key>.yaml.
|
|
116
|
+
*/
|
|
117
|
+
async readMemory() {
|
|
118
|
+
const memDir = join(this.contextDir, "memory");
|
|
119
|
+
if (!existsSync(memDir)) return {};
|
|
120
|
+
const result = {};
|
|
121
|
+
const files = await readdir(memDir);
|
|
122
|
+
for (const file of files) {
|
|
123
|
+
if (!file.endsWith(".yaml") && !file.endsWith(".yml")) continue;
|
|
124
|
+
const key = basename(file).replace(/\.ya?ml$/i, "");
|
|
125
|
+
try {
|
|
126
|
+
result[key] = parse(await readFile(join(memDir, file), "utf-8"));
|
|
127
|
+
} catch (err) {
|
|
128
|
+
this.log?.warn(`Skipping malformed memory file ${file}: ${err}`);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
return result;
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Write a memory entry. Creates/overwrites memory/<key>.yaml.
|
|
135
|
+
*/
|
|
136
|
+
async writeMemory(key, value) {
|
|
137
|
+
const memDir = join(this.contextDir, "memory");
|
|
138
|
+
await mkdir(memDir, { recursive: true });
|
|
139
|
+
await writeFile(join(memDir, `${key}.yaml`), stringify(value));
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Read agent's notes, most recent first.
|
|
143
|
+
* Notes are markdown files in notes/.
|
|
144
|
+
*/
|
|
145
|
+
async readNotes(limit) {
|
|
146
|
+
const notesDir = join(this.contextDir, "notes");
|
|
147
|
+
if (!existsSync(notesDir)) return [];
|
|
148
|
+
const files = (await readdir(notesDir)).filter((f) => f.endsWith(".md")).sort().reverse();
|
|
149
|
+
const selected = limit ? files.slice(0, limit) : files;
|
|
150
|
+
return Promise.all(selected.map((f) => readFile(join(notesDir, f), "utf-8")));
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Append a note. Creates notes/<date>-<slug>.md.
|
|
154
|
+
*/
|
|
155
|
+
async appendNote(content, slug) {
|
|
156
|
+
const notesDir = join(this.contextDir, "notes");
|
|
157
|
+
await mkdir(notesDir, { recursive: true });
|
|
158
|
+
const filename = `${(/* @__PURE__ */ new Date()).toISOString().slice(0, 10)}-${slug ?? `note-${Date.now().toString(36)}`}.md`;
|
|
159
|
+
await writeFile(join(notesDir, filename), content);
|
|
160
|
+
return filename;
|
|
161
|
+
}
|
|
162
|
+
/**
|
|
163
|
+
* Read active todos from todo/index.md.
|
|
164
|
+
* Returns lines that look like incomplete tasks: "- [ ] ..."
|
|
165
|
+
*/
|
|
166
|
+
async readTodos() {
|
|
167
|
+
const todoFile = join(this.contextDir, "todo", "index.md");
|
|
168
|
+
if (!existsSync(todoFile)) return [];
|
|
169
|
+
return (await readFile(todoFile, "utf-8")).split("\n").filter((line) => line.match(/^\s*-\s*\[\s*\]/)).map((line) => line.replace(/^\s*-\s*\[\s*\]\s*/, "").trim());
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Write the full todo list. Replaces todo/index.md.
|
|
173
|
+
*/
|
|
174
|
+
async writeTodos(todos) {
|
|
175
|
+
const todoDir = join(this.contextDir, "todo");
|
|
176
|
+
await mkdir(todoDir, { recursive: true });
|
|
177
|
+
const content = todos.map((t) => `- [ ] ${t}`).join("\n") + "\n";
|
|
178
|
+
await writeFile(join(todoDir, "index.md"), content);
|
|
179
|
+
}
|
|
180
|
+
};
|
|
181
|
+
//#endregion
|
|
182
|
+
//#region src/agent/agent-registry.ts
|
|
183
|
+
/**
|
|
184
|
+
* AgentRegistry — In-memory agent registry.
|
|
185
|
+
*
|
|
186
|
+
* Responsibilities:
|
|
187
|
+
* - Register/unregister agents at runtime
|
|
188
|
+
* - Ensure context directories exist (for config agents)
|
|
189
|
+
* - Provide agent lookup by name
|
|
190
|
+
*
|
|
191
|
+
* All agents are registered programmatically (from config.yml or daemon API).
|
|
192
|
+
* No disk-based discovery (.agents/ is deprecated).
|
|
193
|
+
*
|
|
194
|
+
* Owned by the daemon. One registry per daemon process.
|
|
195
|
+
*/
|
|
196
|
+
var AgentRegistry = class {
|
|
197
|
+
/** Loaded agent handles, keyed by name */
|
|
198
|
+
agents = /* @__PURE__ */ new Map();
|
|
199
|
+
/** Workspace root directory (e.g. ~/.agent-worker/ for global) */
|
|
200
|
+
projectDir;
|
|
201
|
+
/** Optional logger (injected by daemon) */
|
|
202
|
+
log;
|
|
203
|
+
constructor(projectDir, logger) {
|
|
204
|
+
this.projectDir = projectDir;
|
|
205
|
+
this.log = logger;
|
|
206
|
+
}
|
|
207
|
+
/**
|
|
208
|
+
* Register a config agent (from config.yml). Creates context dir on disk.
|
|
209
|
+
* Overwrites existing agent with same name (reload semantics).
|
|
210
|
+
*/
|
|
211
|
+
registerDefinition(def) {
|
|
212
|
+
const contextDir = this.resolveContextDir(def);
|
|
213
|
+
const agentLogger = this.log?.child(def.name);
|
|
214
|
+
const handle = new AgentHandle(def, contextDir, agentLogger, false);
|
|
215
|
+
handle.ensureContextDir();
|
|
216
|
+
this.agents.set(def.name, handle);
|
|
217
|
+
return handle;
|
|
218
|
+
}
|
|
219
|
+
/**
|
|
220
|
+
* Register an ephemeral agent (from daemon API).
|
|
221
|
+
* Exists only in memory, lost on restart.
|
|
222
|
+
*/
|
|
223
|
+
registerEphemeral(def) {
|
|
224
|
+
const contextDir = this.resolveContextDir(def);
|
|
225
|
+
const agentLogger = this.log?.child(def.name);
|
|
226
|
+
const handle = new AgentHandle(def, contextDir, agentLogger, true);
|
|
227
|
+
this.agents.set(def.name, handle);
|
|
228
|
+
return handle;
|
|
229
|
+
}
|
|
230
|
+
/**
|
|
231
|
+
* Unregister an agent from memory.
|
|
232
|
+
* @returns true if agent existed and was removed.
|
|
233
|
+
*/
|
|
234
|
+
delete(name) {
|
|
235
|
+
return this.agents.delete(name);
|
|
236
|
+
}
|
|
237
|
+
/** Get agent handle by name */
|
|
238
|
+
get(name) {
|
|
239
|
+
return this.agents.get(name);
|
|
240
|
+
}
|
|
241
|
+
/** Check if agent exists */
|
|
242
|
+
has(name) {
|
|
243
|
+
return this.agents.has(name);
|
|
244
|
+
}
|
|
245
|
+
/** List all registered agent handles */
|
|
246
|
+
list() {
|
|
247
|
+
return [...this.agents.values()];
|
|
248
|
+
}
|
|
249
|
+
/** Number of registered agents */
|
|
250
|
+
get size() {
|
|
251
|
+
return this.agents.size;
|
|
252
|
+
}
|
|
253
|
+
/** Resolve agent's context directory (absolute path) */
|
|
254
|
+
resolveContextDir(def) {
|
|
255
|
+
if (def.context?.dir) return join(this.projectDir, def.context.dir);
|
|
256
|
+
return join(this.projectDir, "agents", def.name);
|
|
257
|
+
}
|
|
258
|
+
};
|
|
259
|
+
//#endregion
|
|
260
|
+
//#region src/agent/store.ts
|
|
261
|
+
/**
|
|
262
|
+
* In-memory state store. State is lost when the daemon stops.
|
|
263
|
+
* Suitable for development and single-machine deployments.
|
|
264
|
+
*/
|
|
265
|
+
var MemoryStateStore = class {
|
|
266
|
+
states = /* @__PURE__ */ new Map();
|
|
267
|
+
async load(agentId) {
|
|
268
|
+
return this.states.get(agentId) ?? null;
|
|
269
|
+
}
|
|
270
|
+
async save(agentId, state) {
|
|
271
|
+
this.states.set(agentId, state);
|
|
272
|
+
}
|
|
273
|
+
async delete(agentId) {
|
|
274
|
+
this.states.delete(agentId);
|
|
275
|
+
}
|
|
276
|
+
};
|
|
277
|
+
//#endregion
|
|
278
|
+
//#region src/daemon/registry.ts
|
|
279
|
+
/**
|
|
280
|
+
* Daemon Registry
|
|
281
|
+
*
|
|
282
|
+
* Discovery: daemon.json = { pid, host, port, startedAt, token }
|
|
283
|
+
* One daemon process on a fixed port. Clients read daemon.json to find it.
|
|
284
|
+
*/
|
|
285
|
+
const CONFIG_DIR = join(homedir(), ".agent-worker");
|
|
286
|
+
const DEFAULT_PORT = 5099;
|
|
287
|
+
const DAEMON_FILE = join(CONFIG_DIR, "daemon.json");
|
|
288
|
+
/** Write daemon.json for client discovery */
|
|
289
|
+
function writeDaemonInfo(info) {
|
|
290
|
+
mkdirSync(CONFIG_DIR, { recursive: true });
|
|
291
|
+
writeFileSync(DAEMON_FILE, JSON.stringify(info, null, 2));
|
|
292
|
+
}
|
|
293
|
+
/** Read daemon.json. Returns null if missing or malformed. */
|
|
294
|
+
function readDaemonInfo() {
|
|
295
|
+
try {
|
|
296
|
+
return JSON.parse(readFileSync(DAEMON_FILE, "utf-8"));
|
|
297
|
+
} catch {
|
|
298
|
+
return null;
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
/** Remove daemon.json (on shutdown) */
|
|
302
|
+
function removeDaemonInfo() {
|
|
303
|
+
try {
|
|
304
|
+
unlinkSync(DAEMON_FILE);
|
|
305
|
+
} catch {}
|
|
306
|
+
}
|
|
307
|
+
/** Check if a daemon is already running (daemon.json exists + PID alive) */
|
|
308
|
+
function isDaemonRunning() {
|
|
309
|
+
const info = readDaemonInfo();
|
|
310
|
+
if (!info) return null;
|
|
311
|
+
try {
|
|
312
|
+
process.kill(info.pid, 0);
|
|
313
|
+
return info;
|
|
314
|
+
} catch {
|
|
315
|
+
removeDaemonInfo();
|
|
316
|
+
return null;
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
//#endregion
|
|
320
|
+
//#region src/daemon/config.ts
|
|
321
|
+
/**
|
|
322
|
+
* Daemon Config — Parse ~/.agent-worker/config.yml
|
|
323
|
+
*
|
|
324
|
+
* The daemon config IS a workflow YAML file. It defines agents (identity +
|
|
325
|
+
* model + prompt) and channels (external platforms) in the standard workflow
|
|
326
|
+
* format.
|
|
327
|
+
*
|
|
328
|
+
* Example config.yml:
|
|
329
|
+
* agents:
|
|
330
|
+
* alice:
|
|
331
|
+
* model: anthropic/claude-sonnet-4-5
|
|
332
|
+
* system_prompt: "You are a helpful assistant."
|
|
333
|
+
* wakeup: "0 9 * * *"
|
|
334
|
+
* channels:
|
|
335
|
+
* - adapter: telegram
|
|
336
|
+
* bot_token: ${TELEGRAM_BOT_TOKEN}
|
|
337
|
+
* chat_id: ${TELEGRAM_CHAT_ID}
|
|
338
|
+
*/
|
|
339
|
+
/**
|
|
340
|
+
* Load daemon config from ~/.agent-worker/config.yml.
|
|
341
|
+
* Returns null if file doesn't exist or parsing fails.
|
|
342
|
+
*
|
|
343
|
+
* Uses the standard workflow parser — same format as workflow YAML files.
|
|
344
|
+
*/
|
|
345
|
+
async function loadDaemonConfig(configDir) {
|
|
346
|
+
const configPath = join(configDir, "config.yml");
|
|
347
|
+
if (!existsSync(configPath)) return null;
|
|
348
|
+
try {
|
|
349
|
+
const { parseWorkflowFile } = await import("@moniro/workspace");
|
|
350
|
+
return await parseWorkflowFile(configPath, { workflow: "global" });
|
|
351
|
+
} catch {
|
|
352
|
+
return null;
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
//#endregion
|
|
356
|
+
//#region src/daemon/serve.ts
|
|
357
|
+
/**
|
|
358
|
+
* Start an HTTP server for a Hono app.
|
|
359
|
+
* Auto-detects runtime: Bun.serve() when available, @hono/node-server otherwise.
|
|
360
|
+
*/
|
|
361
|
+
async function startHttpServer(app, options) {
|
|
362
|
+
if ("Bun" in globalThis) return startBun(app, options);
|
|
363
|
+
return startNode(app, options);
|
|
364
|
+
}
|
|
365
|
+
function startBun(app, options) {
|
|
366
|
+
const server = globalThis.Bun.serve({
|
|
367
|
+
fetch: app.fetch,
|
|
368
|
+
port: options.port,
|
|
369
|
+
hostname: options.hostname
|
|
370
|
+
});
|
|
371
|
+
return {
|
|
372
|
+
port: server.port ?? options.port,
|
|
373
|
+
close: async () => server.stop(true)
|
|
374
|
+
};
|
|
375
|
+
}
|
|
376
|
+
async function startNode(app, options) {
|
|
377
|
+
const mod = await import("@hono/node-server");
|
|
378
|
+
return new Promise((resolve, reject) => {
|
|
379
|
+
const server = mod.serve({
|
|
380
|
+
fetch: app.fetch,
|
|
381
|
+
port: options.port,
|
|
382
|
+
hostname: options.hostname
|
|
383
|
+
}, (info) => {
|
|
384
|
+
resolve({
|
|
385
|
+
port: info.port,
|
|
386
|
+
close: () => new Promise((r) => server.close(() => r()))
|
|
387
|
+
});
|
|
388
|
+
});
|
|
389
|
+
server.on("error", reject);
|
|
390
|
+
});
|
|
391
|
+
}
|
|
392
|
+
//#endregion
|
|
393
|
+
//#region src/daemon/event-log.ts
|
|
394
|
+
/**
|
|
395
|
+
* DaemonEventLog — Daemon-level append-only JSONL event log.
|
|
396
|
+
*
|
|
397
|
+
* Persists to `~/.agent-worker/events.jsonl`.
|
|
398
|
+
* Records daemon startup/shutdown, registry operations, importer progress.
|
|
399
|
+
*
|
|
400
|
+
* Uses the same Message format as ChannelStore and TimelineStore,
|
|
401
|
+
* enabling read-time merge for unified timeline views.
|
|
402
|
+
*/
|
|
403
|
+
const EVENTS_FILE = "events.jsonl";
|
|
404
|
+
/**
|
|
405
|
+
* Append-only JSONL event log for daemon-level events.
|
|
406
|
+
* Uses synchronous writes — daemon events are infrequent and must not be lost.
|
|
407
|
+
*/
|
|
408
|
+
var DaemonEventLog = class {
|
|
409
|
+
filePath;
|
|
410
|
+
constructor(daemonDir) {
|
|
411
|
+
if (!existsSync(daemonDir)) mkdirSync(daemonDir, { recursive: true });
|
|
412
|
+
this.filePath = join(daemonDir, EVENTS_FILE);
|
|
413
|
+
}
|
|
414
|
+
append(from, content, options) {
|
|
415
|
+
const event = {
|
|
416
|
+
id: nanoid(),
|
|
417
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
418
|
+
from,
|
|
419
|
+
content,
|
|
420
|
+
mentions: [],
|
|
421
|
+
kind: options?.kind ?? "system"
|
|
422
|
+
};
|
|
423
|
+
const line = JSON.stringify(event) + "\n";
|
|
424
|
+
try {
|
|
425
|
+
appendFileSync(this.filePath, line);
|
|
426
|
+
} catch {}
|
|
427
|
+
}
|
|
428
|
+
/** Read all events (full file read). */
|
|
429
|
+
async readAll() {
|
|
430
|
+
try {
|
|
431
|
+
return (await readFile(this.filePath, "utf-8")).split("\n").filter((l) => l.trim()).map((l) => JSON.parse(l));
|
|
432
|
+
} catch {
|
|
433
|
+
return [];
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
/** Read events from byte offset (incremental sync). */
|
|
437
|
+
async readFrom(offset) {
|
|
438
|
+
let fh;
|
|
439
|
+
try {
|
|
440
|
+
fh = await open(this.filePath, "r");
|
|
441
|
+
const { size } = await fh.stat();
|
|
442
|
+
if (offset >= size) return {
|
|
443
|
+
events: [],
|
|
444
|
+
offset: size
|
|
445
|
+
};
|
|
446
|
+
const length = size - offset;
|
|
447
|
+
const buffer = Buffer.alloc(length);
|
|
448
|
+
await fh.read(buffer, 0, length, offset);
|
|
449
|
+
const content = buffer.toString("utf-8");
|
|
450
|
+
const events = [];
|
|
451
|
+
for (const line of content.split("\n")) {
|
|
452
|
+
const trimmed = line.trim();
|
|
453
|
+
if (!trimmed) continue;
|
|
454
|
+
try {
|
|
455
|
+
events.push(JSON.parse(trimmed));
|
|
456
|
+
} catch {}
|
|
457
|
+
}
|
|
458
|
+
return {
|
|
459
|
+
events,
|
|
460
|
+
offset: size
|
|
461
|
+
};
|
|
462
|
+
} catch {
|
|
463
|
+
return {
|
|
464
|
+
events: [],
|
|
465
|
+
offset
|
|
466
|
+
};
|
|
467
|
+
} finally {
|
|
468
|
+
await fh?.close();
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
};
|
|
472
|
+
//#endregion
|
|
473
|
+
//#region src/daemon/daemon.ts
|
|
474
|
+
/**
|
|
475
|
+
* Daemon — Centralized agent coordinator.
|
|
476
|
+
*
|
|
477
|
+
* Architecture: Interface → Daemon → Loop (three layers)
|
|
478
|
+
* Interface: CLI/REST/MCP clients talk to daemon via HTTP
|
|
479
|
+
* Daemon: This module — owns lifecycle, creates workspaces + loops
|
|
480
|
+
* Loop: AgentLoop + Backend — executes agent reasoning
|
|
481
|
+
*
|
|
482
|
+
* Data ownership:
|
|
483
|
+
* AgentRegistry (agents) — what agents exist + their handles (loop, state)
|
|
484
|
+
* defaultWorkspace — shared workspace for all standalone agents
|
|
485
|
+
* Workflows (workflows) — running workflow instances (each with own workspace)
|
|
486
|
+
*
|
|
487
|
+
* Key principle: agents own their loops (stored on AgentHandle). One shared
|
|
488
|
+
* default workspace provides infrastructure (context, MCP, channels) for all
|
|
489
|
+
* standalone agents. Workflows get their own isolated workspaces.
|
|
490
|
+
* Bridge config comes from ~/.agent-worker/config.yml.
|
|
491
|
+
*
|
|
492
|
+
* HTTP endpoints:
|
|
493
|
+
* GET /health, POST /shutdown
|
|
494
|
+
* GET/POST /agents, GET/DELETE /agents/:name
|
|
495
|
+
* POST /run (SSE), POST /serve
|
|
496
|
+
* GET/POST /workflows, DELETE /workflows/:name/:tag
|
|
497
|
+
* ALL /mcp
|
|
498
|
+
*/
|
|
499
|
+
var daemon_exports = /* @__PURE__ */ __exportAll({
|
|
500
|
+
createDaemonApp: () => createDaemonApp,
|
|
501
|
+
startDaemon: () => startDaemon
|
|
502
|
+
});
|
|
503
|
+
let state = null;
|
|
504
|
+
let shuttingDown = false;
|
|
505
|
+
let log = createSilentLogger();
|
|
506
|
+
const mcpSessions = /* @__PURE__ */ new Map();
|
|
507
|
+
async function gracefulShutdown() {
|
|
508
|
+
if (shuttingDown) return;
|
|
509
|
+
shuttingDown = true;
|
|
510
|
+
if (state) {
|
|
511
|
+
for (const handle of state.agents.list()) if (handle.loop) {
|
|
512
|
+
try {
|
|
513
|
+
await handle.loop.stop();
|
|
514
|
+
} catch {}
|
|
515
|
+
handle.loop = null;
|
|
516
|
+
}
|
|
517
|
+
for (const [, wf] of state.workflows) try {
|
|
518
|
+
await wf.shutdown();
|
|
519
|
+
} catch {}
|
|
520
|
+
state.workflows.clear();
|
|
521
|
+
if (state.defaultWorkspace) try {
|
|
522
|
+
await state.defaultWorkspace.shutdown();
|
|
523
|
+
} catch {}
|
|
524
|
+
if (state.server) await state.server.close();
|
|
525
|
+
}
|
|
526
|
+
for (const [, session] of mcpSessions) try {
|
|
527
|
+
await session.transport.close();
|
|
528
|
+
} catch {}
|
|
529
|
+
mcpSessions.clear();
|
|
530
|
+
removeDaemonInfo();
|
|
531
|
+
process.exit(0);
|
|
532
|
+
}
|
|
533
|
+
/** Safe JSON body parsing — returns null on malformed input */
|
|
534
|
+
async function parseJsonBody(c) {
|
|
535
|
+
try {
|
|
536
|
+
return await c.req.json();
|
|
537
|
+
} catch {
|
|
538
|
+
return null;
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
/** Map AgentDefinition to the ResolvedWorkflowAgent type needed by the factory */
|
|
542
|
+
function defToResolvedAgent(def) {
|
|
543
|
+
return {
|
|
544
|
+
backend: def.backend,
|
|
545
|
+
model: def.model,
|
|
546
|
+
provider: def.provider,
|
|
547
|
+
resolvedSystemPrompt: def.prompt.system ?? "",
|
|
548
|
+
schedule: def.schedule
|
|
549
|
+
};
|
|
550
|
+
}
|
|
551
|
+
/** Convert a ResolvedWorkflowAgent (from workflow parser) to AgentDefinition (for registry) */
|
|
552
|
+
function resolvedToAgentDef(name, agent) {
|
|
553
|
+
return {
|
|
554
|
+
name,
|
|
555
|
+
model: agent.model ?? "auto",
|
|
556
|
+
backend: agent.backend === "default" ? "sdk" : agent.backend,
|
|
557
|
+
provider: agent.provider,
|
|
558
|
+
prompt: { system: agent.resolvedSystemPrompt ?? "" },
|
|
559
|
+
schedule: agent.schedule,
|
|
560
|
+
max_tokens: agent.max_tokens,
|
|
561
|
+
max_steps: agent.max_steps
|
|
562
|
+
};
|
|
563
|
+
}
|
|
564
|
+
/**
|
|
565
|
+
* Find an agent's loop.
|
|
566
|
+
* First checks the agent handle (standalone agents own their loop),
|
|
567
|
+
* then falls back to workflow-scoped loops (workflow agents).
|
|
568
|
+
*/
|
|
569
|
+
function findLoop(s, agentName) {
|
|
570
|
+
const handle = s.agents.get(agentName);
|
|
571
|
+
if (handle?.loop) return handle.loop;
|
|
572
|
+
for (const wf of s.workflows.values()) {
|
|
573
|
+
const l = wf.loops.get(agentName);
|
|
574
|
+
if (l) return l;
|
|
575
|
+
}
|
|
576
|
+
return null;
|
|
577
|
+
}
|
|
578
|
+
/**
|
|
579
|
+
* Ensure the default workspace exists.
|
|
580
|
+
* Creates it lazily on first call (starts MCP server, channels, etc.).
|
|
581
|
+
*/
|
|
582
|
+
async function ensureDefaultWorkspace(s) {
|
|
583
|
+
if (s.defaultWorkspace) return s.defaultWorkspace;
|
|
584
|
+
const agentNames = s.agents.list().map((h) => h.name);
|
|
585
|
+
const channelAdapters = s.config?.channels ? createChannelAdapters(s.config.channels) : void 0;
|
|
586
|
+
const workspace = await createMinimalRuntime({
|
|
587
|
+
workflowName: "global",
|
|
588
|
+
tag: void 0,
|
|
589
|
+
agentNames: agentNames.length > 0 ? agentNames : ["user"],
|
|
590
|
+
resolveHandle: (name) => s.agents.get(name) ?? void 0,
|
|
591
|
+
channelAdapters: channelAdapters && channelAdapters.length > 0 ? channelAdapters : void 0,
|
|
592
|
+
onMention: (_from, target) => {
|
|
593
|
+
const handle = s.agents.get(target);
|
|
594
|
+
if (handle?.loop) handle.loop.wake();
|
|
595
|
+
}
|
|
596
|
+
});
|
|
597
|
+
s.defaultWorkspace = workspace;
|
|
598
|
+
if (workspace.bridge) workspace.bridge.subscribe({}, (msg) => {
|
|
599
|
+
if (!msg.from.includes(":")) return;
|
|
600
|
+
const targeted = /* @__PURE__ */ new Set();
|
|
601
|
+
for (const target of msg.mentions) {
|
|
602
|
+
targeted.add(target);
|
|
603
|
+
const handle = s.agents.get(target);
|
|
604
|
+
if (handle?.loop) handle.loop.wake();
|
|
605
|
+
}
|
|
606
|
+
if (msg.to && !targeted.has(msg.to)) {
|
|
607
|
+
targeted.add(msg.to);
|
|
608
|
+
const handle = s.agents.get(msg.to);
|
|
609
|
+
if (handle?.loop) handle.loop.wake();
|
|
610
|
+
}
|
|
611
|
+
if (targeted.size === 0) {
|
|
612
|
+
for (const handle of s.agents.list()) if (handle.loop) handle.loop.enqueue({
|
|
613
|
+
id: generateInstructionId(),
|
|
614
|
+
message: `[${msg.from}]: ${msg.content}`,
|
|
615
|
+
source: "channel",
|
|
616
|
+
priority: "normal",
|
|
617
|
+
queuedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
618
|
+
});
|
|
619
|
+
}
|
|
620
|
+
});
|
|
621
|
+
if (channelAdapters && channelAdapters.length > 0) log.info(`Channels: ${channelAdapters.map((a) => a.platform).join(", ")}`);
|
|
622
|
+
return workspace;
|
|
623
|
+
}
|
|
624
|
+
/**
|
|
625
|
+
* Ensure a standalone agent has a loop.
|
|
626
|
+
* Uses the shared default workspace.
|
|
627
|
+
*
|
|
628
|
+
* This is the bridge between POST /agents (stores definition only) and
|
|
629
|
+
* POST /run or /serve (needs a loop to execute).
|
|
630
|
+
*/
|
|
631
|
+
async function ensureAgentLoop(s, agentName) {
|
|
632
|
+
const existing = findLoop(s, agentName);
|
|
633
|
+
if (existing) return existing;
|
|
634
|
+
const handle = s.agents.get(agentName);
|
|
635
|
+
if (!handle) throw new Error(`Agent not found: ${agentName}`);
|
|
636
|
+
const { loop } = createWiredLoop({
|
|
637
|
+
name: agentName,
|
|
638
|
+
agent: defToResolvedAgent(handle.definition),
|
|
639
|
+
runtime: await ensureDefaultWorkspace(s),
|
|
640
|
+
conversationLog: handle.conversationLog ?? void 0,
|
|
641
|
+
thinThread: handle.thinThread
|
|
642
|
+
});
|
|
643
|
+
handle.loop = loop;
|
|
644
|
+
return loop;
|
|
645
|
+
}
|
|
646
|
+
/**
|
|
647
|
+
* Create the Hono app with all daemon routes.
|
|
648
|
+
*
|
|
649
|
+
* Accepts a state getter so the app can be used both in production
|
|
650
|
+
* (module-level state set by startDaemon) and in tests (injected state).
|
|
651
|
+
*
|
|
652
|
+
* When a token is provided, all endpoints require `Authorization: Bearer <token>`.
|
|
653
|
+
* This prevents cross-origin attacks from malicious websites.
|
|
654
|
+
*/
|
|
655
|
+
function createDaemonApp(options) {
|
|
656
|
+
const { getState, token } = options;
|
|
657
|
+
const app = new Hono();
|
|
658
|
+
if (token) app.use("*", async (c, next) => {
|
|
659
|
+
if (c.req.header("authorization") !== `Bearer ${token}`) return c.json({ error: "Unauthorized" }, 401);
|
|
660
|
+
await next();
|
|
661
|
+
});
|
|
662
|
+
app.get("/health", (c) => {
|
|
663
|
+
const s = getState();
|
|
664
|
+
if (!s) return c.json({ status: "unavailable" }, 503);
|
|
665
|
+
const agentNames = s.agents.list().map((h) => h.name);
|
|
666
|
+
const workflowList = [...s.workflows.values()].map((wf) => ({
|
|
667
|
+
name: wf.name,
|
|
668
|
+
tag: wf.tag,
|
|
669
|
+
agents: wf.agents
|
|
670
|
+
}));
|
|
671
|
+
const configAgentNames = s.agents.list().filter((h) => !h.ephemeral).map((h) => h.name);
|
|
672
|
+
return c.json({
|
|
673
|
+
status: "ok",
|
|
674
|
+
pid: process.pid,
|
|
675
|
+
port: s.port,
|
|
676
|
+
uptime: Date.now() - new Date(s.startedAt).getTime(),
|
|
677
|
+
agents: agentNames,
|
|
678
|
+
configAgents: configAgentNames,
|
|
679
|
+
workflows: workflowList
|
|
680
|
+
});
|
|
681
|
+
});
|
|
682
|
+
app.post("/shutdown", (c) => {
|
|
683
|
+
setImmediate(() => gracefulShutdown());
|
|
684
|
+
return c.json({ success: true });
|
|
685
|
+
});
|
|
686
|
+
app.get("/agents", (c) => {
|
|
687
|
+
const s = getState();
|
|
688
|
+
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
689
|
+
const standaloneAgents = s.agents.list().map((handle) => {
|
|
690
|
+
const def = handle.definition;
|
|
691
|
+
return {
|
|
692
|
+
name: def.name,
|
|
693
|
+
model: def.model,
|
|
694
|
+
backend: def.backend ?? "default",
|
|
695
|
+
workflow: "global",
|
|
696
|
+
tag: void 0,
|
|
697
|
+
createdAt: void 0,
|
|
698
|
+
source: handle.ephemeral ? "ephemeral" : "config",
|
|
699
|
+
state: handle.loop?.state
|
|
700
|
+
};
|
|
701
|
+
});
|
|
702
|
+
const workflowAgents = [...s.workflows.values()].flatMap((wf) => wf.agents.map((agentName) => {
|
|
703
|
+
const loop = wf.loops.get(agentName);
|
|
704
|
+
return {
|
|
705
|
+
name: agentName,
|
|
706
|
+
model: "",
|
|
707
|
+
backend: "",
|
|
708
|
+
workflow: wf.name,
|
|
709
|
+
tag: wf.tag,
|
|
710
|
+
createdAt: wf.startedAt,
|
|
711
|
+
source: "workflow",
|
|
712
|
+
state: loop?.state ?? "unknown"
|
|
713
|
+
};
|
|
714
|
+
}));
|
|
715
|
+
return c.json({ agents: [...standaloneAgents, ...workflowAgents] });
|
|
716
|
+
});
|
|
717
|
+
app.post("/agents", async (c) => {
|
|
718
|
+
const s = getState();
|
|
719
|
+
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
720
|
+
const body = await parseJsonBody(c);
|
|
721
|
+
if (!body || typeof body !== "object") return c.json({ error: "Invalid JSON body" }, 400);
|
|
722
|
+
const { name, model, system, backend = "default", provider, workflow, tag, schedule } = body;
|
|
723
|
+
if (!name || !model || !system) return c.json({ error: "name, model, system required" }, 400);
|
|
724
|
+
if (s.agents.has(name)) return c.json({ error: `Agent already exists: ${name}` }, 409);
|
|
725
|
+
const def = {
|
|
726
|
+
name,
|
|
727
|
+
model,
|
|
728
|
+
backend,
|
|
729
|
+
provider,
|
|
730
|
+
prompt: { system },
|
|
731
|
+
schedule
|
|
732
|
+
};
|
|
733
|
+
s.agents.registerEphemeral(def);
|
|
734
|
+
return c.json({
|
|
735
|
+
name,
|
|
736
|
+
model,
|
|
737
|
+
backend,
|
|
738
|
+
workflow,
|
|
739
|
+
tag,
|
|
740
|
+
schedule
|
|
741
|
+
}, 201);
|
|
742
|
+
});
|
|
743
|
+
app.get("/agents/:name", (c) => {
|
|
744
|
+
const s = getState();
|
|
745
|
+
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
746
|
+
const handle = s.agents.get(c.req.param("name"));
|
|
747
|
+
if (!handle) return c.json({ error: "Agent not found" }, 404);
|
|
748
|
+
const def = handle.definition;
|
|
749
|
+
return c.json({
|
|
750
|
+
name: def.name,
|
|
751
|
+
model: def.model,
|
|
752
|
+
backend: def.backend ?? "default",
|
|
753
|
+
system: def.prompt.system,
|
|
754
|
+
workflow: void 0,
|
|
755
|
+
tag: void 0,
|
|
756
|
+
createdAt: void 0,
|
|
757
|
+
schedule: def.schedule
|
|
758
|
+
});
|
|
759
|
+
});
|
|
760
|
+
app.delete("/agents/:name", async (c) => {
|
|
761
|
+
const s = getState();
|
|
762
|
+
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
763
|
+
const name = c.req.param("name");
|
|
764
|
+
const handle = s.agents.get(name);
|
|
765
|
+
if (!handle) return c.json({ error: "Agent not found" }, 404);
|
|
766
|
+
if (!handle.ephemeral) return c.json({ error: `"${name}" is defined in config.yml — edit config to remove` }, 403);
|
|
767
|
+
if (handle.loop) {
|
|
768
|
+
try {
|
|
769
|
+
await handle.loop.stop();
|
|
770
|
+
} catch {}
|
|
771
|
+
handle.loop = null;
|
|
772
|
+
}
|
|
773
|
+
s.agents.delete(name);
|
|
774
|
+
return c.json({ success: true });
|
|
775
|
+
});
|
|
776
|
+
app.post("/run", async (c) => {
|
|
777
|
+
const s = getState();
|
|
778
|
+
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
779
|
+
const body = await parseJsonBody(c);
|
|
780
|
+
if (!body || typeof body !== "object") return c.json({ error: "Invalid JSON body" }, 400);
|
|
781
|
+
const { agent: agentName, message } = body;
|
|
782
|
+
if (!agentName || !message) return c.json({ error: "agent and message required" }, 400);
|
|
783
|
+
let loop;
|
|
784
|
+
const existingLoop = findLoop(s, agentName);
|
|
785
|
+
if (existingLoop) loop = existingLoop;
|
|
786
|
+
else if (s.agents.has(agentName)) try {
|
|
787
|
+
loop = await ensureAgentLoop(s, agentName);
|
|
788
|
+
} catch (error) {
|
|
789
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
790
|
+
return c.json({ error: `Failed to create agent runtime: ${msg}` }, 500);
|
|
791
|
+
}
|
|
792
|
+
if (!loop) return c.json({ error: `Agent not found: ${agentName}` }, 404);
|
|
793
|
+
const agentLoop = loop;
|
|
794
|
+
return streamSSE(c, async (stream) => {
|
|
795
|
+
try {
|
|
796
|
+
const result = await agentLoop.sendDirect(message);
|
|
797
|
+
if (result.success) {
|
|
798
|
+
if (result.content) await stream.writeSSE({
|
|
799
|
+
event: "chunk",
|
|
800
|
+
data: JSON.stringify({
|
|
801
|
+
agent: agentName,
|
|
802
|
+
text: result.content
|
|
803
|
+
})
|
|
804
|
+
});
|
|
805
|
+
await stream.writeSSE({
|
|
806
|
+
event: "done",
|
|
807
|
+
data: JSON.stringify(result)
|
|
808
|
+
});
|
|
809
|
+
} else await stream.writeSSE({
|
|
810
|
+
event: "error",
|
|
811
|
+
data: JSON.stringify({ error: result.error })
|
|
812
|
+
});
|
|
813
|
+
} catch (error) {
|
|
814
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
815
|
+
await stream.writeSSE({
|
|
816
|
+
event: "error",
|
|
817
|
+
data: JSON.stringify({ error: msg })
|
|
818
|
+
});
|
|
819
|
+
}
|
|
820
|
+
});
|
|
821
|
+
});
|
|
822
|
+
app.post("/serve", async (c) => {
|
|
823
|
+
const s = getState();
|
|
824
|
+
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
825
|
+
const body = await parseJsonBody(c);
|
|
826
|
+
if (!body || typeof body !== "object") return c.json({ error: "Invalid JSON body" }, 400);
|
|
827
|
+
const { agent: agentName, message } = body;
|
|
828
|
+
if (!agentName || !message) return c.json({ error: "agent and message required" }, 400);
|
|
829
|
+
let loop;
|
|
830
|
+
const existingLoop = findLoop(s, agentName);
|
|
831
|
+
if (existingLoop) loop = existingLoop;
|
|
832
|
+
else if (s.agents.has(agentName)) try {
|
|
833
|
+
loop = await ensureAgentLoop(s, agentName);
|
|
834
|
+
} catch (error) {
|
|
835
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
836
|
+
return c.json({ error: msg }, 500);
|
|
837
|
+
}
|
|
838
|
+
if (!loop) return c.json({ error: `Agent not found: ${agentName}` }, 404);
|
|
839
|
+
try {
|
|
840
|
+
const result = await loop.sendDirect(message);
|
|
841
|
+
if (!result.success) return c.json({ error: result.error }, 500);
|
|
842
|
+
return c.json({
|
|
843
|
+
content: result.content ?? "",
|
|
844
|
+
duration: result.duration,
|
|
845
|
+
success: true
|
|
846
|
+
});
|
|
847
|
+
} catch (error) {
|
|
848
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
849
|
+
return c.json({ error: msg }, 500);
|
|
850
|
+
}
|
|
851
|
+
});
|
|
852
|
+
app.all("/mcp", async (c) => {
|
|
853
|
+
const s = getState();
|
|
854
|
+
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
855
|
+
const req = c.req.raw;
|
|
856
|
+
const sessionId = req.headers.get("mcp-session-id");
|
|
857
|
+
if (sessionId && mcpSessions.has(sessionId)) {
|
|
858
|
+
const session = mcpSessions.get(sessionId);
|
|
859
|
+
if (req.method === "DELETE") {
|
|
860
|
+
await session.transport.close();
|
|
861
|
+
mcpSessions.delete(sessionId);
|
|
862
|
+
return new Response(null, { status: 200 });
|
|
863
|
+
}
|
|
864
|
+
return session.transport.handleRequest(req);
|
|
865
|
+
}
|
|
866
|
+
if (req.method === "POST") {
|
|
867
|
+
const body = await req.json();
|
|
868
|
+
if (!(Array.isArray(body) ? body.some((m) => m?.method === "initialize") : body?.method === "initialize")) return c.json({ error: "Bad request: session required" }, 400);
|
|
869
|
+
const agentName = new URL(req.url).searchParams.get("agent") || "user";
|
|
870
|
+
const handle = s.agents.get(agentName);
|
|
871
|
+
const provider = (await ensureDefaultWorkspace(s)).contextProvider;
|
|
872
|
+
const transport = new WebStandardStreamableHTTPServerTransport({
|
|
873
|
+
sessionIdGenerator: () => `${agentName}-${randomUUID().slice(0, 8)}`,
|
|
874
|
+
onsessioninitialized: (sid) => {
|
|
875
|
+
mcpSessions.set(sid, {
|
|
876
|
+
transport,
|
|
877
|
+
agentId: agentName
|
|
878
|
+
});
|
|
879
|
+
},
|
|
880
|
+
onsessionclosed: (sid) => {
|
|
881
|
+
mcpSessions.delete(sid);
|
|
882
|
+
},
|
|
883
|
+
enableJsonResponse: true
|
|
884
|
+
});
|
|
885
|
+
await createContextMCPServer({
|
|
886
|
+
provider,
|
|
887
|
+
validAgents: [...new Set([
|
|
888
|
+
agentName,
|
|
889
|
+
...s.agents.list().map((h) => h.name),
|
|
890
|
+
"user"
|
|
891
|
+
])],
|
|
892
|
+
name: `${handle?.definition.name ?? agentName}-context`,
|
|
893
|
+
version: "1.0.0",
|
|
894
|
+
resolveHandle: (name) => s.agents.get(name) ?? void 0
|
|
895
|
+
}).server.connect(transport);
|
|
896
|
+
return transport.handleRequest(req, { parsedBody: body });
|
|
897
|
+
}
|
|
898
|
+
if (req.method === "GET") return c.json({ error: "Session ID required for GET requests" }, 400);
|
|
899
|
+
return c.json({ error: "Method not allowed" }, 405);
|
|
900
|
+
});
|
|
901
|
+
app.post("/workflows", async (c) => {
|
|
902
|
+
const s = getState();
|
|
903
|
+
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
904
|
+
const body = await parseJsonBody(c);
|
|
905
|
+
if (!body || typeof body !== "object") return c.json({ error: "Invalid JSON body" }, 400);
|
|
906
|
+
const { workflow, tag, feedback, pollInterval, params } = body;
|
|
907
|
+
if (!workflow || !workflow.agents) return c.json({ error: "workflow (parsed YAML) required" }, 400);
|
|
908
|
+
const workflowName = workflow.name || "global";
|
|
909
|
+
const key = tag ? `${workflowName}:${tag}` : workflowName;
|
|
910
|
+
if (s.workflows.has(key)) return c.json({ error: `Workflow already running: ${key}` }, 409);
|
|
911
|
+
try {
|
|
912
|
+
const { runWorkflowWithLoops } = await import("@moniro/workspace");
|
|
913
|
+
const result = await runWorkflowWithLoops({
|
|
914
|
+
workflow,
|
|
915
|
+
workflowName,
|
|
916
|
+
tag,
|
|
917
|
+
mode: "start",
|
|
918
|
+
headless: true,
|
|
919
|
+
feedback,
|
|
920
|
+
pollInterval,
|
|
921
|
+
params,
|
|
922
|
+
log: () => {}
|
|
923
|
+
});
|
|
924
|
+
if (!result.success) return c.json({ error: result.error || "Workflow failed to start" }, 500);
|
|
925
|
+
const workspace = {
|
|
926
|
+
contextProvider: result.contextProvider,
|
|
927
|
+
contextDir: "",
|
|
928
|
+
persistent: false,
|
|
929
|
+
eventLog: null,
|
|
930
|
+
httpMcpServer: null,
|
|
931
|
+
mcpUrl: result.mcpUrl ?? "",
|
|
932
|
+
mcpToolNames: /* @__PURE__ */ new Set(),
|
|
933
|
+
projectDir: process.cwd(),
|
|
934
|
+
shutdown: result.shutdown
|
|
935
|
+
};
|
|
936
|
+
const handle = {
|
|
937
|
+
name: workflowName,
|
|
938
|
+
tag,
|
|
939
|
+
key,
|
|
940
|
+
agents: Object.keys(workflow.agents),
|
|
941
|
+
loops: result.loops,
|
|
942
|
+
workspace,
|
|
943
|
+
shutdown: result.shutdown,
|
|
944
|
+
workflowPath: workflow.filePath,
|
|
945
|
+
startedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
946
|
+
};
|
|
947
|
+
s.workflows.set(key, handle);
|
|
948
|
+
return c.json({
|
|
949
|
+
key,
|
|
950
|
+
name: workflowName,
|
|
951
|
+
tag,
|
|
952
|
+
agents: handle.agents
|
|
953
|
+
}, 201);
|
|
954
|
+
} catch (error) {
|
|
955
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
956
|
+
return c.json({ error: `Failed to start workflow: ${msg}` }, 500);
|
|
957
|
+
}
|
|
958
|
+
});
|
|
959
|
+
app.get("/workflows", (c) => {
|
|
960
|
+
const s = getState();
|
|
961
|
+
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
962
|
+
const workflows = [...s.workflows.values()].map((wf) => {
|
|
963
|
+
const agentStates = {};
|
|
964
|
+
for (const [name, loop] of wf.loops) agentStates[name] = loop.state;
|
|
965
|
+
return {
|
|
966
|
+
name: wf.name,
|
|
967
|
+
tag: wf.tag,
|
|
968
|
+
key: wf.key,
|
|
969
|
+
agents: wf.agents,
|
|
970
|
+
agentStates,
|
|
971
|
+
workflowPath: wf.workflowPath,
|
|
972
|
+
startedAt: wf.startedAt
|
|
973
|
+
};
|
|
974
|
+
});
|
|
975
|
+
return c.json({ workflows });
|
|
976
|
+
});
|
|
977
|
+
async function deleteWorkflow(c, name, tag) {
|
|
978
|
+
const s = getState();
|
|
979
|
+
if (!s) return c.json({ error: "Not ready" }, 503);
|
|
980
|
+
const key = tag ? `${name}:${tag}` : name;
|
|
981
|
+
const handle = s.workflows.get(key);
|
|
982
|
+
if (!handle) return c.json({ error: `Workflow not found: ${key}` }, 404);
|
|
983
|
+
try {
|
|
984
|
+
await handle.shutdown();
|
|
985
|
+
s.workflows.delete(key);
|
|
986
|
+
return c.json({
|
|
987
|
+
success: true,
|
|
988
|
+
key
|
|
989
|
+
});
|
|
990
|
+
} catch (error) {
|
|
991
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
992
|
+
return c.json({ error: `Failed to stop workflow: ${msg}` }, 500);
|
|
993
|
+
}
|
|
994
|
+
}
|
|
995
|
+
app.delete("/workflows/:name/:tag", (c) => deleteWorkflow(c, c.req.param("name"), c.req.param("tag")));
|
|
996
|
+
app.delete("/workflows/:name", (c) => deleteWorkflow(c, c.req.param("name"), void 0));
|
|
997
|
+
return app;
|
|
998
|
+
}
|
|
999
|
+
async function startDaemon(config = {}) {
|
|
1000
|
+
log = createEventLogger(new DaemonEventLog(CONFIG_DIR), "daemon");
|
|
1001
|
+
const existing = isDaemonRunning();
|
|
1002
|
+
if (existing) {
|
|
1003
|
+
log.error(`Daemon already running: pid=${existing.pid} port=${existing.port}`);
|
|
1004
|
+
process.exit(1);
|
|
1005
|
+
}
|
|
1006
|
+
const host = config.host ?? "127.0.0.1";
|
|
1007
|
+
const store = config.store ?? new MemoryStateStore();
|
|
1008
|
+
const token = randomUUID();
|
|
1009
|
+
const server = await startHttpServer(createDaemonApp({
|
|
1010
|
+
getState: () => state,
|
|
1011
|
+
token
|
|
1012
|
+
}), {
|
|
1013
|
+
port: config.port ?? 5099,
|
|
1014
|
+
hostname: host
|
|
1015
|
+
});
|
|
1016
|
+
const actualPort = server.port;
|
|
1017
|
+
const startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
1018
|
+
writeDaemonInfo({
|
|
1019
|
+
pid: process.pid,
|
|
1020
|
+
host,
|
|
1021
|
+
port: actualPort,
|
|
1022
|
+
startedAt,
|
|
1023
|
+
token
|
|
1024
|
+
});
|
|
1025
|
+
const agents = new AgentRegistry(CONFIG_DIR, log);
|
|
1026
|
+
const bootConfig = await loadDaemonConfig(CONFIG_DIR);
|
|
1027
|
+
if (bootConfig) {
|
|
1028
|
+
const agentNames = Object.keys(bootConfig.agents);
|
|
1029
|
+
for (const name of agentNames) {
|
|
1030
|
+
const def = resolvedToAgentDef(name, bootConfig.agents[name]);
|
|
1031
|
+
agents.registerDefinition(def);
|
|
1032
|
+
}
|
|
1033
|
+
if (agentNames.length > 0) log.info(`Loaded ${agentNames.length} agent(s) from config.yml: ${agentNames.join(", ")}`);
|
|
1034
|
+
}
|
|
1035
|
+
state = {
|
|
1036
|
+
agents,
|
|
1037
|
+
defaultWorkspace: null,
|
|
1038
|
+
config: bootConfig,
|
|
1039
|
+
workflows: /* @__PURE__ */ new Map(),
|
|
1040
|
+
store,
|
|
1041
|
+
server,
|
|
1042
|
+
port: actualPort,
|
|
1043
|
+
host,
|
|
1044
|
+
startedAt
|
|
1045
|
+
};
|
|
1046
|
+
log.info(`Daemon started: pid=${process.pid}`);
|
|
1047
|
+
log.info(`Listening: http://${host}:${actualPort}`);
|
|
1048
|
+
log.info(`MCP: http://${host}:${actualPort}/mcp`);
|
|
1049
|
+
if (bootConfig?.channels && bootConfig.channels.length > 0) await ensureDefaultWorkspace(state);
|
|
1050
|
+
const configAgents = agents.list().filter((h) => !h.ephemeral);
|
|
1051
|
+
if (configAgents.length > 0) {
|
|
1052
|
+
log.info(`Auto-starting ${configAgents.length} agent(s)...`);
|
|
1053
|
+
for (const handle of configAgents) try {
|
|
1054
|
+
await (await ensureAgentLoop(state, handle.name)).start();
|
|
1055
|
+
log.info(`Agent started: ${handle.name}`);
|
|
1056
|
+
} catch (err) {
|
|
1057
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
1058
|
+
log.info(`Failed to start agent ${handle.name}: ${msg}`);
|
|
1059
|
+
}
|
|
1060
|
+
}
|
|
1061
|
+
process.on("SIGINT", () => {
|
|
1062
|
+
log.info("Shutting down...");
|
|
1063
|
+
gracefulShutdown();
|
|
1064
|
+
});
|
|
1065
|
+
process.on("SIGTERM", () => {
|
|
1066
|
+
log.info("Shutting down...");
|
|
1067
|
+
gracefulShutdown();
|
|
1068
|
+
});
|
|
1069
|
+
}
|
|
1070
|
+
//#endregion
|
|
1071
|
+
export { isDaemonRunning as a, writeDaemonInfo as c, AgentHandle as d, DEFAULT_PORT as i, MemoryStateStore as l, startDaemon as n, readDaemonInfo as o, DaemonEventLog as r, removeDaemonInfo as s, daemon_exports as t, AgentRegistry as u };
|