@neotx/core 0.1.0-alpha.21 → 0.1.0-alpha.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -20,17 +20,32 @@ var agentToolSchema = z.enum([
20
20
  ]);
21
21
  var agentToolEntrySchema = z.union([agentToolSchema, z.literal("$inherited")]);
22
22
  var agentSandboxSchema = z.enum(["writable", "readonly"]);
23
+ var subagentDefinitionSchema = z.object({
24
+ description: z.string(),
25
+ prompt: z.string(),
26
+ tools: z.array(agentToolSchema).optional(),
27
+ model: agentModelSchema.optional()
28
+ });
23
29
  var agentConfigSchema = z.object({
24
30
  name: z.string(),
25
31
  extends: z.string().optional(),
26
32
  description: z.string().optional(),
33
+ version: z.string().optional(),
27
34
  model: agentModelSchema.optional(),
28
35
  tools: z.array(agentToolEntrySchema).optional(),
29
36
  prompt: z.string().optional(),
30
37
  promptAppend: z.string().optional(),
31
38
  sandbox: agentSandboxSchema.optional(),
32
39
  maxTurns: z.number().optional(),
33
- mcpServers: z.array(z.string()).optional()
40
+ /**
41
+ * Maximum cost in USD for this agent session.
42
+ * Checked post-session (SDK provides cost only after session ends).
43
+ * If session cost >= maxCost, a budget_exceeded error is thrown.
44
+ * Child agents can override the parent's maxCost.
45
+ */
46
+ maxCost: z.number().min(0).optional(),
47
+ mcpServers: z.array(z.string()).optional(),
48
+ agents: z.record(z.string(), subagentDefinitionSchema).optional()
34
49
  });
35
50
 
36
51
  // src/agents/loader.ts
@@ -60,8 +75,24 @@ ${issues}`);
60
75
  const promptPath = path.resolve(path.dirname(filePath), config.prompt);
61
76
  try {
62
77
  config.prompt = await readFile(promptPath, "utf-8");
63
- } catch {
64
- throw new Error(`Prompt file not found: ${promptPath} (referenced in ${filePath})`);
78
+ } catch (err) {
79
+ throw new Error(
80
+ `Prompt file not found: ${promptPath} (referenced in ${filePath}). Error: ${err instanceof Error ? err.message : String(err)}`
81
+ );
82
+ }
83
+ }
84
+ if (config.agents) {
85
+ for (const [name, subagent] of Object.entries(config.agents)) {
86
+ if (subagent.prompt.endsWith(".md")) {
87
+ const subagentPromptPath = path.resolve(path.dirname(filePath), subagent.prompt);
88
+ try {
89
+ subagent.prompt = await readFile(subagentPromptPath, "utf-8");
90
+ } catch (err) {
91
+ throw new Error(
92
+ `Subagent "${name}" prompt file not found: ${subagentPromptPath} (referenced in ${filePath}). Error: ${err instanceof Error ? err.message : String(err)}`
93
+ );
94
+ }
95
+ }
65
96
  }
66
97
  }
67
98
  return config;
@@ -89,18 +120,25 @@ function resolveExtendedAgent(config, extendsName, builtIns) {
89
120
  const tools = mergeTools(config.tools, base.tools);
90
121
  const prompt = mergePrompt(config.prompt, config.promptAppend, base.prompt);
91
122
  const mcpServers = mergeMcpServerNames(base.mcpServers, config.mcpServers);
123
+ const agents = mergeAgents(
124
+ base.agents,
125
+ config.agents
126
+ );
92
127
  const definition = {
93
128
  description: config.description ?? base.description ?? "",
94
129
  prompt,
95
130
  tools,
96
131
  model: config.model ?? base.model ?? "sonnet",
97
- ...mcpServers.length > 0 ? { mcpServers } : {}
132
+ ...mcpServers.length > 0 ? { mcpServers } : {},
133
+ ...agents ? { agents } : {}
98
134
  };
99
135
  return {
100
136
  name: config.name,
101
137
  definition,
102
138
  sandbox: config.sandbox ?? base.sandbox ?? "readonly",
103
139
  ...config.maxTurns !== void 0 ? { maxTurns: config.maxTurns } : base.maxTurns !== void 0 ? { maxTurns: base.maxTurns } : {},
140
+ ...config.maxCost !== void 0 ? { maxCost: config.maxCost } : base.maxCost !== void 0 ? { maxCost: base.maxCost } : {},
141
+ ...config.version !== void 0 ? { version: config.version } : base.version !== void 0 ? { version: base.version } : {},
104
142
  source: config.name === extendsName && !config.extends ? "built-in" : "extended"
105
143
  };
106
144
  }
@@ -142,13 +180,16 @@ ${config.promptAppend}`;
142
180
  prompt,
143
181
  tools,
144
182
  model: config.model,
145
- ...config.mcpServers?.length ? { mcpServers: config.mcpServers } : {}
183
+ ...config.mcpServers?.length ? { mcpServers: config.mcpServers } : {},
184
+ ...config.agents ? { agents: config.agents } : {}
146
185
  };
147
186
  return {
148
187
  name: config.name,
149
188
  definition,
150
189
  sandbox: config.sandbox,
151
190
  ...config.maxTurns !== void 0 ? { maxTurns: config.maxTurns } : {},
191
+ ...config.maxCost !== void 0 ? { maxCost: config.maxCost } : {},
192
+ ...config.version !== void 0 ? { version: config.version } : {},
152
193
  source: "custom"
153
194
  };
154
195
  }
@@ -173,6 +214,12 @@ function mergeMcpServerNames(base, override) {
173
214
  if (!base?.length && !override?.length) return [];
174
215
  return [.../* @__PURE__ */ new Set([...base ?? [], ...override ?? []])];
175
216
  }
217
+ function mergeAgents(base, override) {
218
+ if (!base && !override) return void 0;
219
+ if (!base) return override;
220
+ if (!override) return base;
221
+ return { ...base, ...override };
222
+ }
176
223
 
177
224
  // src/agents/registry.ts
178
225
  var AgentRegistry = class {
@@ -198,7 +245,10 @@ var AgentRegistry = class {
198
245
  let customConfigs;
199
246
  try {
200
247
  customConfigs = await loadAgentsFromDir(this.customDir);
201
- } catch {
248
+ } catch (err) {
249
+ console.debug(
250
+ `[registry] Custom agents dir not found: ${err instanceof Error ? err.message : String(err)}`
251
+ );
202
252
  customConfigs = [];
203
253
  }
204
254
  for (const config of customConfigs) {
@@ -505,6 +555,15 @@ var sessionsConfigSchema = z2.object({
505
555
  maxDurationMs: z2.number().default(36e5),
506
556
  dir: z2.string().default("/tmp/neo-sessions")
507
557
  }).default({ initTimeoutMs: 12e4, maxDurationMs: 36e5, dir: "/tmp/neo-sessions" });
558
+ var journalConfigSchema = z2.object({
559
+ maxCostJournalSizeBytes: z2.number().default(100 * 1024 * 1024),
560
+ // 100MB
561
+ maxEventJournalSizeBytes: z2.number().default(500 * 1024 * 1024)
562
+ // 500MB
563
+ }).default({
564
+ maxCostJournalSizeBytes: 100 * 1024 * 1024,
565
+ maxEventJournalSizeBytes: 500 * 1024 * 1024
566
+ });
508
567
  var supervisorConfigSchema = z2.object({
509
568
  port: z2.number().default(7777),
510
569
  secret: z2.string().optional(),
@@ -544,6 +603,7 @@ var globalConfigSchema = z2.object({
544
603
  budget: budgetConfigSchema,
545
604
  recovery: recoveryConfigSchema,
546
605
  sessions: sessionsConfigSchema,
606
+ journal: journalConfigSchema.optional(),
547
607
  webhooks: z2.array(
548
608
  z2.object({
549
609
  url: z2.string().url(),
@@ -598,6 +658,7 @@ var defaultConfig = {
598
658
  budget: budgetConfigSchema.parse(void 0),
599
659
  recovery: recoveryConfigSchema.parse(void 0),
600
660
  sessions: sessionsConfigSchema.parse(void 0),
661
+ journal: journalConfigSchema.parse(void 0),
601
662
  webhooks: [],
602
663
  supervisor: {
603
664
  port: 7777,
@@ -727,6 +788,7 @@ var ConfigStore = class {
727
788
  }
728
789
  const parsed = neoConfigSchema.safeParse(raw);
729
790
  if (!parsed.success) {
791
+ console.warn(`[neo] Failed to parse config at ${globalPath}:`, parsed.error.message);
730
792
  return null;
731
793
  }
732
794
  return parsed.data;
@@ -745,6 +807,7 @@ var ConfigStore = class {
745
807
  }
746
808
  const parsed = repoOverrideConfigSchema.safeParse(raw);
747
809
  if (!parsed.success) {
810
+ console.warn(`[neo] Failed to parse config at ${repoConfigPath}:`, parsed.error.message);
748
811
  return null;
749
812
  }
750
813
  return parsed.data;
@@ -844,6 +907,9 @@ var ConfigWatcher = class extends EventEmitter {
844
907
  this.debounceTimer = null;
845
908
  this.reloadConfig();
846
909
  }, this.debounceMs);
910
+ if (typeof this.debounceTimer === "object" && "unref" in this.debounceTimer) {
911
+ this.debounceTimer.unref();
912
+ }
847
913
  }
848
914
  /**
849
915
  * Reloads the config and emits 'change' event.
@@ -857,6 +923,51 @@ var ConfigWatcher = class extends EventEmitter {
857
923
  }
858
924
  };
859
925
 
926
+ // src/config/parser.ts
927
+ var NEO_CONFIG_KEYS = {
928
+ "": /* @__PURE__ */ new Set([
929
+ "repos",
930
+ "concurrency",
931
+ "budget",
932
+ "recovery",
933
+ "sessions",
934
+ "webhooks",
935
+ "supervisor",
936
+ "memory",
937
+ "mcpServers",
938
+ "claudeCodePath",
939
+ "idempotency"
940
+ ]),
941
+ concurrency: /* @__PURE__ */ new Set(["maxSessions", "maxPerRepo", "queueMax"]),
942
+ budget: /* @__PURE__ */ new Set(["dailyCapUsd", "alertThresholdPct"]),
943
+ recovery: /* @__PURE__ */ new Set(["maxRetries", "backoffBaseMs"]),
944
+ sessions: /* @__PURE__ */ new Set(["initTimeoutMs", "maxDurationMs", "dir"]),
945
+ supervisor: /* @__PURE__ */ new Set([
946
+ "port",
947
+ "secret",
948
+ "heartbeatTimeoutMs",
949
+ "maxConsecutiveFailures",
950
+ "maxEventsPerSec",
951
+ "dailyCapUsd",
952
+ "consolidationIntervalMs",
953
+ "compactionIntervalMs",
954
+ "eventTimeoutMs",
955
+ "instructions",
956
+ "idleSkipMax",
957
+ "activeWorkSkipMax",
958
+ "autoDecide"
959
+ ]),
960
+ memory: /* @__PURE__ */ new Set(["embeddings"]),
961
+ idempotency: /* @__PURE__ */ new Set(["enabled", "key", "ttlMs"])
962
+ };
963
+ var REPO_OVERRIDE_KEYS = {
964
+ "": /* @__PURE__ */ new Set(["concurrency", "budget", "recovery", "sessions"]),
965
+ concurrency: NEO_CONFIG_KEYS.concurrency,
966
+ budget: NEO_CONFIG_KEYS.budget,
967
+ recovery: NEO_CONFIG_KEYS.recovery,
968
+ sessions: NEO_CONFIG_KEYS.sessions
969
+ };
970
+
860
971
  // src/config.ts
861
972
  var DEFAULT_GLOBAL_CONFIG = {
862
973
  repos: [],
@@ -888,8 +999,10 @@ async function loadConfig(configPath) {
888
999
  let raw;
889
1000
  try {
890
1001
  raw = await readFile2(configPath, "utf-8");
891
- } catch {
892
- throw new Error(`Config file not found: ${configPath}. Run 'neo init' to get started.`);
1002
+ } catch (err) {
1003
+ throw new Error(
1004
+ `Config file not found: ${configPath}. Run 'neo init' to get started. (${err instanceof Error ? err.message : String(err)})`
1005
+ );
893
1006
  }
894
1007
  const parsed = parseYamlFile(raw, configPath);
895
1008
  const result = neoConfigSchema.safeParse(parsed);
@@ -944,7 +1057,9 @@ async function listReposFromGlobalConfig() {
944
1057
  }
945
1058
 
946
1059
  // src/cost/journal.ts
947
- import { appendFile, readFile as readFile3 } from "fs/promises";
1060
+ import { createReadStream } from "fs";
1061
+ import { appendFile, stat } from "fs/promises";
1062
+ import { createInterface } from "readline";
948
1063
 
949
1064
  // src/shared/date.ts
950
1065
  import path5 from "path";
@@ -968,12 +1083,25 @@ async function ensureDir(dirPath, cache) {
968
1083
  }
969
1084
 
970
1085
  // src/cost/journal.ts
1086
+ var JournalFileSizeError = class extends Error {
1087
+ constructor(filePath, fileSizeBytes, maxSizeBytes) {
1088
+ super(
1089
+ `Journal file exceeds maximum size: ${filePath} (${(fileSizeBytes / 1024 / 1024).toFixed(2)}MB > ${(maxSizeBytes / 1024 / 1024).toFixed(2)}MB)`
1090
+ );
1091
+ this.filePath = filePath;
1092
+ this.fileSizeBytes = fileSizeBytes;
1093
+ this.maxSizeBytes = maxSizeBytes;
1094
+ this.name = "JournalFileSizeError";
1095
+ }
1096
+ };
971
1097
  var CostJournal = class {
972
1098
  dir;
973
1099
  dirCache = /* @__PURE__ */ new Set();
974
1100
  dayCache = null;
1101
+ maxFileSizeBytes;
975
1102
  constructor(options) {
976
1103
  this.dir = options.dir;
1104
+ this.maxFileSizeBytes = options.maxFileSizeBytes ?? 100 * 1024 * 1024;
977
1105
  }
978
1106
  async append(entry) {
979
1107
  await ensureDir(this.dir, this.dirCache);
@@ -991,8 +1119,13 @@ var CostJournal = class {
991
1119
  const file = fileForDate(d, "cost", this.dir);
992
1120
  let total = 0;
993
1121
  try {
994
- const content = await readFile3(file, "utf-8");
995
- for (const line of content.split("\n")) {
1122
+ const stats = await stat(file);
1123
+ if (stats.size > this.maxFileSizeBytes) {
1124
+ throw new JournalFileSizeError(file, stats.size, this.maxFileSizeBytes);
1125
+ }
1126
+ const stream = createReadStream(file, { encoding: "utf-8" });
1127
+ const rl = createInterface({ input: stream, crlfDelay: Number.POSITIVE_INFINITY });
1128
+ for await (const line of rl) {
996
1129
  if (!line.trim()) continue;
997
1130
  const entry = JSON.parse(line);
998
1131
  if (toDateKey(new Date(entry.timestamp)) === dayKey) {
@@ -1038,7 +1171,11 @@ var NeoEventEmitter = class {
1038
1171
  if (eventType !== "error") {
1039
1172
  try {
1040
1173
  this.emitter.emit("error", error);
1041
- } catch {
1174
+ } catch (nestedErr) {
1175
+ console.error(
1176
+ "[emitter] Error handler threw:",
1177
+ nestedErr instanceof Error ? nestedErr.message : String(nestedErr)
1178
+ );
1042
1179
  }
1043
1180
  }
1044
1181
  }
@@ -1046,16 +1183,37 @@ var NeoEventEmitter = class {
1046
1183
  };
1047
1184
 
1048
1185
  // src/events/journal.ts
1049
- import { appendFile as appendFile2 } from "fs/promises";
1186
+ import { appendFile as appendFile2, stat as stat2 } from "fs/promises";
1187
+ var JournalFileSizeError2 = class extends Error {
1188
+ constructor(filePath, fileSizeBytes, maxSizeBytes) {
1189
+ super(
1190
+ `Journal file exceeds maximum size: ${filePath} (${(fileSizeBytes / 1024 / 1024).toFixed(2)}MB > ${(maxSizeBytes / 1024 / 1024).toFixed(2)}MB)`
1191
+ );
1192
+ this.filePath = filePath;
1193
+ this.fileSizeBytes = fileSizeBytes;
1194
+ this.maxSizeBytes = maxSizeBytes;
1195
+ this.name = "JournalFileSizeError";
1196
+ }
1197
+ };
1050
1198
  var EventJournal = class {
1051
1199
  dir;
1052
1200
  dirCache = /* @__PURE__ */ new Set();
1201
+ maxFileSizeBytes;
1053
1202
  constructor(options) {
1054
1203
  this.dir = options.dir;
1204
+ this.maxFileSizeBytes = options.maxFileSizeBytes ?? 500 * 1024 * 1024;
1055
1205
  }
1056
1206
  async append(event) {
1057
1207
  await ensureDir(this.dir, this.dirCache);
1058
1208
  const file = fileForDate(new Date(event.timestamp), "events", this.dir);
1209
+ try {
1210
+ const stats = await stat2(file);
1211
+ if (stats.size > this.maxFileSizeBytes) {
1212
+ throw new JournalFileSizeError2(file, stats.size, this.maxFileSizeBytes);
1213
+ }
1214
+ } catch (error) {
1215
+ if (error.code !== "ENOENT") throw error;
1216
+ }
1059
1217
  await appendFile2(file, `${JSON.stringify(event)}
1060
1218
  `, "utf-8");
1061
1219
  }
@@ -1127,7 +1285,10 @@ async function sendWithRetry(url, headers, body, timeoutMs) {
1127
1285
  signal: AbortSignal.timeout(timeoutMs)
1128
1286
  });
1129
1287
  if (res.ok) return;
1130
- } catch {
1288
+ } catch (err) {
1289
+ console.debug(
1290
+ `[webhook] Network error on attempt ${attempt}: ${err instanceof Error ? err.message : String(err)}`
1291
+ );
1131
1292
  }
1132
1293
  if (attempt < RETRY_MAX_ATTEMPTS) {
1133
1294
  const delay = RETRY_BASE_DELAY_MS * 2 ** (attempt - 1);
@@ -1163,14 +1324,38 @@ import { dirname, resolve } from "path";
1163
1324
  import { promisify } from "util";
1164
1325
  var execFileAsync = promisify(execFile);
1165
1326
  var GIT_TIMEOUT = 6e4;
1327
+ function validateGitRef(refName, paramName) {
1328
+ if (!refName || typeof refName !== "string") {
1329
+ throw new Error(`${paramName} must be a non-empty string`);
1330
+ }
1331
+ if (refName.includes("..")) {
1332
+ throw new Error(`${paramName} contains invalid pattern '..' (directory traversal)`);
1333
+ }
1334
+ if (refName.startsWith("-")) {
1335
+ throw new Error(`${paramName} cannot start with '-' (option injection)`);
1336
+ }
1337
+ const validRefPattern = /^[a-zA-Z0-9/_+.-]+$/;
1338
+ if (!validRefPattern.test(refName)) {
1339
+ throw new Error(
1340
+ `${paramName} contains invalid characters. Only alphanumeric, dash, underscore, slash, plus, and dot are allowed. Got: ${refName}`
1341
+ );
1342
+ }
1343
+ }
1166
1344
  async function createSessionClone(options) {
1345
+ validateGitRef(options.branch, "branch");
1346
+ validateGitRef(options.baseBranch, "baseBranch");
1167
1347
  const repoPath = resolve(options.repoPath);
1168
1348
  const sessionDir = resolve(options.sessionDir);
1169
1349
  await mkdir3(dirname(sessionDir), { recursive: true });
1170
1350
  const remoteUrl = await execFileAsync("git", ["config", "--get", "remote.origin.url"], {
1171
1351
  cwd: repoPath,
1172
1352
  timeout: GIT_TIMEOUT
1173
- }).then(({ stdout }) => stdout.trim()).catch(() => "");
1353
+ }).then(({ stdout }) => stdout.trim()).catch((err) => {
1354
+ console.debug(
1355
+ `[neo] No remote.origin.url for ${repoPath}: ${err instanceof Error ? err.message : String(err)}`
1356
+ );
1357
+ return "";
1358
+ });
1174
1359
  const cloneSource = remoteUrl || repoPath;
1175
1360
  await execFileAsync("git", ["clone", "--branch", options.baseBranch, cloneSource, sessionDir], {
1176
1361
  timeout: GIT_TIMEOUT
@@ -1180,7 +1365,12 @@ async function createSessionClone(options) {
1180
1365
  "git",
1181
1366
  ["ls-remote", "--heads", "origin", options.branch],
1182
1367
  { cwd: sessionDir, timeout: GIT_TIMEOUT }
1183
- ).then(({ stdout }) => stdout.trim().length > 0).catch(() => false);
1368
+ ).then(({ stdout }) => stdout.trim().length > 0).catch((err) => {
1369
+ console.debug(
1370
+ `[neo] ls-remote failed for branch ${options.branch}: ${err instanceof Error ? err.message : String(err)}`
1371
+ );
1372
+ return false;
1373
+ });
1184
1374
  if (branchExists) {
1185
1375
  await execFileAsync("git", ["fetch", "origin", options.branch], {
1186
1376
  cwd: sessionDir,
@@ -1234,14 +1424,20 @@ async function listSessionClones(sessionsBaseDir) {
1234
1424
  );
1235
1425
  const url = originUrl.trim();
1236
1426
  if (url) repoPath = resolve(clonePath, url);
1237
- } catch {
1427
+ } catch (err) {
1428
+ console.debug(
1429
+ `[neo] Failed to get origin URL for ${clonePath}: ${err instanceof Error ? err.message : String(err)}`
1430
+ );
1238
1431
  }
1239
1432
  clones.push({
1240
1433
  path: clonePath,
1241
1434
  branch: branchOut.trim(),
1242
1435
  repoPath
1243
1436
  });
1244
- } catch {
1437
+ } catch (err) {
1438
+ console.debug(
1439
+ `[neo] Skipping ${clonePath}, not a valid git repo: ${err instanceof Error ? err.message : String(err)}`
1440
+ );
1245
1441
  }
1246
1442
  }
1247
1443
  return clones;
@@ -1261,15 +1457,21 @@ async function git(repoPath, args) {
1261
1457
  return stdout.trim();
1262
1458
  }
1263
1459
  async function createBranch(repoPath, branch, baseBranch) {
1460
+ validateGitRef(branch, "branch");
1461
+ validateGitRef(baseBranch, "baseBranch");
1264
1462
  await git(repoPath, ["branch", branch, baseBranch]);
1265
1463
  }
1266
1464
  async function pushBranch(repoPath, branch, remote) {
1465
+ validateGitRef(branch, "branch");
1466
+ validateGitRef(remote, "remote");
1267
1467
  await git(repoPath, ["push", remote, branch]);
1268
1468
  }
1269
1469
  async function fetchRemote(repoPath, remote) {
1470
+ validateGitRef(remote, "remote");
1270
1471
  await git(repoPath, ["fetch", remote]);
1271
1472
  }
1272
1473
  async function deleteBranch(repoPath, branch) {
1474
+ validateGitRef(branch, "branch");
1273
1475
  await git(repoPath, ["branch", "-D", branch]);
1274
1476
  }
1275
1477
  async function getCurrentBranch(repoPath) {
@@ -1282,6 +1484,8 @@ function getBranchName(config, runId, branch) {
1282
1484
  return `${prefix}/run-${sanitized}`;
1283
1485
  }
1284
1486
  async function pushSessionBranch(sessionPath, branch, remote) {
1487
+ validateGitRef(branch, "branch");
1488
+ validateGitRef(remote, "remote");
1285
1489
  await git(sessionPath, ["push", "-u", remote, branch]);
1286
1490
  }
1287
1491
 
@@ -1343,15 +1547,21 @@ function auditLog(options) {
1343
1547
  await appendFile3(filePath, lines.join(""), "utf-8");
1344
1548
  buffers.delete(sessionId);
1345
1549
  }
1550
+ function stopTimer() {
1551
+ if (flushTimer !== void 0) {
1552
+ clearInterval(flushTimer);
1553
+ flushTimer = void 0;
1554
+ }
1555
+ }
1346
1556
  return {
1347
1557
  name: "audit-log",
1348
1558
  on: "PostToolUse",
1349
1559
  async flush() {
1560
+ stopTimer();
1350
1561
  await flushAll();
1351
- if (flushTimer !== void 0) {
1352
- clearInterval(flushTimer);
1353
- flushTimer = void 0;
1354
- }
1562
+ },
1563
+ cleanup() {
1564
+ stopTimer();
1355
1565
  },
1356
1566
  async handler(event, context) {
1357
1567
  const entry = {
@@ -1506,12 +1716,12 @@ function loopDetection(options) {
1506
1716
  // src/orchestrator.ts
1507
1717
  import { randomUUID as randomUUID3 } from "crypto";
1508
1718
  import { existsSync as existsSync6 } from "fs";
1509
- import { mkdir as mkdir6, readFile as readFile6 } from "fs/promises";
1719
+ import { mkdir as mkdir6, readFile as readFile5 } from "fs/promises";
1510
1720
  import path10 from "path";
1511
1721
 
1512
1722
  // src/orchestrator/run-store.ts
1513
1723
  import { existsSync as existsSync4 } from "fs";
1514
- import { mkdir as mkdir5, readdir as readdir3, readFile as readFile4, writeFile as writeFile2 } from "fs/promises";
1724
+ import { mkdir as mkdir5, readdir as readdir3, readFile as readFile3, writeFile as writeFile2 } from "fs/promises";
1515
1725
  import path7 from "path";
1516
1726
 
1517
1727
  // src/shared/process.ts
@@ -1597,7 +1807,7 @@ var RunStore = class {
1597
1807
  * If so, update its status to "failed" and return it.
1598
1808
  */
1599
1809
  async recoverRunIfOrphaned(filePath) {
1600
- const content = await readFile4(filePath, "utf-8");
1810
+ const content = await readFile3(filePath, "utf-8");
1601
1811
  const run = JSON.parse(content);
1602
1812
  if (run.status !== "running") return null;
1603
1813
  if (run.pid && run.pid === process.pid) return null;
@@ -1612,13 +1822,13 @@ var RunStore = class {
1612
1822
  };
1613
1823
 
1614
1824
  // src/orchestrator/prompt-builder.ts
1615
- import { readFile as readFile5 } from "fs/promises";
1825
+ import { readFile as readFile4 } from "fs/promises";
1616
1826
  import path8 from "path";
1617
1827
  var INSTRUCTIONS_PATH = ".neo/INSTRUCTIONS.md";
1618
1828
  async function loadRepoInstructions(repoPath) {
1619
1829
  const filePath = path8.join(repoPath, INSTRUCTIONS_PATH);
1620
1830
  try {
1621
- return await readFile5(filePath, "utf-8");
1831
+ return await readFile4(filePath, "utf-8");
1622
1832
  } catch {
1623
1833
  return void 0;
1624
1834
  }
@@ -1831,7 +2041,7 @@ function buildQueryOptions(options) {
1831
2041
  // Always pass cwd: session clone for writable agents, repo root for readonly.
1832
2042
  // Without this, readonly agents default to process.cwd() and may write to main tree.
1833
2043
  cwd: sessionPath ?? options.repoPath,
1834
- // maxTurns: agent.maxTurns,
2044
+ ...options.maxTurns ? { maxTurns: options.maxTurns } : {},
1835
2045
  allowedTools: sandboxConfig.allowedTools,
1836
2046
  // Workers run detached without a TTY — bypass interactive permission prompts.
1837
2047
  // Required pair: permissionMode alone is not enough, SDK also needs the flag.
@@ -1848,6 +2058,9 @@ function buildQueryOptions(options) {
1848
2058
  if (options.mcpServers && Object.keys(options.mcpServers).length > 0) {
1849
2059
  queryOptions.mcpServers = options.mcpServers;
1850
2060
  }
2061
+ if (options.agents && Object.keys(options.agents).length > 0) {
2062
+ queryOptions.agents = options.agents;
2063
+ }
1851
2064
  if (options.env && Object.keys(options.env).length > 0) {
1852
2065
  queryOptions.env = { ...process.env, ...options.env };
1853
2066
  }
@@ -2076,14 +2289,23 @@ ALWAYS run commands from this directory. NEVER cd to or operate on any other rep
2076
2289
  sandboxConfig,
2077
2290
  hooks,
2078
2291
  env: agentEnv,
2292
+ agents: agent.definition.agents,
2079
2293
  initTimeoutMs: this.config.initTimeoutMs,
2080
2294
  maxDurationMs: this.config.maxDurationMs,
2081
2295
  maxRetries: this.config.maxRetries,
2082
2296
  backoffBaseMs: this.config.backoffBaseMs,
2083
2297
  ...sessionPath ? { sessionPath } : {},
2084
2298
  ...mcpServers ? { mcpServers } : {},
2085
- ...onAttempt ? { onAttempt } : {}
2299
+ ...onAttempt ? { onAttempt } : {},
2300
+ ...agent.maxTurns ? { maxTurns: agent.maxTurns } : {}
2086
2301
  });
2302
+ if (agent.maxCost !== void 0 && sessionResult.costUsd >= agent.maxCost) {
2303
+ throw new SessionError(
2304
+ `Agent session exceeded budget: $${sessionResult.costUsd.toFixed(4)} >= $${agent.maxCost.toFixed(4)} limit`,
2305
+ "budget_exceeded",
2306
+ sessionResult.sessionId
2307
+ );
2308
+ }
2087
2309
  const parsed = parseOutput(sessionResult.output);
2088
2310
  const result = {
2089
2311
  status: "success",
@@ -2684,6 +2906,7 @@ var Orchestrator = class extends NeoEventEmitter {
2684
2906
  return {
2685
2907
  paused: this._paused,
2686
2908
  activeSessions: [...this._activeSessions.values()],
2909
+ activeRunCount: this.activeRunCount,
2687
2910
  queueDepth: this.semaphore.queueDepth(),
2688
2911
  costToday: this._costToday,
2689
2912
  budgetCapUsd: this.config.budget.dailyCapUsd,
@@ -2694,6 +2917,15 @@ var Orchestrator = class extends NeoEventEmitter {
2694
2917
  get activeSessions() {
2695
2918
  return [...this._activeSessions.values()];
2696
2919
  }
2920
+ get activeRunCount() {
2921
+ let count = 0;
2922
+ for (const session of this._activeSessions.values()) {
2923
+ if (session.status === "running") {
2924
+ count++;
2925
+ }
2926
+ }
2927
+ return count;
2928
+ }
2697
2929
  // ─── Lifecycle ─────────────────────────────────────────
2698
2930
  async start() {
2699
2931
  this._startedAt = Date.now();
@@ -2865,7 +3097,10 @@ var Orchestrator = class extends NeoEventEmitter {
2865
3097
  outcome: "failure",
2866
3098
  runId
2867
3099
  });
2868
- } catch {
3100
+ } catch (err) {
3101
+ console.debug(
3102
+ `[orchestrator] Failed to write failure episode to memory: ${err instanceof Error ? err.message : String(err)}`
3103
+ );
2869
3104
  }
2870
3105
  return failResult;
2871
3106
  } finally {
@@ -2895,14 +3130,17 @@ var Orchestrator = class extends NeoEventEmitter {
2895
3130
  const branch = ctx.input.branch;
2896
3131
  const remote = ctx.repoConfig.pushRemote ?? "origin";
2897
3132
  try {
2898
- await pushSessionBranch(sessionPath, branch, remote).catch(() => {
3133
+ await pushSessionBranch(sessionPath, branch, remote).catch((err) => {
3134
+ console.debug("[neo] Push failed:", err);
2899
3135
  });
2900
- } catch {
3136
+ } catch (err) {
3137
+ console.debug("[neo] Finalization error:", err);
2901
3138
  }
2902
3139
  }
2903
3140
  try {
2904
3141
  await removeSessionClone(sessionPath);
2905
- } catch {
3142
+ } catch (err) {
3143
+ console.debug("[neo] Session cleanup failed:", err);
2906
3144
  }
2907
3145
  }
2908
3146
  async runAgentSession(ctx, sessionPath) {
@@ -2979,7 +3217,10 @@ var Orchestrator = class extends NeoEventEmitter {
2979
3217
  outcome: isSuccess ? "success" : "failure",
2980
3218
  runId
2981
3219
  });
2982
- } catch {
3220
+ } catch (err) {
3221
+ console.debug(
3222
+ `[orchestrator] Failed to write completion episode to memory: ${err instanceof Error ? err.message : String(err)}`
3223
+ );
2983
3224
  }
2984
3225
  return result;
2985
3226
  }
@@ -3046,7 +3287,10 @@ var Orchestrator = class extends NeoEventEmitter {
3046
3287
  if (memories.length === 0) return void 0;
3047
3288
  store.markAccessed(memories.map((m) => m.id));
3048
3289
  return formatMemoriesForPrompt(memories);
3049
- } catch {
3290
+ } catch (err) {
3291
+ console.debug(
3292
+ `[orchestrator] Failed to load memories: ${err instanceof Error ? err.message : String(err)}`
3293
+ );
3050
3294
  return void 0;
3051
3295
  }
3052
3296
  }
@@ -3197,17 +3441,17 @@ var Orchestrator = class extends NeoEventEmitter {
3197
3441
  // ─── Private: Supervisor discovery ─────────────────────
3198
3442
  /** Discover running supervisor daemons and return webhook configs for their endpoints. */
3199
3443
  async discoverSupervisorWebhooks() {
3200
- const { readdir: readdir6 } = await import("fs/promises");
3444
+ const { readdir: readdir7 } = await import("fs/promises");
3201
3445
  const supervisorsDir = getSupervisorsDir();
3202
3446
  if (!existsSync6(supervisorsDir)) return [];
3203
3447
  const webhooks = [];
3204
3448
  try {
3205
- const entries = await readdir6(supervisorsDir, { withFileTypes: true });
3449
+ const entries = await readdir7(supervisorsDir, { withFileTypes: true });
3206
3450
  for (const entry of entries) {
3207
3451
  if (!entry.isDirectory()) continue;
3208
3452
  try {
3209
3453
  const statePath = path10.join(supervisorsDir, entry.name, "state.json");
3210
- const raw = await readFile6(statePath, "utf-8");
3454
+ const raw = await readFile5(statePath, "utf-8");
3211
3455
  const state = JSON.parse(raw);
3212
3456
  if (state.status !== "running" || !state.port) continue;
3213
3457
  if (state.pid && !isProcessAlive(state.pid)) continue;
@@ -3217,10 +3461,16 @@ var Orchestrator = class extends NeoEventEmitter {
3217
3461
  secret: this.config.supervisor.secret,
3218
3462
  timeoutMs: 5e3
3219
3463
  });
3220
- } catch {
3464
+ } catch (err) {
3465
+ console.debug(
3466
+ `[orchestrator] Failed to load supervisor webhook config: ${err instanceof Error ? err.message : String(err)}`
3467
+ );
3221
3468
  }
3222
3469
  }
3223
- } catch {
3470
+ } catch (err) {
3471
+ console.debug(
3472
+ `[orchestrator] Failed to read supervisors directory: ${err instanceof Error ? err.message : String(err)}`
3473
+ );
3224
3474
  }
3225
3475
  return webhooks;
3226
3476
  }
@@ -3263,9 +3513,20 @@ import { z as z5 } from "zod";
3263
3513
 
3264
3514
  // src/supervisor/decisions.ts
3265
3515
  import { randomUUID as randomUUID4 } from "crypto";
3266
- import { appendFile as appendFile4, readFile as readFile7, writeFile as writeFile3 } from "fs/promises";
3516
+ import { appendFile as appendFile4, readFile as readFile6, stat as stat3, writeFile as writeFile3 } from "fs/promises";
3267
3517
  import path11 from "path";
3268
3518
  import { z as z4 } from "zod";
3519
+ var DecisionFileSizeError = class extends Error {
3520
+ constructor(filePath, fileSizeBytes, maxSizeBytes) {
3521
+ super(
3522
+ `Decision file exceeds maximum size: ${filePath} (${(fileSizeBytes / 1024 / 1024).toFixed(2)}MB > ${(maxSizeBytes / 1024 / 1024).toFixed(2)}MB)`
3523
+ );
3524
+ this.filePath = filePath;
3525
+ this.fileSizeBytes = fileSizeBytes;
3526
+ this.maxSizeBytes = maxSizeBytes;
3527
+ this.name = "DecisionFileSizeError";
3528
+ }
3529
+ };
3269
3530
  var decisionOptionSchema = z4.object({
3270
3531
  key: z4.string(),
3271
3532
  label: z4.string(),
@@ -3279,21 +3540,49 @@ var decisionSchema = z4.object({
3279
3540
  type: z4.string().default("generic"),
3280
3541
  source: z4.string(),
3281
3542
  metadata: z4.record(z4.string(), z4.unknown()).optional(),
3282
- createdAt: z4.string(),
3283
- expiresAt: z4.string().optional(),
3543
+ createdAt: z4.coerce.string(),
3544
+ expiresAt: z4.coerce.string().optional(),
3284
3545
  defaultAnswer: z4.string().optional(),
3285
- answeredAt: z4.string().optional(),
3546
+ answeredAt: z4.coerce.string().optional(),
3286
3547
  answer: z4.string().optional(),
3287
- expiredAt: z4.string().optional()
3548
+ expiredAt: z4.coerce.string().optional()
3549
+ });
3550
+ var tombstoneSchema = z4.object({
3551
+ action: z4.literal("tombstone"),
3552
+ id: z4.string(),
3553
+ createdAt: z4.coerce.string(),
3554
+ reason: z4.enum(["deleted", "expired", "purged"])
3288
3555
  });
3289
3556
  var DecisionStore = class {
3290
3557
  filePath;
3291
3558
  dir;
3292
3559
  dirCache = /* @__PURE__ */ new Set();
3560
+ /** Promise-based mutex to serialize write operations */
3561
+ writeLock = Promise.resolve();
3562
+ /** Maximum file size in bytes before validation fails (default: 10MB) */
3563
+ maxFileSizeBytes = 10 * 1024 * 1024;
3293
3564
  constructor(filePath) {
3294
3565
  this.filePath = filePath;
3295
3566
  this.dir = path11.dirname(filePath);
3296
3567
  }
3568
+ /**
3569
+ * Acquire the write lock and execute a callback.
3570
+ * Serializes all write operations to prevent race conditions.
3571
+ */
3572
+ async withWriteLock(fn) {
3573
+ const release = this.writeLock;
3574
+ let releaseLock = () => {
3575
+ };
3576
+ this.writeLock = new Promise((r) => {
3577
+ releaseLock = r;
3578
+ });
3579
+ try {
3580
+ await release;
3581
+ return await fn();
3582
+ } finally {
3583
+ releaseLock();
3584
+ }
3585
+ }
3297
3586
  /**
3298
3587
  * Create a new decision and persist it.
3299
3588
  * @returns The generated decision ID
@@ -3313,19 +3602,22 @@ var DecisionStore = class {
3313
3602
  /**
3314
3603
  * Answer a decision by ID.
3315
3604
  * Reads all entries, updates the matching one, and rewrites the file.
3605
+ * Uses a mutex to serialize concurrent calls and prevent race conditions.
3316
3606
  */
3317
3607
  async answer(id, answer) {
3318
- const decisions = await this.readAll();
3319
- const decision = decisions.find((d) => d.id === id);
3320
- if (!decision) {
3321
- throw new Error(`Decision not found: ${id}`);
3322
- }
3323
- if (decision.answer !== void 0) {
3324
- throw new Error(`Decision already answered: ${id}`);
3325
- }
3326
- decision.answer = answer;
3327
- decision.answeredAt = (/* @__PURE__ */ new Date()).toISOString();
3328
- await this.writeAll(decisions);
3608
+ await this.withWriteLock(async () => {
3609
+ const decisions = await this.readAll();
3610
+ const decision = decisions.find((d) => d.id === id);
3611
+ if (!decision) {
3612
+ throw new Error(`Decision not found: ${id}`);
3613
+ }
3614
+ if (decision.answer !== void 0) {
3615
+ throw new Error(`Decision already answered: ${id}`);
3616
+ }
3617
+ decision.answer = answer;
3618
+ decision.answeredAt = (/* @__PURE__ */ new Date()).toISOString();
3619
+ await this.writeAll(decisions);
3620
+ });
3329
3621
  }
3330
3622
  /**
3331
3623
  * Get all pending decisions (unanswered, not expired, not timed out).
@@ -3362,33 +3654,40 @@ var DecisionStore = class {
3362
3654
  /**
3363
3655
  * Auto-answer expired decisions with their defaultAnswer.
3364
3656
  * Decisions without defaultAnswer are marked as expired (expiredAt).
3657
+ * Uses a mutex to serialize concurrent calls and prevent race conditions.
3365
3658
  * @returns The decisions that were auto-answered or marked expired
3366
3659
  */
3367
3660
  async expire() {
3368
- const decisions = await this.readAll();
3369
- const now = (/* @__PURE__ */ new Date()).toISOString();
3370
- const expired = [];
3371
- for (const decision of decisions) {
3372
- if (decision.answer === void 0 && decision.expiredAt === void 0 && decision.expiresAt && decision.expiresAt < now) {
3373
- if (decision.defaultAnswer !== void 0) {
3374
- decision.answer = decision.defaultAnswer;
3375
- decision.answeredAt = now;
3376
- } else {
3377
- decision.expiredAt = now;
3661
+ return this.withWriteLock(async () => {
3662
+ const decisions = await this.readAll();
3663
+ const now = (/* @__PURE__ */ new Date()).toISOString();
3664
+ const expired = [];
3665
+ for (const decision of decisions) {
3666
+ if (decision.answer === void 0 && decision.expiredAt === void 0 && decision.expiresAt && decision.expiresAt < now) {
3667
+ if (decision.defaultAnswer !== void 0) {
3668
+ decision.answer = decision.defaultAnswer;
3669
+ decision.answeredAt = now;
3670
+ } else {
3671
+ decision.expiredAt = now;
3672
+ }
3673
+ expired.push(decision);
3378
3674
  }
3379
- expired.push(decision);
3380
3675
  }
3381
- }
3382
- if (expired.length > 0) {
3383
- await this.writeAll(decisions);
3384
- }
3385
- return expired;
3676
+ if (expired.length > 0) {
3677
+ await this.writeAll(decisions);
3678
+ }
3679
+ return expired;
3680
+ });
3386
3681
  }
3387
3682
  // ─── Private helpers ─────────────────────────────────────
3388
3683
  async readAll() {
3389
3684
  let content;
3390
3685
  try {
3391
- content = await readFile7(this.filePath, "utf-8");
3686
+ const stats = await stat3(this.filePath);
3687
+ if (stats.size > this.maxFileSizeBytes) {
3688
+ throw new DecisionFileSizeError(this.filePath, stats.size, this.maxFileSizeBytes);
3689
+ }
3690
+ content = await readFile6(this.filePath, "utf-8");
3392
3691
  } catch (error) {
3393
3692
  if (error.code === "ENOENT") {
3394
3693
  return [];
@@ -3514,7 +3813,7 @@ var activityQueryOptionsSchema = z5.object({
3514
3813
 
3515
3814
  // src/supervisor/activity-log.ts
3516
3815
  import { randomUUID as randomUUID5 } from "crypto";
3517
- import { appendFile as appendFile5, readFile as readFile8, rename, stat } from "fs/promises";
3816
+ import { appendFile as appendFile5, readFile as readFile7, rename, stat as stat4 } from "fs/promises";
3518
3817
  import path12 from "path";
3519
3818
  var ACTIVITY_FILE = "activity.jsonl";
3520
3819
  var MAX_SIZE_BYTES = 10 * 1024 * 1024;
@@ -3553,8 +3852,11 @@ var ActivityLog = class {
3553
3852
  async tail(n) {
3554
3853
  let content;
3555
3854
  try {
3556
- content = await readFile8(this.filePath, "utf-8");
3557
- } catch {
3855
+ content = await readFile7(this.filePath, "utf-8");
3856
+ } catch (err) {
3857
+ console.debug(
3858
+ `[ActivityLog] Failed to read activity log: ${err instanceof Error ? err.message : String(err)}`
3859
+ );
3558
3860
  return [];
3559
3861
  }
3560
3862
  const lines = content.trim().split("\n").filter(Boolean);
@@ -3563,14 +3865,17 @@ var ActivityLog = class {
3563
3865
  for (const line of lastLines) {
3564
3866
  try {
3565
3867
  entries.push(JSON.parse(line));
3566
- } catch {
3868
+ } catch (err) {
3869
+ console.debug(
3870
+ `[ActivityLog] Skipping malformed line: ${err instanceof Error ? err.message : String(err)}`
3871
+ );
3567
3872
  }
3568
3873
  }
3569
3874
  return entries;
3570
3875
  }
3571
3876
  async checkRotation() {
3572
3877
  try {
3573
- const stats = await stat(this.filePath);
3878
+ const stats = await stat4(this.filePath);
3574
3879
  if (stats.size > MAX_SIZE_BYTES) {
3575
3880
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
3576
3881
  const rotatedPath = path12.join(this.dir, `activity-${timestamp}.jsonl`);
@@ -3584,13 +3889,13 @@ var ActivityLog = class {
3584
3889
  // src/supervisor/daemon.ts
3585
3890
  import { randomUUID as randomUUID7 } from "crypto";
3586
3891
  import { existsSync as existsSync8 } from "fs";
3587
- import { mkdir as mkdir7, readFile as readFile12, rm as rm2, writeFile as writeFile7 } from "fs/promises";
3892
+ import { mkdir as mkdir7, readFile as readFile11, rm as rm2, writeFile as writeFile7 } from "fs/promises";
3588
3893
  import { homedir as homedir5 } from "os";
3589
3894
  import path15 from "path";
3590
3895
 
3591
3896
  // src/supervisor/event-queue.ts
3592
3897
  import { watch as watch2 } from "fs";
3593
- import { readFile as readFile9, writeFile as writeFile4 } from "fs/promises";
3898
+ import { readFile as readFile8, writeFile as writeFile4 } from "fs/promises";
3594
3899
  var EventQueue = class {
3595
3900
  queue = [];
3596
3901
  seenIds = /* @__PURE__ */ new Set();
@@ -3743,7 +4048,7 @@ var EventQueue = class {
3743
4048
  async readNewLines(filePath, kind) {
3744
4049
  let content;
3745
4050
  try {
3746
- content = await readFile9(filePath, "utf-8");
4051
+ content = await readFile8(filePath, "utf-8");
3747
4052
  } catch (_err) {
3748
4053
  return;
3749
4054
  }
@@ -3768,7 +4073,7 @@ var EventQueue = class {
3768
4073
  async replayFile(filePath, kind) {
3769
4074
  let content;
3770
4075
  try {
3771
- content = await readFile9(filePath, "utf-8");
4076
+ content = await readFile8(filePath, "utf-8");
3772
4077
  } catch (_err) {
3773
4078
  return;
3774
4079
  }
@@ -3804,7 +4109,7 @@ var EventQueue = class {
3804
4109
  }
3805
4110
  async markInFile(filePath, matchTimestamp, processedAt) {
3806
4111
  try {
3807
- const content = await readFile9(filePath, "utf-8");
4112
+ const content = await readFile8(filePath, "utf-8");
3808
4113
  const lines = content.split("\n");
3809
4114
  let changed = false;
3810
4115
  const updated = lines.map((line) => {
@@ -3832,7 +4137,7 @@ var EventQueue = class {
3832
4137
  // src/supervisor/heartbeat.ts
3833
4138
  import { randomUUID as randomUUID6 } from "crypto";
3834
4139
  import { existsSync as existsSync7 } from "fs";
3835
- import { readdir as readdir4, readFile as readFile11, writeFile as writeFile6 } from "fs/promises";
4140
+ import { readdir as readdir4, readFile as readFile10, writeFile as writeFile6 } from "fs/promises";
3836
4141
  import { homedir as homedir4 } from "os";
3837
4142
  import path14 from "path";
3838
4143
 
@@ -3888,7 +4193,7 @@ var IdleDetector = class {
3888
4193
  };
3889
4194
 
3890
4195
  // src/supervisor/log-buffer.ts
3891
- import { appendFile as appendFile6, readFile as readFile10, stat as stat2, writeFile as writeFile5 } from "fs/promises";
4196
+ import { appendFile as appendFile6, readFile as readFile9, stat as stat5, writeFile as writeFile5 } from "fs/promises";
3892
4197
  import path13 from "path";
3893
4198
  var LOG_BUFFER_FILE = "log-buffer.jsonl";
3894
4199
  var MAX_FILE_BYTES = 1024 * 1024;
@@ -3902,16 +4207,22 @@ function parseLines(content) {
3902
4207
  for (const line of lines) {
3903
4208
  try {
3904
4209
  entries.push(JSON.parse(line));
3905
- } catch {
4210
+ } catch (err) {
4211
+ console.debug(
4212
+ `[log-buffer] Skipping malformed line: ${err instanceof Error ? err.message : String(err)}`
4213
+ );
3906
4214
  }
3907
4215
  }
3908
4216
  return entries;
3909
4217
  }
3910
4218
  async function readLogBuffer(dir) {
3911
4219
  try {
3912
- const content = await readFile10(bufferPath(dir), "utf-8");
4220
+ const content = await readFile9(bufferPath(dir), "utf-8");
3913
4221
  return parseLines(content);
3914
- } catch {
4222
+ } catch (err) {
4223
+ console.debug(
4224
+ `[log-buffer] Failed to read buffer: ${err instanceof Error ? err.message : String(err)}`
4225
+ );
3915
4226
  return [];
3916
4227
  }
3917
4228
  }
@@ -3923,8 +4234,11 @@ async function markConsolidated(dir, ids) {
3923
4234
  const filePath = bufferPath(dir);
3924
4235
  let content;
3925
4236
  try {
3926
- content = await readFile10(filePath, "utf-8");
3927
- } catch {
4237
+ content = await readFile9(filePath, "utf-8");
4238
+ } catch (err) {
4239
+ console.debug(
4240
+ `[log-buffer] Failed to read for consolidation: ${err instanceof Error ? err.message : String(err)}`
4241
+ );
3928
4242
  return;
3929
4243
  }
3930
4244
  const idSet = new Set(ids);
@@ -3938,7 +4252,10 @@ async function markConsolidated(dir, ids) {
3938
4252
  entry.consolidatedAt = now;
3939
4253
  }
3940
4254
  updated.push(JSON.stringify(entry));
3941
- } catch {
4255
+ } catch (err) {
4256
+ console.debug(
4257
+ `[log-buffer] Preserving malformed line during consolidation: ${err instanceof Error ? err.message : String(err)}`
4258
+ );
3942
4259
  updated.push(line);
3943
4260
  }
3944
4261
  }
@@ -3949,8 +4266,11 @@ async function compactLogBuffer(dir) {
3949
4266
  const filePath = bufferPath(dir);
3950
4267
  let content;
3951
4268
  try {
3952
- content = await readFile10(filePath, "utf-8");
3953
- } catch {
4269
+ content = await readFile9(filePath, "utf-8");
4270
+ } catch (err) {
4271
+ console.debug(
4272
+ `[log-buffer] Failed to read for compaction: ${err instanceof Error ? err.message : String(err)}`
4273
+ );
3954
4274
  return;
3955
4275
  }
3956
4276
  const now = Date.now();
@@ -3966,7 +4286,10 @@ async function compactLogBuffer(dir) {
3966
4286
  }
3967
4287
  }
3968
4288
  kept.push(JSON.stringify(entry));
3969
- } catch {
4289
+ } catch (err) {
4290
+ console.debug(
4291
+ `[log-buffer] Dropping malformed line during compaction: ${err instanceof Error ? err.message : String(err)}`
4292
+ );
3970
4293
  }
3971
4294
  }
3972
4295
  let result = `${kept.join("\n")}
@@ -3982,7 +4305,10 @@ async function appendLogBuffer(dir, entry) {
3982
4305
  try {
3983
4306
  await appendFile6(bufferPath(dir), `${JSON.stringify(entry)}
3984
4307
  `, "utf-8");
3985
- } catch {
4308
+ } catch (err) {
4309
+ console.debug(
4310
+ `[log-buffer] Failed to append entry: ${err instanceof Error ? err.message : String(err)}`
4311
+ );
3986
4312
  }
3987
4313
  }
3988
4314
 
@@ -4000,7 +4326,10 @@ var OPERATING_PRINCIPLES = `### Operating principles
4000
4326
  - Keep initiative boundaries strict: decisions for initiative A must not be influenced by unrelated state from B.
4001
4327
  - Your user-visible channel is \`neo log\` only; produce concise tool calls (not reasoning/explanations) and avoid wasted tokens.
4002
4328
  - You may inspect repositories available via \`neo repos\`, read-only to launch agents.
4003
- - Task hygiene is non-negotiable: update task outcomes EVERY heartbeat. A task without a current outcome is a blind spot.`;
4329
+ - Task hygiene is non-negotiable: update task outcomes EVERY heartbeat. A task without a current outcome is a blind spot.
4330
+ - **No duplicate dispatches**: before dispatching a \`developer\` for any finding, ALWAYS check for open or recently merged PRs on the same topic: \`gh pr list --repo <repo> --search "<keywords>" --state open\` and \`--state merged --limit 5\`. If a similar PR exists \u2192 skip and log with \`neo log discovery\`. Dispatching duplicate agents wastes budget and pollutes the PR list.
4331
+ - **Decision routing**: when a pending decision arrives from an agent, answer within 1-2 heartbeats. Route: (1) answer directly if strategic/scope/priority, (2) dispatch scout to investigate if codebase context needed, (3) wait for human if autoDecide is off or genuinely uncertain. Agents are BLOCKED waiting \u2014 stale decisions waste session budget.
4332
+ - **Verify agent output**: always read agent output with \`neo runs <runId>\` before dispatching follow-up work. Route based on agent output contracts documented in SUPERVISOR.md.`;
4004
4333
  var COMMANDS = `### Dispatching agents
4005
4334
  \`\`\`bash
4006
4335
  neo run <agent> --prompt "..." --repo <path> --branch <name> [--priority critical|high|medium|low] [--meta '<json>']
@@ -4025,7 +4354,7 @@ neo runs <runId> # full run details + agent output (MUST READ
4025
4354
  neo cost --short [--all] # check budget
4026
4355
  \`\`\`
4027
4356
 
4028
- \`neo runs <runId>\` returns the agent's full output. **ALWAYS read it when a run completes** \u2014 it contains structured JSON (PR URLs, issues, plans, milestones) that you need to decide next steps.
4357
+ \`neo runs <runId>\` returns the agent's full output. **ALWAYS read it when a run completes** \u2014 it contains the agent's results that you need to decide next steps per SUPERVISOR.md routing rules.
4029
4358
 
4030
4359
  ### Memory
4031
4360
  \`\`\`bash
@@ -4076,8 +4405,9 @@ var HEARTBEAT_RULES = `### Heartbeat lifecycle
4076
4405
  1. DEDUP FIRST \u2014 check focus for PROCESSED entries. Skip any runId already processed.
4077
4406
  2. MONITOR RUNS \u2014 \`neo runs --short\` to check active run status. If a run completed since last HB, read its output with \`neo runs <runId>\` BEFORE doing anything else.
4078
4407
  3. PENDING TASKS? \u2014 dispatch the next eligible task from work queue. Do not re-plan.
4079
- 4. EVENTS? \u2014 process run completions, messages, webhooks. Parse agent JSON output.
4408
+ 4. EVENTS? \u2014 process run completions, messages, webhooks. Read agent output and route per SUPERVISOR.md contracts.
4080
4409
  5. FOLLOW-UPS? \u2014 check CI (\`gh pr checks\`), deferred dispatches.
4410
+ 5b. DECISIONS? \u2014 check \`neo decision list\` for pending decisions from agents. Route each: answer directly, dispatch scout to investigate, or wait for human. Agents are blocked waiting \u2014 prioritize these.
4081
4411
  6. DISPATCH \u2014 route work to agents. Mark tasks \`in_progress\`, add ACTIVE to focus.
4082
4412
  7. UPDATE TASKS \u2014 review ALL in_progress/blocked tasks. For each: confirm status matches reality (run still active? PR merged? blocked resolved?). Update outcomes immediately \u2014 do not defer to next heartbeat.
4083
4413
  8. SERIALIZE & YIELD \u2014 rewrite focus (see <focus>), log your decisions, and yield. Do not poll.
@@ -4086,15 +4416,15 @@ var HEARTBEAT_RULES = `### Heartbeat lifecycle
4086
4416
  <run-monitoring>
4087
4417
  Runs are your agents in the field. You MUST actively track them:
4088
4418
  - **On dispatch**: include a label in \`--meta\` for identification: \`--meta '{"label":"T6-csv-export","ticketId":"YC-42",...}'\`
4089
- - **On completion**: ALWAYS run \`neo runs <runId>\` to read the full output. Parse structured JSON (PR URLs, issues, plans). This is NOT optional \u2014 you cannot decide next steps without reading the output.
4419
+ - **On completion**: ALWAYS run \`neo runs <runId>\` to read the full output. This is NOT optional \u2014 you cannot decide next steps without reading the output.
4090
4420
  - **On failure**: read the output to understand why. Decide: retry (blocked), abandon, or escalate.
4091
4421
  - **Active runs**: check \`neo runs --short --status running\` to verify your runs are still alive. If a run disappeared, investigate.
4092
4422
  </run-monitoring>
4093
4423
 
4094
4424
  <multi-task-initiatives>
4095
- **Branch strategy:** one branch per initiative \u2014 all tasks push to the same branch sequentially (never in parallel). First task creates the branch; open PR after it completes. Later tasks add commits to the same PR. Independent initiatives CAN run in parallel on different branches.
4425
+ **Branch strategy:** one branch per initiative. Architect produces a plan; developer executes all tasks on that branch. Independent initiatives CAN run in parallel on different branches.
4096
4426
 
4097
- **Dispatch quality:** write a detailed \`--prompt\` with acceptance criteria, files to modify, and context from completed sibling tasks (commits, APIs added, files changed). When dispatching task N, summarize what tasks 1..N-1 produced.
4427
+ **Dispatch quality:** when dispatching developer with a plan, include the plan path and any context from completed prior work (PR numbers, APIs added). For direct tasks (no plan), write a detailed \`--prompt\` with acceptance criteria.
4098
4428
 
4099
4429
  **Post-completion:** if agent opened a PR, dispatch \`reviewer\` in parallel with CI (do not wait). Update task outcome with concrete details (PR#, what was done) and update the initiative note.
4100
4430
 
@@ -4183,7 +4513,7 @@ function buildMemoryRulesExamples(supervisorDir) {
4183
4513
  neo memory write --type focus --expires 2h "ACTIVE: 5900a64a developer 'T1' branch:feat/x (cat ${notesDir}/plan-YC-2670-kanban.md)"
4184
4514
  neo memory write --type fact --scope /repo "main branch uses protected merges \u2014 agents must create PRs, never push directly"
4185
4515
  neo memory write --type fact --scope /repo "pnpm build must pass before push \u2014 CI does not rebuild, run 2g589f34a5a failed without it"
4186
- neo memory write --type procedure --scope /repo "After architect run: parse milestones from JSON output, create one task per milestone with --tags initiative:<name>"
4516
+ neo memory write --type procedure --scope /repo "After architect run: read plan path from output, dispatch developer with plan per SUPERVISOR.md routing"
4187
4517
  neo memory write --type procedure --scope /repo "When developer run fails with ENOSPC: the repo has large fixtures \u2014 use --branch with shallow clone flag"
4188
4518
  neo memory write --type feedback --scope /repo "User wants PR descriptions in French even though code is in English"
4189
4519
  neo memory write --type task --scope /repo --severity high --category "neo runs 2g589f34a5a" --tags "initiative:auth-v2,depends:mem_xyz" "T1: Auth middleware"
@@ -4216,7 +4546,7 @@ ${lines}
4216
4546
  }
4217
4547
  return "<focus>\n(empty \u2014 use neo memory write --type focus to set working context)\n</focus>";
4218
4548
  }
4219
- function buildPendingDecisionsSection(decisions, autoDecide = false) {
4549
+ function buildPendingDecisionsSection(decisions) {
4220
4550
  if (!decisions || decisions.length === 0) {
4221
4551
  return "";
4222
4552
  }
@@ -4235,7 +4565,7 @@ function buildPendingDecisionsSection(decisions, autoDecide = false) {
4235
4565
  lines.push(` Context: ${d.context}`);
4236
4566
  }
4237
4567
  }
4238
- const instruction = autoDecide ? `You are in **autoDecide** mode \u2014 answer each pending decision yourself based on available context, project knowledge, and best engineering judgment.
4568
+ const instruction = `You are in **autoDecide** mode \u2014 answer each pending decision yourself based on available context, project knowledge, and best engineering judgment.
4239
4569
 
4240
4570
  \`\`\`bash
4241
4571
  neo decision answer <decision_id> <answer>
@@ -4243,10 +4573,7 @@ neo decision answer <decision_id> <answer>
4243
4573
 
4244
4574
  For each decision: analyze the options, consider the project context and risk, then answer decisively. Prefer safe, incremental choices when uncertain. Log your reasoning before answering.
4245
4575
 
4246
- **Merge authority:** In autoDecide mode you MAY merge branches when the PR is ready (CI green, reviews approved). Use \`gh pr merge\` with the appropriate merge strategy.` : `To answer a decision, emit a \`decision:answer\` event:
4247
- \`\`\`bash
4248
- neo event emit decision:answer --data '{"id":"<decision_id>","answer":"<option_key>"}'
4249
- \`\`\``;
4576
+ **Merge authority:** In autoDecide mode you MAY merge branches when the PR is ready (CI green, reviews approved). Use \`gh pr merge\` with the appropriate merge strategy.`;
4250
4577
  return `Pending decisions (${decisions.length}):
4251
4578
  ${lines.join("\n")}
4252
4579
 
@@ -4278,7 +4605,7 @@ ${opts.activeRuns.map((r) => `- ${r}`).join("\n")}`);
4278
4605
  if (recentActions) {
4279
4606
  parts.push(recentActions);
4280
4607
  }
4281
- const pendingDecisions = buildPendingDecisionsSection(opts.pendingDecisions, opts.autoDecide);
4608
+ const pendingDecisions = buildPendingDecisionsSection(opts.pendingDecisions);
4282
4609
  if (pendingDecisions) {
4283
4610
  parts.push(pendingDecisions);
4284
4611
  }
@@ -4589,7 +4916,7 @@ function buildIdlePrompt(opts) {
4589
4916
  const budgetLine = `Budget: $${opts.budgetStatus.todayUsd.toFixed(2)} / $${opts.budgetStatus.capUsd.toFixed(2)} (${opts.budgetStatus.remainingPct.toFixed(0)}% remaining)`;
4590
4917
  const hasRepos = opts.repos.length > 0;
4591
4918
  const hasBudget = opts.budgetStatus.remainingPct > 10;
4592
- const hasPendingDecisions = (opts.pendingDecisions?.length ?? 0) > 0;
4919
+ const hasPendingDecisions = opts.hasPendingDecisions ?? (opts.pendingDecisions?.length ?? 0) > 0;
4593
4920
  if (!hasRepos || !hasBudget) {
4594
4921
  return `${buildRoleSection(opts.heartbeatCount)}
4595
4922
 
@@ -4604,7 +4931,7 @@ Nothing to do. Run \`neo log discovery "idle"\` and yield. Do not produce any ot
4604
4931
  }
4605
4932
  const repoList = opts.repos.map((r) => `- ${r.path} (branch: ${r.defaultBranch})`).join("\n");
4606
4933
  if (hasPendingDecisions) {
4607
- const pendingSection = buildPendingDecisionsSection(opts.pendingDecisions, opts.autoDecide);
4934
+ const pendingSection = buildPendingDecisionsSection(opts.pendingDecisions);
4608
4935
  if (opts.autoDecide) {
4609
4936
  return `${buildRoleSection(opts.heartbeatCount)}
4610
4937
 
@@ -4653,29 +4980,8 @@ Repositories:
4653
4980
  ${repoList}
4654
4981
  </context>
4655
4982
 
4656
- <reference>
4657
- ${getCommandsSection(opts.heartbeatCount)}
4658
- </reference>
4659
-
4660
4983
  <directive>
4661
- Idle \u2014 no work in progress. Use this downtime to dispatch a \`scout\` agent on one of your repositories.
4662
-
4663
- The scout explores the codebase and surfaces bugs, improvements, security issues, and tech debt. It creates decisions (via \`neo decision create\`) for each critical or high-impact finding, so the user can choose what to act on.
4664
-
4665
- **Rules:**
4666
- - Pick the repo that was least recently scouted (check your memory for previous scout runs).
4667
- - Only ONE scout at a time \u2014 never dispatch multiple scouts in parallel.
4668
- - Use \`--branch main\` (or the repo's default branch) \u2014 scouts are read-only.
4669
- - Log your decision before dispatching.
4670
-
4671
- **Example:**
4672
- \`\`\`bash
4673
- neo log decision "Idle \u2014 dispatching scout on <repo>"
4674
- neo run scout --prompt "Explore this repository. Surface bugs, improvements, security issues, and tech debt. Create decisions for critical and high-impact findings." \\
4675
- --repo <path> \\
4676
- --branch <default-branch> \\
4677
- --meta '{"stage":"scout","label":"scout-<repo-name>"}'
4678
- \`\`\`
4984
+ Nothing to do. Run \`neo log discovery "idle"\` and yield. Do not produce any other output.
4679
4985
  </directive>`;
4680
4986
  }
4681
4987
  function buildStandardPrompt(opts) {
@@ -4944,7 +5250,8 @@ var HeartbeatLoop = class {
4944
5250
  if (this.configStore) {
4945
5251
  this.config = this.configStore.getAll();
4946
5252
  }
4947
- this.activityLog.log("event", "Configuration reloaded (hot-reload)").catch(() => {
5253
+ this.activityLog.log("event", "Configuration reloaded (hot-reload)").catch((err) => {
5254
+ console.debug("[neo] Config reload log failed:", err);
4948
5255
  });
4949
5256
  this.eventQueue.interrupt();
4950
5257
  }
@@ -4955,21 +5262,14 @@ var HeartbeatLoop = class {
4955
5262
  const today = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
4956
5263
  const budgetCheck = await this.checkBudgetExceeded(state, today);
4957
5264
  if (budgetCheck.exceeded) return;
4958
- const { grouped, rawEvents } = this.eventQueue.drainAndGroup();
4959
- const totalEventCount = grouped.messages.length + grouped.webhooks.length + grouped.runCompletions.length;
4960
- const activeRuns = await this.getActiveRuns();
4961
- const decisionStore = this.getDecisionStore();
4962
- await this.processDecisionAnswers(rawEvents, decisionStore);
4963
- const expiredDecisions = await decisionStore.expire();
4964
- const hasExpiredDecisions = expiredDecisions.length > 0;
4965
- const pendingDecisions = this.config.supervisor.autoDecide ? await decisionStore.pending() : [];
4966
- const answeredDecisions = this.config.supervisor.autoDecide ? await decisionStore.answered(state?.lastHeartbeat) : [];
5265
+ const eventCtx = await this.gatherEventContext();
5266
+ const { pendingDecisions, answeredDecisions, hasExpiredDecisions, hasPendingDecisions } = await this.processDecisions(eventCtx.rawEvents, state?.lastHeartbeat);
4967
5267
  const unconsolidatedEntries = await readUnconsolidated(this.supervisorDir);
4968
5268
  const hasPendingConsolidation = unconsolidatedEntries.length > 0;
4969
5269
  const skipResult = await this.handleSkipLogic({
4970
5270
  state,
4971
- totalEventCount,
4972
- activeRuns,
5271
+ totalEventCount: eventCtx.totalEventCount,
5272
+ activeRuns: eventCtx.activeRuns,
4973
5273
  hasPendingConsolidation,
4974
5274
  hasExpiredDecisions
4975
5275
  });
@@ -4979,27 +5279,31 @@ var HeartbeatLoop = class {
4979
5279
  }
4980
5280
  const modeResult = await this.determineHeartbeatMode(state);
4981
5281
  const { prompt, modeLabel } = await this.buildHeartbeatModePrompt({
4982
- grouped,
5282
+ grouped: eventCtx.grouped,
4983
5283
  todayCost: budgetCheck.todayCost,
4984
5284
  heartbeatCount: modeResult.heartbeatCount,
4985
5285
  unconsolidated: modeResult.unconsolidated,
4986
5286
  isCompaction: modeResult.isCompaction,
4987
5287
  isConsolidation: modeResult.isConsolidation,
4988
- activeRuns,
5288
+ activeRuns: eventCtx.activeRuns,
4989
5289
  pendingDecisions,
4990
5290
  answeredDecisions,
5291
+ hasPendingDecisions,
4991
5292
  lastHeartbeat: state?.lastHeartbeat,
4992
- lastConsolidationTimestamp: modeResult.lastConsolidationTs
5293
+ lastConsolidationTimestamp: modeResult.lastConsolidationTs,
5294
+ memories: eventCtx.memories,
5295
+ recentActions: eventCtx.recentActions,
5296
+ mcpServerNames: eventCtx.mcpServerNames
4993
5297
  });
4994
5298
  await this.activityLog.log(
4995
5299
  "heartbeat",
4996
5300
  `Heartbeat #${modeResult.heartbeatCount} starting (${modeLabel})`,
4997
5301
  {
4998
5302
  heartbeatId,
4999
- eventCount: totalEventCount,
5000
- messages: grouped.messages.length,
5001
- webhooks: grouped.webhooks.length,
5002
- runCompletions: grouped.runCompletions.length,
5303
+ eventCount: eventCtx.totalEventCount,
5304
+ messages: eventCtx.grouped.messages.length,
5305
+ webhooks: eventCtx.grouped.webhooks.length,
5306
+ runCompletions: eventCtx.grouped.runCompletions.length,
5003
5307
  isConsolidation: modeResult.isConsolidation
5004
5308
  }
5005
5309
  );
@@ -5011,17 +5315,11 @@ var HeartbeatLoop = class {
5011
5315
  { heartbeatId }
5012
5316
  );
5013
5317
  }
5014
- if (rawEvents.length > 0) {
5015
- const inboxPath = path14.join(this.supervisorDir, "inbox.jsonl");
5016
- await this.eventQueue.markProcessed(inboxPath, this.eventsPath, rawEvents);
5017
- }
5018
- if (modeResult.isConsolidation) {
5019
- const allIds = modeResult.unconsolidated.map((e) => e.id);
5020
- if (allIds.length > 0) {
5021
- await markConsolidated(this.supervisorDir, allIds);
5022
- }
5023
- await compactLogBuffer(this.supervisorDir);
5024
- }
5318
+ await this.handlePostSdkProcessing({
5319
+ rawEvents: eventCtx.rawEvents,
5320
+ isConsolidation: modeResult.isConsolidation,
5321
+ unconsolidated: modeResult.unconsolidated
5322
+ });
5025
5323
  const durationMs = Date.now() - startTime;
5026
5324
  const { stateUpdate } = this.buildStateUpdate({
5027
5325
  state,
@@ -5044,13 +5342,92 @@ var HeartbeatLoop = class {
5044
5342
  isConsolidation: modeResult.isConsolidation
5045
5343
  }
5046
5344
  );
5345
+ await this.emitCompletionEvents({
5346
+ heartbeatCount: modeResult.heartbeatCount,
5347
+ activeRuns: eventCtx.activeRuns,
5348
+ todayCost: budgetCheck.todayCost,
5349
+ costUsd,
5350
+ rawEvents: eventCtx.rawEvents
5351
+ });
5352
+ }
5353
+ /**
5354
+ * Check if supervisor daily budget is exceeded.
5355
+ */
5356
+ async checkBudgetExceeded(state, today) {
5357
+ const todayCost = state?.costResetDate === today ? state.todayCostUsd ?? 0 : 0;
5358
+ if (todayCost >= this.config.supervisor.dailyCapUsd) {
5359
+ await this.activityLog.log(
5360
+ "error",
5361
+ `Supervisor daily budget exceeded ($${todayCost.toFixed(2)} / $${this.config.supervisor.dailyCapUsd}). Skipping heartbeat.`
5362
+ );
5363
+ await this.sleep(this.config.supervisor.eventTimeoutMs);
5364
+ return { todayCost, exceeded: true };
5365
+ }
5366
+ return { todayCost, exceeded: false };
5367
+ }
5368
+ /**
5369
+ * Process decision answers from inbox and expire old decisions.
5370
+ * Returns pending, answered, and expiry status for prompt context.
5371
+ */
5372
+ async processDecisions(rawEvents, lastHeartbeat) {
5373
+ const decisionStore = this.getDecisionStore();
5374
+ await this.processDecisionAnswers(rawEvents, decisionStore);
5375
+ const expiredDecisions = await decisionStore.expire();
5376
+ const hasExpiredDecisions = expiredDecisions.length > 0;
5377
+ const allPending = await decisionStore.pending();
5378
+ const hasPendingDecisions = allPending.length > 0;
5379
+ const pendingDecisions = this.config.supervisor.autoDecide ? allPending : [];
5380
+ const answeredDecisions = this.config.supervisor.autoDecide ? await decisionStore.answered(lastHeartbeat) : [];
5381
+ return { pendingDecisions, answeredDecisions, hasExpiredDecisions, hasPendingDecisions };
5382
+ }
5383
+ /**
5384
+ * Gather event context: drain queue, fetch active runs, memories, and recent actions.
5385
+ */
5386
+ async gatherEventContext() {
5387
+ const { grouped, rawEvents } = this.eventQueue.drainAndGroup();
5388
+ const totalEventCount = grouped.messages.length + grouped.webhooks.length + grouped.runCompletions.length;
5389
+ const activeRuns = await this.getActiveRuns();
5390
+ const mcpServerNames = this.config.mcpServers ? Object.keys(this.config.mcpServers) : [];
5391
+ const store = this.getMemoryStore();
5392
+ const memories = store ? store.query({ limit: 40, sortBy: "relevance" }) : [];
5393
+ const recentActions = await this.activityLog.tail(20);
5394
+ return {
5395
+ grouped,
5396
+ rawEvents,
5397
+ totalEventCount,
5398
+ activeRuns,
5399
+ memories,
5400
+ recentActions,
5401
+ mcpServerNames
5402
+ };
5403
+ }
5404
+ /**
5405
+ * Handle post-SDK processing: mark events as processed, consolidate log buffer.
5406
+ */
5407
+ async handlePostSdkProcessing(input) {
5408
+ if (input.rawEvents.length > 0) {
5409
+ const inboxPath = path14.join(this.supervisorDir, "inbox.jsonl");
5410
+ await this.eventQueue.markProcessed(inboxPath, this.eventsPath, input.rawEvents);
5411
+ }
5412
+ if (input.isConsolidation) {
5413
+ const allIds = input.unconsolidated.map((e) => e.id);
5414
+ if (allIds.length > 0) {
5415
+ await markConsolidated(this.supervisorDir, allIds);
5416
+ }
5417
+ await compactLogBuffer(this.supervisorDir);
5418
+ }
5419
+ }
5420
+ /**
5421
+ * Emit completion webhook events: heartbeat completed and run completed events.
5422
+ */
5423
+ async emitCompletionEvents(input) {
5047
5424
  await this.emitHeartbeatCompleted({
5048
- heartbeatNumber: modeResult.heartbeatCount + 1,
5049
- runsActive: activeRuns.length,
5050
- todayUsd: budgetCheck.todayCost + costUsd,
5425
+ heartbeatNumber: input.heartbeatCount + 1,
5426
+ runsActive: input.activeRuns.length,
5427
+ todayUsd: input.todayCost + input.costUsd,
5051
5428
  limitUsd: this.config.supervisor.dailyCapUsd
5052
5429
  });
5053
- for (const event of rawEvents) {
5430
+ for (const event of input.rawEvents) {
5054
5431
  if (event.kind === "run_complete") {
5055
5432
  const runData = await this.readPersistedRun(event.runId);
5056
5433
  const emitOpts = {
@@ -5066,21 +5443,6 @@ var HeartbeatLoop = class {
5066
5443
  }
5067
5444
  }
5068
5445
  }
5069
- /**
5070
- * Check if supervisor daily budget is exceeded.
5071
- */
5072
- async checkBudgetExceeded(state, today) {
5073
- const todayCost = state?.costResetDate === today ? state.todayCostUsd ?? 0 : 0;
5074
- if (todayCost >= this.config.supervisor.dailyCapUsd) {
5075
- await this.activityLog.log(
5076
- "error",
5077
- `Supervisor daily budget exceeded ($${todayCost.toFixed(2)} / $${this.config.supervisor.dailyCapUsd}). Skipping heartbeat.`
5078
- );
5079
- await this.sleep(this.config.supervisor.eventTimeoutMs);
5080
- return { todayCost, exceeded: true };
5081
- }
5082
- return { todayCost, exceeded: false };
5083
- }
5084
5446
  /**
5085
5447
  * Handle skip logic for idle and active-work scenarios.
5086
5448
  * Uses IdleDetector to make skip decisions based on context.
@@ -5183,10 +5545,6 @@ var HeartbeatLoop = class {
5183
5545
  * Build the prompt for the current heartbeat mode.
5184
5546
  */
5185
5547
  async buildHeartbeatModePrompt(opts) {
5186
- const mcpServerNames = this.config.mcpServers ? Object.keys(this.config.mcpServers) : [];
5187
- const store = this.getMemoryStore();
5188
- const memories = store ? store.query({ limit: 40, sortBy: "relevance" }) : [];
5189
- const recentActions = await this.activityLog.tail(20);
5190
5548
  const sharedOpts = {
5191
5549
  repos: this.config.repos,
5192
5550
  grouped: opts.grouped,
@@ -5197,13 +5555,14 @@ var HeartbeatLoop = class {
5197
5555
  },
5198
5556
  activeRuns: opts.activeRuns,
5199
5557
  heartbeatCount: opts.heartbeatCount,
5200
- mcpServerNames,
5558
+ mcpServerNames: opts.mcpServerNames,
5201
5559
  customInstructions: this.customInstructions,
5202
5560
  supervisorDir: this.supervisorDir,
5203
- memories,
5204
- recentActions,
5561
+ memories: opts.memories,
5562
+ recentActions: opts.recentActions,
5205
5563
  pendingDecisions: opts.pendingDecisions,
5206
5564
  answeredDecisions: opts.answeredDecisions,
5565
+ hasPendingDecisions: opts.hasPendingDecisions,
5207
5566
  autoDecide: this.config.supervisor.autoDecide
5208
5567
  };
5209
5568
  if (opts.isCompaction) {
@@ -5313,7 +5672,7 @@ var HeartbeatLoop = class {
5313
5672
  }
5314
5673
  async readState() {
5315
5674
  try {
5316
- const raw = await readFile11(this.statePath, "utf-8");
5675
+ const raw = await readFile10(this.statePath, "utf-8");
5317
5676
  return JSON.parse(raw);
5318
5677
  } catch {
5319
5678
  return null;
@@ -5321,7 +5680,7 @@ var HeartbeatLoop = class {
5321
5680
  }
5322
5681
  async updateState(updates) {
5323
5682
  try {
5324
- const raw = await readFile11(this.statePath, "utf-8");
5683
+ const raw = await readFile10(this.statePath, "utf-8");
5325
5684
  const state = JSON.parse(raw);
5326
5685
  Object.assign(state, updates);
5327
5686
  await writeFile6(this.statePath, JSON.stringify(state, null, 2), "utf-8");
@@ -5346,7 +5705,7 @@ var HeartbeatLoop = class {
5346
5705
  for (const f of files) {
5347
5706
  if (!f.endsWith(".json")) continue;
5348
5707
  try {
5349
- const raw = await readFile11(path14.join(subDir, f), "utf-8");
5708
+ const raw = await readFile10(path14.join(subDir, f), "utf-8");
5350
5709
  const run = JSON.parse(raw);
5351
5710
  if (isRunActive(run)) {
5352
5711
  active.push(
@@ -5380,7 +5739,7 @@ var HeartbeatLoop = class {
5380
5739
  }
5381
5740
  for (const filePath of candidates) {
5382
5741
  try {
5383
- const content = await readFile11(filePath, "utf-8");
5742
+ const content = await readFile10(filePath, "utf-8");
5384
5743
  await this.activityLog.log("event", `Loaded instructions from ${filePath}`);
5385
5744
  return content;
5386
5745
  } catch {
@@ -5495,7 +5854,7 @@ var HeartbeatLoop = class {
5495
5854
  const subDir = path14.join(runsDir, entry.name);
5496
5855
  const runPath = path14.join(subDir, `${runId}.json`);
5497
5856
  if (existsSync7(runPath)) {
5498
- const raw = await readFile11(runPath, "utf-8");
5857
+ const raw = await readFile10(runPath, "utf-8");
5499
5858
  const run = JSON.parse(raw);
5500
5859
  const totalCostUsd = Object.values(run.steps).reduce(
5501
5860
  (sum, step) => sum + (step.costUsd ?? 0),
@@ -5873,9 +6232,12 @@ var SupervisorDaemon = class {
5873
6232
  async readState() {
5874
6233
  const statePath = path15.join(this.dir, "state.json");
5875
6234
  try {
5876
- const raw = await readFile12(statePath, "utf-8");
6235
+ const raw = await readFile11(statePath, "utf-8");
5877
6236
  return JSON.parse(raw);
5878
- } catch {
6237
+ } catch (err) {
6238
+ console.debug(
6239
+ `[SupervisorDaemon] Failed to read state: ${err instanceof Error ? err.message : String(err)}`
6240
+ );
5879
6241
  return null;
5880
6242
  }
5881
6243
  }
@@ -5885,10 +6247,13 @@ var SupervisorDaemon = class {
5885
6247
  }
5886
6248
  async readLockPid(lockPath) {
5887
6249
  try {
5888
- const raw = await readFile12(lockPath, "utf-8");
6250
+ const raw = await readFile11(lockPath, "utf-8");
5889
6251
  const pid = Number.parseInt(raw.trim(), 10);
5890
6252
  return Number.isNaN(pid) ? null : pid;
5891
- } catch {
6253
+ } catch (err) {
6254
+ console.debug(
6255
+ `[SupervisorDaemon] Failed to read lock PID from ${lockPath}: ${err instanceof Error ? err.message : String(err)}`
6256
+ );
5892
6257
  return null;
5893
6258
  }
5894
6259
  }
@@ -5924,8 +6289,8 @@ var SupervisorDaemon = class {
5924
6289
  };
5925
6290
 
5926
6291
  // src/supervisor/StatusReader.ts
5927
- import { readFileSync as readFileSync2 } from "fs";
5928
- import { readFile as readFile13 } from "fs/promises";
6292
+ import { existsSync as existsSync9, readFileSync as readFileSync2 } from "fs";
6293
+ import { readdir as readdir5, readFile as readFile12 } from "fs/promises";
5929
6294
  import path16 from "path";
5930
6295
  var STATE_FILE = "state.json";
5931
6296
  var ACTIVITY_FILE2 = "activity.jsonl";
@@ -5945,14 +6310,20 @@ var StatusReader = class {
5945
6310
  async getStatus() {
5946
6311
  let raw;
5947
6312
  try {
5948
- raw = await readFile13(this.statePath, "utf-8");
5949
- } catch {
6313
+ raw = await readFile12(this.statePath, "utf-8");
6314
+ } catch (err) {
6315
+ console.debug(
6316
+ `[StatusReader] State file not found: ${err instanceof Error ? err.message : String(err)}`
6317
+ );
5950
6318
  return null;
5951
6319
  }
5952
6320
  let parsed;
5953
6321
  try {
5954
6322
  parsed = JSON.parse(raw);
5955
- } catch {
6323
+ } catch (err) {
6324
+ console.debug(
6325
+ `[StatusReader] Malformed state JSON: ${err instanceof Error ? err.message : String(err)}`
6326
+ );
5956
6327
  return null;
5957
6328
  }
5958
6329
  const result = supervisorDaemonStateSchema.safeParse(parsed);
@@ -5966,6 +6337,7 @@ var StatusReader = class {
5966
6337
  stopped: "idle"
5967
6338
  };
5968
6339
  const recentActivity = this.queryActivity({ limit: 5 });
6340
+ const activeRunCount = await this.countActiveRuns();
5969
6341
  return {
5970
6342
  pid: daemon.pid,
5971
6343
  sessionId: daemon.sessionId,
@@ -5975,8 +6347,7 @@ var StatusReader = class {
5975
6347
  todayCostUsd: daemon.todayCostUsd,
5976
6348
  status: statusMap[daemon.status],
5977
6349
  lastHeartbeat: daemon.lastHeartbeat ?? daemon.startedAt,
5978
- activeRunCount: 0,
5979
- // TODO: count active runs from .neo/runs/
6350
+ activeRunCount,
5980
6351
  recentActivitySummary: recentActivity.map((e) => `[${e.type}] ${e.summary}`)
5981
6352
  };
5982
6353
  }
@@ -5989,7 +6360,10 @@ var StatusReader = class {
5989
6360
  let content;
5990
6361
  try {
5991
6362
  content = readFileSync2(this.activityPath, "utf-8");
5992
- } catch {
6363
+ } catch (err) {
6364
+ console.debug(
6365
+ `[StatusReader] Activity file not found: ${err instanceof Error ? err.message : String(err)}`
6366
+ );
5993
6367
  return [];
5994
6368
  }
5995
6369
  const lines = content.trim().split("\n").filter(Boolean);
@@ -6001,7 +6375,10 @@ var StatusReader = class {
6001
6375
  if (result.success) {
6002
6376
  entries.push(result.data);
6003
6377
  }
6004
- } catch {
6378
+ } catch (err) {
6379
+ console.debug(
6380
+ `[StatusReader] Skipping malformed activity line: ${err instanceof Error ? err.message : String(err)}`
6381
+ );
6005
6382
  }
6006
6383
  }
6007
6384
  if (type) {
@@ -6017,16 +6394,80 @@ var StatusReader = class {
6017
6394
  }
6018
6395
  return entries.slice(offset, offset + limit);
6019
6396
  }
6397
+ /**
6398
+ * Count runs with status "running" from .neo/runs/.
6399
+ * Fails silently — returns 0 if the runs directory doesn't exist.
6400
+ */
6401
+ async countActiveRuns() {
6402
+ const runsDir = getRunsDir();
6403
+ if (!existsSync9(runsDir)) return 0;
6404
+ try {
6405
+ const runFiles = await this.collectRunFiles(runsDir);
6406
+ let count = 0;
6407
+ for (const filePath of runFiles) {
6408
+ if (await this.isRunning(filePath)) count++;
6409
+ }
6410
+ return count;
6411
+ } catch (err) {
6412
+ console.debug(
6413
+ `[StatusReader] Failed to count active runs: ${err instanceof Error ? err.message : String(err)}`
6414
+ );
6415
+ return 0;
6416
+ }
6417
+ }
6418
+ /**
6419
+ * Collect all run JSON files from the runs directory tree.
6420
+ * Searches both top-level and repo subdirectories.
6421
+ */
6422
+ async collectRunFiles(runsDir) {
6423
+ const entries = await readdir5(runsDir, { withFileTypes: true });
6424
+ const jsonFiles = [];
6425
+ for (const entry of entries) {
6426
+ if (entry.isDirectory()) {
6427
+ const subDir = path16.join(runsDir, entry.name);
6428
+ const subFiles = await readdir5(subDir);
6429
+ for (const f of subFiles) {
6430
+ if (this.isRunFile(f)) {
6431
+ jsonFiles.push(path16.join(subDir, f));
6432
+ }
6433
+ }
6434
+ } else if (this.isRunFile(entry.name)) {
6435
+ jsonFiles.push(path16.join(runsDir, entry.name));
6436
+ }
6437
+ }
6438
+ return jsonFiles;
6439
+ }
6440
+ /**
6441
+ * Check if a filename is a run file (JSON but not dispatch).
6442
+ */
6443
+ isRunFile(filename) {
6444
+ return filename.endsWith(".json") && !filename.endsWith(".dispatch.json");
6445
+ }
6446
+ /**
6447
+ * Check if a run file represents an active (running) run.
6448
+ */
6449
+ async isRunning(filePath) {
6450
+ try {
6451
+ const content = await readFile12(filePath, "utf-8");
6452
+ const run = JSON.parse(content);
6453
+ return run.status === "running";
6454
+ } catch (err) {
6455
+ console.debug(
6456
+ `[StatusReader] Failed to read run file ${filePath}: ${err instanceof Error ? err.message : String(err)}`
6457
+ );
6458
+ return false;
6459
+ }
6460
+ }
6020
6461
  };
6021
6462
 
6022
6463
  // src/supervisor/shutdown.ts
6023
- import { existsSync as existsSync9 } from "fs";
6024
- import { readdir as readdir5, readFile as readFile14, writeFile as writeFile8 } from "fs/promises";
6464
+ import { existsSync as existsSync10 } from "fs";
6465
+ import { readdir as readdir6, readFile as readFile13, writeFile as writeFile8 } from "fs/promises";
6025
6466
  import path17 from "path";
6026
6467
 
6027
6468
  // src/webhook-config.ts
6028
- import { existsSync as existsSync10 } from "fs";
6029
- import { mkdir as mkdir8, readFile as readFile15, writeFile as writeFile9 } from "fs/promises";
6469
+ import { existsSync as existsSync11 } from "fs";
6470
+ import { mkdir as mkdir8, readFile as readFile14, writeFile as writeFile9 } from "fs/promises";
6030
6471
  import path18 from "path";
6031
6472
  import { z as z8 } from "zod";
6032
6473
  var webhookEntrySchema = z8.object({
@@ -6044,10 +6485,10 @@ function getWebhooksConfigPath() {
6044
6485
  }
6045
6486
  async function loadWebhooksConfig() {
6046
6487
  const configPath = getWebhooksConfigPath();
6047
- if (!existsSync10(configPath)) {
6488
+ if (!existsSync11(configPath)) {
6048
6489
  return { webhooks: [] };
6049
6490
  }
6050
- const raw = await readFile15(configPath, "utf-8");
6491
+ const raw = await readFile14(configPath, "utf-8");
6051
6492
  const parsed = JSON.parse(raw);
6052
6493
  return webhooksConfigSchema.parse(parsed);
6053
6494
  }