@neotx/core 0.1.0-alpha.1 → 0.1.0-alpha.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +567 -0
- package/dist/index.d.ts +369 -123
- package/dist/index.js +2446 -878
- package/dist/index.js.map +1 -1
- package/package.json +7 -2
package/dist/index.js
CHANGED
|
@@ -74,51 +74,37 @@ import path2 from "path";
|
|
|
74
74
|
// src/agents/resolver.ts
|
|
75
75
|
function resolveAgent(config, builtIns) {
|
|
76
76
|
const extendsName = config.extends ?? (builtIns.has(config.name) && config.extends === void 0 ? config.name : void 0);
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
const baseTols = base.tools ?? [];
|
|
89
|
-
const newTools = config.tools.filter((t) => t !== "$inherited");
|
|
90
|
-
tools2 = [...baseTols, ...newTools];
|
|
91
|
-
} else {
|
|
92
|
-
tools2 = config.tools;
|
|
93
|
-
}
|
|
94
|
-
} else {
|
|
95
|
-
tools2 = base.tools ?? [];
|
|
96
|
-
}
|
|
97
|
-
let prompt2;
|
|
98
|
-
if (config.prompt) {
|
|
99
|
-
prompt2 = config.prompt;
|
|
100
|
-
} else {
|
|
101
|
-
prompt2 = base.prompt ?? "";
|
|
102
|
-
}
|
|
103
|
-
if (config.promptAppend) {
|
|
104
|
-
prompt2 = `${prompt2}
|
|
105
|
-
|
|
106
|
-
${config.promptAppend}`;
|
|
107
|
-
}
|
|
108
|
-
const definition2 = {
|
|
109
|
-
description: config.description ?? base.description ?? "",
|
|
110
|
-
prompt: prompt2,
|
|
111
|
-
tools: tools2,
|
|
112
|
-
model: config.model ?? base.model ?? "sonnet"
|
|
113
|
-
};
|
|
114
|
-
return {
|
|
115
|
-
name: config.name,
|
|
116
|
-
definition: definition2,
|
|
117
|
-
sandbox: config.sandbox ?? base.sandbox ?? "readonly",
|
|
118
|
-
...config.maxTurns !== void 0 ? { maxTurns: config.maxTurns } : base.maxTurns !== void 0 ? { maxTurns: base.maxTurns } : {},
|
|
119
|
-
source: config.name === extendsName && !config.extends ? "built-in" : "extended"
|
|
120
|
-
};
|
|
77
|
+
if (extendsName !== void 0) {
|
|
78
|
+
return resolveExtendedAgent(config, extendsName, builtIns);
|
|
79
|
+
}
|
|
80
|
+
return resolveCustomAgent(config);
|
|
81
|
+
}
|
|
82
|
+
function resolveExtendedAgent(config, extendsName, builtIns) {
|
|
83
|
+
const base = builtIns.get(extendsName);
|
|
84
|
+
if (!base) {
|
|
85
|
+
throw new Error(
|
|
86
|
+
`Agent "${config.name}" extends "${extendsName}", but no built-in agent with that name exists.`
|
|
87
|
+
);
|
|
121
88
|
}
|
|
89
|
+
const tools = mergeTools(config.tools, base.tools);
|
|
90
|
+
const prompt = mergePrompt(config.prompt, config.promptAppend, base.prompt);
|
|
91
|
+
const mcpServers = mergeMcpServerNames(base.mcpServers, config.mcpServers);
|
|
92
|
+
const definition = {
|
|
93
|
+
description: config.description ?? base.description ?? "",
|
|
94
|
+
prompt,
|
|
95
|
+
tools,
|
|
96
|
+
model: config.model ?? base.model ?? "sonnet",
|
|
97
|
+
...mcpServers.length > 0 ? { mcpServers } : {}
|
|
98
|
+
};
|
|
99
|
+
return {
|
|
100
|
+
name: config.name,
|
|
101
|
+
definition,
|
|
102
|
+
sandbox: config.sandbox ?? base.sandbox ?? "readonly",
|
|
103
|
+
...config.maxTurns !== void 0 ? { maxTurns: config.maxTurns } : base.maxTurns !== void 0 ? { maxTurns: base.maxTurns } : {},
|
|
104
|
+
source: config.name === extendsName && !config.extends ? "built-in" : "extended"
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
function resolveCustomAgent(config) {
|
|
122
108
|
if (!config.description) {
|
|
123
109
|
throw new Error(
|
|
124
110
|
`Agent "${config.name}" has no "extends" and no "description". Add a 'description' field to the agent YAML.`
|
|
@@ -155,7 +141,8 @@ ${config.promptAppend}`;
|
|
|
155
141
|
description: config.description,
|
|
156
142
|
prompt,
|
|
157
143
|
tools,
|
|
158
|
-
model: config.model
|
|
144
|
+
model: config.model,
|
|
145
|
+
...config.mcpServers?.length ? { mcpServers: config.mcpServers } : {}
|
|
159
146
|
};
|
|
160
147
|
return {
|
|
161
148
|
name: config.name,
|
|
@@ -165,6 +152,27 @@ ${config.promptAppend}`;
|
|
|
165
152
|
source: "custom"
|
|
166
153
|
};
|
|
167
154
|
}
|
|
155
|
+
function mergeTools(configTools, baseTools) {
|
|
156
|
+
if (!configTools) return baseTools ?? [];
|
|
157
|
+
if (configTools.includes("$inherited")) {
|
|
158
|
+
const newTools = configTools.filter((t) => t !== "$inherited");
|
|
159
|
+
return [...baseTools ?? [], ...newTools];
|
|
160
|
+
}
|
|
161
|
+
return configTools;
|
|
162
|
+
}
|
|
163
|
+
function mergePrompt(configPrompt, promptAppend, basePrompt) {
|
|
164
|
+
let prompt = configPrompt ?? basePrompt ?? "";
|
|
165
|
+
if (promptAppend) {
|
|
166
|
+
prompt = `${prompt}
|
|
167
|
+
|
|
168
|
+
${promptAppend}`;
|
|
169
|
+
}
|
|
170
|
+
return prompt;
|
|
171
|
+
}
|
|
172
|
+
function mergeMcpServerNames(base, override) {
|
|
173
|
+
if (!base?.length && !override?.length) return [];
|
|
174
|
+
return [.../* @__PURE__ */ new Set([...base ?? [], ...override ?? []])];
|
|
175
|
+
}
|
|
168
176
|
|
|
169
177
|
// src/agents/registry.ts
|
|
170
178
|
var AgentRegistry = class {
|
|
@@ -438,9 +446,6 @@ function getSupervisorDir(name) {
|
|
|
438
446
|
function getSupervisorStatePath(name) {
|
|
439
447
|
return path3.join(getSupervisorDir(name), "state.json");
|
|
440
448
|
}
|
|
441
|
-
function getSupervisorMemoryPath(name) {
|
|
442
|
-
return path3.join(getSupervisorDir(name), "memory.md");
|
|
443
|
-
}
|
|
444
449
|
function getSupervisorActivityPath(name) {
|
|
445
450
|
return path3.join(getSupervisorDir(name), "activity.jsonl");
|
|
446
451
|
}
|
|
@@ -470,22 +475,22 @@ var mcpServerConfigSchema = z2.discriminatedUnion("type", [
|
|
|
470
475
|
httpMcpServerSchema,
|
|
471
476
|
stdioMcpServerSchema
|
|
472
477
|
]);
|
|
478
|
+
var gitStrategySchema = z2.enum(["pr", "branch"]).default("branch");
|
|
473
479
|
var repoConfigSchema = z2.object({
|
|
474
480
|
path: z2.string(),
|
|
475
481
|
name: z2.string().optional(),
|
|
476
482
|
defaultBranch: z2.string().default("main"),
|
|
477
483
|
branchPrefix: z2.string().default("feat"),
|
|
478
484
|
pushRemote: z2.string().default("origin"),
|
|
479
|
-
|
|
480
|
-
prBaseBranch: z2.string().optional()
|
|
485
|
+
gitStrategy: gitStrategySchema
|
|
481
486
|
});
|
|
482
487
|
var globalConfigSchema = z2.object({
|
|
483
488
|
repos: z2.array(repoConfigSchema).default([]),
|
|
484
489
|
concurrency: z2.object({
|
|
485
490
|
maxSessions: z2.number().default(5),
|
|
486
|
-
maxPerRepo: z2.number().default(
|
|
491
|
+
maxPerRepo: z2.number().default(4),
|
|
487
492
|
queueMax: z2.number().default(50)
|
|
488
|
-
}).default({ maxSessions: 5, maxPerRepo:
|
|
493
|
+
}).default({ maxSessions: 5, maxPerRepo: 4, queueMax: 50 }),
|
|
489
494
|
budget: z2.object({
|
|
490
495
|
dailyCapUsd: z2.number().default(500),
|
|
491
496
|
alertThresholdPct: z2.number().default(80)
|
|
@@ -496,8 +501,9 @@ var globalConfigSchema = z2.object({
|
|
|
496
501
|
}).default({ maxRetries: 3, backoffBaseMs: 3e4 }),
|
|
497
502
|
sessions: z2.object({
|
|
498
503
|
initTimeoutMs: z2.number().default(12e4),
|
|
499
|
-
maxDurationMs: z2.number().default(36e5)
|
|
500
|
-
|
|
504
|
+
maxDurationMs: z2.number().default(36e5),
|
|
505
|
+
dir: z2.string().default("/tmp/neo-sessions")
|
|
506
|
+
}).default({ initTimeoutMs: 12e4, maxDurationMs: 36e5, dir: "/tmp/neo-sessions" }),
|
|
501
507
|
webhooks: z2.array(
|
|
502
508
|
z2.object({
|
|
503
509
|
url: z2.string().url(),
|
|
@@ -509,22 +515,30 @@ var globalConfigSchema = z2.object({
|
|
|
509
515
|
supervisor: z2.object({
|
|
510
516
|
port: z2.number().default(7777),
|
|
511
517
|
secret: z2.string().optional(),
|
|
512
|
-
idleIntervalMs: z2.number().default(6e4),
|
|
513
|
-
idleSkipMax: z2.number().default(20),
|
|
514
518
|
heartbeatTimeoutMs: z2.number().default(3e5),
|
|
515
519
|
maxConsecutiveFailures: z2.number().default(3),
|
|
516
520
|
maxEventsPerSec: z2.number().default(10),
|
|
517
521
|
dailyCapUsd: z2.number().default(50),
|
|
522
|
+
/** How often consolidation runs (ms) */
|
|
523
|
+
consolidationIntervalMs: z2.number().default(3e5),
|
|
524
|
+
/** How often compaction runs (ms) */
|
|
525
|
+
compactionIntervalMs: z2.number().default(36e5),
|
|
526
|
+
/** Safety timeout for waitForWork (ms) */
|
|
527
|
+
eventTimeoutMs: z2.number().default(3e5),
|
|
518
528
|
instructions: z2.string().optional()
|
|
519
529
|
}).default({
|
|
520
530
|
port: 7777,
|
|
521
|
-
idleIntervalMs: 6e4,
|
|
522
|
-
idleSkipMax: 20,
|
|
523
531
|
heartbeatTimeoutMs: 3e5,
|
|
524
532
|
maxConsecutiveFailures: 3,
|
|
525
533
|
maxEventsPerSec: 10,
|
|
526
|
-
dailyCapUsd: 50
|
|
534
|
+
dailyCapUsd: 50,
|
|
535
|
+
consolidationIntervalMs: 3e5,
|
|
536
|
+
compactionIntervalMs: 36e5,
|
|
537
|
+
eventTimeoutMs: 3e5
|
|
527
538
|
}),
|
|
539
|
+
memory: z2.object({
|
|
540
|
+
embeddings: z2.boolean().default(true)
|
|
541
|
+
}).default({ embeddings: true }),
|
|
528
542
|
mcpServers: z2.record(z2.string(), mcpServerConfigSchema).optional(),
|
|
529
543
|
claudeCodePath: z2.string().optional(),
|
|
530
544
|
idempotency: z2.object({
|
|
@@ -538,7 +552,7 @@ var DEFAULT_GLOBAL_CONFIG = {
|
|
|
538
552
|
repos: [],
|
|
539
553
|
concurrency: {
|
|
540
554
|
maxSessions: 5,
|
|
541
|
-
maxPerRepo:
|
|
555
|
+
maxPerRepo: 4,
|
|
542
556
|
queueMax: 50
|
|
543
557
|
},
|
|
544
558
|
budget: {
|
|
@@ -620,18 +634,40 @@ async function listReposFromGlobalConfig() {
|
|
|
620
634
|
}
|
|
621
635
|
|
|
622
636
|
// src/cost/journal.ts
|
|
623
|
-
import { appendFile,
|
|
637
|
+
import { appendFile, readFile as readFile3 } from "fs/promises";
|
|
638
|
+
|
|
639
|
+
// src/shared/date.ts
|
|
624
640
|
import path5 from "path";
|
|
641
|
+
function toDateKey(date) {
|
|
642
|
+
return date.toISOString().slice(0, 10);
|
|
643
|
+
}
|
|
644
|
+
function fileForDate(date, prefix, dir) {
|
|
645
|
+
const yyyy = date.getUTCFullYear();
|
|
646
|
+
const mm = String(date.getUTCMonth() + 1).padStart(2, "0");
|
|
647
|
+
return path5.join(dir, `${prefix}-${yyyy}-${mm}.jsonl`);
|
|
648
|
+
}
|
|
649
|
+
|
|
650
|
+
// src/shared/fs.ts
|
|
651
|
+
import { mkdir as mkdir2 } from "fs/promises";
|
|
652
|
+
async function ensureDir(dirPath, cache) {
|
|
653
|
+
if (cache?.has(dirPath)) {
|
|
654
|
+
return;
|
|
655
|
+
}
|
|
656
|
+
await mkdir2(dirPath, { recursive: true });
|
|
657
|
+
cache?.add(dirPath);
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
// src/cost/journal.ts
|
|
625
661
|
var CostJournal = class {
|
|
626
662
|
dir;
|
|
627
|
-
|
|
663
|
+
dirCache = /* @__PURE__ */ new Set();
|
|
628
664
|
dayCache = null;
|
|
629
665
|
constructor(options) {
|
|
630
666
|
this.dir = options.dir;
|
|
631
667
|
}
|
|
632
668
|
async append(entry) {
|
|
633
|
-
await this.
|
|
634
|
-
const file =
|
|
669
|
+
await ensureDir(this.dir, this.dirCache);
|
|
670
|
+
const file = fileForDate(new Date(entry.timestamp), "cost", this.dir);
|
|
635
671
|
await appendFile(file, `${JSON.stringify(entry)}
|
|
636
672
|
`, "utf-8");
|
|
637
673
|
this.dayCache = null;
|
|
@@ -642,7 +678,7 @@ var CostJournal = class {
|
|
|
642
678
|
if (this.dayCache?.key === dayKey) {
|
|
643
679
|
return this.dayCache.total;
|
|
644
680
|
}
|
|
645
|
-
const file =
|
|
681
|
+
const file = fileForDate(d, "cost", this.dir);
|
|
646
682
|
let total = 0;
|
|
647
683
|
try {
|
|
648
684
|
const content = await readFile3(file, "utf-8");
|
|
@@ -659,20 +695,7 @@ var CostJournal = class {
|
|
|
659
695
|
this.dayCache = { key: dayKey, total };
|
|
660
696
|
return total;
|
|
661
697
|
}
|
|
662
|
-
fileForDate(date) {
|
|
663
|
-
const yyyy = date.getUTCFullYear();
|
|
664
|
-
const mm = String(date.getUTCMonth() + 1).padStart(2, "0");
|
|
665
|
-
return path5.join(this.dir, `cost-${yyyy}-${mm}.jsonl`);
|
|
666
|
-
}
|
|
667
|
-
async ensureDir() {
|
|
668
|
-
if (this.dirCreated) return;
|
|
669
|
-
await mkdir2(this.dir, { recursive: true });
|
|
670
|
-
this.dirCreated = true;
|
|
671
|
-
}
|
|
672
698
|
};
|
|
673
|
-
function toDateKey(date) {
|
|
674
|
-
return date.toISOString().slice(0, 10);
|
|
675
|
-
}
|
|
676
699
|
|
|
677
700
|
// src/events/emitter.ts
|
|
678
701
|
import { EventEmitter } from "events";
|
|
@@ -713,36 +736,29 @@ var NeoEventEmitter = class {
|
|
|
713
736
|
};
|
|
714
737
|
|
|
715
738
|
// src/events/journal.ts
|
|
716
|
-
import { appendFile as appendFile2
|
|
717
|
-
import path6 from "path";
|
|
739
|
+
import { appendFile as appendFile2 } from "fs/promises";
|
|
718
740
|
var EventJournal = class {
|
|
719
741
|
dir;
|
|
720
|
-
|
|
742
|
+
dirCache = /* @__PURE__ */ new Set();
|
|
721
743
|
constructor(options) {
|
|
722
744
|
this.dir = options.dir;
|
|
723
745
|
}
|
|
724
746
|
async append(event) {
|
|
725
|
-
await this.
|
|
726
|
-
const file =
|
|
747
|
+
await ensureDir(this.dir, this.dirCache);
|
|
748
|
+
const file = fileForDate(new Date(event.timestamp), "events", this.dir);
|
|
727
749
|
await appendFile2(file, `${JSON.stringify(event)}
|
|
728
750
|
`, "utf-8");
|
|
729
751
|
}
|
|
730
|
-
fileForDate(date) {
|
|
731
|
-
const yyyy = date.getUTCFullYear();
|
|
732
|
-
const mm = String(date.getUTCMonth() + 1).padStart(2, "0");
|
|
733
|
-
return path6.join(this.dir, `events-${yyyy}-${mm}.jsonl`);
|
|
734
|
-
}
|
|
735
|
-
async ensureDir() {
|
|
736
|
-
if (this.dirCreated) return;
|
|
737
|
-
await mkdir3(this.dir, { recursive: true });
|
|
738
|
-
this.dirCreated = true;
|
|
739
|
-
}
|
|
740
752
|
};
|
|
741
753
|
|
|
742
754
|
// src/events/webhook.ts
|
|
743
|
-
import { createHmac } from "crypto";
|
|
755
|
+
import { createHmac, randomUUID } from "crypto";
|
|
756
|
+
var RETRY_EVENT_TYPES = /* @__PURE__ */ new Set(["session:complete", "session:fail", "budget:alert"]);
|
|
757
|
+
var RETRY_MAX_ATTEMPTS = 3;
|
|
758
|
+
var RETRY_BASE_DELAY_MS = 500;
|
|
744
759
|
var WebhookDispatcher = class {
|
|
745
760
|
webhooks;
|
|
761
|
+
pending = /* @__PURE__ */ new Set();
|
|
746
762
|
constructor(webhooks) {
|
|
747
763
|
this.webhooks = webhooks;
|
|
748
764
|
}
|
|
@@ -751,8 +767,10 @@ var WebhookDispatcher = class {
|
|
|
751
767
|
for (const webhook of this.webhooks) {
|
|
752
768
|
if (!matchesFilter(event.type, webhook.events)) continue;
|
|
753
769
|
const payload = {
|
|
770
|
+
id: randomUUID(),
|
|
754
771
|
version: 1,
|
|
755
|
-
event:
|
|
772
|
+
event: event.type,
|
|
773
|
+
payload: toSerializable(event),
|
|
756
774
|
source: "neo",
|
|
757
775
|
deliveredAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
758
776
|
};
|
|
@@ -763,16 +781,45 @@ var WebhookDispatcher = class {
|
|
|
763
781
|
if (webhook.secret) {
|
|
764
782
|
headers["X-Neo-Signature"] = sign(body, webhook.secret);
|
|
765
783
|
}
|
|
766
|
-
|
|
784
|
+
if (RETRY_EVENT_TYPES.has(event.type)) {
|
|
785
|
+
const p = sendWithRetry(webhook.url, headers, body, webhook.timeoutMs).catch(() => {
|
|
786
|
+
}).finally(() => this.pending.delete(p));
|
|
787
|
+
this.pending.add(p);
|
|
788
|
+
} else {
|
|
789
|
+
fetch(webhook.url, {
|
|
790
|
+
method: "POST",
|
|
791
|
+
headers,
|
|
792
|
+
body,
|
|
793
|
+
signal: AbortSignal.timeout(webhook.timeoutMs)
|
|
794
|
+
}).catch(() => {
|
|
795
|
+
});
|
|
796
|
+
}
|
|
797
|
+
}
|
|
798
|
+
}
|
|
799
|
+
/** Wait for all pending terminal webhook deliveries to complete. */
|
|
800
|
+
async flush() {
|
|
801
|
+
if (this.pending.size === 0) return;
|
|
802
|
+
await Promise.allSettled([...this.pending]);
|
|
803
|
+
}
|
|
804
|
+
};
|
|
805
|
+
async function sendWithRetry(url, headers, body, timeoutMs) {
|
|
806
|
+
for (let attempt = 1; attempt <= RETRY_MAX_ATTEMPTS; attempt++) {
|
|
807
|
+
try {
|
|
808
|
+
const res = await fetch(url, {
|
|
767
809
|
method: "POST",
|
|
768
810
|
headers,
|
|
769
811
|
body,
|
|
770
|
-
signal: AbortSignal.timeout(
|
|
771
|
-
}).catch(() => {
|
|
812
|
+
signal: AbortSignal.timeout(timeoutMs)
|
|
772
813
|
});
|
|
814
|
+
if (res.ok) return;
|
|
815
|
+
} catch {
|
|
816
|
+
}
|
|
817
|
+
if (attempt < RETRY_MAX_ATTEMPTS) {
|
|
818
|
+
const delay = RETRY_BASE_DELAY_MS * 2 ** (attempt - 1);
|
|
819
|
+
await new Promise((resolve4) => setTimeout(resolve4, delay));
|
|
773
820
|
}
|
|
774
821
|
}
|
|
775
|
-
}
|
|
822
|
+
}
|
|
776
823
|
function matchesFilter(eventType, filters) {
|
|
777
824
|
if (!filters || filters.length === 0) return true;
|
|
778
825
|
return filters.some((f) => {
|
|
@@ -793,74 +840,145 @@ function toSerializable(event) {
|
|
|
793
840
|
return obj;
|
|
794
841
|
}
|
|
795
842
|
|
|
796
|
-
// src/isolation/
|
|
843
|
+
// src/isolation/clone.ts
|
|
797
844
|
import { execFile } from "child_process";
|
|
798
|
-
import {
|
|
845
|
+
import { existsSync as existsSync2 } from "fs";
|
|
846
|
+
import { mkdir as mkdir3, readdir as readdir2, rm } from "fs/promises";
|
|
847
|
+
import { dirname, resolve } from "path";
|
|
799
848
|
import { promisify } from "util";
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
const
|
|
805
|
-
|
|
806
|
-
const
|
|
807
|
-
|
|
849
|
+
var execFileAsync = promisify(execFile);
|
|
850
|
+
var GIT_TIMEOUT = 6e4;
|
|
851
|
+
async function createSessionClone(options) {
|
|
852
|
+
const repoPath = resolve(options.repoPath);
|
|
853
|
+
const sessionDir = resolve(options.sessionDir);
|
|
854
|
+
await mkdir3(dirname(sessionDir), { recursive: true });
|
|
855
|
+
const remoteUrl = await execFileAsync("git", ["config", "--get", "remote.origin.url"], {
|
|
856
|
+
cwd: repoPath,
|
|
857
|
+
timeout: GIT_TIMEOUT
|
|
858
|
+
}).then(({ stdout }) => stdout.trim()).catch(() => "");
|
|
859
|
+
const cloneSource = remoteUrl || repoPath;
|
|
860
|
+
await execFileAsync("git", ["clone", "--branch", options.baseBranch, cloneSource, sessionDir], {
|
|
861
|
+
timeout: GIT_TIMEOUT
|
|
808
862
|
});
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
if (
|
|
816
|
-
|
|
863
|
+
if (options.branch !== options.baseBranch) {
|
|
864
|
+
const branchExists = await execFileAsync(
|
|
865
|
+
"git",
|
|
866
|
+
["ls-remote", "--heads", "origin", options.branch],
|
|
867
|
+
{ cwd: sessionDir, timeout: GIT_TIMEOUT }
|
|
868
|
+
).then(({ stdout }) => stdout.trim().length > 0).catch(() => false);
|
|
869
|
+
if (branchExists) {
|
|
870
|
+
await execFileAsync("git", ["fetch", "origin", options.branch], {
|
|
871
|
+
cwd: sessionDir,
|
|
872
|
+
timeout: GIT_TIMEOUT
|
|
873
|
+
});
|
|
874
|
+
await execFileAsync("git", ["checkout", "-b", options.branch, `origin/${options.branch}`], {
|
|
875
|
+
cwd: sessionDir,
|
|
876
|
+
timeout: GIT_TIMEOUT
|
|
877
|
+
});
|
|
878
|
+
} else {
|
|
879
|
+
await execFileAsync("git", ["checkout", "-b", options.branch], {
|
|
880
|
+
cwd: sessionDir,
|
|
881
|
+
timeout: GIT_TIMEOUT
|
|
882
|
+
});
|
|
883
|
+
}
|
|
884
|
+
}
|
|
885
|
+
return { path: sessionDir, branch: options.branch, repoPath };
|
|
886
|
+
}
|
|
887
|
+
async function removeSessionClone(sessionPath) {
|
|
888
|
+
const absPath = resolve(sessionPath);
|
|
889
|
+
if (!existsSync2(absPath)) {
|
|
890
|
+
return;
|
|
891
|
+
}
|
|
892
|
+
await rm(absPath, { recursive: true, force: true });
|
|
893
|
+
}
|
|
894
|
+
async function listSessionClones(sessionsBaseDir) {
|
|
895
|
+
const absBase = resolve(sessionsBaseDir);
|
|
896
|
+
if (!existsSync2(absBase)) {
|
|
897
|
+
return [];
|
|
898
|
+
}
|
|
899
|
+
const entries = await readdir2(absBase, { withFileTypes: true });
|
|
900
|
+
const clones = [];
|
|
901
|
+
for (const entry of entries) {
|
|
902
|
+
if (!entry.isDirectory()) continue;
|
|
903
|
+
const clonePath = resolve(absBase, entry.name);
|
|
904
|
+
try {
|
|
905
|
+
const { stdout: branchOut } = await execFileAsync(
|
|
906
|
+
"git",
|
|
907
|
+
["rev-parse", "--abbrev-ref", "HEAD"],
|
|
908
|
+
{
|
|
909
|
+
cwd: clonePath,
|
|
910
|
+
timeout: GIT_TIMEOUT
|
|
911
|
+
}
|
|
912
|
+
);
|
|
913
|
+
let repoPath = clonePath;
|
|
914
|
+
try {
|
|
915
|
+
const { stdout: originUrl } = await execFileAsync(
|
|
916
|
+
"git",
|
|
917
|
+
["config", "--get", "remote.origin.url"],
|
|
918
|
+
{ cwd: clonePath, timeout: GIT_TIMEOUT }
|
|
919
|
+
);
|
|
920
|
+
const url = originUrl.trim();
|
|
921
|
+
if (url) repoPath = resolve(clonePath, url);
|
|
922
|
+
} catch {
|
|
923
|
+
}
|
|
924
|
+
clones.push({
|
|
925
|
+
path: clonePath,
|
|
926
|
+
branch: branchOut.trim(),
|
|
927
|
+
repoPath
|
|
928
|
+
});
|
|
929
|
+
} catch {
|
|
817
930
|
}
|
|
818
931
|
}
|
|
932
|
+
return clones;
|
|
819
933
|
}
|
|
820
934
|
|
|
821
935
|
// src/isolation/git.ts
|
|
822
|
-
|
|
823
|
-
|
|
936
|
+
import { execFile as execFile2 } from "child_process";
|
|
937
|
+
import { resolve as resolve2 } from "path";
|
|
938
|
+
import { promisify as promisify2 } from "util";
|
|
939
|
+
var execFileAsync2 = promisify2(execFile2);
|
|
940
|
+
var GIT_TIMEOUT2 = 6e4;
|
|
824
941
|
async function git(repoPath, args) {
|
|
825
|
-
const { stdout } = await
|
|
826
|
-
cwd:
|
|
827
|
-
timeout:
|
|
942
|
+
const { stdout } = await execFileAsync2("git", args, {
|
|
943
|
+
cwd: resolve2(repoPath),
|
|
944
|
+
timeout: GIT_TIMEOUT2
|
|
828
945
|
});
|
|
829
946
|
return stdout.trim();
|
|
830
947
|
}
|
|
831
948
|
async function createBranch(repoPath, branch, baseBranch) {
|
|
832
|
-
await
|
|
949
|
+
await git(repoPath, ["branch", branch, baseBranch]);
|
|
833
950
|
}
|
|
834
951
|
async function pushBranch(repoPath, branch, remote) {
|
|
835
|
-
await
|
|
952
|
+
await git(repoPath, ["push", remote, branch]);
|
|
836
953
|
}
|
|
837
954
|
async function fetchRemote(repoPath, remote) {
|
|
838
|
-
await
|
|
955
|
+
await git(repoPath, ["fetch", remote]);
|
|
839
956
|
}
|
|
840
957
|
async function deleteBranch(repoPath, branch) {
|
|
841
|
-
await
|
|
958
|
+
await git(repoPath, ["branch", "-D", branch]);
|
|
842
959
|
}
|
|
843
960
|
async function getCurrentBranch(repoPath) {
|
|
844
|
-
return
|
|
961
|
+
return git(repoPath, ["rev-parse", "--abbrev-ref", "HEAD"]);
|
|
845
962
|
}
|
|
846
|
-
function getBranchName(config, runId) {
|
|
963
|
+
function getBranchName(config, runId, branch) {
|
|
964
|
+
if (branch) return branch;
|
|
847
965
|
const prefix = config.branchPrefix ?? "feat";
|
|
848
966
|
const sanitized = runId.toLowerCase().replace(/[^a-z0-9-]/g, "-");
|
|
849
967
|
return `${prefix}/run-${sanitized}`;
|
|
850
968
|
}
|
|
851
|
-
async function
|
|
852
|
-
await
|
|
969
|
+
async function pushSessionBranch(sessionPath, branch, remote) {
|
|
970
|
+
await git(sessionPath, ["push", "-u", remote, branch]);
|
|
853
971
|
}
|
|
854
972
|
|
|
855
973
|
// src/isolation/sandbox.ts
|
|
856
|
-
import { resolve as
|
|
974
|
+
import { resolve as resolve3 } from "path";
|
|
857
975
|
var WRITE_TOOLS = /* @__PURE__ */ new Set(["Write", "Edit", "NotebookEdit"]);
|
|
858
|
-
function buildSandboxConfig(agent,
|
|
976
|
+
function buildSandboxConfig(agent, sessionPath) {
|
|
859
977
|
const isWritable = agent.sandbox === "writable";
|
|
860
|
-
const
|
|
978
|
+
const absSession = sessionPath ? resolve3(sessionPath) : void 0;
|
|
861
979
|
const allowedTools = isWritable ? agent.definition.tools : agent.definition.tools.filter((t) => !WRITE_TOOLS.has(t));
|
|
862
|
-
const readablePaths =
|
|
863
|
-
const writablePaths = isWritable &&
|
|
980
|
+
const readablePaths = absSession ? [absSession] : [];
|
|
981
|
+
const writablePaths = isWritable && absSession ? [absSession] : [];
|
|
864
982
|
return {
|
|
865
983
|
allowedTools,
|
|
866
984
|
readablePaths,
|
|
@@ -869,112 +987,9 @@ function buildSandboxConfig(agent, worktreePath) {
|
|
|
869
987
|
};
|
|
870
988
|
}
|
|
871
989
|
|
|
872
|
-
// src/isolation/worktree.ts
|
|
873
|
-
import { execFile as execFile2 } from "child_process";
|
|
874
|
-
import { existsSync as existsSync2 } from "fs";
|
|
875
|
-
import { readdir as readdir2, rm } from "fs/promises";
|
|
876
|
-
import { resolve as resolve3 } from "path";
|
|
877
|
-
import { promisify as promisify2 } from "util";
|
|
878
|
-
var execFileAsync2 = promisify2(execFile2);
|
|
879
|
-
var GIT_TIMEOUT2 = 6e4;
|
|
880
|
-
async function createWorktree(options) {
|
|
881
|
-
const repoPath = resolve3(options.repoPath);
|
|
882
|
-
const worktreeDir = resolve3(options.worktreeDir);
|
|
883
|
-
await withGitLock(repoPath, async () => {
|
|
884
|
-
await execFileAsync2(
|
|
885
|
-
"git",
|
|
886
|
-
["worktree", "add", "-b", options.branch, worktreeDir, options.baseBranch],
|
|
887
|
-
{ cwd: repoPath, timeout: GIT_TIMEOUT2 }
|
|
888
|
-
);
|
|
889
|
-
});
|
|
890
|
-
await execFileAsync2("git", ["config", "core.hooksPath", "/dev/null"], {
|
|
891
|
-
cwd: worktreeDir,
|
|
892
|
-
timeout: GIT_TIMEOUT2
|
|
893
|
-
});
|
|
894
|
-
return { path: worktreeDir, branch: options.branch, repoPath };
|
|
895
|
-
}
|
|
896
|
-
async function removeWorktree(worktreePath) {
|
|
897
|
-
const absPath = resolve3(worktreePath);
|
|
898
|
-
if (!existsSync2(absPath)) {
|
|
899
|
-
return;
|
|
900
|
-
}
|
|
901
|
-
const repoPath = await findRepoForWorktree(absPath);
|
|
902
|
-
if (repoPath) {
|
|
903
|
-
await withGitLock(repoPath, async () => {
|
|
904
|
-
try {
|
|
905
|
-
await execFileAsync2("git", ["worktree", "remove", absPath, "--force"], {
|
|
906
|
-
cwd: repoPath,
|
|
907
|
-
timeout: GIT_TIMEOUT2
|
|
908
|
-
});
|
|
909
|
-
} catch {
|
|
910
|
-
await rm(absPath, { recursive: true, force: true });
|
|
911
|
-
await execFileAsync2("git", ["worktree", "prune"], {
|
|
912
|
-
cwd: repoPath,
|
|
913
|
-
timeout: GIT_TIMEOUT2
|
|
914
|
-
}).catch(() => {
|
|
915
|
-
});
|
|
916
|
-
}
|
|
917
|
-
await execFileAsync2("git", ["update-index", "--refresh"], {
|
|
918
|
-
cwd: repoPath,
|
|
919
|
-
timeout: GIT_TIMEOUT2
|
|
920
|
-
}).catch(() => {
|
|
921
|
-
});
|
|
922
|
-
});
|
|
923
|
-
} else {
|
|
924
|
-
await rm(absPath, { recursive: true, force: true });
|
|
925
|
-
}
|
|
926
|
-
}
|
|
927
|
-
async function listWorktrees(repoPath) {
|
|
928
|
-
const absRepoPath = resolve3(repoPath);
|
|
929
|
-
const { stdout } = await execFileAsync2("git", ["worktree", "list", "--porcelain"], {
|
|
930
|
-
cwd: absRepoPath,
|
|
931
|
-
timeout: GIT_TIMEOUT2
|
|
932
|
-
});
|
|
933
|
-
const worktrees = [];
|
|
934
|
-
let current;
|
|
935
|
-
for (const line of stdout.split("\n")) {
|
|
936
|
-
if (line.startsWith("worktree ")) {
|
|
937
|
-
if (current) {
|
|
938
|
-
worktrees.push({ ...current, repoPath: absRepoPath });
|
|
939
|
-
}
|
|
940
|
-
current = { path: line.slice(9), branch: "" };
|
|
941
|
-
} else if (line.startsWith("branch ") && current) {
|
|
942
|
-
current.branch = line.slice(7).replace("refs/heads/", "");
|
|
943
|
-
}
|
|
944
|
-
}
|
|
945
|
-
if (current) {
|
|
946
|
-
worktrees.push({ ...current, repoPath: absRepoPath });
|
|
947
|
-
}
|
|
948
|
-
return worktrees;
|
|
949
|
-
}
|
|
950
|
-
async function cleanupOrphanedWorktrees(worktreeBaseDir) {
|
|
951
|
-
const absBase = resolve3(worktreeBaseDir);
|
|
952
|
-
if (!existsSync2(absBase)) {
|
|
953
|
-
return;
|
|
954
|
-
}
|
|
955
|
-
const entries = await readdir2(absBase, { withFileTypes: true });
|
|
956
|
-
for (const entry of entries) {
|
|
957
|
-
if (!entry.isDirectory()) continue;
|
|
958
|
-
const worktreePath = resolve3(absBase, entry.name);
|
|
959
|
-
await removeWorktree(worktreePath);
|
|
960
|
-
}
|
|
961
|
-
}
|
|
962
|
-
async function findRepoForWorktree(worktreePath) {
|
|
963
|
-
try {
|
|
964
|
-
const { stdout } = await execFileAsync2("git", ["rev-parse", "--git-common-dir"], {
|
|
965
|
-
cwd: worktreePath,
|
|
966
|
-
timeout: GIT_TIMEOUT2
|
|
967
|
-
});
|
|
968
|
-
const gitCommonDir = resolve3(worktreePath, stdout.trim());
|
|
969
|
-
return resolve3(gitCommonDir, "..");
|
|
970
|
-
} catch {
|
|
971
|
-
return void 0;
|
|
972
|
-
}
|
|
973
|
-
}
|
|
974
|
-
|
|
975
990
|
// src/middleware/audit-log.ts
|
|
976
991
|
import { appendFile as appendFile3, mkdir as mkdir4 } from "fs/promises";
|
|
977
|
-
import
|
|
992
|
+
import path6 from "path";
|
|
978
993
|
var DEFAULT_FLUSH_INTERVAL_MS = 500;
|
|
979
994
|
var DEFAULT_FLUSH_SIZE = 20;
|
|
980
995
|
function auditLog(options) {
|
|
@@ -988,7 +1003,7 @@ function auditLog(options) {
|
|
|
988
1003
|
let dirCreated = false;
|
|
989
1004
|
const buffers = /* @__PURE__ */ new Map();
|
|
990
1005
|
let flushTimer;
|
|
991
|
-
async function
|
|
1006
|
+
async function ensureDir2() {
|
|
992
1007
|
if (!dirCreated) {
|
|
993
1008
|
await mkdir4(dir, { recursive: true });
|
|
994
1009
|
dirCreated = true;
|
|
@@ -996,10 +1011,10 @@ function auditLog(options) {
|
|
|
996
1011
|
}
|
|
997
1012
|
async function flushAll() {
|
|
998
1013
|
if (buffers.size === 0) return;
|
|
999
|
-
await
|
|
1014
|
+
await ensureDir2();
|
|
1000
1015
|
const writes = [];
|
|
1001
1016
|
for (const [sessionId, lines] of buffers) {
|
|
1002
|
-
const filePath =
|
|
1017
|
+
const filePath = path6.join(dir, `${sessionId}.jsonl`);
|
|
1003
1018
|
writes.push(appendFile3(filePath, lines.join(""), "utf-8"));
|
|
1004
1019
|
}
|
|
1005
1020
|
buffers.clear();
|
|
@@ -1008,8 +1023,8 @@ function auditLog(options) {
|
|
|
1008
1023
|
async function flushSession(sessionId) {
|
|
1009
1024
|
const lines = buffers.get(sessionId);
|
|
1010
1025
|
if (!lines || lines.length === 0) return;
|
|
1011
|
-
await
|
|
1012
|
-
const filePath =
|
|
1026
|
+
await ensureDir2();
|
|
1027
|
+
const filePath = path6.join(dir, `${sessionId}.jsonl`);
|
|
1013
1028
|
await appendFile3(filePath, lines.join(""), "utf-8");
|
|
1014
1029
|
buffers.delete(sessionId);
|
|
1015
1030
|
}
|
|
@@ -1174,10 +1189,116 @@ function loopDetection(options) {
|
|
|
1174
1189
|
}
|
|
1175
1190
|
|
|
1176
1191
|
// src/orchestrator.ts
|
|
1177
|
-
import { randomUUID } from "crypto";
|
|
1178
|
-
import { existsSync as
|
|
1179
|
-
import { mkdir as
|
|
1180
|
-
import
|
|
1192
|
+
import { randomUUID as randomUUID3 } from "crypto";
|
|
1193
|
+
import { existsSync as existsSync6 } from "fs";
|
|
1194
|
+
import { mkdir as mkdir6, readFile as readFile7 } from "fs/promises";
|
|
1195
|
+
import path11 from "path";
|
|
1196
|
+
|
|
1197
|
+
// src/orchestrator/run-store.ts
|
|
1198
|
+
import { existsSync as existsSync3 } from "fs";
|
|
1199
|
+
import { mkdir as mkdir5, readdir as readdir3, readFile as readFile4, writeFile as writeFile2 } from "fs/promises";
|
|
1200
|
+
import path7 from "path";
|
|
1201
|
+
|
|
1202
|
+
// src/shared/process.ts
|
|
1203
|
+
function isProcessAlive(pid) {
|
|
1204
|
+
if (!Number.isInteger(pid) || pid <= 0) {
|
|
1205
|
+
return false;
|
|
1206
|
+
}
|
|
1207
|
+
try {
|
|
1208
|
+
process.kill(pid, 0);
|
|
1209
|
+
return true;
|
|
1210
|
+
} catch (error) {
|
|
1211
|
+
if (error instanceof Error && "code" in error && error.code === "EPERM") {
|
|
1212
|
+
return true;
|
|
1213
|
+
}
|
|
1214
|
+
return false;
|
|
1215
|
+
}
|
|
1216
|
+
}
|
|
1217
|
+
|
|
1218
|
+
// src/orchestrator/run-store.ts
|
|
1219
|
+
var ORPHAN_GRACE_PERIOD_MS = 3e4;
|
|
1220
|
+
var RunStore = class {
|
|
1221
|
+
runsDir;
|
|
1222
|
+
createdDirs = /* @__PURE__ */ new Set();
|
|
1223
|
+
constructor(options = {}) {
|
|
1224
|
+
this.runsDir = options.runsDir ?? getRunsDir();
|
|
1225
|
+
}
|
|
1226
|
+
/**
|
|
1227
|
+
* Persist a run to disk. Creates the repo subdirectory if needed.
|
|
1228
|
+
* Fails silently — run persistence is non-critical.
|
|
1229
|
+
*/
|
|
1230
|
+
async persistRun(run) {
|
|
1231
|
+
try {
|
|
1232
|
+
const slug = toRepoSlug({ path: run.repo });
|
|
1233
|
+
const repoDir = getRepoRunsDir(slug);
|
|
1234
|
+
if (!this.createdDirs.has(repoDir)) {
|
|
1235
|
+
await mkdir5(repoDir, { recursive: true });
|
|
1236
|
+
this.createdDirs.add(repoDir);
|
|
1237
|
+
}
|
|
1238
|
+
const filePath = path7.join(repoDir, `${run.runId}.json`);
|
|
1239
|
+
await writeFile2(filePath, JSON.stringify(run, null, 2), "utf-8");
|
|
1240
|
+
} catch {
|
|
1241
|
+
}
|
|
1242
|
+
}
|
|
1243
|
+
/**
|
|
1244
|
+
* Find all runs that were left in "running" state but whose process died.
|
|
1245
|
+
* Returns them so the caller can emit failure events and update status.
|
|
1246
|
+
*/
|
|
1247
|
+
async recoverOrphanedRuns() {
|
|
1248
|
+
if (!existsSync3(this.runsDir)) return [];
|
|
1249
|
+
const orphaned = [];
|
|
1250
|
+
try {
|
|
1251
|
+
const jsonFiles = await this.collectRunFiles();
|
|
1252
|
+
for (const filePath of jsonFiles) {
|
|
1253
|
+
const run = await this.recoverRunIfOrphaned(filePath);
|
|
1254
|
+
if (run) orphaned.push(run);
|
|
1255
|
+
}
|
|
1256
|
+
} catch {
|
|
1257
|
+
}
|
|
1258
|
+
return orphaned;
|
|
1259
|
+
}
|
|
1260
|
+
/**
|
|
1261
|
+
* Collect all .json run files from the runs directory tree.
|
|
1262
|
+
* Searches both top-level and repo subdirectories.
|
|
1263
|
+
*/
|
|
1264
|
+
async collectRunFiles() {
|
|
1265
|
+
const entries = await readdir3(this.runsDir, { withFileTypes: true });
|
|
1266
|
+
const jsonFiles = [];
|
|
1267
|
+
for (const entry of entries) {
|
|
1268
|
+
if (entry.isDirectory()) {
|
|
1269
|
+
const subDir = path7.join(this.runsDir, entry.name);
|
|
1270
|
+
const subFiles = await readdir3(subDir);
|
|
1271
|
+
for (const f of subFiles) {
|
|
1272
|
+
if (f.endsWith(".json")) jsonFiles.push(path7.join(subDir, f));
|
|
1273
|
+
}
|
|
1274
|
+
} else if (entry.name.endsWith(".json")) {
|
|
1275
|
+
jsonFiles.push(path7.join(this.runsDir, entry.name));
|
|
1276
|
+
}
|
|
1277
|
+
}
|
|
1278
|
+
return jsonFiles;
|
|
1279
|
+
}
|
|
1280
|
+
/**
|
|
1281
|
+
* Check if a run file represents an orphaned run.
|
|
1282
|
+
* If so, update its status to "failed" and return it.
|
|
1283
|
+
*/
|
|
1284
|
+
async recoverRunIfOrphaned(filePath) {
|
|
1285
|
+
const content = await readFile4(filePath, "utf-8");
|
|
1286
|
+
const run = JSON.parse(content);
|
|
1287
|
+
if (run.status !== "running") return null;
|
|
1288
|
+
if (run.pid && run.pid === process.pid) return null;
|
|
1289
|
+
if (run.pid && isProcessAlive(run.pid)) return null;
|
|
1290
|
+
const ageMs = Date.now() - new Date(run.createdAt).getTime();
|
|
1291
|
+
if (ageMs < ORPHAN_GRACE_PERIOD_MS) return null;
|
|
1292
|
+
run.status = "failed";
|
|
1293
|
+
run.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
1294
|
+
await writeFile2(filePath, JSON.stringify(run, null, 2), "utf-8");
|
|
1295
|
+
return run;
|
|
1296
|
+
}
|
|
1297
|
+
};
|
|
1298
|
+
|
|
1299
|
+
// src/runner/session-executor.ts
|
|
1300
|
+
import { readFile as readFile5 } from "fs/promises";
|
|
1301
|
+
import path8 from "path";
|
|
1181
1302
|
|
|
1182
1303
|
// src/runner/output-parser.ts
|
|
1183
1304
|
function extractJson(raw) {
|
|
@@ -1195,37 +1316,61 @@ function extractJson(raw) {
|
|
|
1195
1316
|
}
|
|
1196
1317
|
return void 0;
|
|
1197
1318
|
}
|
|
1319
|
+
var PR_URL_REGEX = /^PR_URL:\s*(https?:\/\/\S+)/m;
|
|
1320
|
+
function extractPrUrl(raw) {
|
|
1321
|
+
const match = raw.match(PR_URL_REGEX);
|
|
1322
|
+
if (!match?.[1]) return void 0;
|
|
1323
|
+
const prUrl = match[1];
|
|
1324
|
+
const numberMatch = prUrl.match(/\/pull\/(\d+)/);
|
|
1325
|
+
if (numberMatch?.[1]) {
|
|
1326
|
+
return { prUrl, prNumber: Number.parseInt(numberMatch[1], 10) };
|
|
1327
|
+
}
|
|
1328
|
+
return { prUrl };
|
|
1329
|
+
}
|
|
1198
1330
|
function parseOutput(raw, schema) {
|
|
1331
|
+
const prInfo = extractPrUrl(raw);
|
|
1332
|
+
const base = { rawOutput: raw };
|
|
1333
|
+
if (prInfo) {
|
|
1334
|
+
base.prUrl = prInfo.prUrl;
|
|
1335
|
+
if (prInfo.prNumber !== void 0) {
|
|
1336
|
+
base.prNumber = prInfo.prNumber;
|
|
1337
|
+
}
|
|
1338
|
+
}
|
|
1199
1339
|
if (!schema) {
|
|
1200
|
-
return
|
|
1340
|
+
return base;
|
|
1201
1341
|
}
|
|
1202
1342
|
const extracted = extractJson(raw);
|
|
1203
1343
|
if (extracted === void 0) {
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
parseError: "Failed to extract JSON from output"
|
|
1207
|
-
};
|
|
1344
|
+
base.parseError = "Failed to extract JSON from output";
|
|
1345
|
+
return base;
|
|
1208
1346
|
}
|
|
1209
1347
|
const result = schema.safeParse(extracted);
|
|
1210
1348
|
if (!result.success) {
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
parseError: `Schema validation failed: ${result.error.message}`
|
|
1214
|
-
};
|
|
1349
|
+
base.parseError = `Schema validation failed: ${result.error.message}`;
|
|
1350
|
+
return base;
|
|
1215
1351
|
}
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
output: result.data
|
|
1219
|
-
};
|
|
1352
|
+
base.output = result.data;
|
|
1353
|
+
return base;
|
|
1220
1354
|
}
|
|
1221
1355
|
|
|
1222
|
-
// src/
|
|
1356
|
+
// src/sdk-types.ts
|
|
1223
1357
|
function isInitMessage(msg) {
|
|
1224
1358
|
return msg.type === "system" && msg.subtype === "init";
|
|
1225
1359
|
}
|
|
1226
1360
|
function isResultMessage(msg) {
|
|
1227
1361
|
return msg.type === "result";
|
|
1228
1362
|
}
|
|
1363
|
+
function isAssistantMessage(msg) {
|
|
1364
|
+
return msg.type === "assistant" && !msg.subtype;
|
|
1365
|
+
}
|
|
1366
|
+
function isToolUseMessage(msg) {
|
|
1367
|
+
return msg.type === "assistant" && msg.subtype === "tool_use";
|
|
1368
|
+
}
|
|
1369
|
+
function isToolResultMessage(msg) {
|
|
1370
|
+
return msg.type === "assistant" && msg.subtype === "tool_result";
|
|
1371
|
+
}
|
|
1372
|
+
|
|
1373
|
+
// src/runner/session.ts
|
|
1229
1374
|
function checkAborted(signal) {
|
|
1230
1375
|
if (signal.aborted) {
|
|
1231
1376
|
const reason = signal.reason;
|
|
@@ -1237,8 +1382,36 @@ function toSessionError(error, isTimeout, sessionId) {
|
|
|
1237
1382
|
const message = error instanceof Error ? error.message : String(error);
|
|
1238
1383
|
return new SessionError(message, isTimeout ? "timeout" : "unknown", sessionId);
|
|
1239
1384
|
}
|
|
1385
|
+
function buildQueryOptions(options) {
|
|
1386
|
+
const { sessionPath, sandboxConfig } = options;
|
|
1387
|
+
const queryOptions = {
|
|
1388
|
+
// Always pass cwd: session clone for writable agents, repo root for readonly.
|
|
1389
|
+
// Without this, readonly agents default to process.cwd() and may write to main tree.
|
|
1390
|
+
cwd: sessionPath ?? options.repoPath,
|
|
1391
|
+
// maxTurns: agent.maxTurns,
|
|
1392
|
+
allowedTools: sandboxConfig.allowedTools,
|
|
1393
|
+
// Workers run detached without a TTY — bypass interactive permission prompts.
|
|
1394
|
+
// Required pair: permissionMode alone is not enough, SDK also needs the flag.
|
|
1395
|
+
permissionMode: "bypassPermissions",
|
|
1396
|
+
allowDangerouslySkipPermissions: true,
|
|
1397
|
+
// Load project-level CLAUDE.md so agents inherit project rules and conventions.
|
|
1398
|
+
settingSources: ["user", "project", "local"],
|
|
1399
|
+
// Don't persist agent sessions — they are ephemeral clones.
|
|
1400
|
+
persistSession: false
|
|
1401
|
+
};
|
|
1402
|
+
if (options.resumeSessionId) {
|
|
1403
|
+
queryOptions.resume = options.resumeSessionId;
|
|
1404
|
+
}
|
|
1405
|
+
if (options.mcpServers && Object.keys(options.mcpServers).length > 0) {
|
|
1406
|
+
queryOptions.mcpServers = options.mcpServers;
|
|
1407
|
+
}
|
|
1408
|
+
if (options.env && Object.keys(options.env).length > 0) {
|
|
1409
|
+
queryOptions.env = { ...process.env, ...options.env };
|
|
1410
|
+
}
|
|
1411
|
+
return queryOptions;
|
|
1412
|
+
}
|
|
1240
1413
|
async function runSession(options) {
|
|
1241
|
-
const {
|
|
1414
|
+
const { prompt, initTimeoutMs, maxDurationMs, onEvent } = options;
|
|
1242
1415
|
const startTime = Date.now();
|
|
1243
1416
|
let sessionId = "";
|
|
1244
1417
|
const abortController = new AbortController();
|
|
@@ -1250,30 +1423,11 @@ async function runSession(options) {
|
|
|
1250
1423
|
}, maxDurationMs);
|
|
1251
1424
|
try {
|
|
1252
1425
|
const sdk = await import("@anthropic-ai/claude-agent-sdk");
|
|
1253
|
-
const queryOptions =
|
|
1254
|
-
// Always pass cwd: worktree for writable agents, repo root for readonly.
|
|
1255
|
-
// Without this, readonly agents default to process.cwd() and may write to main tree.
|
|
1256
|
-
cwd: worktreePath ?? options.repoPath,
|
|
1257
|
-
maxTurns: agent.maxTurns,
|
|
1258
|
-
allowedTools: sandboxConfig.allowedTools
|
|
1259
|
-
};
|
|
1260
|
-
if (options.resumeSessionId) {
|
|
1261
|
-
queryOptions.resume = options.resumeSessionId;
|
|
1262
|
-
}
|
|
1263
|
-
if (options.mcpServers?.length) {
|
|
1264
|
-
queryOptions.mcpServers = options.mcpServers;
|
|
1265
|
-
}
|
|
1426
|
+
const queryOptions = buildQueryOptions(options);
|
|
1266
1427
|
let output = "";
|
|
1267
1428
|
let costUsd = 0;
|
|
1268
1429
|
let turnCount = 0;
|
|
1269
|
-
const
|
|
1270
|
-
|
|
1271
|
-
---
|
|
1272
|
-
|
|
1273
|
-
## Task
|
|
1274
|
-
|
|
1275
|
-
${prompt}` : prompt;
|
|
1276
|
-
const stream = sdk.query({ prompt: fullPrompt, options: queryOptions });
|
|
1430
|
+
const stream = sdk.query({ prompt, options: queryOptions });
|
|
1277
1431
|
for await (const message of stream) {
|
|
1278
1432
|
checkAborted(abortController.signal);
|
|
1279
1433
|
const msg = message;
|
|
@@ -1387,44 +1541,725 @@ async function runWithRecovery(options) {
|
|
|
1387
1541
|
throw new Error("Recovery failed: unreachable");
|
|
1388
1542
|
}
|
|
1389
1543
|
|
|
1390
|
-
// src/
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1544
|
+
// src/runner/session-executor.ts
|
|
1545
|
+
var INSTRUCTIONS_PATH = ".neo/INSTRUCTIONS.md";
|
|
1546
|
+
async function loadRepoInstructions(repoPath) {
|
|
1547
|
+
const filePath = path8.join(repoPath, INSTRUCTIONS_PATH);
|
|
1548
|
+
try {
|
|
1549
|
+
return await readFile5(filePath, "utf-8");
|
|
1550
|
+
} catch {
|
|
1551
|
+
return void 0;
|
|
1552
|
+
}
|
|
1553
|
+
}
|
|
1554
|
+
function buildGitStrategyInstructions(strategy, agent, branch, baseBranch, remote, metadata) {
|
|
1555
|
+
const prNumber = metadata?.prNumber;
|
|
1556
|
+
if (agent.sandbox !== "writable") {
|
|
1557
|
+
if (prNumber) {
|
|
1558
|
+
return `## Pull Request
|
|
1394
1559
|
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
|
|
1403
|
-
|
|
1404
|
-
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
}
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
|
|
1427
|
-
|
|
1560
|
+
PR #${String(prNumber)} is open for this task. After your review, leave your findings as a comment: \`gh pr comment ${String(prNumber)} --body "..."\`.`;
|
|
1561
|
+
}
|
|
1562
|
+
return null;
|
|
1563
|
+
}
|
|
1564
|
+
if (strategy === "pr") {
|
|
1565
|
+
if (prNumber) {
|
|
1566
|
+
return `## Git workflow
|
|
1567
|
+
|
|
1568
|
+
You are on branch \`${branch}\`.
|
|
1569
|
+
An open PR exists: #${String(prNumber)}.
|
|
1570
|
+
After committing, push your changes to the branch. The PR will be updated automatically.
|
|
1571
|
+
Leave a review comment on the PR summarizing what you did: \`gh pr comment ${String(prNumber)} --body "..."\`.`;
|
|
1572
|
+
}
|
|
1573
|
+
return `## Git workflow
|
|
1574
|
+
|
|
1575
|
+
You are on branch \`${branch}\` (base: \`${baseBranch}\`).
|
|
1576
|
+
After committing:
|
|
1577
|
+
1. Push: \`git push -u ${remote} ${branch}\`
|
|
1578
|
+
2. Create a PR against \`${baseBranch}\` \u2014 choose a title and description that reflect the work you completed. End the PR body with: \`\u{1F916} Generated with [neo](https://neotx.dev)\`
|
|
1579
|
+
3. Output the PR URL on a dedicated line: \`PR_URL: <url>\``;
|
|
1580
|
+
}
|
|
1581
|
+
return `## Git workflow
|
|
1582
|
+
|
|
1583
|
+
You are on branch \`${branch}\` (base: \`${baseBranch}\`).
|
|
1584
|
+
Commit your changes. The branch will be pushed automatically.`;
|
|
1585
|
+
}
|
|
1586
|
+
function buildReportingInstructions(_runId) {
|
|
1587
|
+
return `## Reporting & Memory
|
|
1588
|
+
|
|
1589
|
+
### Progress reporting (real-time, visible in TUI)
|
|
1590
|
+
Chain \`neo log\` with the command that triggered it \u2014 never standalone:
|
|
1591
|
+
\`\`\`bash
|
|
1592
|
+
pnpm test && neo log milestone "all tests passing" || neo log blocker "tests failing"
|
|
1593
|
+
git push origin HEAD && neo log action "pushed to branch"
|
|
1594
|
+
neo log decision "chose JWT over sessions \u2014 simpler for MVP"
|
|
1595
|
+
\`\`\`
|
|
1596
|
+
|
|
1597
|
+
### Memory (persistent, injected into future agent prompts)
|
|
1598
|
+
Write discoveries so the next agent on this repo starts smarter.
|
|
1599
|
+
|
|
1600
|
+
**Be selective** \u2014 only write a memory if it would change HOW you or future agents approach work:
|
|
1601
|
+
\`\`\`bash
|
|
1602
|
+
# GOOD: affects workflow decisions
|
|
1603
|
+
neo memory write --type fact --scope $NEO_REPOSITORY "CI requires pnpm build before push \u2014 no auto-rebuild in pipeline"
|
|
1604
|
+
neo memory write --type fact --scope $NEO_REPOSITORY "Biome enforces complexity max 20 \u2014 extract helpers for large functions"
|
|
1605
|
+
neo memory write --type procedure --scope $NEO_REPOSITORY "Integration tests require DATABASE_URL env var \u2014 set before running"
|
|
1606
|
+
|
|
1607
|
+
# BAD: trivial or derivable \u2014 do NOT write these
|
|
1608
|
+
# "packages/core has 71 files" \u2014 derivable from ls
|
|
1609
|
+
# "Uses React 19" \u2014 visible in package.json
|
|
1610
|
+
# "apps/web has no test framework" \u2014 derivable from ls/cat
|
|
1611
|
+
\`\`\`
|
|
1612
|
+
|
|
1613
|
+
**The test**: if \`cat package.json\`, \`ls\`, or reading the README can answer it, do NOT memorize it. Only memorize truths that affect decisions or non-obvious workflows learned from failure.
|
|
1614
|
+
|
|
1615
|
+
Write at key moments: after resolving a non-obvious issue, after discovering a build/CI quirk, before finishing.`;
|
|
1616
|
+
}
|
|
1617
|
+
function buildFullPrompt(agentPrompt, repoInstructions, gitInstructions, taskPrompt, memoryContext, cwdInstructions, reportingInstructions) {
|
|
1618
|
+
const sections = [];
|
|
1619
|
+
if (agentPrompt) sections.push(agentPrompt);
|
|
1620
|
+
if (cwdInstructions) sections.push(cwdInstructions);
|
|
1621
|
+
if (memoryContext) sections.push(memoryContext);
|
|
1622
|
+
if (repoInstructions) sections.push(`## Repository instructions
|
|
1623
|
+
|
|
1624
|
+
${repoInstructions}`);
|
|
1625
|
+
if (gitInstructions) sections.push(gitInstructions);
|
|
1626
|
+
if (reportingInstructions) sections.push(reportingInstructions);
|
|
1627
|
+
sections.push(`## Task
|
|
1628
|
+
|
|
1629
|
+
${taskPrompt}`);
|
|
1630
|
+
return sections.join("\n\n---\n\n");
|
|
1631
|
+
}
|
|
1632
|
+
function buildMiddlewareContext(runId, workflow, step, agent, repo, getContextValue) {
|
|
1633
|
+
const store = /* @__PURE__ */ new Map();
|
|
1634
|
+
return {
|
|
1635
|
+
runId,
|
|
1636
|
+
workflow,
|
|
1637
|
+
step,
|
|
1638
|
+
agent,
|
|
1639
|
+
repo,
|
|
1640
|
+
get: ((key) => {
|
|
1641
|
+
const value = getContextValue(key);
|
|
1642
|
+
if (value !== void 0) return value;
|
|
1643
|
+
return store.get(key);
|
|
1644
|
+
}),
|
|
1645
|
+
set: ((key, value) => {
|
|
1646
|
+
store.set(key, value);
|
|
1647
|
+
})
|
|
1648
|
+
};
|
|
1649
|
+
}
|
|
1650
|
+
var SessionExecutor = class {
|
|
1651
|
+
constructor(config, getContextValue) {
|
|
1652
|
+
this.config = config;
|
|
1653
|
+
this.getContextValue = getContextValue;
|
|
1654
|
+
}
|
|
1655
|
+
/**
|
|
1656
|
+
* Execute an agent session with the given input and dependencies.
|
|
1657
|
+
* Handles prompt building, SDK invocation via recovery wrapper, and output parsing.
|
|
1658
|
+
*/
|
|
1659
|
+
async execute(input, deps) {
|
|
1660
|
+
const {
|
|
1661
|
+
runId,
|
|
1662
|
+
agent,
|
|
1663
|
+
stepDef,
|
|
1664
|
+
repoConfig,
|
|
1665
|
+
repoPath,
|
|
1666
|
+
prompt: taskPrompt,
|
|
1667
|
+
branch,
|
|
1668
|
+
gitStrategy,
|
|
1669
|
+
sessionPath,
|
|
1670
|
+
metadata,
|
|
1671
|
+
startedAt
|
|
1672
|
+
} = input;
|
|
1673
|
+
const { middleware, mcpServers, memoryContext, onAttempt } = deps;
|
|
1674
|
+
if (agent.sandbox === "writable" && !branch) {
|
|
1675
|
+
throw new Error(
|
|
1676
|
+
"Validation error: --branch is required for writable agents. Provide an explicit branch name (e.g. --branch feat/PROJ-42-description)."
|
|
1677
|
+
);
|
|
1678
|
+
}
|
|
1679
|
+
const branchName = agent.sandbox === "writable" ? branch : "";
|
|
1680
|
+
const sandboxConfig = buildSandboxConfig(agent, sessionPath);
|
|
1681
|
+
const chain = buildMiddlewareChain(middleware);
|
|
1682
|
+
const middlewareContext = buildMiddlewareContext(
|
|
1683
|
+
runId,
|
|
1684
|
+
stepDef.prompt ? "workflow" : "direct",
|
|
1685
|
+
"execute",
|
|
1686
|
+
agent.name,
|
|
1687
|
+
repoPath,
|
|
1688
|
+
this.getContextValue
|
|
1689
|
+
);
|
|
1690
|
+
const hooks = buildSDKHooks(chain, middlewareContext, middleware);
|
|
1691
|
+
const repoInstructions = await loadRepoInstructions(repoPath);
|
|
1692
|
+
const gitInstructions = buildGitStrategyInstructions(
|
|
1693
|
+
gitStrategy,
|
|
1694
|
+
agent,
|
|
1695
|
+
branchName,
|
|
1696
|
+
repoConfig.defaultBranch,
|
|
1697
|
+
repoConfig.pushRemote ?? "origin",
|
|
1698
|
+
metadata
|
|
1699
|
+
);
|
|
1700
|
+
const cwdInstructions = sessionPath ? `## Working directory
|
|
1701
|
+
|
|
1702
|
+
You are working in an isolated clone at: \`${sessionPath}\`
|
|
1703
|
+
ALWAYS run commands from this directory. NEVER cd to or operate on any other repository.` : void 0;
|
|
1704
|
+
const reportingInstructions = buildReportingInstructions(runId);
|
|
1705
|
+
const fullPrompt = buildFullPrompt(
|
|
1706
|
+
agent.definition.prompt,
|
|
1707
|
+
repoInstructions,
|
|
1708
|
+
gitInstructions,
|
|
1709
|
+
stepDef.prompt ?? taskPrompt,
|
|
1710
|
+
memoryContext,
|
|
1711
|
+
cwdInstructions,
|
|
1712
|
+
reportingInstructions
|
|
1713
|
+
);
|
|
1714
|
+
const recoveryOpts = stepDef.recovery;
|
|
1715
|
+
const agentEnv = {
|
|
1716
|
+
NEO_RUN_ID: runId,
|
|
1717
|
+
NEO_AGENT_NAME: agent.name,
|
|
1718
|
+
NEO_REPOSITORY: repoPath
|
|
1719
|
+
};
|
|
1720
|
+
const sessionResult = await runWithRecovery({
|
|
1721
|
+
agent,
|
|
1722
|
+
prompt: fullPrompt,
|
|
1723
|
+
repoPath,
|
|
1724
|
+
sandboxConfig,
|
|
1725
|
+
hooks,
|
|
1726
|
+
env: agentEnv,
|
|
1727
|
+
initTimeoutMs: this.config.initTimeoutMs,
|
|
1728
|
+
maxDurationMs: this.config.maxDurationMs,
|
|
1729
|
+
maxRetries: recoveryOpts?.maxRetries ?? this.config.maxRetries,
|
|
1730
|
+
backoffBaseMs: this.config.backoffBaseMs,
|
|
1731
|
+
...sessionPath ? { sessionPath } : {},
|
|
1732
|
+
...mcpServers ? { mcpServers } : {},
|
|
1733
|
+
...recoveryOpts?.nonRetryable ? { nonRetryable: recoveryOpts.nonRetryable } : {},
|
|
1734
|
+
...onAttempt ? { onAttempt } : {}
|
|
1735
|
+
});
|
|
1736
|
+
const parsed = parseOutput(sessionResult.output);
|
|
1737
|
+
const result = {
|
|
1738
|
+
status: "success",
|
|
1739
|
+
sessionId: sessionResult.sessionId,
|
|
1740
|
+
output: parsed.output ?? parsed.rawOutput,
|
|
1741
|
+
rawOutput: sessionResult.output,
|
|
1742
|
+
costUsd: sessionResult.costUsd,
|
|
1743
|
+
durationMs: sessionResult.durationMs,
|
|
1744
|
+
agent: agent.name,
|
|
1745
|
+
startedAt,
|
|
1746
|
+
completedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1747
|
+
attempt: 1,
|
|
1748
|
+
parsed
|
|
1749
|
+
};
|
|
1750
|
+
if (parsed.prUrl) {
|
|
1751
|
+
result.prUrl = parsed.prUrl;
|
|
1752
|
+
}
|
|
1753
|
+
if (parsed.prNumber !== void 0) {
|
|
1754
|
+
result.prNumber = parsed.prNumber;
|
|
1755
|
+
}
|
|
1756
|
+
return result;
|
|
1757
|
+
}
|
|
1758
|
+
};
|
|
1759
|
+
|
|
1760
|
+
// src/supervisor/memory/embedder.ts
|
|
1761
|
+
var extractorPromise = null;
|
|
1762
|
+
function getExtractor() {
|
|
1763
|
+
if (!extractorPromise) {
|
|
1764
|
+
extractorPromise = (async () => {
|
|
1765
|
+
const { pipeline } = await import("@huggingface/transformers");
|
|
1766
|
+
return pipeline("feature-extraction", "Xenova/all-MiniLM-L6-v2", {
|
|
1767
|
+
dtype: "fp32"
|
|
1768
|
+
});
|
|
1769
|
+
})();
|
|
1770
|
+
}
|
|
1771
|
+
return extractorPromise;
|
|
1772
|
+
}
|
|
1773
|
+
var LocalEmbedder = class {
|
|
1774
|
+
dimensions = 384;
|
|
1775
|
+
async embed(texts) {
|
|
1776
|
+
const extractor = await getExtractor();
|
|
1777
|
+
const output = await extractor(texts, { pooling: "mean", normalize: true });
|
|
1778
|
+
return output.tolist();
|
|
1779
|
+
}
|
|
1780
|
+
};
|
|
1781
|
+
|
|
1782
|
+
// src/supervisor/memory/entry.ts
|
|
1783
|
+
import { z as z3 } from "zod";
|
|
1784
|
+
var memoryTypeSchema = z3.enum([
|
|
1785
|
+
"fact",
|
|
1786
|
+
"procedure",
|
|
1787
|
+
"episode",
|
|
1788
|
+
"focus",
|
|
1789
|
+
"feedback",
|
|
1790
|
+
"task"
|
|
1791
|
+
]);
|
|
1792
|
+
var memoryEntrySchema = z3.object({
|
|
1793
|
+
id: z3.string(),
|
|
1794
|
+
type: memoryTypeSchema,
|
|
1795
|
+
scope: z3.string(),
|
|
1796
|
+
// "global" | repo path
|
|
1797
|
+
content: z3.string(),
|
|
1798
|
+
source: z3.string(),
|
|
1799
|
+
// "developer" | "reviewer" | "supervisor" | "user"
|
|
1800
|
+
tags: z3.array(z3.string()).default([]),
|
|
1801
|
+
// Lifecycle
|
|
1802
|
+
createdAt: z3.string(),
|
|
1803
|
+
lastAccessedAt: z3.string(),
|
|
1804
|
+
accessCount: z3.number().default(0),
|
|
1805
|
+
// Optional per-type fields
|
|
1806
|
+
expiresAt: z3.string().optional(),
|
|
1807
|
+
// focus TTL
|
|
1808
|
+
outcome: z3.string().optional(),
|
|
1809
|
+
// episode: success/failure/blocked
|
|
1810
|
+
runId: z3.string().optional(),
|
|
1811
|
+
category: z3.string().optional(),
|
|
1812
|
+
// feedback: reviewer issue category
|
|
1813
|
+
severity: z3.string().optional(),
|
|
1814
|
+
supersedes: z3.string().optional()
|
|
1815
|
+
// contradiction resolution
|
|
1816
|
+
});
|
|
1817
|
+
var memoryWriteInputSchema = z3.object({
|
|
1818
|
+
type: memoryTypeSchema,
|
|
1819
|
+
scope: z3.string().default("global"),
|
|
1820
|
+
content: z3.string(),
|
|
1821
|
+
source: z3.string().default("user"),
|
|
1822
|
+
tags: z3.array(z3.string()).default([]),
|
|
1823
|
+
expiresAt: z3.string().optional(),
|
|
1824
|
+
outcome: z3.string().optional(),
|
|
1825
|
+
runId: z3.string().optional(),
|
|
1826
|
+
category: z3.string().optional(),
|
|
1827
|
+
severity: z3.string().optional(),
|
|
1828
|
+
supersedes: z3.string().optional()
|
|
1829
|
+
});
|
|
1830
|
+
|
|
1831
|
+
// src/supervisor/memory/format.ts
|
|
1832
|
+
var TYPE_LABELS = {
|
|
1833
|
+
fact: "Fact",
|
|
1834
|
+
procedure: "How-to",
|
|
1835
|
+
episode: "Past run",
|
|
1836
|
+
focus: "Current focus",
|
|
1837
|
+
feedback: "Recurring issue"
|
|
1838
|
+
};
|
|
1839
|
+
var TYPE_ICONS = {
|
|
1840
|
+
fact: "\xB7",
|
|
1841
|
+
procedure: "\u2192",
|
|
1842
|
+
episode: "\u25C7",
|
|
1843
|
+
focus: "\u2605",
|
|
1844
|
+
feedback: "\u26A0"
|
|
1845
|
+
};
|
|
1846
|
+
function formatMemoriesForPrompt(memories) {
|
|
1847
|
+
if (memories.length === 0) return "";
|
|
1848
|
+
const grouped = /* @__PURE__ */ new Map();
|
|
1849
|
+
for (const m of memories) {
|
|
1850
|
+
const group = grouped.get(m.type) ?? [];
|
|
1851
|
+
group.push(m);
|
|
1852
|
+
grouped.set(m.type, group);
|
|
1853
|
+
}
|
|
1854
|
+
const sections = [];
|
|
1855
|
+
for (const [type, entries] of grouped) {
|
|
1856
|
+
const label = TYPE_LABELS[type] ?? type;
|
|
1857
|
+
const icon = TYPE_ICONS[type] ?? "\xB7";
|
|
1858
|
+
const lines = entries.map((e) => {
|
|
1859
|
+
const confidence = e.accessCount >= 3 ? "" : " (unconfirmed)";
|
|
1860
|
+
return `${icon} ${e.content}${confidence}`;
|
|
1861
|
+
});
|
|
1862
|
+
sections.push(`### ${label}s
|
|
1863
|
+
${lines.join("\n")}`);
|
|
1864
|
+
}
|
|
1865
|
+
return `## Known context for this repository
|
|
1866
|
+
|
|
1867
|
+
${sections.join("\n\n")}`;
|
|
1868
|
+
}
|
|
1869
|
+
|
|
1870
|
+
// src/supervisor/memory/store.ts
|
|
1871
|
+
import { randomUUID as randomUUID2 } from "crypto";
|
|
1872
|
+
import { existsSync as existsSync4, mkdirSync } from "fs";
|
|
1873
|
+
import { createRequire } from "module";
|
|
1874
|
+
import path9 from "path";
|
|
1875
|
+
var esmRequire = createRequire(import.meta.url);
|
|
1876
|
+
var MemoryStore = class {
|
|
1877
|
+
db;
|
|
1878
|
+
embedder;
|
|
1879
|
+
hasVec;
|
|
1880
|
+
constructor(dbPath, embedder) {
|
|
1881
|
+
const dir = path9.dirname(dbPath);
|
|
1882
|
+
if (!existsSync4(dir)) {
|
|
1883
|
+
mkdirSync(dir, { recursive: true });
|
|
1884
|
+
}
|
|
1885
|
+
const Database = esmRequire("better-sqlite3");
|
|
1886
|
+
this.db = new Database(dbPath);
|
|
1887
|
+
this.db.pragma("journal_mode = WAL");
|
|
1888
|
+
this.db.pragma("foreign_keys = ON");
|
|
1889
|
+
this.embedder = embedder ?? null;
|
|
1890
|
+
this.hasVec = false;
|
|
1891
|
+
this.initSchema();
|
|
1892
|
+
}
|
|
1893
|
+
// ─── Schema initialization ───────────────────────────
|
|
1894
|
+
initSchema() {
|
|
1895
|
+
this.db.exec(`
|
|
1896
|
+
CREATE TABLE IF NOT EXISTS memories (
|
|
1897
|
+
id TEXT PRIMARY KEY,
|
|
1898
|
+
type TEXT NOT NULL CHECK(type IN ('fact','procedure','episode','focus','feedback','task')),
|
|
1899
|
+
scope TEXT NOT NULL,
|
|
1900
|
+
content TEXT NOT NULL,
|
|
1901
|
+
source TEXT NOT NULL,
|
|
1902
|
+
tags TEXT DEFAULT '[]',
|
|
1903
|
+
created_at TEXT NOT NULL,
|
|
1904
|
+
last_accessed_at TEXT NOT NULL,
|
|
1905
|
+
access_count INTEGER DEFAULT 0,
|
|
1906
|
+
expires_at TEXT,
|
|
1907
|
+
outcome TEXT,
|
|
1908
|
+
run_id TEXT,
|
|
1909
|
+
category TEXT,
|
|
1910
|
+
severity TEXT,
|
|
1911
|
+
supersedes TEXT
|
|
1912
|
+
);
|
|
1913
|
+
|
|
1914
|
+
CREATE INDEX IF NOT EXISTS idx_mem_type_scope ON memories(type, scope);
|
|
1915
|
+
CREATE INDEX IF NOT EXISTS idx_mem_created ON memories(created_at);
|
|
1916
|
+
`);
|
|
1917
|
+
this.migrateCheckConstraint();
|
|
1918
|
+
this.db.exec(`
|
|
1919
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS memories_fts USING fts5(
|
|
1920
|
+
content,
|
|
1921
|
+
content='memories',
|
|
1922
|
+
content_rowid='rowid',
|
|
1923
|
+
tokenize='porter'
|
|
1924
|
+
);
|
|
1925
|
+
`);
|
|
1926
|
+
this.db.exec(`
|
|
1927
|
+
CREATE TRIGGER IF NOT EXISTS memories_ai AFTER INSERT ON memories BEGIN
|
|
1928
|
+
INSERT INTO memories_fts(rowid, content) VALUES (new.rowid, new.content);
|
|
1929
|
+
END;
|
|
1930
|
+
CREATE TRIGGER IF NOT EXISTS memories_ad AFTER DELETE ON memories BEGIN
|
|
1931
|
+
INSERT INTO memories_fts(memories_fts, rowid, content) VALUES('delete', old.rowid, old.content);
|
|
1932
|
+
END;
|
|
1933
|
+
CREATE TRIGGER IF NOT EXISTS memories_au AFTER UPDATE ON memories BEGIN
|
|
1934
|
+
INSERT INTO memories_fts(memories_fts, rowid, content) VALUES('delete', old.rowid, old.content);
|
|
1935
|
+
INSERT INTO memories_fts(rowid, content) VALUES (new.rowid, new.content);
|
|
1936
|
+
END;
|
|
1937
|
+
`);
|
|
1938
|
+
if (this.embedder) {
|
|
1939
|
+
try {
|
|
1940
|
+
const sqliteVec = esmRequire("sqlite-vec");
|
|
1941
|
+
sqliteVec.load(this.db);
|
|
1942
|
+
this.db.exec(`
|
|
1943
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS memories_vec USING vec0(
|
|
1944
|
+
memory_id TEXT,
|
|
1945
|
+
embedding float[${this.embedder.dimensions}]
|
|
1946
|
+
);
|
|
1947
|
+
`);
|
|
1948
|
+
this.hasVec = true;
|
|
1949
|
+
} catch {
|
|
1950
|
+
this.hasVec = false;
|
|
1951
|
+
}
|
|
1952
|
+
}
|
|
1953
|
+
}
|
|
1954
|
+
/**
|
|
1955
|
+
* Migrate existing tables whose CHECK constraint predates the 'task' type.
|
|
1956
|
+
* SQLite doesn't allow ALTER CHECK, so we recreate the table if needed.
|
|
1957
|
+
*/
|
|
1958
|
+
migrateCheckConstraint() {
|
|
1959
|
+
const tableInfo = this.db.prepare("SELECT sql FROM sqlite_master WHERE type='table' AND name='memories'").get();
|
|
1960
|
+
if (!tableInfo || tableInfo.sql.includes("'task'")) return;
|
|
1961
|
+
this.db.exec(`
|
|
1962
|
+
ALTER TABLE memories RENAME TO memories_old;
|
|
1963
|
+
|
|
1964
|
+
CREATE TABLE memories (
|
|
1965
|
+
id TEXT PRIMARY KEY,
|
|
1966
|
+
type TEXT NOT NULL CHECK(type IN ('fact','procedure','episode','focus','feedback','task')),
|
|
1967
|
+
scope TEXT NOT NULL,
|
|
1968
|
+
content TEXT NOT NULL,
|
|
1969
|
+
source TEXT NOT NULL,
|
|
1970
|
+
tags TEXT DEFAULT '[]',
|
|
1971
|
+
created_at TEXT NOT NULL,
|
|
1972
|
+
last_accessed_at TEXT NOT NULL,
|
|
1973
|
+
access_count INTEGER DEFAULT 0,
|
|
1974
|
+
expires_at TEXT,
|
|
1975
|
+
outcome TEXT,
|
|
1976
|
+
run_id TEXT,
|
|
1977
|
+
category TEXT,
|
|
1978
|
+
severity TEXT,
|
|
1979
|
+
supersedes TEXT
|
|
1980
|
+
);
|
|
1981
|
+
|
|
1982
|
+
INSERT INTO memories SELECT * FROM memories_old;
|
|
1983
|
+
DROP TABLE memories_old;
|
|
1984
|
+
`);
|
|
1985
|
+
}
|
|
1986
|
+
// ─── Write ───────────────────────────────────────────
|
|
1987
|
+
async write(input) {
|
|
1988
|
+
const id = `mem_${randomUUID2().slice(0, 12)}`;
|
|
1989
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1990
|
+
this.db.prepare(
|
|
1991
|
+
`INSERT INTO memories (id, type, scope, content, source, tags, created_at, last_accessed_at, access_count, expires_at, outcome, run_id, category, severity, supersedes)
|
|
1992
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?, ?, ?)`
|
|
1993
|
+
).run(
|
|
1994
|
+
id,
|
|
1995
|
+
input.type,
|
|
1996
|
+
input.scope ?? "global",
|
|
1997
|
+
input.content,
|
|
1998
|
+
input.source ?? "user",
|
|
1999
|
+
JSON.stringify(input.tags ?? []),
|
|
2000
|
+
now,
|
|
2001
|
+
now,
|
|
2002
|
+
input.expiresAt ?? null,
|
|
2003
|
+
input.outcome ?? null,
|
|
2004
|
+
input.runId ?? null,
|
|
2005
|
+
input.category ?? null,
|
|
2006
|
+
input.severity ?? null,
|
|
2007
|
+
input.supersedes ?? null
|
|
2008
|
+
);
|
|
2009
|
+
if (this.embedder && this.hasVec) {
|
|
2010
|
+
try {
|
|
2011
|
+
const [vector] = await this.embedder.embed([input.content]);
|
|
2012
|
+
const rowid = this.db.prepare("SELECT rowid FROM memories WHERE id = ?").get(id);
|
|
2013
|
+
if (rowid && vector) {
|
|
2014
|
+
this.db.prepare("INSERT INTO memories_vec (rowid, memory_id, embedding) VALUES (?, ?, ?)").run(rowid.rowid, id, new Float32Array(vector));
|
|
2015
|
+
}
|
|
2016
|
+
} catch {
|
|
2017
|
+
}
|
|
2018
|
+
}
|
|
2019
|
+
return id;
|
|
2020
|
+
}
|
|
2021
|
+
// ─── Update ──────────────────────────────────────────
|
|
2022
|
+
update(id, content) {
|
|
2023
|
+
this.db.prepare("UPDATE memories SET content = ? WHERE id = ?").run(content, id);
|
|
2024
|
+
if (this.hasVec) {
|
|
2025
|
+
const row = this.db.prepare("SELECT rowid FROM memories WHERE id = ?").get(id);
|
|
2026
|
+
if (row) {
|
|
2027
|
+
this.db.prepare("DELETE FROM memories_vec WHERE rowid = ?").run(row.rowid);
|
|
2028
|
+
}
|
|
2029
|
+
}
|
|
2030
|
+
}
|
|
2031
|
+
// ─── Update fields ───────────────────────────────────
|
|
2032
|
+
updateFields(id, fields) {
|
|
2033
|
+
const sets = [];
|
|
2034
|
+
const params = [];
|
|
2035
|
+
if (fields.content !== void 0) {
|
|
2036
|
+
sets.push("content = ?");
|
|
2037
|
+
params.push(fields.content);
|
|
2038
|
+
}
|
|
2039
|
+
if (fields.outcome !== void 0) {
|
|
2040
|
+
sets.push("outcome = ?");
|
|
2041
|
+
params.push(fields.outcome);
|
|
2042
|
+
}
|
|
2043
|
+
if (fields.runId !== void 0) {
|
|
2044
|
+
sets.push("run_id = ?");
|
|
2045
|
+
params.push(fields.runId);
|
|
2046
|
+
}
|
|
2047
|
+
if (sets.length === 0) return;
|
|
2048
|
+
params.push(id);
|
|
2049
|
+
this.db.prepare(`UPDATE memories SET ${sets.join(", ")} WHERE id = ?`).run(...params);
|
|
2050
|
+
}
|
|
2051
|
+
// ─── Forget ──────────────────────────────────────────
|
|
2052
|
+
forget(id) {
|
|
2053
|
+
const row = this.db.prepare("SELECT rowid FROM memories WHERE id = ?").get(id);
|
|
2054
|
+
if (row && this.hasVec) {
|
|
2055
|
+
this.db.prepare("DELETE FROM memories_vec WHERE rowid = ?").run(row.rowid);
|
|
2056
|
+
}
|
|
2057
|
+
this.db.prepare("DELETE FROM memories WHERE id = ?").run(id);
|
|
2058
|
+
}
|
|
2059
|
+
// ─── Query (synchronous — structured filters) ───────
|
|
2060
|
+
query(opts = {}) {
|
|
2061
|
+
const conditions = [];
|
|
2062
|
+
const params = [];
|
|
2063
|
+
if (opts.scope) {
|
|
2064
|
+
conditions.push("(scope = ? OR scope = 'global')");
|
|
2065
|
+
params.push(opts.scope);
|
|
2066
|
+
}
|
|
2067
|
+
if (opts.types && opts.types.length > 0) {
|
|
2068
|
+
const placeholders = opts.types.map(() => "?").join(",");
|
|
2069
|
+
conditions.push(`type IN (${placeholders})`);
|
|
2070
|
+
params.push(...opts.types);
|
|
2071
|
+
}
|
|
2072
|
+
if (opts.since) {
|
|
2073
|
+
conditions.push("created_at > ?");
|
|
2074
|
+
params.push(opts.since);
|
|
2075
|
+
}
|
|
2076
|
+
const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
2077
|
+
let orderBy;
|
|
2078
|
+
switch (opts.sortBy) {
|
|
2079
|
+
case "accessCount":
|
|
2080
|
+
orderBy = "ORDER BY access_count DESC";
|
|
2081
|
+
break;
|
|
2082
|
+
case "createdAt":
|
|
2083
|
+
orderBy = "ORDER BY created_at DESC";
|
|
2084
|
+
break;
|
|
2085
|
+
case "relevance":
|
|
2086
|
+
default:
|
|
2087
|
+
orderBy = "ORDER BY (access_count * MAX(0, 1.0 - (julianday('now') - julianday(last_accessed_at)) / 60.0)) DESC";
|
|
2088
|
+
break;
|
|
2089
|
+
}
|
|
2090
|
+
const limit = opts.limit ? `LIMIT ${opts.limit}` : "LIMIT 50";
|
|
2091
|
+
const rows = this.db.prepare(`SELECT * FROM memories ${where} ${orderBy} ${limit}`).all(...params);
|
|
2092
|
+
return rows.map(rowToEntry);
|
|
2093
|
+
}
|
|
2094
|
+
// ─── Search (async — semantic or FTS) ────────────────
|
|
2095
|
+
async search(text, opts = {}) {
|
|
2096
|
+
if (this.embedder && this.hasVec) {
|
|
2097
|
+
try {
|
|
2098
|
+
const [queryVec] = await this.embedder.embed([text]);
|
|
2099
|
+
const limit2 = opts.limit ?? 20;
|
|
2100
|
+
const candidates = this.db.prepare(
|
|
2101
|
+
`SELECT m.*, v.distance
|
|
2102
|
+
FROM memories_vec v
|
|
2103
|
+
JOIN memories m ON m.rowid = v.rowid
|
|
2104
|
+
WHERE v.embedding MATCH ?
|
|
2105
|
+
ORDER BY v.distance
|
|
2106
|
+
LIMIT ?`
|
|
2107
|
+
).all(new Float32Array(queryVec), limit2 * 3);
|
|
2108
|
+
const filtered = candidates.filter((row) => {
|
|
2109
|
+
if (opts.scope && row.scope !== opts.scope && row.scope !== "global") return false;
|
|
2110
|
+
if (opts.types && opts.types.length > 0 && !opts.types.includes(row.type))
|
|
2111
|
+
return false;
|
|
2112
|
+
return true;
|
|
2113
|
+
});
|
|
2114
|
+
return filtered.slice(0, limit2).map((row) => rowToEntry(row));
|
|
2115
|
+
} catch {
|
|
2116
|
+
}
|
|
2117
|
+
}
|
|
2118
|
+
const limit = opts.limit ?? 20;
|
|
2119
|
+
const ftsQuery = text.split(/\s+/).filter(Boolean).map((w) => `"${w}"`).join(" OR ");
|
|
2120
|
+
if (!ftsQuery) return this.query(opts);
|
|
2121
|
+
try {
|
|
2122
|
+
const rows = this.db.prepare(
|
|
2123
|
+
`SELECT m.*, rank
|
|
2124
|
+
FROM memories_fts fts
|
|
2125
|
+
JOIN memories m ON m.rowid = fts.rowid
|
|
2126
|
+
WHERE memories_fts MATCH ?
|
|
2127
|
+
ORDER BY rank
|
|
2128
|
+
LIMIT ?`
|
|
2129
|
+
).all(ftsQuery, limit);
|
|
2130
|
+
const filtered = rows.filter((row) => {
|
|
2131
|
+
if (opts.scope && row.scope !== opts.scope && row.scope !== "global") return false;
|
|
2132
|
+
if (opts.types && opts.types.length > 0 && !opts.types.includes(row.type))
|
|
2133
|
+
return false;
|
|
2134
|
+
return true;
|
|
2135
|
+
});
|
|
2136
|
+
return filtered.map(rowToEntry);
|
|
2137
|
+
} catch {
|
|
2138
|
+
return this.query(opts);
|
|
2139
|
+
}
|
|
2140
|
+
}
|
|
2141
|
+
// ─── Lifecycle ───────────────────────────────────────
|
|
2142
|
+
markAccessed(ids) {
|
|
2143
|
+
if (ids.length === 0) return;
|
|
2144
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
2145
|
+
const stmt = this.db.prepare(
|
|
2146
|
+
"UPDATE memories SET access_count = access_count + 1, last_accessed_at = ? WHERE id = ?"
|
|
2147
|
+
);
|
|
2148
|
+
const transaction = this.db.transaction(() => {
|
|
2149
|
+
for (const id of ids) {
|
|
2150
|
+
stmt.run(now, id);
|
|
2151
|
+
}
|
|
2152
|
+
});
|
|
2153
|
+
transaction();
|
|
2154
|
+
}
|
|
2155
|
+
decay(maxAgeDays = 30, minAccessCount = 3) {
|
|
2156
|
+
const staleResult = this.db.prepare(
|
|
2157
|
+
`DELETE FROM memories
|
|
2158
|
+
WHERE access_count < ?
|
|
2159
|
+
AND julianday('now') - julianday(last_accessed_at) > ?
|
|
2160
|
+
AND type NOT IN ('focus', 'task')`
|
|
2161
|
+
).run(minAccessCount, maxAgeDays);
|
|
2162
|
+
const taskResult = this.db.prepare(
|
|
2163
|
+
`DELETE FROM memories
|
|
2164
|
+
WHERE type = 'task'
|
|
2165
|
+
AND outcome = 'done'
|
|
2166
|
+
AND julianday('now') - julianday(last_accessed_at) > 7`
|
|
2167
|
+
).run();
|
|
2168
|
+
return staleResult.changes + taskResult.changes;
|
|
2169
|
+
}
|
|
2170
|
+
expireEphemeral() {
|
|
2171
|
+
const result = this.db.prepare(
|
|
2172
|
+
`DELETE FROM memories
|
|
2173
|
+
WHERE type = 'focus'
|
|
2174
|
+
AND expires_at IS NOT NULL
|
|
2175
|
+
AND expires_at < ?`
|
|
2176
|
+
).run((/* @__PURE__ */ new Date()).toISOString());
|
|
2177
|
+
return result.changes;
|
|
2178
|
+
}
|
|
2179
|
+
// ─── Stats ───────────────────────────────────────────
|
|
2180
|
+
stats() {
|
|
2181
|
+
const total = this.db.prepare("SELECT COUNT(*) as count FROM memories").get().count;
|
|
2182
|
+
const byTypeRows = this.db.prepare("SELECT type, COUNT(*) as count FROM memories GROUP BY type").all();
|
|
2183
|
+
const byType = {};
|
|
2184
|
+
for (const row of byTypeRows) {
|
|
2185
|
+
byType[row.type] = row.count;
|
|
2186
|
+
}
|
|
2187
|
+
const byScopeRows = this.db.prepare("SELECT scope, COUNT(*) as count FROM memories GROUP BY scope").all();
|
|
2188
|
+
const byScope = {};
|
|
2189
|
+
for (const row of byScopeRows) {
|
|
2190
|
+
byScope[row.scope] = row.count;
|
|
2191
|
+
}
|
|
2192
|
+
return { total, byType, byScope };
|
|
2193
|
+
}
|
|
2194
|
+
// ─── Cleanup ─────────────────────────────────────────
|
|
2195
|
+
close() {
|
|
2196
|
+
this.db.close();
|
|
2197
|
+
}
|
|
2198
|
+
};
|
|
2199
|
+
function rowToEntry(row) {
|
|
2200
|
+
let tags = [];
|
|
2201
|
+
try {
|
|
2202
|
+
tags = JSON.parse(row.tags);
|
|
2203
|
+
} catch {
|
|
2204
|
+
tags = [];
|
|
2205
|
+
}
|
|
2206
|
+
return {
|
|
2207
|
+
id: row.id,
|
|
2208
|
+
type: row.type,
|
|
2209
|
+
scope: row.scope,
|
|
2210
|
+
content: row.content,
|
|
2211
|
+
source: row.source,
|
|
2212
|
+
tags,
|
|
2213
|
+
createdAt: row.created_at,
|
|
2214
|
+
lastAccessedAt: row.last_accessed_at,
|
|
2215
|
+
accessCount: row.access_count,
|
|
2216
|
+
expiresAt: row.expires_at ?? void 0,
|
|
2217
|
+
outcome: row.outcome ?? void 0,
|
|
2218
|
+
runId: row.run_id ?? void 0,
|
|
2219
|
+
category: row.category ?? void 0,
|
|
2220
|
+
severity: row.severity ?? void 0,
|
|
2221
|
+
supersedes: row.supersedes ?? void 0
|
|
2222
|
+
};
|
|
2223
|
+
}
|
|
2224
|
+
|
|
2225
|
+
// src/workflows/registry.ts
|
|
2226
|
+
import { existsSync as existsSync5 } from "fs";
|
|
2227
|
+
import { readdir as readdir4 } from "fs/promises";
|
|
2228
|
+
import path10 from "path";
|
|
2229
|
+
|
|
2230
|
+
// src/workflows/loader.ts
|
|
2231
|
+
import { readFile as readFile6 } from "fs/promises";
|
|
2232
|
+
import { parse } from "yaml";
|
|
2233
|
+
import { z as z4 } from "zod";
|
|
2234
|
+
var workflowStepDefSchema = z4.object({
|
|
2235
|
+
type: z4.literal("step").optional().default("step"),
|
|
2236
|
+
agent: z4.string(),
|
|
2237
|
+
dependsOn: z4.array(z4.string()).optional(),
|
|
2238
|
+
prompt: z4.string().optional(),
|
|
2239
|
+
sandbox: z4.enum(["writable", "readonly"]).optional(),
|
|
2240
|
+
maxTurns: z4.number().int().positive().optional(),
|
|
2241
|
+
mcpServers: z4.array(z4.string()).optional(),
|
|
2242
|
+
recovery: z4.object({
|
|
2243
|
+
maxRetries: z4.number().int().nonnegative().optional(),
|
|
2244
|
+
nonRetryable: z4.array(z4.string()).optional()
|
|
2245
|
+
}).optional(),
|
|
2246
|
+
condition: z4.string().optional()
|
|
2247
|
+
});
|
|
2248
|
+
var workflowGateDefSchema = z4.object({
|
|
2249
|
+
type: z4.literal("gate"),
|
|
2250
|
+
dependsOn: z4.array(z4.string()).optional(),
|
|
2251
|
+
description: z4.string(),
|
|
2252
|
+
timeout: z4.string().optional(),
|
|
2253
|
+
autoApprove: z4.boolean().optional()
|
|
2254
|
+
});
|
|
2255
|
+
var workflowHeaderSchema = z4.object({
|
|
2256
|
+
name: z4.string().min(1),
|
|
2257
|
+
description: z4.string().optional(),
|
|
2258
|
+
steps: z4.record(z4.string(), z4.unknown())
|
|
2259
|
+
});
|
|
2260
|
+
function parseStepEntry(stepName, stepValue) {
|
|
2261
|
+
const obj = stepValue;
|
|
2262
|
+
const schema = obj.type === "gate" ? workflowGateDefSchema : workflowStepDefSchema;
|
|
1428
2263
|
const result = schema.safeParse(stepValue);
|
|
1429
2264
|
if (result.success) {
|
|
1430
2265
|
return { step: result.data, errors: [] };
|
|
@@ -1460,7 +2295,7 @@ ${errors.join("\n")}`);
|
|
|
1460
2295
|
return steps;
|
|
1461
2296
|
}
|
|
1462
2297
|
async function loadWorkflow(filePath) {
|
|
1463
|
-
const content = await
|
|
2298
|
+
const content = await readFile6(filePath, "utf-8");
|
|
1464
2299
|
const raw = parse(content);
|
|
1465
2300
|
const headerResult = workflowHeaderSchema.safeParse(raw);
|
|
1466
2301
|
if (!headerResult.success) {
|
|
@@ -1498,11 +2333,11 @@ var WorkflowRegistry = class {
|
|
|
1498
2333
|
return this.workflows.has(name);
|
|
1499
2334
|
}
|
|
1500
2335
|
async loadFromDir(dir) {
|
|
1501
|
-
if (!
|
|
1502
|
-
const files = await
|
|
2336
|
+
if (!existsSync5(dir)) return;
|
|
2337
|
+
const files = await readdir4(dir);
|
|
1503
2338
|
for (const file of files) {
|
|
1504
2339
|
if (!file.endsWith(".yml") && !file.endsWith(".yaml")) continue;
|
|
1505
|
-
const filePath =
|
|
2340
|
+
const filePath = path10.join(dir, file);
|
|
1506
2341
|
const workflow = await loadWorkflow(filePath);
|
|
1507
2342
|
this.workflows.set(workflow.name, workflow);
|
|
1508
2343
|
}
|
|
@@ -1513,7 +2348,6 @@ var WorkflowRegistry = class {
|
|
|
1513
2348
|
var MAX_PROMPT_SIZE = 100 * 1024;
|
|
1514
2349
|
var MAX_METADATA_DEPTH = 5;
|
|
1515
2350
|
var SHUTDOWN_TIMEOUT_MS = 5 * 60 * 1e3;
|
|
1516
|
-
var WORKTREES_DIR = ".neo/worktrees";
|
|
1517
2351
|
var textEncoder = new TextEncoder();
|
|
1518
2352
|
var Orchestrator = class extends NeoEventEmitter {
|
|
1519
2353
|
config;
|
|
@@ -1525,17 +2359,19 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1525
2359
|
idempotencyCache = /* @__PURE__ */ new Map();
|
|
1526
2360
|
abortControllers = /* @__PURE__ */ new Map();
|
|
1527
2361
|
repoIndex = /* @__PURE__ */ new Map();
|
|
1528
|
-
|
|
2362
|
+
runStore = new RunStore();
|
|
1529
2363
|
journalDir;
|
|
1530
2364
|
builtInWorkflowDir;
|
|
1531
2365
|
customWorkflowDir;
|
|
1532
2366
|
costJournal = null;
|
|
1533
2367
|
eventJournal = null;
|
|
1534
2368
|
webhookDispatcher = null;
|
|
2369
|
+
memoryStore = null;
|
|
1535
2370
|
_paused = false;
|
|
1536
2371
|
_costToday = 0;
|
|
1537
2372
|
_startedAt = 0;
|
|
1538
2373
|
_drainResolve = null;
|
|
2374
|
+
skipOrphanRecovery;
|
|
1539
2375
|
constructor(config, options = {}) {
|
|
1540
2376
|
super();
|
|
1541
2377
|
this.config = config;
|
|
@@ -1543,8 +2379,9 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1543
2379
|
this.journalDir = options.journalDir ?? getJournalsDir();
|
|
1544
2380
|
this.builtInWorkflowDir = options.builtInWorkflowDir;
|
|
1545
2381
|
this.customWorkflowDir = options.customWorkflowDir;
|
|
2382
|
+
this.skipOrphanRecovery = options.skipOrphanRecovery ?? false;
|
|
1546
2383
|
for (const repo of config.repos) {
|
|
1547
|
-
const resolvedPath =
|
|
2384
|
+
const resolvedPath = path11.resolve(repo.path);
|
|
1548
2385
|
const normalizedRepo = { ...repo, path: resolvedPath };
|
|
1549
2386
|
this.repoIndex.set(resolvedPath, normalizedRepo);
|
|
1550
2387
|
}
|
|
@@ -1642,8 +2479,15 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1642
2479
|
this._startedAt = Date.now();
|
|
1643
2480
|
this.costJournal = new CostJournal({ dir: this.journalDir });
|
|
1644
2481
|
this.eventJournal = new EventJournal({ dir: this.journalDir });
|
|
1645
|
-
|
|
1646
|
-
|
|
2482
|
+
const supervisorWebhooks = await this.discoverSupervisorWebhooks();
|
|
2483
|
+
const allWebhooks = [...this.config.webhooks, ...supervisorWebhooks];
|
|
2484
|
+
if (allWebhooks.length > 0) {
|
|
2485
|
+
this.webhookDispatcher = new WebhookDispatcher(allWebhooks);
|
|
2486
|
+
}
|
|
2487
|
+
if (supervisorWebhooks.length > 0) {
|
|
2488
|
+
console.log(
|
|
2489
|
+
`[neo] Discovered ${supervisorWebhooks.length} supervisor webhook(s): ${supervisorWebhooks.map((w) => w.url).join(", ")}`
|
|
2490
|
+
);
|
|
1647
2491
|
}
|
|
1648
2492
|
this._costToday = await this.costJournal.getDayTotal();
|
|
1649
2493
|
if (this.builtInWorkflowDir) {
|
|
@@ -1653,12 +2497,10 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1653
2497
|
this.registerWorkflow(workflow);
|
|
1654
2498
|
}
|
|
1655
2499
|
}
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
const worktreeBase = path9.join(repo.path, WORKTREES_DIR);
|
|
1659
|
-
await cleanupOrphanedWorktrees(worktreeBase).catch(() => {
|
|
1660
|
-
});
|
|
2500
|
+
if (!this.skipOrphanRecovery) {
|
|
2501
|
+
await this.recoverOrphanedRuns();
|
|
1661
2502
|
}
|
|
2503
|
+
await mkdir6(this.config.sessions.dir, { recursive: true });
|
|
1662
2504
|
}
|
|
1663
2505
|
async shutdown() {
|
|
1664
2506
|
this._paused = true;
|
|
@@ -1682,6 +2524,9 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1682
2524
|
type: "orchestrator:shutdown",
|
|
1683
2525
|
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1684
2526
|
});
|
|
2527
|
+
if (this.webhookDispatcher) {
|
|
2528
|
+
await this.webhookDispatcher.flush();
|
|
2529
|
+
}
|
|
1685
2530
|
}
|
|
1686
2531
|
// ─── Emit override (journal events) ───────────────────
|
|
1687
2532
|
emit(event) {
|
|
@@ -1721,8 +2566,8 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1721
2566
|
return idempotencyKey;
|
|
1722
2567
|
}
|
|
1723
2568
|
buildDispatchContext(input) {
|
|
1724
|
-
const runId = input.runId ??
|
|
1725
|
-
const sessionId =
|
|
2569
|
+
const runId = input.runId ?? randomUUID3();
|
|
2570
|
+
const sessionId = randomUUID3();
|
|
1726
2571
|
const workflow = this.workflows.get(input.workflow);
|
|
1727
2572
|
if (!workflow) {
|
|
1728
2573
|
const available = [...this.workflows.keys()].join(", ") || "none";
|
|
@@ -1758,28 +2603,39 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1758
2603
|
}
|
|
1759
2604
|
async executeStep(ctx) {
|
|
1760
2605
|
const { input, runId, sessionId, startedAt, agent, repoConfig, activeSession } = ctx;
|
|
1761
|
-
let
|
|
2606
|
+
let sessionPath;
|
|
2607
|
+
await this.persistRun({
|
|
2608
|
+
version: 1,
|
|
2609
|
+
runId,
|
|
2610
|
+
workflow: input.workflow,
|
|
2611
|
+
repo: input.repo,
|
|
2612
|
+
prompt: input.prompt,
|
|
2613
|
+
pid: process.pid,
|
|
2614
|
+
status: "running",
|
|
2615
|
+
steps: {},
|
|
2616
|
+
createdAt: activeSession.startedAt,
|
|
2617
|
+
updatedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2618
|
+
metadata: input.metadata
|
|
2619
|
+
});
|
|
1762
2620
|
try {
|
|
1763
|
-
|
|
1764
|
-
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
}
|
|
1775
|
-
const stepResult = await this.runAgentSession(ctx, worktreePath);
|
|
2621
|
+
const branchName = input.branch || repoConfig.defaultBranch;
|
|
2622
|
+
const sessionDir = path11.join(this.config.sessions.dir, runId);
|
|
2623
|
+
const info = await createSessionClone({
|
|
2624
|
+
repoPath: input.repo,
|
|
2625
|
+
branch: branchName,
|
|
2626
|
+
baseBranch: repoConfig.defaultBranch,
|
|
2627
|
+
sessionDir
|
|
2628
|
+
});
|
|
2629
|
+
sessionPath = info.path;
|
|
2630
|
+
activeSession.sessionPath = sessionPath;
|
|
2631
|
+
const stepResult = await this.runAgentSession(ctx, sessionPath);
|
|
1776
2632
|
this.emitCostEvents(sessionId, stepResult.costUsd, ctx);
|
|
1777
2633
|
this.emitSessionComplete(ctx, stepResult);
|
|
1778
2634
|
return stepResult;
|
|
1779
2635
|
} catch (error) {
|
|
1780
2636
|
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
1781
2637
|
this.emitSessionFail(ctx, errorMsg);
|
|
1782
|
-
|
|
2638
|
+
const failResult = {
|
|
1783
2639
|
status: "failure",
|
|
1784
2640
|
sessionId,
|
|
1785
2641
|
costUsd: 0,
|
|
@@ -1790,9 +2646,22 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1790
2646
|
error: errorMsg,
|
|
1791
2647
|
attempt: 1
|
|
1792
2648
|
};
|
|
2649
|
+
try {
|
|
2650
|
+
const store = this.getMemoryStore();
|
|
2651
|
+
await store.write({
|
|
2652
|
+
type: "episode",
|
|
2653
|
+
scope: input.repo,
|
|
2654
|
+
content: `Run ${runId.slice(0, 8)} (${agent.name}): failed${failResult.error ? ` \u2014 ${failResult.error.slice(0, 150)}` : ""}`,
|
|
2655
|
+
source: agent.name,
|
|
2656
|
+
outcome: "failure",
|
|
2657
|
+
runId
|
|
2658
|
+
});
|
|
2659
|
+
} catch {
|
|
2660
|
+
}
|
|
2661
|
+
return failResult;
|
|
1793
2662
|
} finally {
|
|
1794
|
-
if (
|
|
1795
|
-
await this.
|
|
2663
|
+
if (sessionPath) {
|
|
2664
|
+
await this.finalizeSession(sessionPath, ctx);
|
|
1796
2665
|
}
|
|
1797
2666
|
this.semaphore.release(sessionId);
|
|
1798
2667
|
this._activeSessions.delete(sessionId);
|
|
@@ -1804,35 +2673,26 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1804
2673
|
}
|
|
1805
2674
|
}
|
|
1806
2675
|
/**
|
|
1807
|
-
* Push the branch, then remove the
|
|
2676
|
+
* Push the branch (writable only), then remove the session clone.
|
|
1808
2677
|
* Runs in `finally` so it executes on both success and failure.
|
|
1809
2678
|
*/
|
|
1810
|
-
async
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1814
|
-
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
2679
|
+
async finalizeSession(sessionPath, ctx) {
|
|
2680
|
+
if (ctx.agent.sandbox === "writable") {
|
|
2681
|
+
const branch = ctx.input.branch;
|
|
2682
|
+
const remote = ctx.repoConfig.pushRemote ?? "origin";
|
|
2683
|
+
try {
|
|
2684
|
+
await pushSessionBranch(sessionPath, branch, remote).catch(() => {
|
|
2685
|
+
});
|
|
2686
|
+
} catch {
|
|
2687
|
+
}
|
|
1818
2688
|
}
|
|
1819
2689
|
try {
|
|
1820
|
-
await
|
|
2690
|
+
await removeSessionClone(sessionPath);
|
|
1821
2691
|
} catch {
|
|
1822
2692
|
}
|
|
1823
2693
|
}
|
|
1824
|
-
async runAgentSession(ctx,
|
|
1825
|
-
const { input, runId, sessionId, stepName, stepDef, agent, activeSession } = ctx;
|
|
1826
|
-
const sandboxConfig = buildSandboxConfig(agent, worktreePath);
|
|
1827
|
-
const chain = buildMiddlewareChain(this.userMiddleware);
|
|
1828
|
-
const middlewareContext = this.buildMiddlewareContext(
|
|
1829
|
-
runId,
|
|
1830
|
-
input.workflow,
|
|
1831
|
-
stepName,
|
|
1832
|
-
agent.name,
|
|
1833
|
-
input.repo
|
|
1834
|
-
);
|
|
1835
|
-
const hooks = buildSDKHooks(chain, middlewareContext, this.userMiddleware);
|
|
2694
|
+
async runAgentSession(ctx, sessionPath) {
|
|
2695
|
+
const { input, runId, sessionId, stepName, stepDef, agent, repoConfig, activeSession } = ctx;
|
|
1836
2696
|
this.emit({
|
|
1837
2697
|
type: "session:start",
|
|
1838
2698
|
sessionId,
|
|
@@ -1844,69 +2704,101 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1844
2704
|
metadata: input.metadata,
|
|
1845
2705
|
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1846
2706
|
});
|
|
2707
|
+
const executor = new SessionExecutor(
|
|
2708
|
+
{
|
|
2709
|
+
initTimeoutMs: this.config.sessions.initTimeoutMs,
|
|
2710
|
+
maxDurationMs: this.config.sessions.maxDurationMs,
|
|
2711
|
+
maxRetries: this.config.recovery.maxRetries,
|
|
2712
|
+
backoffBaseMs: this.config.recovery.backoffBaseMs
|
|
2713
|
+
},
|
|
2714
|
+
(key) => {
|
|
2715
|
+
if (key === "costToday") return this._costToday;
|
|
2716
|
+
if (key === "budgetCapUsd") return this.config.budget.dailyCapUsd;
|
|
2717
|
+
return void 0;
|
|
2718
|
+
}
|
|
2719
|
+
);
|
|
2720
|
+
const strategy = input.gitStrategy ?? repoConfig.gitStrategy ?? "branch";
|
|
2721
|
+
const mcpServers = this.resolveMcpServers(stepDef, agent);
|
|
2722
|
+
const memoryContext = this.loadMemoryContext(input.repo);
|
|
1847
2723
|
const recoveryOpts = stepDef.recovery;
|
|
1848
|
-
const
|
|
1849
|
-
|
|
1850
|
-
|
|
1851
|
-
|
|
1852
|
-
|
|
1853
|
-
|
|
1854
|
-
|
|
1855
|
-
|
|
1856
|
-
|
|
1857
|
-
|
|
1858
|
-
|
|
1859
|
-
|
|
1860
|
-
|
|
1861
|
-
|
|
1862
|
-
|
|
1863
|
-
|
|
1864
|
-
|
|
1865
|
-
|
|
1866
|
-
|
|
1867
|
-
|
|
1868
|
-
|
|
1869
|
-
|
|
1870
|
-
|
|
1871
|
-
|
|
1872
|
-
|
|
2724
|
+
const result = await executor.execute(
|
|
2725
|
+
{
|
|
2726
|
+
runId,
|
|
2727
|
+
sessionId,
|
|
2728
|
+
agent,
|
|
2729
|
+
stepDef,
|
|
2730
|
+
repoConfig,
|
|
2731
|
+
repoPath: input.repo,
|
|
2732
|
+
prompt: input.prompt,
|
|
2733
|
+
branch: input.branch,
|
|
2734
|
+
gitStrategy: strategy,
|
|
2735
|
+
sessionPath,
|
|
2736
|
+
metadata: input.metadata,
|
|
2737
|
+
startedAt: activeSession.startedAt
|
|
2738
|
+
},
|
|
2739
|
+
{
|
|
2740
|
+
middleware: this.userMiddleware,
|
|
2741
|
+
mcpServers,
|
|
2742
|
+
memoryContext,
|
|
2743
|
+
onAttempt: (attempt, strategy2) => {
|
|
2744
|
+
if (attempt > 1) {
|
|
2745
|
+
this.emit({
|
|
2746
|
+
type: "session:fail",
|
|
2747
|
+
sessionId,
|
|
2748
|
+
runId,
|
|
2749
|
+
error: `Retrying with strategy: ${strategy2}`,
|
|
2750
|
+
attempt: attempt - 1,
|
|
2751
|
+
maxRetries: recoveryOpts?.maxRetries ?? this.config.recovery.maxRetries,
|
|
2752
|
+
willRetry: true,
|
|
2753
|
+
metadata: input.metadata,
|
|
2754
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
2755
|
+
});
|
|
2756
|
+
}
|
|
1873
2757
|
}
|
|
1874
2758
|
}
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
status
|
|
1879
|
-
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
1884
|
-
|
|
1885
|
-
|
|
1886
|
-
|
|
1887
|
-
|
|
1888
|
-
}
|
|
2759
|
+
);
|
|
2760
|
+
try {
|
|
2761
|
+
const store = this.getMemoryStore();
|
|
2762
|
+
const isSuccess = result.status === "success";
|
|
2763
|
+
await store.write({
|
|
2764
|
+
type: "episode",
|
|
2765
|
+
scope: input.repo,
|
|
2766
|
+
content: `Run ${runId.slice(0, 8)} (${agent.name}): ${isSuccess ? "completed" : "failed"}${result.error ? ` \u2014 ${result.error.slice(0, 150)}` : ""}`,
|
|
2767
|
+
source: agent.name,
|
|
2768
|
+
outcome: isSuccess ? "success" : "failure",
|
|
2769
|
+
runId
|
|
2770
|
+
});
|
|
2771
|
+
} catch {
|
|
2772
|
+
}
|
|
2773
|
+
return result;
|
|
1889
2774
|
}
|
|
1890
2775
|
async finalizeDispatch(ctx, stepResult, idempotencyKey) {
|
|
1891
|
-
const { input, runId, stepName,
|
|
2776
|
+
const { input, runId, stepName, activeSession } = ctx;
|
|
1892
2777
|
const taskResult = {
|
|
1893
2778
|
runId,
|
|
1894
2779
|
workflow: input.workflow,
|
|
1895
2780
|
repo: input.repo,
|
|
1896
2781
|
status: stepResult.status === "success" ? "success" : "failure",
|
|
1897
2782
|
steps: { [stepName]: stepResult },
|
|
1898
|
-
branch: stepResult.status === "success" && activeSession.
|
|
2783
|
+
branch: stepResult.status === "success" && activeSession.sessionPath ? input.branch : void 0,
|
|
1899
2784
|
costUsd: stepResult.costUsd,
|
|
1900
2785
|
durationMs: Date.now() - ctx.startedAt,
|
|
1901
2786
|
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1902
2787
|
metadata: input.metadata
|
|
1903
2788
|
};
|
|
2789
|
+
if (stepResult.prUrl) {
|
|
2790
|
+
taskResult.prUrl = stepResult.prUrl;
|
|
2791
|
+
}
|
|
2792
|
+
if (stepResult.prNumber !== void 0) {
|
|
2793
|
+
taskResult.prNumber = stepResult.prNumber;
|
|
2794
|
+
}
|
|
1904
2795
|
await this.persistRun({
|
|
1905
2796
|
version: 1,
|
|
1906
2797
|
runId,
|
|
1907
2798
|
workflow: input.workflow,
|
|
1908
2799
|
repo: input.repo,
|
|
1909
2800
|
prompt: input.prompt,
|
|
2801
|
+
pid: process.pid,
|
|
1910
2802
|
branch: taskResult.branch,
|
|
1911
2803
|
status: taskResult.status === "success" ? "completed" : "failed",
|
|
1912
2804
|
steps: taskResult.steps,
|
|
@@ -1923,6 +2815,30 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1923
2815
|
}
|
|
1924
2816
|
return taskResult;
|
|
1925
2817
|
}
|
|
2818
|
+
// ─── Private: Memory injection ──────────────────────────
|
|
2819
|
+
getMemoryStore() {
|
|
2820
|
+
if (!this.memoryStore) {
|
|
2821
|
+
const supervisorDir = path11.join(getSupervisorsDir(), "supervisor");
|
|
2822
|
+
this.memoryStore = new MemoryStore(path11.join(supervisorDir, "memory.sqlite"));
|
|
2823
|
+
}
|
|
2824
|
+
return this.memoryStore;
|
|
2825
|
+
}
|
|
2826
|
+
loadMemoryContext(repoPath) {
|
|
2827
|
+
try {
|
|
2828
|
+
const store = this.getMemoryStore();
|
|
2829
|
+
const memories = store.query({
|
|
2830
|
+
scope: repoPath,
|
|
2831
|
+
types: ["fact", "procedure", "feedback"],
|
|
2832
|
+
limit: 25,
|
|
2833
|
+
sortBy: "relevance"
|
|
2834
|
+
});
|
|
2835
|
+
if (memories.length === 0) return void 0;
|
|
2836
|
+
store.markAccessed(memories.map((m) => m.id));
|
|
2837
|
+
return formatMemoriesForPrompt(memories);
|
|
2838
|
+
} catch {
|
|
2839
|
+
return void 0;
|
|
2840
|
+
}
|
|
2841
|
+
}
|
|
1926
2842
|
// ─── Private: Event helpers ────────────────────────────
|
|
1927
2843
|
emitCostEvents(sessionId, sessionCost, ctx) {
|
|
1928
2844
|
this._costToday += sessionCost;
|
|
@@ -1997,7 +2913,7 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
1997
2913
|
`Validation error: prompt exceeds maximum size of ${String(MAX_PROMPT_SIZE)} bytes`
|
|
1998
2914
|
);
|
|
1999
2915
|
}
|
|
2000
|
-
if (!
|
|
2916
|
+
if (!existsSync6(input.repo)) {
|
|
2001
2917
|
throw new Error(`Validation error: repo path does not exist: ${input.repo}`);
|
|
2002
2918
|
}
|
|
2003
2919
|
if (!this.workflows.has(input.workflow)) {
|
|
@@ -2070,32 +2986,14 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
2070
2986
|
return agent;
|
|
2071
2987
|
}
|
|
2072
2988
|
resolveRepo(repoPath) {
|
|
2073
|
-
const repo = this.repoIndex.get(
|
|
2989
|
+
const repo = this.repoIndex.get(path11.resolve(repoPath));
|
|
2074
2990
|
if (repo) return repo;
|
|
2075
2991
|
return {
|
|
2076
2992
|
path: repoPath,
|
|
2077
2993
|
defaultBranch: "main",
|
|
2078
2994
|
branchPrefix: "feat",
|
|
2079
2995
|
pushRemote: "origin",
|
|
2080
|
-
|
|
2081
|
-
};
|
|
2082
|
-
}
|
|
2083
|
-
buildMiddlewareContext(runId, workflow, step, agent, repo) {
|
|
2084
|
-
const store = /* @__PURE__ */ new Map();
|
|
2085
|
-
return {
|
|
2086
|
-
runId,
|
|
2087
|
-
workflow,
|
|
2088
|
-
step,
|
|
2089
|
-
agent,
|
|
2090
|
-
repo,
|
|
2091
|
-
get: ((key) => {
|
|
2092
|
-
if (key === "costToday") return this._costToday;
|
|
2093
|
-
if (key === "budgetCapUsd") return this.config.budget.dailyCapUsd;
|
|
2094
|
-
return store.get(key);
|
|
2095
|
-
}),
|
|
2096
|
-
set: ((key, value) => {
|
|
2097
|
-
store.set(key, value);
|
|
2098
|
-
})
|
|
2996
|
+
gitStrategy: "branch"
|
|
2099
2997
|
};
|
|
2100
2998
|
}
|
|
2101
2999
|
computeBudgetRemainingPct() {
|
|
@@ -2103,48 +3001,76 @@ var Orchestrator = class extends NeoEventEmitter {
|
|
|
2103
3001
|
if (cap <= 0) return 0;
|
|
2104
3002
|
return Math.max(0, (cap - this._costToday) / cap * 100);
|
|
2105
3003
|
}
|
|
2106
|
-
// ─── Private:
|
|
2107
|
-
|
|
2108
|
-
|
|
2109
|
-
|
|
2110
|
-
|
|
2111
|
-
|
|
2112
|
-
|
|
2113
|
-
|
|
3004
|
+
// ─── Private: MCP server resolution ────────────────────
|
|
3005
|
+
resolveMcpServers(stepDef, agent) {
|
|
3006
|
+
const configServers = this.config.mcpServers;
|
|
3007
|
+
if (!configServers) return void 0;
|
|
3008
|
+
const names = /* @__PURE__ */ new Set();
|
|
3009
|
+
if (stepDef.mcpServers) {
|
|
3010
|
+
for (const name of stepDef.mcpServers) names.add(name);
|
|
3011
|
+
}
|
|
3012
|
+
if (agent.definition.mcpServers) {
|
|
3013
|
+
for (const name of agent.definition.mcpServers) names.add(name);
|
|
3014
|
+
}
|
|
3015
|
+
if (names.size === 0) return void 0;
|
|
3016
|
+
const resolved = {};
|
|
3017
|
+
for (const name of names) {
|
|
3018
|
+
const serverConfig = configServers[name];
|
|
3019
|
+
if (serverConfig) {
|
|
3020
|
+
resolved[name] = serverConfig;
|
|
2114
3021
|
}
|
|
2115
|
-
const filePath = path9.join(runsDir, `${run.runId}.json`);
|
|
2116
|
-
await writeFile2(filePath, JSON.stringify(run, null, 2), "utf-8");
|
|
2117
|
-
} catch {
|
|
2118
3022
|
}
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
|
|
2122
|
-
|
|
3023
|
+
return Object.keys(resolved).length > 0 ? resolved : void 0;
|
|
3024
|
+
}
|
|
3025
|
+
// ─── Private: Supervisor discovery ─────────────────────
|
|
3026
|
+
/** Discover running supervisor daemons and return webhook configs for their endpoints. */
|
|
3027
|
+
async discoverSupervisorWebhooks() {
|
|
3028
|
+
const { readdir: readdir6 } = await import("fs/promises");
|
|
3029
|
+
const supervisorsDir = getSupervisorsDir();
|
|
3030
|
+
if (!existsSync6(supervisorsDir)) return [];
|
|
3031
|
+
const webhooks = [];
|
|
2123
3032
|
try {
|
|
2124
|
-
const entries = await
|
|
2125
|
-
const jsonFiles = [];
|
|
3033
|
+
const entries = await readdir6(supervisorsDir, { withFileTypes: true });
|
|
2126
3034
|
for (const entry of entries) {
|
|
2127
|
-
if (entry.isDirectory())
|
|
2128
|
-
|
|
2129
|
-
const
|
|
2130
|
-
|
|
2131
|
-
|
|
2132
|
-
|
|
2133
|
-
|
|
2134
|
-
|
|
2135
|
-
|
|
2136
|
-
|
|
2137
|
-
|
|
2138
|
-
|
|
2139
|
-
|
|
2140
|
-
|
|
2141
|
-
run.status = "failed";
|
|
2142
|
-
run.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
2143
|
-
await writeFile2(filePath, JSON.stringify(run, null, 2), "utf-8");
|
|
3035
|
+
if (!entry.isDirectory()) continue;
|
|
3036
|
+
try {
|
|
3037
|
+
const statePath = path11.join(supervisorsDir, entry.name, "state.json");
|
|
3038
|
+
const raw = await readFile7(statePath, "utf-8");
|
|
3039
|
+
const state = JSON.parse(raw);
|
|
3040
|
+
if (state.status !== "running" || !state.port) continue;
|
|
3041
|
+
if (state.pid && !isProcessAlive(state.pid)) continue;
|
|
3042
|
+
webhooks.push({
|
|
3043
|
+
url: `http://localhost:${String(state.port)}/webhook`,
|
|
3044
|
+
events: ["session:complete", "session:fail", "budget:alert"],
|
|
3045
|
+
secret: this.config.supervisor.secret,
|
|
3046
|
+
timeoutMs: 5e3
|
|
3047
|
+
});
|
|
3048
|
+
} catch {
|
|
2144
3049
|
}
|
|
2145
3050
|
}
|
|
2146
3051
|
} catch {
|
|
2147
3052
|
}
|
|
3053
|
+
return webhooks;
|
|
3054
|
+
}
|
|
3055
|
+
// ─── Private: Run persistence ──────────────────────────
|
|
3056
|
+
async persistRun(run) {
|
|
3057
|
+
await this.runStore.persistRun(run);
|
|
3058
|
+
}
|
|
3059
|
+
async recoverOrphanedRuns() {
|
|
3060
|
+
const orphanedRuns = await this.runStore.recoverOrphanedRuns();
|
|
3061
|
+
for (const run of orphanedRuns) {
|
|
3062
|
+
this.emit({
|
|
3063
|
+
type: "session:fail",
|
|
3064
|
+
sessionId: run.runId,
|
|
3065
|
+
runId: run.runId,
|
|
3066
|
+
error: "Orphaned run: process died without completing",
|
|
3067
|
+
attempt: 1,
|
|
3068
|
+
maxRetries: this.config.recovery.maxRetries,
|
|
3069
|
+
willRetry: false,
|
|
3070
|
+
metadata: run.metadata,
|
|
3071
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
3072
|
+
});
|
|
3073
|
+
}
|
|
2148
3074
|
}
|
|
2149
3075
|
};
|
|
2150
3076
|
function isPlainObject(value) {
|
|
@@ -2161,40 +3087,45 @@ function objectDepth(obj, current = 0) {
|
|
|
2161
3087
|
}
|
|
2162
3088
|
|
|
2163
3089
|
// src/supervisor/schemas.ts
|
|
2164
|
-
import { z as
|
|
2165
|
-
var
|
|
2166
|
-
|
|
2167
|
-
|
|
2168
|
-
sessionId:
|
|
2169
|
-
port:
|
|
2170
|
-
cwd:
|
|
2171
|
-
startedAt:
|
|
2172
|
-
lastHeartbeat:
|
|
2173
|
-
heartbeatCount:
|
|
2174
|
-
totalCostUsd:
|
|
2175
|
-
todayCostUsd:
|
|
2176
|
-
costResetDate:
|
|
2177
|
-
idleSkipCount:
|
|
2178
|
-
|
|
3090
|
+
import { z as z5 } from "zod";
|
|
3091
|
+
var wakeReasonSchema = z5.enum(["events", "timer", "active_runs", "forced"]);
|
|
3092
|
+
var supervisorDaemonStateSchema = z5.object({
|
|
3093
|
+
pid: z5.number(),
|
|
3094
|
+
sessionId: z5.string(),
|
|
3095
|
+
port: z5.number(),
|
|
3096
|
+
cwd: z5.string(),
|
|
3097
|
+
startedAt: z5.string(),
|
|
3098
|
+
lastHeartbeat: z5.string().optional(),
|
|
3099
|
+
heartbeatCount: z5.number().default(0),
|
|
3100
|
+
totalCostUsd: z5.number().default(0),
|
|
3101
|
+
todayCostUsd: z5.number().default(0),
|
|
3102
|
+
costResetDate: z5.string().optional(),
|
|
3103
|
+
idleSkipCount: z5.number().default(0),
|
|
3104
|
+
activeWorkSkipCount: z5.number().default(0),
|
|
3105
|
+
status: z5.enum(["running", "draining", "stopped"]).default("running"),
|
|
3106
|
+
lastConsolidationHeartbeat: z5.number().default(0),
|
|
3107
|
+
lastCompactionHeartbeat: z5.number().default(0),
|
|
3108
|
+
lastConsolidationTimestamp: z5.string().optional(),
|
|
3109
|
+
wakeReason: wakeReasonSchema.optional()
|
|
2179
3110
|
});
|
|
2180
|
-
var webhookIncomingEventSchema =
|
|
2181
|
-
id:
|
|
2182
|
-
source:
|
|
2183
|
-
event:
|
|
2184
|
-
payload:
|
|
2185
|
-
receivedAt:
|
|
2186
|
-
processedAt:
|
|
3111
|
+
var webhookIncomingEventSchema = z5.object({
|
|
3112
|
+
id: z5.string().optional(),
|
|
3113
|
+
source: z5.string().optional(),
|
|
3114
|
+
event: z5.string().optional(),
|
|
3115
|
+
payload: z5.record(z5.string(), z5.unknown()).optional(),
|
|
3116
|
+
receivedAt: z5.string(),
|
|
3117
|
+
processedAt: z5.string().optional()
|
|
2187
3118
|
});
|
|
2188
|
-
var inboxMessageSchema =
|
|
2189
|
-
id:
|
|
2190
|
-
from:
|
|
2191
|
-
text:
|
|
2192
|
-
timestamp:
|
|
2193
|
-
processedAt:
|
|
3119
|
+
var inboxMessageSchema = z5.object({
|
|
3120
|
+
id: z5.string(),
|
|
3121
|
+
from: z5.enum(["tui", "api", "external", "agent"]),
|
|
3122
|
+
text: z5.string(),
|
|
3123
|
+
timestamp: z5.string(),
|
|
3124
|
+
processedAt: z5.string().optional()
|
|
2194
3125
|
});
|
|
2195
|
-
var activityEntrySchema =
|
|
2196
|
-
id:
|
|
2197
|
-
type:
|
|
3126
|
+
var activityEntrySchema = z5.object({
|
|
3127
|
+
id: z5.string(),
|
|
3128
|
+
type: z5.enum([
|
|
2198
3129
|
"heartbeat",
|
|
2199
3130
|
"decision",
|
|
2200
3131
|
"action",
|
|
@@ -2206,15 +3137,27 @@ var activityEntrySchema = z4.object({
|
|
|
2206
3137
|
"dispatch",
|
|
2207
3138
|
"tool_use"
|
|
2208
3139
|
]),
|
|
2209
|
-
summary:
|
|
2210
|
-
detail:
|
|
2211
|
-
timestamp:
|
|
3140
|
+
summary: z5.string(),
|
|
3141
|
+
detail: z5.unknown().optional(),
|
|
3142
|
+
timestamp: z5.string()
|
|
3143
|
+
});
|
|
3144
|
+
var logBufferEntrySchema = z5.object({
|
|
3145
|
+
id: z5.string(),
|
|
3146
|
+
type: z5.enum(["progress", "action", "decision", "blocker", "milestone", "discovery"]),
|
|
3147
|
+
message: z5.string(),
|
|
3148
|
+
agent: z5.string().optional(),
|
|
3149
|
+
runId: z5.string().optional(),
|
|
3150
|
+
repo: z5.string().optional(),
|
|
3151
|
+
target: z5.enum(["memory", "knowledge", "digest"]),
|
|
3152
|
+
timestamp: z5.string(),
|
|
3153
|
+
consolidatedAt: z5.string().optional()
|
|
2212
3154
|
});
|
|
3155
|
+
var internalEventKindSchema = z5.enum(["consolidation_timer", "active_run_check"]);
|
|
2213
3156
|
|
|
2214
3157
|
// src/supervisor/activity-log.ts
|
|
2215
|
-
import { randomUUID as
|
|
2216
|
-
import { appendFile as appendFile4, readFile as
|
|
2217
|
-
import
|
|
3158
|
+
import { randomUUID as randomUUID4 } from "crypto";
|
|
3159
|
+
import { appendFile as appendFile4, readFile as readFile8, rename, stat } from "fs/promises";
|
|
3160
|
+
import path12 from "path";
|
|
2218
3161
|
var ACTIVITY_FILE = "activity.jsonl";
|
|
2219
3162
|
var MAX_SIZE_BYTES = 10 * 1024 * 1024;
|
|
2220
3163
|
var ActivityLog = class {
|
|
@@ -2222,7 +3165,7 @@ var ActivityLog = class {
|
|
|
2222
3165
|
dir;
|
|
2223
3166
|
constructor(dir) {
|
|
2224
3167
|
this.dir = dir;
|
|
2225
|
-
this.filePath =
|
|
3168
|
+
this.filePath = path12.join(dir, ACTIVITY_FILE);
|
|
2226
3169
|
}
|
|
2227
3170
|
/**
|
|
2228
3171
|
* Append a structured entry to the activity log.
|
|
@@ -2239,7 +3182,7 @@ var ActivityLog = class {
|
|
|
2239
3182
|
*/
|
|
2240
3183
|
async log(type, summary, detail) {
|
|
2241
3184
|
await this.append({
|
|
2242
|
-
id:
|
|
3185
|
+
id: randomUUID4(),
|
|
2243
3186
|
type,
|
|
2244
3187
|
summary,
|
|
2245
3188
|
detail,
|
|
@@ -2252,7 +3195,7 @@ var ActivityLog = class {
|
|
|
2252
3195
|
async tail(n) {
|
|
2253
3196
|
let content;
|
|
2254
3197
|
try {
|
|
2255
|
-
content = await
|
|
3198
|
+
content = await readFile8(this.filePath, "utf-8");
|
|
2256
3199
|
} catch {
|
|
2257
3200
|
return [];
|
|
2258
3201
|
}
|
|
@@ -2272,7 +3215,7 @@ var ActivityLog = class {
|
|
|
2272
3215
|
const stats = await stat(this.filePath);
|
|
2273
3216
|
if (stats.size > MAX_SIZE_BYTES) {
|
|
2274
3217
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
2275
|
-
const rotatedPath =
|
|
3218
|
+
const rotatedPath = path12.join(this.dir, `activity-${timestamp}.jsonl`);
|
|
2276
3219
|
await rename(this.filePath, rotatedPath);
|
|
2277
3220
|
}
|
|
2278
3221
|
} catch {
|
|
@@ -2281,15 +3224,15 @@ var ActivityLog = class {
|
|
|
2281
3224
|
};
|
|
2282
3225
|
|
|
2283
3226
|
// src/supervisor/daemon.ts
|
|
2284
|
-
import { randomUUID as
|
|
2285
|
-
import { existsSync as
|
|
2286
|
-
import { mkdir as
|
|
3227
|
+
import { randomUUID as randomUUID6 } from "crypto";
|
|
3228
|
+
import { existsSync as existsSync8 } from "fs";
|
|
3229
|
+
import { mkdir as mkdir7, readFile as readFile12, rm as rm2, writeFile as writeFile6 } from "fs/promises";
|
|
2287
3230
|
import { homedir as homedir3 } from "os";
|
|
2288
|
-
import
|
|
3231
|
+
import path15 from "path";
|
|
2289
3232
|
|
|
2290
3233
|
// src/supervisor/event-queue.ts
|
|
2291
3234
|
import { watch } from "fs";
|
|
2292
|
-
import { readFile as
|
|
3235
|
+
import { readFile as readFile9, writeFile as writeFile3 } from "fs/promises";
|
|
2293
3236
|
var EventQueue = class {
|
|
2294
3237
|
queue = [];
|
|
2295
3238
|
seenIds = /* @__PURE__ */ new Set();
|
|
@@ -2373,7 +3316,14 @@ var EventQueue = class {
|
|
|
2373
3316
|
* Start watching inbox.jsonl and events.jsonl for new entries.
|
|
2374
3317
|
* New lines are parsed and pushed into the queue.
|
|
2375
3318
|
*/
|
|
2376
|
-
startWatching(inboxPath, eventsPath) {
|
|
3319
|
+
async startWatching(inboxPath, eventsPath) {
|
|
3320
|
+
for (const p of [inboxPath, eventsPath]) {
|
|
3321
|
+
try {
|
|
3322
|
+
await writeFile3(p, "", { flag: "a" });
|
|
3323
|
+
} catch (err) {
|
|
3324
|
+
console.error(`[EventQueue] Failed to ensure file exists: ${p}`, err);
|
|
3325
|
+
}
|
|
3326
|
+
}
|
|
2377
3327
|
this.watchJsonlFile(inboxPath, "message");
|
|
2378
3328
|
this.watchJsonlFile(eventsPath, "webhook");
|
|
2379
3329
|
}
|
|
@@ -2421,18 +3371,20 @@ var EventQueue = class {
|
|
|
2421
3371
|
watchJsonlFile(filePath, kind) {
|
|
2422
3372
|
try {
|
|
2423
3373
|
const watcher = watch(filePath, () => {
|
|
2424
|
-
this.readNewLines(filePath, kind).catch(() => {
|
|
3374
|
+
this.readNewLines(filePath, kind).catch((err) => {
|
|
3375
|
+
console.error(`[EventQueue] Failed to read new lines from ${filePath}:`, err);
|
|
2425
3376
|
});
|
|
2426
3377
|
});
|
|
2427
3378
|
this.watchers.push(watcher);
|
|
2428
|
-
} catch {
|
|
3379
|
+
} catch (err) {
|
|
3380
|
+
console.error(`[EventQueue] Cannot watch file (may not exist yet): ${filePath}`, err);
|
|
2429
3381
|
}
|
|
2430
3382
|
}
|
|
2431
3383
|
async readNewLines(filePath, kind) {
|
|
2432
3384
|
let content;
|
|
2433
3385
|
try {
|
|
2434
|
-
content = await
|
|
2435
|
-
} catch {
|
|
3386
|
+
content = await readFile9(filePath, "utf-8");
|
|
3387
|
+
} catch (_err) {
|
|
2436
3388
|
return;
|
|
2437
3389
|
}
|
|
2438
3390
|
const offset = this.fileOffsets.get(filePath) ?? 0;
|
|
@@ -2449,15 +3401,15 @@ var EventQueue = class {
|
|
|
2449
3401
|
} else {
|
|
2450
3402
|
this.push({ kind: "message", data: parsed });
|
|
2451
3403
|
}
|
|
2452
|
-
} catch {
|
|
3404
|
+
} catch (_err) {
|
|
2453
3405
|
}
|
|
2454
3406
|
}
|
|
2455
3407
|
}
|
|
2456
3408
|
async replayFile(filePath, kind) {
|
|
2457
3409
|
let content;
|
|
2458
3410
|
try {
|
|
2459
|
-
content = await
|
|
2460
|
-
} catch {
|
|
3411
|
+
content = await readFile9(filePath, "utf-8");
|
|
3412
|
+
} catch (_err) {
|
|
2461
3413
|
return;
|
|
2462
3414
|
}
|
|
2463
3415
|
this.fileOffsets.set(filePath, content.length);
|
|
@@ -2473,7 +3425,7 @@ var EventQueue = class {
|
|
|
2473
3425
|
this.push({ kind: "message", data: parsed });
|
|
2474
3426
|
}
|
|
2475
3427
|
unprocessed.push(line);
|
|
2476
|
-
} catch {
|
|
3428
|
+
} catch (_err) {
|
|
2477
3429
|
}
|
|
2478
3430
|
}
|
|
2479
3431
|
}
|
|
@@ -2492,7 +3444,7 @@ var EventQueue = class {
|
|
|
2492
3444
|
}
|
|
2493
3445
|
async markInFile(filePath, matchTimestamp, processedAt) {
|
|
2494
3446
|
try {
|
|
2495
|
-
const content = await
|
|
3447
|
+
const content = await readFile9(filePath, "utf-8");
|
|
2496
3448
|
const lines = content.split("\n");
|
|
2497
3449
|
let changed = false;
|
|
2498
3450
|
const updated = lines.map((line) => {
|
|
@@ -2504,7 +3456,7 @@ var EventQueue = class {
|
|
|
2504
3456
|
changed = true;
|
|
2505
3457
|
return JSON.stringify(parsed);
|
|
2506
3458
|
}
|
|
2507
|
-
} catch {
|
|
3459
|
+
} catch (_err) {
|
|
2508
3460
|
}
|
|
2509
3461
|
return line;
|
|
2510
3462
|
});
|
|
@@ -2512,269 +3464,689 @@ var EventQueue = class {
|
|
|
2512
3464
|
await writeFile3(filePath, updated.join("\n"), "utf-8");
|
|
2513
3465
|
this.fileOffsets.set(filePath, updated.join("\n").length);
|
|
2514
3466
|
}
|
|
2515
|
-
} catch {
|
|
3467
|
+
} catch (err) {
|
|
3468
|
+
console.error(`[EventQueue] Failed to mark events as processed in ${filePath}:`, err);
|
|
2516
3469
|
}
|
|
2517
3470
|
}
|
|
2518
3471
|
};
|
|
2519
3472
|
|
|
2520
3473
|
// src/supervisor/heartbeat.ts
|
|
2521
|
-
import { randomUUID as
|
|
2522
|
-
import {
|
|
3474
|
+
import { randomUUID as randomUUID5 } from "crypto";
|
|
3475
|
+
import { existsSync as existsSync7 } from "fs";
|
|
3476
|
+
import { readdir as readdir5, readFile as readFile11, writeFile as writeFile5 } from "fs/promises";
|
|
2523
3477
|
import { homedir as homedir2 } from "os";
|
|
2524
|
-
import
|
|
2525
|
-
import { fileURLToPath } from "url";
|
|
3478
|
+
import path14 from "path";
|
|
2526
3479
|
|
|
2527
|
-
// src/supervisor/
|
|
2528
|
-
import { appendFile as appendFile5, readFile as
|
|
2529
|
-
import
|
|
2530
|
-
var
|
|
2531
|
-
var
|
|
2532
|
-
var
|
|
2533
|
-
|
|
2534
|
-
|
|
2535
|
-
var MAX_DECISIONS = 10;
|
|
2536
|
-
function parseStructuredMemory(raw) {
|
|
2537
|
-
if (!raw.trim()) {
|
|
2538
|
-
return emptyMemory();
|
|
2539
|
-
}
|
|
2540
|
-
try {
|
|
2541
|
-
const parsed = JSON.parse(raw);
|
|
2542
|
-
return {
|
|
2543
|
-
activeWork: parsed.activeWork ?? [],
|
|
2544
|
-
blockers: parsed.blockers ?? [],
|
|
2545
|
-
repoNotes: parsed.repoNotes ?? {},
|
|
2546
|
-
recentDecisions: parsed.recentDecisions ?? [],
|
|
2547
|
-
trackerSync: parsed.trackerSync ?? {},
|
|
2548
|
-
notes: parsed.notes ?? ""
|
|
2549
|
-
};
|
|
2550
|
-
} catch {
|
|
2551
|
-
return { ...emptyMemory(), notes: raw };
|
|
2552
|
-
}
|
|
3480
|
+
// src/supervisor/log-buffer.ts
|
|
3481
|
+
import { appendFile as appendFile5, readFile as readFile10, stat as stat2, writeFile as writeFile4 } from "fs/promises";
|
|
3482
|
+
import path13 from "path";
|
|
3483
|
+
var LOG_BUFFER_FILE = "log-buffer.jsonl";
|
|
3484
|
+
var MAX_FILE_BYTES = 1024 * 1024;
|
|
3485
|
+
var COMPACTION_AGE_MS = 24 * 60 * 60 * 1e3;
|
|
3486
|
+
function bufferPath(dir) {
|
|
3487
|
+
return path13.join(dir, LOG_BUFFER_FILE);
|
|
2553
3488
|
}
|
|
2554
|
-
function
|
|
2555
|
-
|
|
2556
|
-
|
|
2557
|
-
|
|
2558
|
-
|
|
2559
|
-
|
|
2560
|
-
|
|
2561
|
-
|
|
2562
|
-
}
|
|
3489
|
+
function parseLines(content) {
|
|
3490
|
+
const entries = [];
|
|
3491
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
3492
|
+
for (const line of lines) {
|
|
3493
|
+
try {
|
|
3494
|
+
entries.push(JSON.parse(line));
|
|
3495
|
+
} catch {
|
|
3496
|
+
}
|
|
3497
|
+
}
|
|
3498
|
+
return entries;
|
|
2563
3499
|
}
|
|
2564
|
-
async function
|
|
3500
|
+
async function readLogBuffer(dir) {
|
|
2565
3501
|
try {
|
|
2566
|
-
|
|
3502
|
+
const content = await readFile10(bufferPath(dir), "utf-8");
|
|
3503
|
+
return parseLines(content);
|
|
2567
3504
|
} catch {
|
|
2568
|
-
return
|
|
3505
|
+
return [];
|
|
2569
3506
|
}
|
|
2570
3507
|
}
|
|
2571
|
-
async function
|
|
2572
|
-
await
|
|
3508
|
+
async function readUnconsolidated(dir) {
|
|
3509
|
+
const entries = await readLogBuffer(dir);
|
|
3510
|
+
return entries.filter((e) => !e.consolidatedAt);
|
|
2573
3511
|
}
|
|
2574
|
-
async function
|
|
3512
|
+
async function markConsolidated(dir, ids) {
|
|
3513
|
+
const filePath = bufferPath(dir);
|
|
3514
|
+
let content;
|
|
2575
3515
|
try {
|
|
2576
|
-
|
|
3516
|
+
content = await readFile10(filePath, "utf-8");
|
|
2577
3517
|
} catch {
|
|
3518
|
+
return;
|
|
2578
3519
|
}
|
|
2579
|
-
|
|
2580
|
-
|
|
2581
|
-
|
|
2582
|
-
|
|
2583
|
-
|
|
2584
|
-
|
|
3520
|
+
const idSet = new Set(ids);
|
|
3521
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
3522
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
3523
|
+
const updated = [];
|
|
3524
|
+
for (const line of lines) {
|
|
3525
|
+
try {
|
|
3526
|
+
const entry = JSON.parse(line);
|
|
3527
|
+
if (idSet.has(entry.id) && !entry.consolidatedAt) {
|
|
3528
|
+
entry.consolidatedAt = now;
|
|
3529
|
+
}
|
|
3530
|
+
updated.push(JSON.stringify(entry));
|
|
3531
|
+
} catch {
|
|
3532
|
+
updated.push(line);
|
|
2585
3533
|
}
|
|
2586
|
-
} catch {
|
|
2587
3534
|
}
|
|
2588
|
-
|
|
2589
|
-
|
|
2590
|
-
async function saveMemory(dir, content) {
|
|
2591
|
-
const compacted = await compactMemory(dir, content);
|
|
2592
|
-
await writeFile4(path11.join(dir, MEMORY_FILE), compacted, "utf-8");
|
|
3535
|
+
await writeFile4(filePath, `${updated.join("\n")}
|
|
3536
|
+
`, "utf-8");
|
|
2593
3537
|
}
|
|
2594
|
-
function
|
|
2595
|
-
const
|
|
2596
|
-
|
|
2597
|
-
|
|
2598
|
-
|
|
3538
|
+
async function compactLogBuffer(dir) {
|
|
3539
|
+
const filePath = bufferPath(dir);
|
|
3540
|
+
let content;
|
|
3541
|
+
try {
|
|
3542
|
+
content = await readFile10(filePath, "utf-8");
|
|
3543
|
+
} catch {
|
|
3544
|
+
return;
|
|
3545
|
+
}
|
|
3546
|
+
const now = Date.now();
|
|
3547
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
3548
|
+
const kept = [];
|
|
3549
|
+
for (const line of lines) {
|
|
2599
3550
|
try {
|
|
2600
|
-
JSON.parse(
|
|
2601
|
-
|
|
3551
|
+
const entry = JSON.parse(line);
|
|
3552
|
+
if (entry.consolidatedAt) {
|
|
3553
|
+
const consolidatedTime = new Date(entry.consolidatedAt).getTime();
|
|
3554
|
+
if (now - consolidatedTime > COMPACTION_AGE_MS) {
|
|
3555
|
+
continue;
|
|
3556
|
+
}
|
|
3557
|
+
}
|
|
3558
|
+
kept.push(JSON.stringify(entry));
|
|
2602
3559
|
} catch {
|
|
2603
3560
|
}
|
|
2604
3561
|
}
|
|
2605
|
-
|
|
2606
|
-
|
|
2607
|
-
|
|
2608
|
-
|
|
2609
|
-
|
|
2610
|
-
|
|
2611
|
-
}
|
|
2612
|
-
|
|
2613
|
-
const sizeKB = Buffer.byteLength(content, "utf-8") / 1024;
|
|
2614
|
-
return { ok: sizeKB <= MAX_SIZE_KB, sizeKB: Math.round(sizeKB * 10) / 10 };
|
|
3562
|
+
let result = `${kept.join("\n")}
|
|
3563
|
+
`;
|
|
3564
|
+
while (Buffer.byteLength(result, "utf-8") > MAX_FILE_BYTES && kept.length > 0) {
|
|
3565
|
+
kept.shift();
|
|
3566
|
+
result = `${kept.join("\n")}
|
|
3567
|
+
`;
|
|
3568
|
+
}
|
|
3569
|
+
await writeFile4(filePath, result, "utf-8");
|
|
2615
3570
|
}
|
|
2616
|
-
async function
|
|
2617
|
-
if (!content.startsWith("{")) return content;
|
|
2618
|
-
let parsed;
|
|
3571
|
+
async function appendLogBuffer(dir, entry) {
|
|
2619
3572
|
try {
|
|
2620
|
-
|
|
2621
|
-
} catch {
|
|
2622
|
-
return content;
|
|
2623
|
-
}
|
|
2624
|
-
let changed = false;
|
|
2625
|
-
if (parsed.recentDecisions.length > MAX_DECISIONS) {
|
|
2626
|
-
const toArchive = parsed.recentDecisions.slice(0, -MAX_DECISIONS);
|
|
2627
|
-
parsed.recentDecisions = parsed.recentDecisions.slice(-MAX_DECISIONS);
|
|
2628
|
-
changed = true;
|
|
2629
|
-
const archivePath = path11.join(dir, ARCHIVE_FILE);
|
|
2630
|
-
const entry = {
|
|
2631
|
-
type: "decisions_archived",
|
|
2632
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2633
|
-
decisions: toArchive
|
|
2634
|
-
};
|
|
2635
|
-
await appendFile5(archivePath, `${JSON.stringify(entry)}
|
|
3573
|
+
await appendFile5(bufferPath(dir), `${JSON.stringify(entry)}
|
|
2636
3574
|
`, "utf-8");
|
|
3575
|
+
} catch {
|
|
2637
3576
|
}
|
|
2638
|
-
const result = changed ? JSON.stringify(parsed, null, 2) : content;
|
|
2639
|
-
const sizeKB = Buffer.byteLength(result, "utf-8") / 1024;
|
|
2640
|
-
if (sizeKB > MAX_SIZE_KB && parsed.notes.length > 200) {
|
|
2641
|
-
const archivePath = path11.join(dir, ARCHIVE_FILE);
|
|
2642
|
-
const entry = {
|
|
2643
|
-
type: "notes_archived",
|
|
2644
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2645
|
-
notes: parsed.notes
|
|
2646
|
-
};
|
|
2647
|
-
await appendFile5(archivePath, `${JSON.stringify(entry)}
|
|
2648
|
-
`, "utf-8");
|
|
2649
|
-
parsed.notes = "(archived \u2014 see memory-archive.jsonl)";
|
|
2650
|
-
return JSON.stringify(parsed, null, 2);
|
|
2651
|
-
}
|
|
2652
|
-
return result;
|
|
2653
3577
|
}
|
|
2654
3578
|
|
|
2655
3579
|
// src/supervisor/prompt-builder.ts
|
|
2656
|
-
|
|
2657
|
-
|
|
2658
|
-
|
|
2659
|
-
|
|
2660
|
-
|
|
2661
|
-
Your
|
|
2662
|
-
|
|
2663
|
-
|
|
2664
|
-
|
|
2665
|
-
|
|
2666
|
-
|
|
2667
|
-
|
|
2668
|
-
|
|
2669
|
-
|
|
2670
|
-
|
|
2671
|
-
|
|
2672
|
-
|
|
2673
|
-
|
|
2674
|
-
|
|
2675
|
-
|
|
2676
|
-
|
|
2677
|
-
|
|
2678
|
-
|
|
2679
|
-
|
|
2680
|
-
|
|
2681
|
-
|
|
2682
|
-
|
|
3580
|
+
var ROLE = `You are the neo autonomous supervisor \u2014 a stateless dispatch controller.
|
|
3581
|
+
|
|
3582
|
+
You receive state (events, memory, work queue) and produce actions (tool calls).
|
|
3583
|
+
|
|
3584
|
+
<behavioral-contract>
|
|
3585
|
+
- Your ONLY visible output is \`neo log\` commands. The TUI shows these and nothing else.
|
|
3586
|
+
- Your text output is NEVER shown to anyone \u2014 every token of text is wasted cost.
|
|
3587
|
+
- Produce tool calls, not explanations. Do not narrate your reasoning.
|
|
3588
|
+
- You NEVER modify code \u2014 that is the agents' job.
|
|
3589
|
+
</behavioral-contract>`;
|
|
3590
|
+
var COMMANDS = `### Dispatching agents
|
|
3591
|
+
\`\`\`bash
|
|
3592
|
+
neo run <agent> --prompt "..." --repo <path> --branch <name> [--priority critical|high|medium|low] [--meta '<json>']
|
|
3593
|
+
\`\`\`
|
|
3594
|
+
|
|
3595
|
+
| Flag | Required | Description |
|
|
3596
|
+
|------|----------|-------------|
|
|
3597
|
+
| \`--prompt\` | always | Task description for the agent |
|
|
3598
|
+
| \`--repo\` | always | Target repository path |
|
|
3599
|
+
| \`--branch\` | always | Branch name for the isolated clone |
|
|
3600
|
+
| \`--priority\` | no | \`critical\`, \`high\`, \`medium\`, \`low\` |
|
|
3601
|
+
| \`--meta\` | **always** | JSON with \`"label"\` for identification + \`"ticketId"\`, \`"stage"\`, etc. |
|
|
3602
|
+
|
|
3603
|
+
All agents require \`--branch\`. Each agent session runs in an isolated clone on that branch.
|
|
3604
|
+
Always include \`--meta '{"label":"T1-auth-middleware","ticketId":"YC-42","stage":"develop"}'\` so you can identify runs later.
|
|
3605
|
+
|
|
3606
|
+
### Monitoring & reading agent output
|
|
3607
|
+
\`\`\`bash
|
|
3608
|
+
neo runs --short # check recent runs
|
|
3609
|
+
neo runs --short --status running # check active runs are alive
|
|
3610
|
+
neo runs <runId> # full run details + agent output (MUST READ on completion)
|
|
3611
|
+
neo cost --short [--all] # check budget
|
|
3612
|
+
\`\`\`
|
|
3613
|
+
|
|
3614
|
+
\`neo runs <runId>\` returns the agent's full output. **ALWAYS read it when a run completes** \u2014 it contains structured JSON (PR URLs, issues, plans, milestones) that you need to decide next steps.
|
|
3615
|
+
|
|
3616
|
+
### Memory
|
|
3617
|
+
\`\`\`bash
|
|
3618
|
+
neo memory write --type fact --scope /path "Stable fact about repo"
|
|
3619
|
+
neo memory write --type focus --expires 2h "Current working context"
|
|
3620
|
+
neo memory write --type procedure --scope /path "How to do X"
|
|
3621
|
+
neo memory write --type task --scope /path --severity high --category "neo runs <id>" "Task description"
|
|
3622
|
+
neo memory update <id> --outcome in_progress|done|blocked|abandoned
|
|
3623
|
+
neo memory forget <id>
|
|
3624
|
+
neo memory search "keyword"
|
|
3625
|
+
neo memory list --type fact
|
|
3626
|
+
\`\`\`
|
|
3627
|
+
|
|
3628
|
+
### Reporting
|
|
3629
|
+
\`\`\`bash
|
|
3630
|
+
neo log <type> "<message>" # visible in TUI only
|
|
3631
|
+
\`\`\``;
|
|
3632
|
+
var COMMANDS_COMPACT = `### Commands (reference)
|
|
3633
|
+
\`neo run <agent> --prompt "..." --repo <path> --branch <name> --meta '{"label":"T1-auth",...}'\`
|
|
3634
|
+
\`neo runs [--short | <runId>]\` \xB7 \`neo runs --short --status running\` \xB7 \`neo cost --short\`
|
|
3635
|
+
\`neo memory write|update|forget|search|list\` \xB7 \`neo log <type> "<msg>"\`
|
|
3636
|
+
ALWAYS read run output on completion: \`neo runs <runId>\` \u2014 it contains the agent's structured result.`;
|
|
3637
|
+
var HEARTBEAT_RULES = `### Heartbeat lifecycle
|
|
3638
|
+
|
|
3639
|
+
<decision-tree>
|
|
3640
|
+
1. DEDUP FIRST \u2014 check focus for PROCESSED entries. Skip any runId already processed.
|
|
3641
|
+
2. MONITOR RUNS \u2014 \`neo runs --short\` to check active run status. If a run completed since last HB, read its output with \`neo runs <runId>\` BEFORE doing anything else.
|
|
3642
|
+
3. PENDING TASKS? \u2014 dispatch the next eligible task from work queue. Do not re-plan.
|
|
3643
|
+
4. EVENTS? \u2014 process run completions, messages, webhooks. Parse agent JSON output.
|
|
3644
|
+
5. FOLLOW-UPS? \u2014 check CI (\`gh pr checks\`), deferred dispatches.
|
|
3645
|
+
6. DISPATCH \u2014 route work to agents. Mark tasks \`in_progress\`, add ACTIVE to focus.
|
|
3646
|
+
7. YIELD \u2014 log your decisions and yield. Do not poll. Completions arrive at future heartbeats.
|
|
3647
|
+
</decision-tree>
|
|
3648
|
+
|
|
3649
|
+
<run-monitoring>
|
|
3650
|
+
Runs are your agents in the field. You MUST track them:
|
|
3651
|
+
- **On dispatch**: always include a label in \`--meta\` for identification: \`--meta '{"label":"T6-csv-export","ticketId":"YC-42",...}'\`
|
|
3652
|
+
- **On completion**: ALWAYS run \`neo runs <runId>\` to read the agent's full output. The output contains structured JSON (PR URLs, issues, plans) \u2014 you need it to decide next steps.
|
|
3653
|
+
- **On failure**: read the output to understand why. Check if the task should be retried, blocked, or abandoned.
|
|
3654
|
+
- **Active runs**: check \`neo runs --short --status running\` to verify your runs are still alive. If a run disappeared, investigate.
|
|
3655
|
+
</run-monitoring>
|
|
3656
|
+
|
|
3657
|
+
<rules>
|
|
3658
|
+
- Work queue IS your plan. Never re-plan existing tasks.
|
|
3659
|
+
- Maximize parallelism: dispatch independent tasks in the same heartbeat.
|
|
3660
|
+
- After dispatch: update focus, yield immediately. Do NOT wait for results.
|
|
3661
|
+
- Deferred work (CI pending): MUST check at next heartbeat.
|
|
3662
|
+
- Before dispatching a task, run the \`--category\` command from the task to retrieve context.
|
|
3663
|
+
</rules>`;
|
|
3664
|
+
var REPORTING_RULES = `### Reporting
|
|
3665
|
+
|
|
3666
|
+
\`neo log\` is your ONLY visible output. Use telegraphic format.
|
|
3667
|
+
|
|
3668
|
+
<log-format>
|
|
3669
|
+
neo log decision "<ticket> \u2192 <action> | <1-line reason>"
|
|
3670
|
+
neo log action "<agent> <repo>:<branch> run:<runId> | <context>"
|
|
3671
|
+
neo log discovery "<what> in <where>"
|
|
3672
|
+
</log-format>
|
|
3673
|
+
|
|
3674
|
+
<examples>
|
|
3675
|
+
<example type="good">
|
|
3676
|
+
neo log decision "YC-42 \u2192 developer | clear spec, complexity 3"
|
|
3677
|
+
neo log action "developer standards:feat/YC-42-auth run:5900a64a | task T1"
|
|
3678
|
+
neo log discovery "CI requires node 20 in api-service"
|
|
3679
|
+
</example>
|
|
3680
|
+
<example type="bad">
|
|
3681
|
+
neo log plan "Good! Now let me check the status and update things accordingly."
|
|
3682
|
+
neo log decision "Heartbeat #309: Idle cycle - no action required. All 4 repositories stable."
|
|
3683
|
+
neo log action "I've dispatched a developer agent to work on the authentication feature."
|
|
3684
|
+
</example>
|
|
3685
|
+
</examples>`;
|
|
3686
|
+
var MEMORY_RULES_CORE = `### Memory
|
|
3687
|
+
|
|
3688
|
+
<memory-types>
|
|
3689
|
+
| Type | Store when | TTL |
|
|
3690
|
+
|------|-----------|-----|
|
|
3691
|
+
| \`fact\` | Stable truth affecting dispatch decisions | Permanent (decays) |
|
|
3692
|
+
| \`procedure\` | Same failure 3+ times | Permanent |
|
|
3693
|
+
| \`focus\` | After every dispatch/deferral | --expires required |
|
|
3694
|
+
| \`task\` | Any planned work (tickets, decompositions, follow-ups) | Until done/abandoned |
|
|
3695
|
+
| \`feedback\` | Same review complaint 3+ times | Permanent |
|
|
3696
|
+
</memory-types>
|
|
3697
|
+
|
|
3698
|
+
<memory-rules>
|
|
3699
|
+
- Focus MUST use structured format: ACTIVE/PENDING/WAITING/PROCESSED lines only.
|
|
3700
|
+
- NEVER store: file counts, line numbers, completed work details, data available via \`neo runs <id>\`.
|
|
3701
|
+
- After PR merge: forget related facts unless they are reusable architectural truths.
|
|
3702
|
+
- Pattern escalation: same failure 3+ times \u2192 write a \`procedure\`.
|
|
3703
|
+
- Every memory that references external context MUST include a retrieval command (in \`--category\` for tasks, in content for facts/procedures). You are stateless \u2014 if you can't retrieve it later, don't store it.
|
|
3704
|
+
</memory-rules>
|
|
3705
|
+
|
|
3706
|
+
<task-workflow>
|
|
3707
|
+
Tasks are your work queue. The work queue section above shows them with markers (\`\u25CB\` pending, \`[ACTIVE]\` in_progress, \`[BLOCKED]\` blocked).
|
|
3708
|
+
|
|
3709
|
+
Create a task for any planned work: incoming tickets, architect decompositions, refiner sub-tickets, follow-up actions, CI fixes.
|
|
3710
|
+
- \`--severity critical|high|medium|low\` \u2014 dispatch highest severity first
|
|
3711
|
+
- \`--tags "initiative:<name>"\` \u2014 groups related tasks (shown as [initiative] headers in queue)
|
|
3712
|
+
- \`--tags "depends:mem_<id>"\` \u2014 task cannot start until dependency is done
|
|
3713
|
+
- \`--category\` \u2014 **MANDATORY** \u2014 the command to retrieve context for this task (shown as \`\u2192 <command>\` in queue)
|
|
3714
|
+
|
|
3715
|
+
**Context retrieval rule**: every task and relevant memory MUST include a way for you to access its source context at a future heartbeat. You are stateless \u2014 without this, you lose the context.
|
|
3716
|
+
- Agent output: \`--category "neo runs <runId>"\`
|
|
3717
|
+
- Note/plan: \`--category "cat notes/plan-feature.md"\`
|
|
3718
|
+
- Notion ticket: \`--category "API-retrieve-a-page <notionPageId>"\`
|
|
3719
|
+
- Architect decomposition: \`--category "neo runs <architectRunId>"\` (contains milestones + tasks)
|
|
3720
|
+
|
|
3721
|
+
Lifecycle: create \u2192 \`neo memory update <id> --outcome in_progress\` (on dispatch) \u2192 \`done\` (on success) / \`blocked\` (on failure, will retry) / \`abandoned\` (terminal, won't retry)
|
|
3722
|
+
|
|
3723
|
+
Dispatch rule: pick the highest-severity task with no unmet dependencies. Dispatch independent tasks in parallel. Before dispatching, run the \`--category\` command to retrieve task context.
|
|
3724
|
+
</task-workflow>
|
|
3725
|
+
|
|
3726
|
+
<focus-format>
|
|
3727
|
+
ACTIVE: <runId> <agent> "<task>" branch:<name>
|
|
3728
|
+
PENDING: <taskId> "<description>" depends:<taskId>
|
|
3729
|
+
WAITING: <what> since:HB<N>
|
|
3730
|
+
PROCESSED: <runId> \u2192 <outcome> PR#<N>
|
|
3731
|
+
</focus-format>
|
|
3732
|
+
|
|
3733
|
+
**Notes** (\`notes/\`, via Bash): use for detailed multi-page plans that span multiple heartbeats. After creating a plan, write a focus summary with \`--category "cat notes/<file>"\`. Delete notes when done.`;
|
|
3734
|
+
var MEMORY_RULES_EXAMPLES = `<memory-commands>
|
|
3735
|
+
neo memory write --type focus --expires 2h "ACTIVE: 5900a64a developer 'T1' branch:feat/x"
|
|
3736
|
+
neo memory write --type fact --scope /repo "CI requires pnpm build \u2014 discovered in run abc123"
|
|
3737
|
+
neo memory write --type procedure --scope /repo "Check gh pr view before re-dispatch"
|
|
3738
|
+
neo memory write --type task --scope /repo --severity high --category "neo runs abc123" --tags "initiative:auth-v2,depends:mem_xyz" "T1: Auth middleware"
|
|
3739
|
+
neo memory update <id> --outcome in_progress|done|blocked|abandoned
|
|
3740
|
+
neo memory forget <id>
|
|
3741
|
+
</memory-commands>`;
|
|
3742
|
+
function getCommandsSection(heartbeatCount) {
|
|
3743
|
+
return heartbeatCount <= 3 ? COMMANDS : COMMANDS_COMPACT;
|
|
3744
|
+
}
|
|
3745
|
+
function buildContextSections(opts) {
|
|
3746
|
+
const parts = [];
|
|
2683
3747
|
if (opts.repos.length > 0) {
|
|
2684
3748
|
const repoList = opts.repos.map((r) => `- ${r.path} (branch: ${r.defaultBranch})`).join("\n");
|
|
2685
|
-
|
|
3749
|
+
parts.push(`Repositories:
|
|
2686
3750
|
${repoList}`);
|
|
2687
|
-
} else {
|
|
2688
|
-
sections.push("## Registered repositories\n(none \u2014 run 'neo init' in a repo to register it)");
|
|
2689
3751
|
}
|
|
2690
3752
|
if (opts.mcpServerNames.length > 0) {
|
|
2691
3753
|
const mcpList = opts.mcpServerNames.map((n) => `- ${n}`).join("\n");
|
|
2692
|
-
|
|
2693
|
-
|
|
2694
|
-
${mcpList}
|
|
2695
|
-
|
|
2696
|
-
You can use these tools directly to query external systems.`
|
|
2697
|
-
);
|
|
3754
|
+
parts.push(`Integrations (MCP):
|
|
3755
|
+
${mcpList}`);
|
|
2698
3756
|
}
|
|
2699
|
-
|
|
2700
|
-
|
|
2701
|
-
- Today: $${opts.budgetStatus.todayUsd.toFixed(2)} / $${opts.budgetStatus.capUsd.toFixed(2)} (${opts.budgetStatus.remainingPct.toFixed(0)}% remaining)`
|
|
3757
|
+
parts.push(
|
|
3758
|
+
`Budget: $${opts.budgetStatus.todayUsd.toFixed(2)} / $${opts.budgetStatus.capUsd.toFixed(2)} (${opts.budgetStatus.remainingPct.toFixed(0)}% remaining)`
|
|
2702
3759
|
);
|
|
2703
|
-
|
|
2704
|
-
|
|
2705
|
-
|
|
3760
|
+
return parts;
|
|
3761
|
+
}
|
|
3762
|
+
function buildMemorySection(memories, supervisorDir) {
|
|
3763
|
+
const focusEntries = memories.filter((m) => m.type === "focus");
|
|
3764
|
+
const factEntries = memories.filter((m) => m.type === "fact");
|
|
3765
|
+
const procedureEntries = memories.filter((m) => m.type === "procedure");
|
|
3766
|
+
const feedbackEntries = memories.filter((m) => m.type === "feedback");
|
|
3767
|
+
const parts = [];
|
|
3768
|
+
if (focusEntries.length > 0) {
|
|
3769
|
+
const lines = focusEntries.map((m) => `- ${m.content}`).join("\n");
|
|
3770
|
+
parts.push(`<focus>
|
|
3771
|
+
${lines}
|
|
3772
|
+
</focus>`);
|
|
3773
|
+
} else {
|
|
3774
|
+
parts.push(
|
|
3775
|
+
"<focus>\n(empty \u2014 use neo memory write --type focus to set working context)\n</focus>"
|
|
3776
|
+
);
|
|
2706
3777
|
}
|
|
2707
|
-
|
|
2708
|
-
|
|
2709
|
-
|
|
2710
|
-
|
|
2711
|
-
|
|
2712
|
-
|
|
2713
|
-
|
|
3778
|
+
if (factEntries.length > 0) {
|
|
3779
|
+
const byScope = /* @__PURE__ */ new Map();
|
|
3780
|
+
for (const m of factEntries) {
|
|
3781
|
+
const scope = m.scope === "global" ? "global" : m.scope.split("/").pop() ?? m.scope;
|
|
3782
|
+
const group = byScope.get(scope) ?? [];
|
|
3783
|
+
group.push(m);
|
|
3784
|
+
byScope.set(scope, group);
|
|
2714
3785
|
}
|
|
2715
|
-
|
|
2716
|
-
|
|
3786
|
+
const scopeSections = [];
|
|
3787
|
+
for (const [scope, entries] of byScope) {
|
|
3788
|
+
const oldestAccess = Math.min(
|
|
3789
|
+
...entries.map((m) => Date.now() - new Date(m.lastAccessedAt).getTime())
|
|
3790
|
+
);
|
|
3791
|
+
const daysAgo = Math.floor(oldestAccess / 864e5);
|
|
3792
|
+
const staleHint = daysAgo >= 5 ? ` (last accessed ${daysAgo}d ago)` : "";
|
|
3793
|
+
const lines = entries.map((m) => {
|
|
3794
|
+
const confidence = m.accessCount >= 3 ? "" : " (unconfirmed)";
|
|
3795
|
+
return ` - ${m.content}${confidence}`;
|
|
3796
|
+
}).join("\n");
|
|
3797
|
+
scopeSections.push(` [${scope}]${staleHint} (${entries.length})
|
|
3798
|
+
${lines}`);
|
|
2717
3799
|
}
|
|
2718
|
-
|
|
2719
|
-
|
|
3800
|
+
parts.push(`Known facts:
|
|
3801
|
+
${scopeSections.join("\n")}`);
|
|
3802
|
+
}
|
|
3803
|
+
if (procedureEntries.length > 0) {
|
|
3804
|
+
const lines = procedureEntries.map((m) => `- ${m.content}`).join("\n");
|
|
3805
|
+
parts.push(`Procedures:
|
|
3806
|
+
${lines}`);
|
|
3807
|
+
}
|
|
3808
|
+
if (feedbackEntries.length > 0) {
|
|
3809
|
+
const lines = feedbackEntries.map((m) => `- [${m.category ?? "general"}] ${m.content}`).join("\n");
|
|
3810
|
+
parts.push(`Recurring review issues:
|
|
3811
|
+
${lines}`);
|
|
3812
|
+
}
|
|
3813
|
+
parts.push(`For detailed plans and checklists, use notes:
|
|
3814
|
+
\`\`\`bash
|
|
3815
|
+
cat > ${supervisorDir}/notes/plan-feature.md << 'EOF'
|
|
3816
|
+
<your detailed plan here>
|
|
3817
|
+
EOF
|
|
3818
|
+
\`\`\``);
|
|
3819
|
+
return parts.join("\n\n");
|
|
3820
|
+
}
|
|
3821
|
+
var DONE_OUTCOMES = /* @__PURE__ */ new Set(["done", "abandoned"]);
|
|
3822
|
+
var MAX_TASKS = 15;
|
|
3823
|
+
function buildWorkQueueSection(memories) {
|
|
3824
|
+
const tasks = memories.filter((m) => m.type === "task" && !DONE_OUTCOMES.has(m.outcome ?? ""));
|
|
3825
|
+
const doneCount = countDoneTasks(memories);
|
|
3826
|
+
if (tasks.length === 0) {
|
|
3827
|
+
if (doneCount > 0) {
|
|
3828
|
+
return `Work queue (0 remaining, ${doneCount} done) \u2014 all tasks complete. Pick up new work or wait for events.`;
|
|
2720
3829
|
}
|
|
2721
|
-
|
|
2722
|
-
${parts.join("\n\n")}`);
|
|
2723
|
-
} else {
|
|
2724
|
-
sections.push(
|
|
2725
|
-
"## Pending events\nNo new events. This is an idle heartbeat \u2014 check on active runs if any, or wait."
|
|
2726
|
-
);
|
|
3830
|
+
return "";
|
|
2727
3831
|
}
|
|
2728
|
-
|
|
2729
|
-
|
|
2730
|
-
|
|
2731
|
-
|
|
2732
|
-
To update knowledge, output a \`<knowledge>...</knowledge>\` block. Only update when reference data changes (API IDs, workspace config, etc.).`);
|
|
3832
|
+
const groups = groupTasksByInitiative(tasks);
|
|
3833
|
+
const lines = renderTaskGroups(groups);
|
|
3834
|
+
if (tasks.length > MAX_TASKS) {
|
|
3835
|
+
lines.push(` ... and ${tasks.length - MAX_TASKS} more pending`);
|
|
2733
3836
|
}
|
|
2734
|
-
|
|
2735
|
-
return
|
|
3837
|
+
const header = `Work queue (${tasks.length} remaining, ${doneCount} done) \u2014 dispatch the next eligible task:`;
|
|
3838
|
+
return `${header}
|
|
3839
|
+
${lines.join("\n")}`;
|
|
2736
3840
|
}
|
|
2737
|
-
function
|
|
2738
|
-
|
|
2739
|
-
|
|
2740
|
-
|
|
2741
|
-
|
|
2742
|
-
|
|
2743
|
-
|
|
2744
|
-
|
|
2745
|
-
|
|
2746
|
-
|
|
2747
|
-
|
|
2748
|
-
(
|
|
2749
|
-
|
|
2750
|
-
|
|
2751
|
-
|
|
2752
|
-
|
|
2753
|
-
\`\`\`
|
|
2754
|
-
Keep under 8KB. Prune old decisions (keep last 10).`;
|
|
3841
|
+
function countDoneTasks(memories) {
|
|
3842
|
+
return memories.filter((m) => m.type === "task" && DONE_OUTCOMES.has(m.outcome ?? "")).length;
|
|
3843
|
+
}
|
|
3844
|
+
function groupTasksByInitiative(tasks) {
|
|
3845
|
+
const initiativeMap = /* @__PURE__ */ new Map();
|
|
3846
|
+
const noInitiative = [];
|
|
3847
|
+
for (const task of tasks) {
|
|
3848
|
+
const tag = task.tags.find((t) => t.startsWith("initiative:"));
|
|
3849
|
+
if (tag) {
|
|
3850
|
+
const key = tag.slice("initiative:".length);
|
|
3851
|
+
const group = initiativeMap.get(key) ?? [];
|
|
3852
|
+
group.push(task);
|
|
3853
|
+
initiativeMap.set(key, group);
|
|
3854
|
+
} else {
|
|
3855
|
+
noInitiative.push(task);
|
|
3856
|
+
}
|
|
2755
3857
|
}
|
|
2756
|
-
const
|
|
2757
|
-
|
|
2758
|
-
|
|
2759
|
-
|
|
2760
|
-
|
|
2761
|
-
|
|
3858
|
+
const groups = [];
|
|
3859
|
+
for (const [initiative, taskList] of initiativeMap) {
|
|
3860
|
+
groups.push({ initiative, tasks: taskList });
|
|
3861
|
+
}
|
|
3862
|
+
if (noInitiative.length > 0) {
|
|
3863
|
+
groups.push({ initiative: null, tasks: noInitiative });
|
|
3864
|
+
}
|
|
3865
|
+
return groups;
|
|
3866
|
+
}
|
|
3867
|
+
function renderTaskGroups(groups) {
|
|
3868
|
+
const lines = [];
|
|
3869
|
+
let rendered = 0;
|
|
3870
|
+
for (const group of groups) {
|
|
3871
|
+
if (rendered >= MAX_TASKS) break;
|
|
3872
|
+
if (group.initiative && groups.length > 1) {
|
|
3873
|
+
lines.push(` [${group.initiative}]`);
|
|
3874
|
+
}
|
|
3875
|
+
for (const task of group.tasks) {
|
|
3876
|
+
if (rendered >= MAX_TASKS) break;
|
|
3877
|
+
lines.push(` ${formatTaskLine(task)}`);
|
|
3878
|
+
rendered++;
|
|
3879
|
+
}
|
|
3880
|
+
}
|
|
3881
|
+
return lines;
|
|
3882
|
+
}
|
|
3883
|
+
function formatTaskLine(task) {
|
|
3884
|
+
const marker = formatTaskMarker(task.outcome);
|
|
3885
|
+
const severity = task.severity ? `[${task.severity}] ` : "";
|
|
3886
|
+
const scope = task.scope !== "global" ? ` (${getBasename(task.scope)})` : "";
|
|
3887
|
+
const run = task.runId ? ` [run ${task.runId.slice(0, 8)}]` : "";
|
|
3888
|
+
const cat = task.category ? ` \u2192 ${task.category}` : "";
|
|
3889
|
+
return `${marker} ${severity}${task.content}${scope}${run}${cat}`;
|
|
3890
|
+
}
|
|
3891
|
+
function formatTaskMarker(outcome) {
|
|
3892
|
+
switch (outcome) {
|
|
3893
|
+
case "in_progress":
|
|
3894
|
+
return "[ACTIVE]";
|
|
3895
|
+
case "blocked":
|
|
3896
|
+
return "[BLOCKED]";
|
|
3897
|
+
default:
|
|
3898
|
+
return "\u25CB";
|
|
3899
|
+
}
|
|
3900
|
+
}
|
|
3901
|
+
function getBasename(scopePath) {
|
|
3902
|
+
const parts = scopePath.split("/");
|
|
3903
|
+
return parts[parts.length - 1] || scopePath;
|
|
3904
|
+
}
|
|
3905
|
+
var SIGNIFICANT_TYPES = /* @__PURE__ */ new Set(["decision", "action", "dispatch", "error"]);
|
|
3906
|
+
function buildRecentActionsSection(entries) {
|
|
3907
|
+
const significant = entries.filter((e) => SIGNIFICANT_TYPES.has(e.type));
|
|
3908
|
+
if (significant.length === 0) return "";
|
|
3909
|
+
const lines = significant.map((e) => {
|
|
3910
|
+
const ago = formatTimeAgo(Date.now() - new Date(e.timestamp).getTime());
|
|
3911
|
+
return `- [${e.type}] ${e.summary} (${ago})`;
|
|
3912
|
+
});
|
|
3913
|
+
return `Recent actions (your last heartbeats):
|
|
3914
|
+
${lines.join("\n")}`;
|
|
3915
|
+
}
|
|
3916
|
+
function formatTimeAgo(ms) {
|
|
3917
|
+
if (ms < 6e4) return "just now";
|
|
3918
|
+
const minutes = Math.floor(ms / 6e4);
|
|
3919
|
+
if (minutes < 60) return `${minutes}m ago`;
|
|
3920
|
+
const hours = Math.floor(minutes / 60);
|
|
3921
|
+
if (hours < 24) return `${hours}h${minutes % 60}m ago`;
|
|
3922
|
+
return `${Math.floor(hours / 24)}d ago`;
|
|
3923
|
+
}
|
|
3924
|
+
function buildEventsSection(grouped) {
|
|
3925
|
+
const { messages, webhooks, runCompletions } = grouped;
|
|
3926
|
+
const totalEvents = messages.length + webhooks.length + runCompletions.length;
|
|
3927
|
+
if (totalEvents === 0) {
|
|
3928
|
+
return "No new events.";
|
|
3929
|
+
}
|
|
3930
|
+
const parts = [];
|
|
3931
|
+
for (const msg of messages) {
|
|
3932
|
+
const countSuffix = msg.count > 1 ? ` (x${msg.count})` : "";
|
|
3933
|
+
parts.push(`Message from ${msg.from}${countSuffix}: ${msg.text}`);
|
|
3934
|
+
}
|
|
3935
|
+
for (const evt of webhooks) {
|
|
3936
|
+
parts.push(formatEvent(evt));
|
|
3937
|
+
}
|
|
3938
|
+
for (const evt of runCompletions) {
|
|
3939
|
+
parts.push(formatEvent(evt));
|
|
3940
|
+
}
|
|
3941
|
+
return `${totalEvents} pending event(s):
|
|
3942
|
+
${parts.join("\n\n")}`;
|
|
2762
3943
|
}
|
|
2763
3944
|
function formatEvent(event) {
|
|
2764
3945
|
switch (event.kind) {
|
|
2765
3946
|
case "webhook":
|
|
2766
|
-
return
|
|
3947
|
+
return `Webhook [${event.data.source ?? "unknown"}] ${event.data.event ?? ""}
|
|
2767
3948
|
\`\`\`json
|
|
2768
3949
|
${JSON.stringify(event.data.payload ?? {}, null, 2)}
|
|
2769
3950
|
\`\`\``;
|
|
2770
3951
|
case "message":
|
|
2771
|
-
return
|
|
3952
|
+
return `Message from ${event.data.from}: ${event.data.text}`;
|
|
2772
3953
|
case "run_complete":
|
|
2773
|
-
return
|
|
3954
|
+
return `Run completed: ${event.runId} (check with \`neo runs\`)`;
|
|
3955
|
+
case "internal":
|
|
3956
|
+
return `Internal event: ${event.eventKind}`;
|
|
2774
3957
|
}
|
|
2775
3958
|
}
|
|
3959
|
+
function isIdleHeartbeat(opts) {
|
|
3960
|
+
const { messages, webhooks, runCompletions } = opts.grouped;
|
|
3961
|
+
const totalEvents = messages.length + webhooks.length + runCompletions.length;
|
|
3962
|
+
const hasWork = buildWorkQueueSection(opts.memories) !== "";
|
|
3963
|
+
return totalEvents === 0 && opts.activeRuns.length === 0 && !hasWork;
|
|
3964
|
+
}
|
|
3965
|
+
function buildIdlePrompt(opts) {
|
|
3966
|
+
return `<role>
|
|
3967
|
+
${ROLE}
|
|
3968
|
+
Heartbeat #${opts.heartbeatCount}
|
|
3969
|
+
</role>
|
|
3970
|
+
|
|
3971
|
+
<context>
|
|
3972
|
+
No events. No active runs. No pending tasks.
|
|
3973
|
+
Budget: $${opts.budgetStatus.todayUsd.toFixed(2)} / $${opts.budgetStatus.capUsd.toFixed(2)} (${opts.budgetStatus.remainingPct.toFixed(0)}% remaining)
|
|
3974
|
+
</context>
|
|
3975
|
+
|
|
3976
|
+
<directive>
|
|
3977
|
+
Nothing to do. Run \`neo log discovery "idle"\` and yield. Do not produce any other output.
|
|
3978
|
+
</directive>`;
|
|
3979
|
+
}
|
|
3980
|
+
function buildStandardPrompt(opts) {
|
|
3981
|
+
const sections = [];
|
|
3982
|
+
sections.push(`<role>
|
|
3983
|
+
${ROLE}
|
|
3984
|
+
Heartbeat #${opts.heartbeatCount}
|
|
3985
|
+
</role>`);
|
|
3986
|
+
const contextParts = [];
|
|
3987
|
+
const workQueue = buildWorkQueueSection(opts.memories);
|
|
3988
|
+
if (workQueue) {
|
|
3989
|
+
contextParts.push(workQueue);
|
|
3990
|
+
}
|
|
3991
|
+
if (opts.activeRuns.length > 0) {
|
|
3992
|
+
contextParts.push(`Active runs:
|
|
3993
|
+
${opts.activeRuns.map((r) => `- ${r}`).join("\n")}`);
|
|
3994
|
+
}
|
|
3995
|
+
contextParts.push(...buildContextSections(opts));
|
|
3996
|
+
contextParts.push(buildMemorySection(opts.memories, opts.supervisorDir));
|
|
3997
|
+
const recentActions = buildRecentActionsSection(opts.recentActions);
|
|
3998
|
+
if (recentActions) {
|
|
3999
|
+
contextParts.push(recentActions);
|
|
4000
|
+
}
|
|
4001
|
+
contextParts.push(`Events:
|
|
4002
|
+
${buildEventsSection(opts.grouped)}`);
|
|
4003
|
+
sections.push(`<context>
|
|
4004
|
+
${contextParts.join("\n\n")}
|
|
4005
|
+
</context>`);
|
|
4006
|
+
sections.push(`<reference>
|
|
4007
|
+
${getCommandsSection(opts.heartbeatCount)}
|
|
4008
|
+
</reference>`);
|
|
4009
|
+
const instructionParts = [];
|
|
4010
|
+
instructionParts.push(HEARTBEAT_RULES);
|
|
4011
|
+
instructionParts.push(REPORTING_RULES);
|
|
4012
|
+
instructionParts.push(MEMORY_RULES_CORE);
|
|
4013
|
+
if (opts.customInstructions) {
|
|
4014
|
+
instructionParts.push(`### Custom instructions
|
|
4015
|
+
${opts.customInstructions}`);
|
|
4016
|
+
}
|
|
4017
|
+
const { messages, webhooks, runCompletions } = opts.grouped;
|
|
4018
|
+
const hasEvents = messages.length + webhooks.length + runCompletions.length > 0;
|
|
4019
|
+
instructionParts.push(
|
|
4020
|
+
hasEvents ? "Process events, dispatch eligible work, yield. Each heartbeat costs ~$0.10 \u2014 be efficient." : "No events. If pending work exists, dispatch it. Otherwise yield immediately."
|
|
4021
|
+
);
|
|
4022
|
+
sections.push(`<instructions>
|
|
4023
|
+
${instructionParts.join("\n\n")}
|
|
4024
|
+
</instructions>`);
|
|
4025
|
+
return sections.join("\n\n");
|
|
4026
|
+
}
|
|
4027
|
+
function buildConsolidationPrompt(opts) {
|
|
4028
|
+
const sections = [];
|
|
4029
|
+
sections.push(`<role>
|
|
4030
|
+
${ROLE}
|
|
4031
|
+
Heartbeat #${opts.heartbeatCount} (CONSOLIDATION)
|
|
4032
|
+
</role>`);
|
|
4033
|
+
const contextParts = [];
|
|
4034
|
+
const workQueueConsolidation = buildWorkQueueSection(opts.memories);
|
|
4035
|
+
if (workQueueConsolidation) {
|
|
4036
|
+
contextParts.push(workQueueConsolidation);
|
|
4037
|
+
}
|
|
4038
|
+
if (opts.activeRuns.length > 0) {
|
|
4039
|
+
contextParts.push(`Active runs:
|
|
4040
|
+
${opts.activeRuns.map((r) => `- ${r}`).join("\n")}`);
|
|
4041
|
+
}
|
|
4042
|
+
contextParts.push(...buildContextSections(opts));
|
|
4043
|
+
contextParts.push(buildMemorySection(opts.memories, opts.supervisorDir));
|
|
4044
|
+
const recentActions = buildRecentActionsSection(opts.recentActions);
|
|
4045
|
+
if (recentActions) {
|
|
4046
|
+
contextParts.push(recentActions);
|
|
4047
|
+
}
|
|
4048
|
+
contextParts.push(`Events:
|
|
4049
|
+
${buildEventsSection(opts.grouped)}`);
|
|
4050
|
+
sections.push(`<context>
|
|
4051
|
+
${contextParts.join("\n\n")}
|
|
4052
|
+
</context>`);
|
|
4053
|
+
sections.push(`<reference>
|
|
4054
|
+
${getCommandsSection(opts.heartbeatCount)}
|
|
4055
|
+
</reference>`);
|
|
4056
|
+
const instructionParts = [];
|
|
4057
|
+
instructionParts.push(HEARTBEAT_RULES);
|
|
4058
|
+
instructionParts.push(REPORTING_RULES);
|
|
4059
|
+
instructionParts.push(MEMORY_RULES_CORE);
|
|
4060
|
+
instructionParts.push(MEMORY_RULES_EXAMPLES);
|
|
4061
|
+
if (opts.customInstructions) {
|
|
4062
|
+
instructionParts.push(`### Custom instructions
|
|
4063
|
+
${opts.customInstructions}`);
|
|
4064
|
+
}
|
|
4065
|
+
instructionParts.push(
|
|
4066
|
+
`### Consolidation
|
|
4067
|
+
This is a CONSOLIDATION heartbeat.
|
|
4068
|
+
|
|
4069
|
+
**Idle guard**: if there are NO active runs AND no new events since last consolidation, log "idle, no changes" and yield immediately. Do NOT re-validate facts you already reviewed.
|
|
4070
|
+
|
|
4071
|
+
If there IS active work, your job:
|
|
4072
|
+
|
|
4073
|
+
1. **Review memory** \u2014 check facts and procedures for accuracy. Remove outdated entries. Resolve contradictions (keep newer). Remove facts about completed work (merged PRs, finished initiatives).
|
|
4074
|
+
2. **Update focus** \u2014 rewrite focus using the MANDATORY structured format (ACTIVE/PENDING/WAITING/PROCESSED). Remove resolved items. Add new context.
|
|
4075
|
+
3. **Pattern escalation** \u2014 if agents hit the same issue 3+ times (check recent actions), write a \`procedure\` to prevent recurrence.
|
|
4076
|
+
4. **Prune completed work** \u2014 if a PR is merged or an initiative is done, forget related facts that are no longer actionable. Keep only reusable architectural truths.
|
|
4077
|
+
5. **Prune done tasks** \u2014 forget tasks with outcome \`done\` or \`abandoned\` older than 7 days.`
|
|
4078
|
+
);
|
|
4079
|
+
sections.push(`<instructions>
|
|
4080
|
+
${instructionParts.join("\n\n")}
|
|
4081
|
+
</instructions>`);
|
|
4082
|
+
return sections.join("\n\n");
|
|
4083
|
+
}
|
|
4084
|
+
function buildCompactionPrompt(opts) {
|
|
4085
|
+
const sections = [];
|
|
4086
|
+
sections.push(`<role>
|
|
4087
|
+
${ROLE}
|
|
4088
|
+
Heartbeat #${opts.heartbeatCount} (COMPACTION)
|
|
4089
|
+
</role>`);
|
|
4090
|
+
const contextParts = [];
|
|
4091
|
+
contextParts.push(...buildContextSections(opts));
|
|
4092
|
+
contextParts.push(buildMemorySection(opts.memories, opts.supervisorDir));
|
|
4093
|
+
const workQueueCompaction = buildWorkQueueSection(opts.memories);
|
|
4094
|
+
if (workQueueCompaction) {
|
|
4095
|
+
contextParts.push(workQueueCompaction);
|
|
4096
|
+
}
|
|
4097
|
+
sections.push(`<context>
|
|
4098
|
+
${contextParts.join("\n\n")}
|
|
4099
|
+
</context>`);
|
|
4100
|
+
sections.push(`<reference>
|
|
4101
|
+
${getCommandsSection(opts.heartbeatCount)}
|
|
4102
|
+
</reference>`);
|
|
4103
|
+
const instructionParts = [];
|
|
4104
|
+
instructionParts.push(HEARTBEAT_RULES);
|
|
4105
|
+
instructionParts.push(REPORTING_RULES);
|
|
4106
|
+
instructionParts.push(MEMORY_RULES_CORE);
|
|
4107
|
+
instructionParts.push(MEMORY_RULES_EXAMPLES);
|
|
4108
|
+
if (opts.customInstructions) {
|
|
4109
|
+
instructionParts.push(`### Custom instructions
|
|
4110
|
+
${opts.customInstructions}`);
|
|
4111
|
+
}
|
|
4112
|
+
instructionParts.push(`### Compaction
|
|
4113
|
+
This is a COMPACTION heartbeat. Deep-clean your ENTIRE memory.
|
|
4114
|
+
|
|
4115
|
+
1. **Remove stale facts** \u2014 facts >7 days old with no recent reinforcement. Check the "(last accessed Xd ago)" hints in the facts section.
|
|
4116
|
+
2. **Remove completed-work facts** \u2014 if all PRs for a repo initiative are merged/closed, forget related facts. Keep only reusable architectural truths (build system, CI config, tooling).
|
|
4117
|
+
3. **Remove trivial facts** \u2014 file counts, line numbers, structural details that \`ls\` or \`cat package.json\` can answer. These waste context.
|
|
4118
|
+
4. **Merge duplicates** \u2014 combine similar facts within the same scope into one.
|
|
4119
|
+
5. **Clean up focus** \u2014 forget resolved items, rewrite remaining in structured format.
|
|
4120
|
+
6. **Prune done tasks** \u2014 forget tasks with outcome \`done\` or \`abandoned\` older than 7 days.
|
|
4121
|
+
7. **Delete completed notes** from notes/ directory.
|
|
4122
|
+
8. **Stay under 15 facts per scope** \u2014 prioritize facts that affect dispatch decisions.
|
|
4123
|
+
|
|
4124
|
+
Flag contradictions: if two facts contradict, keep the newer one.
|
|
4125
|
+
|
|
4126
|
+
\`\`\`bash
|
|
4127
|
+
neo memory list --type fact
|
|
4128
|
+
neo memory forget <stale-id>
|
|
4129
|
+
\`\`\``);
|
|
4130
|
+
sections.push(`<instructions>
|
|
4131
|
+
${instructionParts.join("\n\n")}
|
|
4132
|
+
</instructions>`);
|
|
4133
|
+
return sections.join("\n\n");
|
|
4134
|
+
}
|
|
2776
4135
|
|
|
2777
4136
|
// src/supervisor/heartbeat.ts
|
|
4137
|
+
var DEFAULT_IDLE_SKIP_MAX = 20;
|
|
4138
|
+
var DEFAULT_ACTIVE_WORK_SKIP_MAX = 3;
|
|
4139
|
+
var DEFAULT_CONSOLIDATION_INTERVAL = 5;
|
|
4140
|
+
function shouldConsolidate(heartbeatCount, lastConsolidationHeartbeat, consolidationInterval, hasPendingEntries) {
|
|
4141
|
+
const since = heartbeatCount - lastConsolidationHeartbeat;
|
|
4142
|
+
if (since >= consolidationInterval) return true;
|
|
4143
|
+
if (hasPendingEntries && since >= 2) return true;
|
|
4144
|
+
return false;
|
|
4145
|
+
}
|
|
4146
|
+
function shouldCompact(heartbeatCount, lastCompactionHeartbeat, compactionInterval = 50) {
|
|
4147
|
+
const since = heartbeatCount - lastCompactionHeartbeat;
|
|
4148
|
+
return since >= compactionInterval;
|
|
4149
|
+
}
|
|
2778
4150
|
var HeartbeatLoop = class {
|
|
2779
4151
|
stopping = false;
|
|
2780
4152
|
consecutiveFailures = 0;
|
|
@@ -2786,6 +4158,9 @@ var HeartbeatLoop = class {
|
|
|
2786
4158
|
eventQueue;
|
|
2787
4159
|
activityLog;
|
|
2788
4160
|
customInstructions;
|
|
4161
|
+
defaultInstructionsPath;
|
|
4162
|
+
memoryStore = null;
|
|
4163
|
+
memoryDbPath;
|
|
2789
4164
|
constructor(options) {
|
|
2790
4165
|
this.config = options.config;
|
|
2791
4166
|
this.supervisorDir = options.supervisorDir;
|
|
@@ -2793,6 +4168,17 @@ var HeartbeatLoop = class {
|
|
|
2793
4168
|
this.sessionId = options.sessionId;
|
|
2794
4169
|
this.eventQueue = options.eventQueue;
|
|
2795
4170
|
this.activityLog = options.activityLog;
|
|
4171
|
+
this.defaultInstructionsPath = options.defaultInstructionsPath;
|
|
4172
|
+
this.memoryDbPath = options.memoryDbPath;
|
|
4173
|
+
}
|
|
4174
|
+
getMemoryStore() {
|
|
4175
|
+
if (!this.memoryStore && this.memoryDbPath) {
|
|
4176
|
+
try {
|
|
4177
|
+
this.memoryStore = new MemoryStore(this.memoryDbPath);
|
|
4178
|
+
} catch {
|
|
4179
|
+
}
|
|
4180
|
+
}
|
|
4181
|
+
return this.memoryStore;
|
|
2796
4182
|
}
|
|
2797
4183
|
async start() {
|
|
2798
4184
|
this.customInstructions = await this.loadInstructions();
|
|
@@ -2807,7 +4193,7 @@ var HeartbeatLoop = class {
|
|
|
2807
4193
|
await this.activityLog.log("error", `Heartbeat failed: ${msg}`, { error: msg });
|
|
2808
4194
|
if (this.consecutiveFailures >= this.config.supervisor.maxConsecutiveFailures) {
|
|
2809
4195
|
const backoffMs = Math.min(
|
|
2810
|
-
this.config.supervisor.
|
|
4196
|
+
this.config.supervisor.eventTimeoutMs * 2 ** (this.consecutiveFailures - this.config.supervisor.maxConsecutiveFailures),
|
|
2811
4197
|
15 * 60 * 1e3
|
|
2812
4198
|
// max 15 minutes
|
|
2813
4199
|
);
|
|
@@ -2820,7 +4206,7 @@ var HeartbeatLoop = class {
|
|
|
2820
4206
|
}
|
|
2821
4207
|
}
|
|
2822
4208
|
if (this.stopping) break;
|
|
2823
|
-
await this.eventQueue.waitForEvent(this.config.supervisor.
|
|
4209
|
+
await this.eventQueue.waitForEvent(this.config.supervisor.eventTimeoutMs);
|
|
2824
4210
|
}
|
|
2825
4211
|
await this.activityLog.log("heartbeat", "Supervisor heartbeat loop stopped");
|
|
2826
4212
|
}
|
|
@@ -2831,57 +4217,234 @@ var HeartbeatLoop = class {
|
|
|
2831
4217
|
}
|
|
2832
4218
|
async runHeartbeat() {
|
|
2833
4219
|
const startTime = Date.now();
|
|
2834
|
-
const heartbeatId =
|
|
4220
|
+
const heartbeatId = randomUUID5();
|
|
2835
4221
|
const state = await this.readState();
|
|
2836
4222
|
const today = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
|
|
4223
|
+
const budgetCheck = await this.checkBudgetExceeded(state, today);
|
|
4224
|
+
if (budgetCheck.exceeded) return;
|
|
4225
|
+
const grouped = this.eventQueue.drainAndGroup();
|
|
4226
|
+
const totalEventCount = grouped.messages.length + grouped.webhooks.length + grouped.runCompletions.length;
|
|
4227
|
+
const activeRuns = await this.getActiveRuns();
|
|
4228
|
+
const skipResult = await this.handleSkipLogic({
|
|
4229
|
+
state,
|
|
4230
|
+
totalEventCount,
|
|
4231
|
+
activeRuns
|
|
4232
|
+
});
|
|
4233
|
+
if (skipResult.shouldSkip) return;
|
|
4234
|
+
if (skipResult.resetCounters) {
|
|
4235
|
+
await this.updateState({ idleSkipCount: 0, activeWorkSkipCount: 0 });
|
|
4236
|
+
}
|
|
4237
|
+
const modeResult = await this.determineHeartbeatMode(state);
|
|
4238
|
+
const { prompt, modeLabel } = await this.buildHeartbeatModePrompt({
|
|
4239
|
+
grouped,
|
|
4240
|
+
todayCost: budgetCheck.todayCost,
|
|
4241
|
+
heartbeatCount: modeResult.heartbeatCount,
|
|
4242
|
+
unconsolidated: modeResult.unconsolidated,
|
|
4243
|
+
isCompaction: modeResult.isCompaction,
|
|
4244
|
+
isConsolidation: modeResult.isConsolidation,
|
|
4245
|
+
activeRuns,
|
|
4246
|
+
lastHeartbeat: state?.lastHeartbeat,
|
|
4247
|
+
lastConsolidationTimestamp: modeResult.lastConsolidationTs
|
|
4248
|
+
});
|
|
4249
|
+
await this.activityLog.log(
|
|
4250
|
+
"heartbeat",
|
|
4251
|
+
`Heartbeat #${modeResult.heartbeatCount} starting (${modeLabel})`,
|
|
4252
|
+
{
|
|
4253
|
+
heartbeatId,
|
|
4254
|
+
eventCount: totalEventCount,
|
|
4255
|
+
messages: grouped.messages.length,
|
|
4256
|
+
webhooks: grouped.webhooks.length,
|
|
4257
|
+
runCompletions: grouped.runCompletions.length,
|
|
4258
|
+
isConsolidation: modeResult.isConsolidation
|
|
4259
|
+
}
|
|
4260
|
+
);
|
|
4261
|
+
const { costUsd, turnCount } = await this.callSdk(prompt, heartbeatId);
|
|
4262
|
+
if (modeResult.isConsolidation) {
|
|
4263
|
+
const allIds = modeResult.unconsolidated.map((e) => e.id);
|
|
4264
|
+
if (allIds.length > 0) {
|
|
4265
|
+
await markConsolidated(this.supervisorDir, allIds);
|
|
4266
|
+
}
|
|
4267
|
+
await compactLogBuffer(this.supervisorDir);
|
|
4268
|
+
}
|
|
4269
|
+
const durationMs = Date.now() - startTime;
|
|
4270
|
+
const { stateUpdate } = this.buildStateUpdate({
|
|
4271
|
+
state,
|
|
4272
|
+
today,
|
|
4273
|
+
todayCost: budgetCheck.todayCost,
|
|
4274
|
+
costUsd,
|
|
4275
|
+
heartbeatCount: modeResult.heartbeatCount,
|
|
4276
|
+
isConsolidation: modeResult.isConsolidation,
|
|
4277
|
+
isCompaction: modeResult.isCompaction
|
|
4278
|
+
});
|
|
4279
|
+
await this.updateState(stateUpdate);
|
|
4280
|
+
await this.activityLog.log(
|
|
4281
|
+
"heartbeat",
|
|
4282
|
+
`Heartbeat #${modeResult.heartbeatCount + 1} complete (${modeLabel})`,
|
|
4283
|
+
{
|
|
4284
|
+
heartbeatId,
|
|
4285
|
+
costUsd,
|
|
4286
|
+
durationMs,
|
|
4287
|
+
turnCount,
|
|
4288
|
+
isConsolidation: modeResult.isConsolidation
|
|
4289
|
+
}
|
|
4290
|
+
);
|
|
4291
|
+
}
|
|
4292
|
+
/**
|
|
4293
|
+
* Check if supervisor daily budget is exceeded.
|
|
4294
|
+
*/
|
|
4295
|
+
async checkBudgetExceeded(state, today) {
|
|
2837
4296
|
const todayCost = state?.costResetDate === today ? state.todayCostUsd ?? 0 : 0;
|
|
2838
4297
|
if (todayCost >= this.config.supervisor.dailyCapUsd) {
|
|
2839
4298
|
await this.activityLog.log(
|
|
2840
4299
|
"error",
|
|
2841
4300
|
`Supervisor daily budget exceeded ($${todayCost.toFixed(2)} / $${this.config.supervisor.dailyCapUsd}). Skipping heartbeat.`
|
|
2842
4301
|
);
|
|
2843
|
-
await this.sleep(this.config.supervisor.
|
|
2844
|
-
return;
|
|
4302
|
+
await this.sleep(this.config.supervisor.eventTimeoutMs);
|
|
4303
|
+
return { todayCost, exceeded: true };
|
|
2845
4304
|
}
|
|
2846
|
-
|
|
2847
|
-
|
|
4305
|
+
return { todayCost, exceeded: false };
|
|
4306
|
+
}
|
|
4307
|
+
/**
|
|
4308
|
+
* Handle skip logic for idle and active-work scenarios.
|
|
4309
|
+
*/
|
|
4310
|
+
async handleSkipLogic(opts) {
|
|
4311
|
+
const { state, totalEventCount, activeRuns } = opts;
|
|
2848
4312
|
const idleSkipCount = state?.idleSkipCount ?? 0;
|
|
2849
|
-
|
|
2850
|
-
|
|
2851
|
-
|
|
2852
|
-
|
|
4313
|
+
const activeWorkSkipCount = state?.activeWorkSkipCount ?? 0;
|
|
4314
|
+
const hasActiveWork = activeRuns.length > 0;
|
|
4315
|
+
if (totalEventCount === 0) {
|
|
4316
|
+
if (hasActiveWork) {
|
|
4317
|
+
if (activeWorkSkipCount < DEFAULT_ACTIVE_WORK_SKIP_MAX) {
|
|
4318
|
+
await this.updateState({
|
|
4319
|
+
activeWorkSkipCount: activeWorkSkipCount + 1,
|
|
4320
|
+
idleSkipCount: 0
|
|
4321
|
+
});
|
|
4322
|
+
await this.activityLog.log(
|
|
4323
|
+
"heartbeat",
|
|
4324
|
+
`Active-work skip #${activeWorkSkipCount + 1}/${DEFAULT_ACTIVE_WORK_SKIP_MAX} \u2014 ${activeRuns.length} runs active, no events`
|
|
4325
|
+
);
|
|
4326
|
+
return { shouldSkip: true, resetCounters: false };
|
|
4327
|
+
}
|
|
4328
|
+
} else {
|
|
4329
|
+
if (idleSkipCount < DEFAULT_IDLE_SKIP_MAX) {
|
|
4330
|
+
await this.updateState({
|
|
4331
|
+
idleSkipCount: idleSkipCount + 1,
|
|
4332
|
+
activeWorkSkipCount: 0
|
|
4333
|
+
});
|
|
4334
|
+
await this.activityLog.log("heartbeat", `Idle skip #${idleSkipCount + 1} \u2014 no events`);
|
|
4335
|
+
return { shouldSkip: true, resetCounters: false };
|
|
4336
|
+
}
|
|
4337
|
+
}
|
|
2853
4338
|
}
|
|
2854
|
-
|
|
2855
|
-
|
|
4339
|
+
const needsReset = idleSkipCount > 0 || activeWorkSkipCount > 0;
|
|
4340
|
+
return { shouldSkip: false, resetCounters: needsReset };
|
|
4341
|
+
}
|
|
4342
|
+
/**
|
|
4343
|
+
* Determine heartbeat mode: compaction > consolidation > standard.
|
|
4344
|
+
*/
|
|
4345
|
+
async determineHeartbeatMode(state) {
|
|
4346
|
+
const heartbeatCount = state?.heartbeatCount ?? 0;
|
|
4347
|
+
const lastConsolidation = state?.lastConsolidationHeartbeat ?? 0;
|
|
4348
|
+
const lastCompaction = state?.lastCompactionHeartbeat ?? 0;
|
|
4349
|
+
const lastConsolidationTs = state?.lastConsolidationTimestamp;
|
|
4350
|
+
const unconsolidated = await readUnconsolidated(this.supervisorDir);
|
|
4351
|
+
const hasNewEntriesSinceLastConsolidation = lastConsolidationTs ? unconsolidated.some((e) => e.timestamp > lastConsolidationTs) : unconsolidated.length > 0;
|
|
4352
|
+
const hasPendingEntries = unconsolidated.length > 0;
|
|
4353
|
+
const isCompaction = shouldCompact(heartbeatCount, lastCompaction);
|
|
4354
|
+
const wouldConsolidate = shouldConsolidate(
|
|
4355
|
+
heartbeatCount,
|
|
4356
|
+
lastConsolidation,
|
|
4357
|
+
DEFAULT_CONSOLIDATION_INTERVAL,
|
|
4358
|
+
hasPendingEntries
|
|
4359
|
+
);
|
|
4360
|
+
const isConsolidation = isCompaction || wouldConsolidate && hasNewEntriesSinceLastConsolidation;
|
|
4361
|
+
return {
|
|
4362
|
+
isConsolidation,
|
|
4363
|
+
isCompaction,
|
|
4364
|
+
unconsolidated,
|
|
4365
|
+
heartbeatCount,
|
|
4366
|
+
lastConsolidation,
|
|
4367
|
+
lastConsolidationTs
|
|
4368
|
+
};
|
|
4369
|
+
}
|
|
4370
|
+
/**
|
|
4371
|
+
* Build the state update object after heartbeat completion.
|
|
4372
|
+
*/
|
|
4373
|
+
buildStateUpdate(opts) {
|
|
4374
|
+
const stateUpdate = {
|
|
4375
|
+
sessionId: this.sessionId,
|
|
4376
|
+
lastHeartbeat: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4377
|
+
heartbeatCount: opts.heartbeatCount + 1,
|
|
4378
|
+
totalCostUsd: (opts.state?.totalCostUsd ?? 0) + opts.costUsd,
|
|
4379
|
+
todayCostUsd: opts.todayCost + opts.costUsd,
|
|
4380
|
+
costResetDate: opts.today
|
|
4381
|
+
};
|
|
4382
|
+
if (opts.isConsolidation) {
|
|
4383
|
+
stateUpdate.lastConsolidationHeartbeat = opts.heartbeatCount + 1;
|
|
4384
|
+
stateUpdate.lastConsolidationTimestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
2856
4385
|
}
|
|
2857
|
-
|
|
2858
|
-
|
|
2859
|
-
|
|
4386
|
+
if (opts.isCompaction) {
|
|
4387
|
+
stateUpdate.lastCompactionHeartbeat = opts.heartbeatCount + 1;
|
|
4388
|
+
}
|
|
4389
|
+
return { stateUpdate };
|
|
4390
|
+
}
|
|
4391
|
+
/**
|
|
4392
|
+
* Build the prompt for the current heartbeat mode.
|
|
4393
|
+
*/
|
|
4394
|
+
async buildHeartbeatModePrompt(opts) {
|
|
2860
4395
|
const mcpServerNames = this.config.mcpServers ? Object.keys(this.config.mcpServers) : [];
|
|
2861
|
-
const
|
|
4396
|
+
const store = this.getMemoryStore();
|
|
4397
|
+
const memories = store ? store.query({ limit: 40, sortBy: "relevance" }) : [];
|
|
4398
|
+
const recentActions = await this.activityLog.tail(20);
|
|
4399
|
+
const sharedOpts = {
|
|
2862
4400
|
repos: this.config.repos,
|
|
2863
|
-
|
|
2864
|
-
knowledge,
|
|
2865
|
-
memorySizeKB: memoryCheck.sizeKB,
|
|
2866
|
-
grouped,
|
|
4401
|
+
grouped: opts.grouped,
|
|
2867
4402
|
budgetStatus: {
|
|
2868
|
-
todayUsd: todayCost,
|
|
4403
|
+
todayUsd: opts.todayCost,
|
|
2869
4404
|
capUsd: this.config.supervisor.dailyCapUsd,
|
|
2870
|
-
remainingPct: (this.config.supervisor.dailyCapUsd - todayCost) / this.config.supervisor.dailyCapUsd * 100
|
|
4405
|
+
remainingPct: (this.config.supervisor.dailyCapUsd - opts.todayCost) / this.config.supervisor.dailyCapUsd * 100
|
|
2871
4406
|
},
|
|
2872
|
-
activeRuns:
|
|
2873
|
-
|
|
2874
|
-
heartbeatCount: state?.heartbeatCount ?? 0,
|
|
4407
|
+
activeRuns: opts.activeRuns,
|
|
4408
|
+
heartbeatCount: opts.heartbeatCount,
|
|
2875
4409
|
mcpServerNames,
|
|
2876
|
-
customInstructions: this.customInstructions
|
|
2877
|
-
|
|
2878
|
-
|
|
2879
|
-
|
|
2880
|
-
|
|
2881
|
-
|
|
2882
|
-
|
|
2883
|
-
|
|
2884
|
-
|
|
4410
|
+
customInstructions: this.customInstructions,
|
|
4411
|
+
supervisorDir: this.supervisorDir,
|
|
4412
|
+
memories,
|
|
4413
|
+
recentActions
|
|
4414
|
+
};
|
|
4415
|
+
if (opts.isCompaction) {
|
|
4416
|
+
return {
|
|
4417
|
+
prompt: buildCompactionPrompt({
|
|
4418
|
+
...sharedOpts,
|
|
4419
|
+
lastConsolidationTimestamp: opts.lastConsolidationTimestamp
|
|
4420
|
+
}),
|
|
4421
|
+
modeLabel: "compaction"
|
|
4422
|
+
};
|
|
4423
|
+
}
|
|
4424
|
+
if (opts.isConsolidation) {
|
|
4425
|
+
return {
|
|
4426
|
+
prompt: buildConsolidationPrompt({
|
|
4427
|
+
...sharedOpts,
|
|
4428
|
+
lastConsolidationTimestamp: opts.lastConsolidationTimestamp
|
|
4429
|
+
}),
|
|
4430
|
+
modeLabel: "consolidation"
|
|
4431
|
+
};
|
|
4432
|
+
}
|
|
4433
|
+
if (isIdleHeartbeat(sharedOpts)) {
|
|
4434
|
+
return {
|
|
4435
|
+
prompt: buildIdlePrompt(sharedOpts),
|
|
4436
|
+
modeLabel: "idle"
|
|
4437
|
+
};
|
|
4438
|
+
}
|
|
4439
|
+
return {
|
|
4440
|
+
prompt: buildStandardPrompt(sharedOpts),
|
|
4441
|
+
modeLabel: "standard"
|
|
4442
|
+
};
|
|
4443
|
+
}
|
|
4444
|
+
/**
|
|
4445
|
+
* Call the Claude SDK and stream results.
|
|
4446
|
+
*/
|
|
4447
|
+
async callSdk(prompt, heartbeatId) {
|
|
2885
4448
|
const abortController = new AbortController();
|
|
2886
4449
|
this.activeAbort = abortController;
|
|
2887
4450
|
const timeout = setTimeout(() => {
|
|
@@ -2892,41 +4455,27 @@ var HeartbeatLoop = class {
|
|
|
2892
4455
|
let turnCount = 0;
|
|
2893
4456
|
try {
|
|
2894
4457
|
const sdk = await import("@anthropic-ai/claude-agent-sdk");
|
|
2895
|
-
const allowedTools = ["Bash", "Read"
|
|
4458
|
+
const allowedTools = ["Bash", "Read"];
|
|
2896
4459
|
if (this.config.mcpServers) {
|
|
2897
4460
|
for (const name of Object.keys(this.config.mcpServers)) {
|
|
2898
4461
|
allowedTools.push(`mcp__${name}__*`);
|
|
2899
4462
|
}
|
|
2900
4463
|
}
|
|
2901
|
-
const mcpInternalPath = path12.join(
|
|
2902
|
-
path12.dirname(fileURLToPath(import.meta.url)),
|
|
2903
|
-
"mcp-internal.js"
|
|
2904
|
-
);
|
|
2905
|
-
const mcpServers = {
|
|
2906
|
-
neo: {
|
|
2907
|
-
type: "stdio",
|
|
2908
|
-
command: "node",
|
|
2909
|
-
args: [mcpInternalPath],
|
|
2910
|
-
env: { NEO_ACTIVITY_PATH: this.activityLog.filePath }
|
|
2911
|
-
},
|
|
2912
|
-
...this.config.mcpServers ?? {}
|
|
2913
|
-
};
|
|
2914
4464
|
const queryOptions = {
|
|
2915
4465
|
cwd: homedir2(),
|
|
2916
|
-
maxTurns: 50,
|
|
2917
4466
|
allowedTools,
|
|
2918
4467
|
permissionMode: "bypassPermissions",
|
|
2919
4468
|
allowDangerouslySkipPermissions: true,
|
|
2920
|
-
mcpServers
|
|
4469
|
+
mcpServers: this.config.mcpServers ?? {}
|
|
2921
4470
|
};
|
|
2922
4471
|
const stream = sdk.query({ prompt, options: queryOptions });
|
|
2923
4472
|
for await (const message of stream) {
|
|
2924
4473
|
if (abortController.signal.aborted) break;
|
|
2925
4474
|
const msg = message;
|
|
2926
|
-
if (msg
|
|
4475
|
+
if (isInitMessage(msg)) {
|
|
2927
4476
|
this.sessionId = msg.session_id;
|
|
2928
4477
|
}
|
|
2929
|
-
if (msg
|
|
4478
|
+
if (isResultMessage(msg)) {
|
|
2930
4479
|
output = msg.result ?? "";
|
|
2931
4480
|
costUsd = msg.total_cost_usd ?? 0;
|
|
2932
4481
|
turnCount = msg.num_turns ?? 0;
|
|
@@ -2937,39 +4486,11 @@ var HeartbeatLoop = class {
|
|
|
2937
4486
|
clearTimeout(timeout);
|
|
2938
4487
|
this.activeAbort = null;
|
|
2939
4488
|
}
|
|
2940
|
-
|
|
2941
|
-
if (newMemory) {
|
|
2942
|
-
await saveMemory(this.supervisorDir, newMemory);
|
|
2943
|
-
}
|
|
2944
|
-
const newKnowledge = extractKnowledgeFromResponse(output);
|
|
2945
|
-
if (newKnowledge) {
|
|
2946
|
-
await saveKnowledge(this.supervisorDir, newKnowledge);
|
|
2947
|
-
}
|
|
2948
|
-
const durationMs = Date.now() - startTime;
|
|
2949
|
-
await this.updateState({
|
|
2950
|
-
sessionId: this.sessionId,
|
|
2951
|
-
lastHeartbeat: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2952
|
-
heartbeatCount: (state?.heartbeatCount ?? 0) + 1,
|
|
2953
|
-
totalCostUsd: (state?.totalCostUsd ?? 0) + costUsd,
|
|
2954
|
-
todayCostUsd: todayCost + costUsd,
|
|
2955
|
-
costResetDate: today
|
|
2956
|
-
});
|
|
2957
|
-
await this.activityLog.log(
|
|
2958
|
-
"heartbeat",
|
|
2959
|
-
`Heartbeat #${(state?.heartbeatCount ?? 0) + 1} complete`,
|
|
2960
|
-
{
|
|
2961
|
-
heartbeatId,
|
|
2962
|
-
costUsd,
|
|
2963
|
-
durationMs,
|
|
2964
|
-
turnCount,
|
|
2965
|
-
memoryUpdated: !!newMemory,
|
|
2966
|
-
responseSummary: output
|
|
2967
|
-
}
|
|
2968
|
-
);
|
|
4489
|
+
return { output, costUsd, turnCount };
|
|
2969
4490
|
}
|
|
2970
4491
|
async readState() {
|
|
2971
4492
|
try {
|
|
2972
|
-
const raw = await
|
|
4493
|
+
const raw = await readFile11(this.statePath, "utf-8");
|
|
2973
4494
|
return JSON.parse(raw);
|
|
2974
4495
|
} catch {
|
|
2975
4496
|
return null;
|
|
@@ -2977,29 +4498,63 @@ var HeartbeatLoop = class {
|
|
|
2977
4498
|
}
|
|
2978
4499
|
async updateState(updates) {
|
|
2979
4500
|
try {
|
|
2980
|
-
const raw = await
|
|
4501
|
+
const raw = await readFile11(this.statePath, "utf-8");
|
|
2981
4502
|
const state = JSON.parse(raw);
|
|
2982
4503
|
Object.assign(state, updates);
|
|
2983
4504
|
await writeFile5(this.statePath, JSON.stringify(state, null, 2), "utf-8");
|
|
2984
4505
|
} catch {
|
|
2985
4506
|
}
|
|
2986
4507
|
}
|
|
4508
|
+
/** Read persisted run files and return summaries of active (running/paused) runs. */
|
|
4509
|
+
async getActiveRuns() {
|
|
4510
|
+
const runsDir = getRunsDir();
|
|
4511
|
+
if (!existsSync7(runsDir)) return [];
|
|
4512
|
+
try {
|
|
4513
|
+
const entries = await readdir5(runsDir, { withFileTypes: true });
|
|
4514
|
+
const active = [];
|
|
4515
|
+
for (const entry of entries) {
|
|
4516
|
+
if (!entry.isDirectory()) continue;
|
|
4517
|
+
const subDir = path14.join(runsDir, entry.name);
|
|
4518
|
+
const files = await readdir5(subDir);
|
|
4519
|
+
for (const f of files) {
|
|
4520
|
+
if (!f.endsWith(".json")) continue;
|
|
4521
|
+
try {
|
|
4522
|
+
const raw = await readFile11(path14.join(subDir, f), "utf-8");
|
|
4523
|
+
const run = JSON.parse(raw);
|
|
4524
|
+
if (run.status === "running" || run.status === "paused") {
|
|
4525
|
+
active.push(
|
|
4526
|
+
`${run.runId} [${run.status}] ${run.workflow} on ${path14.basename(run.repo)}`
|
|
4527
|
+
);
|
|
4528
|
+
}
|
|
4529
|
+
} catch {
|
|
4530
|
+
}
|
|
4531
|
+
}
|
|
4532
|
+
}
|
|
4533
|
+
return active;
|
|
4534
|
+
} catch {
|
|
4535
|
+
return [];
|
|
4536
|
+
}
|
|
4537
|
+
}
|
|
2987
4538
|
/**
|
|
2988
4539
|
* Load custom instructions from SUPERVISOR.md.
|
|
2989
4540
|
* Resolution order:
|
|
2990
4541
|
* 1. Explicit path via `supervisor.instructions` in config
|
|
2991
|
-
* 2.
|
|
4542
|
+
* 2. User default: ~/.neo/SUPERVISOR.md
|
|
4543
|
+
* 3. Bundled default from @neotx/agents (if path provided)
|
|
2992
4544
|
*/
|
|
2993
4545
|
async loadInstructions() {
|
|
2994
4546
|
const candidates = [];
|
|
2995
4547
|
if (this.config.supervisor.instructions) {
|
|
2996
|
-
candidates.push(
|
|
4548
|
+
candidates.push(path14.resolve(this.config.supervisor.instructions));
|
|
4549
|
+
}
|
|
4550
|
+
candidates.push(path14.join(getDataDir(), "SUPERVISOR.md"));
|
|
4551
|
+
if (this.defaultInstructionsPath) {
|
|
4552
|
+
candidates.push(this.defaultInstructionsPath);
|
|
2997
4553
|
}
|
|
2998
|
-
candidates.push(path12.join(getDataDir(), "SUPERVISOR.md"));
|
|
2999
4554
|
for (const filePath of candidates) {
|
|
3000
4555
|
try {
|
|
3001
|
-
const content = await
|
|
3002
|
-
await this.activityLog.log("event", `Loaded
|
|
4556
|
+
const content = await readFile11(filePath, "utf-8");
|
|
4557
|
+
await this.activityLog.log("event", `Loaded instructions from ${filePath}`);
|
|
3003
4558
|
return content;
|
|
3004
4559
|
} catch {
|
|
3005
4560
|
}
|
|
@@ -3008,17 +4563,17 @@ var HeartbeatLoop = class {
|
|
|
3008
4563
|
}
|
|
3009
4564
|
/** Route a single SDK stream message to the appropriate log handler. */
|
|
3010
4565
|
async logStreamMessage(msg, heartbeatId) {
|
|
3011
|
-
if (msg
|
|
3012
|
-
if (!msg.subtype) {
|
|
4566
|
+
if (isAssistantMessage(msg)) {
|
|
3013
4567
|
await this.logContentBlocks(msg, heartbeatId);
|
|
3014
|
-
} else if (msg
|
|
4568
|
+
} else if (isToolUseMessage(msg)) {
|
|
3015
4569
|
await this.logToolUse(msg, heartbeatId);
|
|
3016
|
-
} else if (msg
|
|
4570
|
+
} else if (isToolResultMessage(msg)) {
|
|
3017
4571
|
await this.logToolResult(msg, heartbeatId);
|
|
3018
4572
|
}
|
|
3019
4573
|
}
|
|
3020
4574
|
/** Log thinking and plan blocks from assistant content — no truncation. */
|
|
3021
4575
|
async logContentBlocks(msg, heartbeatId) {
|
|
4576
|
+
if (!isAssistantMessage(msg)) return;
|
|
3022
4577
|
const content = msg.message?.content;
|
|
3023
4578
|
if (!content) return;
|
|
3024
4579
|
for (const block of content) {
|
|
@@ -3033,7 +4588,8 @@ var HeartbeatLoop = class {
|
|
|
3033
4588
|
}
|
|
3034
4589
|
/** Log tool use events — distinguish MCP tools from built-in tools. */
|
|
3035
4590
|
async logToolUse(msg, heartbeatId) {
|
|
3036
|
-
|
|
4591
|
+
if (!isToolUseMessage(msg)) return;
|
|
4592
|
+
const toolName = msg.tool;
|
|
3037
4593
|
const isMcp = toolName.startsWith("mcp__");
|
|
3038
4594
|
await this.activityLog.log(
|
|
3039
4595
|
isMcp ? "tool_use" : "action",
|
|
@@ -3043,7 +4599,8 @@ var HeartbeatLoop = class {
|
|
|
3043
4599
|
}
|
|
3044
4600
|
/** Detect agent dispatches from bash tool results. */
|
|
3045
4601
|
async logToolResult(msg, heartbeatId) {
|
|
3046
|
-
|
|
4602
|
+
if (!isToolResultMessage(msg)) return;
|
|
4603
|
+
const result = msg.result ?? "";
|
|
3047
4604
|
const runMatch = /Run\s+(\S+)\s+dispatched/i.exec(result);
|
|
3048
4605
|
if (runMatch) {
|
|
3049
4606
|
await this.activityLog.log("dispatch", `Agent dispatched: ${runMatch[1]}`, {
|
|
@@ -3058,7 +4615,7 @@ var HeartbeatLoop = class {
|
|
|
3058
4615
|
};
|
|
3059
4616
|
|
|
3060
4617
|
// src/supervisor/webhook-server.ts
|
|
3061
|
-
import { timingSafeEqual } from "crypto";
|
|
4618
|
+
import { createHmac as createHmac2, timingSafeEqual } from "crypto";
|
|
3062
4619
|
import { appendFile as appendFile6 } from "fs/promises";
|
|
3063
4620
|
import { createServer } from "http";
|
|
3064
4621
|
var MAX_BODY_SIZE = 1024 * 1024;
|
|
@@ -3111,24 +4668,25 @@ var WebhookServer = class {
|
|
|
3111
4668
|
this.sendJson(res, 404, { error: "Not found" });
|
|
3112
4669
|
}
|
|
3113
4670
|
async handleWebhook(req, res) {
|
|
4671
|
+
const body = await this.readBody(req);
|
|
4672
|
+
if (body === null) {
|
|
4673
|
+
this.sendJson(res, 413, { error: "Payload too large (max 1MB)" });
|
|
4674
|
+
return;
|
|
4675
|
+
}
|
|
3114
4676
|
if (this.secret) {
|
|
3115
|
-
const
|
|
3116
|
-
if (!
|
|
3117
|
-
this.sendJson(res, 401, { error: "Missing X-Neo-
|
|
4677
|
+
const signature = req.headers["x-neo-signature"];
|
|
4678
|
+
if (!signature) {
|
|
4679
|
+
this.sendJson(res, 401, { error: "Missing X-Neo-Signature header" });
|
|
3118
4680
|
return;
|
|
3119
4681
|
}
|
|
3120
|
-
const expected =
|
|
3121
|
-
const
|
|
3122
|
-
|
|
3123
|
-
|
|
4682
|
+
const expected = createHmac2("sha256", this.secret).update(body).digest("hex");
|
|
4683
|
+
const expectedBuf = Buffer.from(expected, "utf-8");
|
|
4684
|
+
const actualBuf = Buffer.from(signature, "utf-8");
|
|
4685
|
+
if (expectedBuf.length !== actualBuf.length || !timingSafeEqual(expectedBuf, actualBuf)) {
|
|
4686
|
+
this.sendJson(res, 403, { error: "Invalid signature" });
|
|
3124
4687
|
return;
|
|
3125
4688
|
}
|
|
3126
4689
|
}
|
|
3127
|
-
const body = await this.readBody(req);
|
|
3128
|
-
if (body === null) {
|
|
3129
|
-
this.sendJson(res, 413, { error: "Payload too large (max 1MB)" });
|
|
3130
|
-
return;
|
|
3131
|
-
}
|
|
3132
4690
|
let parsed;
|
|
3133
4691
|
try {
|
|
3134
4692
|
parsed = JSON.parse(body);
|
|
@@ -3178,6 +4736,7 @@ var SupervisorDaemon = class {
|
|
|
3178
4736
|
name;
|
|
3179
4737
|
config;
|
|
3180
4738
|
dir;
|
|
4739
|
+
defaultInstructionsPath;
|
|
3181
4740
|
webhookServer = null;
|
|
3182
4741
|
eventQueue = null;
|
|
3183
4742
|
heartbeatLoop = null;
|
|
@@ -3187,13 +4746,14 @@ var SupervisorDaemon = class {
|
|
|
3187
4746
|
this.name = options.name;
|
|
3188
4747
|
this.config = options.config;
|
|
3189
4748
|
this.dir = getSupervisorDir(options.name);
|
|
4749
|
+
this.defaultInstructionsPath = options.defaultInstructionsPath;
|
|
3190
4750
|
}
|
|
3191
4751
|
async start() {
|
|
3192
|
-
await
|
|
3193
|
-
const lockPath =
|
|
3194
|
-
if (
|
|
4752
|
+
await mkdir7(this.dir, { recursive: true });
|
|
4753
|
+
const lockPath = path15.join(this.dir, "daemon.lock");
|
|
4754
|
+
if (existsSync8(lockPath)) {
|
|
3195
4755
|
const lockPid = await this.readLockPid(lockPath);
|
|
3196
|
-
if (lockPid &&
|
|
4756
|
+
if (lockPid && isProcessAlive(lockPid)) {
|
|
3197
4757
|
throw new Error(
|
|
3198
4758
|
`Supervisor "${this.name}" already running (PID ${lockPid}). Use --kill first.`
|
|
3199
4759
|
);
|
|
@@ -3202,35 +4762,44 @@ var SupervisorDaemon = class {
|
|
|
3202
4762
|
}
|
|
3203
4763
|
const tempLock = `${lockPath}.${process.pid}`;
|
|
3204
4764
|
await writeFile6(tempLock, String(process.pid), "utf-8");
|
|
3205
|
-
const { rename:
|
|
3206
|
-
await
|
|
4765
|
+
const { rename: rename2 } = await import("fs/promises");
|
|
4766
|
+
await rename2(tempLock, lockPath);
|
|
3207
4767
|
const existingState = await this.readState();
|
|
3208
4768
|
if (existingState?.sessionId && existingState.status !== "stopped") {
|
|
3209
4769
|
this.sessionId = existingState.sessionId;
|
|
3210
4770
|
} else {
|
|
3211
|
-
this.sessionId =
|
|
4771
|
+
this.sessionId = randomUUID6();
|
|
3212
4772
|
}
|
|
3213
4773
|
this.activityLog = new ActivityLog(this.dir);
|
|
3214
4774
|
this.eventQueue = new EventQueue({
|
|
3215
4775
|
maxEventsPerSec: this.config.supervisor.maxEventsPerSec
|
|
3216
4776
|
});
|
|
3217
|
-
const inboxPath =
|
|
3218
|
-
const eventsPath =
|
|
4777
|
+
const inboxPath = path15.join(this.dir, "inbox.jsonl");
|
|
4778
|
+
const eventsPath = path15.join(this.dir, "events.jsonl");
|
|
3219
4779
|
await this.eventQueue.replayUnprocessed(inboxPath, eventsPath);
|
|
3220
|
-
this.eventQueue.startWatching(inboxPath, eventsPath);
|
|
4780
|
+
await this.eventQueue.startWatching(inboxPath, eventsPath);
|
|
3221
4781
|
this.webhookServer = new WebhookServer({
|
|
3222
4782
|
port: this.config.supervisor.port,
|
|
3223
4783
|
secret: this.config.supervisor.secret,
|
|
3224
4784
|
eventsPath,
|
|
3225
4785
|
onEvent: (event) => {
|
|
3226
4786
|
this.eventQueue?.push({ kind: "webhook", data: event });
|
|
4787
|
+
if ((event.event === "session:complete" || event.event === "session:fail") && event.payload) {
|
|
4788
|
+
const runId = typeof event.payload.runId === "string" ? event.payload.runId : void 0;
|
|
4789
|
+
if (runId) {
|
|
4790
|
+
this.eventQueue?.push({
|
|
4791
|
+
kind: "run_complete",
|
|
4792
|
+
runId,
|
|
4793
|
+
timestamp: event.receivedAt
|
|
4794
|
+
});
|
|
4795
|
+
}
|
|
4796
|
+
}
|
|
3227
4797
|
},
|
|
3228
4798
|
getHealth: () => this.getHealthInfo()
|
|
3229
4799
|
});
|
|
3230
4800
|
await this.webhookServer.start();
|
|
3231
4801
|
await this.writeState({
|
|
3232
4802
|
pid: process.pid,
|
|
3233
|
-
tmuxSession: `neo-${this.name}`,
|
|
3234
4803
|
sessionId: this.sessionId,
|
|
3235
4804
|
port: this.config.supervisor.port,
|
|
3236
4805
|
cwd: homedir3(),
|
|
@@ -3241,7 +4810,11 @@ var SupervisorDaemon = class {
|
|
|
3241
4810
|
todayCostUsd: existingState?.todayCostUsd ?? 0,
|
|
3242
4811
|
costResetDate: existingState?.costResetDate,
|
|
3243
4812
|
idleSkipCount: existingState?.idleSkipCount ?? 0,
|
|
3244
|
-
|
|
4813
|
+
activeWorkSkipCount: existingState?.activeWorkSkipCount ?? 0,
|
|
4814
|
+
status: "running",
|
|
4815
|
+
lastConsolidationHeartbeat: existingState?.lastConsolidationHeartbeat ?? 0,
|
|
4816
|
+
lastCompactionHeartbeat: existingState?.lastCompactionHeartbeat ?? 0,
|
|
4817
|
+
lastConsolidationTimestamp: existingState?.lastConsolidationTimestamp
|
|
3245
4818
|
});
|
|
3246
4819
|
const shutdown = () => {
|
|
3247
4820
|
this.stop().catch(console.error);
|
|
@@ -3252,14 +4825,15 @@ var SupervisorDaemon = class {
|
|
|
3252
4825
|
"event",
|
|
3253
4826
|
`Supervisor "${this.name}" started on port ${this.config.supervisor.port}`
|
|
3254
4827
|
);
|
|
3255
|
-
const statePath =
|
|
4828
|
+
const statePath = path15.join(this.dir, "state.json");
|
|
3256
4829
|
this.heartbeatLoop = new HeartbeatLoop({
|
|
3257
4830
|
config: this.config,
|
|
3258
4831
|
supervisorDir: this.dir,
|
|
3259
4832
|
statePath,
|
|
3260
4833
|
sessionId: this.sessionId,
|
|
3261
4834
|
eventQueue: this.eventQueue,
|
|
3262
|
-
activityLog: this.activityLog
|
|
4835
|
+
activityLog: this.activityLog,
|
|
4836
|
+
defaultInstructionsPath: this.defaultInstructionsPath
|
|
3263
4837
|
});
|
|
3264
4838
|
await this.heartbeatLoop.start();
|
|
3265
4839
|
}
|
|
@@ -3274,7 +4848,7 @@ var SupervisorDaemon = class {
|
|
|
3274
4848
|
state.status = "stopped";
|
|
3275
4849
|
await this.writeState(state);
|
|
3276
4850
|
}
|
|
3277
|
-
const lockPath =
|
|
4851
|
+
const lockPath = path15.join(this.dir, "daemon.lock");
|
|
3278
4852
|
await rm2(lockPath, { force: true });
|
|
3279
4853
|
if (this.activityLog) {
|
|
3280
4854
|
await this.activityLog.log("event", `Supervisor "${this.name}" stopped`);
|
|
@@ -3291,35 +4865,27 @@ var SupervisorDaemon = class {
|
|
|
3291
4865
|
};
|
|
3292
4866
|
}
|
|
3293
4867
|
async readState() {
|
|
3294
|
-
const statePath =
|
|
4868
|
+
const statePath = path15.join(this.dir, "state.json");
|
|
3295
4869
|
try {
|
|
3296
|
-
const raw = await
|
|
4870
|
+
const raw = await readFile12(statePath, "utf-8");
|
|
3297
4871
|
return JSON.parse(raw);
|
|
3298
4872
|
} catch {
|
|
3299
4873
|
return null;
|
|
3300
4874
|
}
|
|
3301
4875
|
}
|
|
3302
4876
|
async writeState(state) {
|
|
3303
|
-
const statePath =
|
|
4877
|
+
const statePath = path15.join(this.dir, "state.json");
|
|
3304
4878
|
await writeFile6(statePath, JSON.stringify(state, null, 2), "utf-8");
|
|
3305
4879
|
}
|
|
3306
4880
|
async readLockPid(lockPath) {
|
|
3307
4881
|
try {
|
|
3308
|
-
const raw = await
|
|
4882
|
+
const raw = await readFile12(lockPath, "utf-8");
|
|
3309
4883
|
const pid = Number.parseInt(raw.trim(), 10);
|
|
3310
4884
|
return Number.isNaN(pid) ? null : pid;
|
|
3311
4885
|
} catch {
|
|
3312
4886
|
return null;
|
|
3313
4887
|
}
|
|
3314
4888
|
}
|
|
3315
|
-
isProcessAlive(pid) {
|
|
3316
|
-
try {
|
|
3317
|
-
process.kill(pid, 0);
|
|
3318
|
-
return true;
|
|
3319
|
-
} catch {
|
|
3320
|
-
return false;
|
|
3321
|
-
}
|
|
3322
|
-
}
|
|
3323
4889
|
};
|
|
3324
4890
|
|
|
3325
4891
|
// src/index.ts
|
|
@@ -3331,10 +4897,13 @@ export {
|
|
|
3331
4897
|
EventJournal,
|
|
3332
4898
|
EventQueue,
|
|
3333
4899
|
HeartbeatLoop,
|
|
4900
|
+
LocalEmbedder,
|
|
4901
|
+
MemoryStore,
|
|
3334
4902
|
NeoEventEmitter,
|
|
3335
4903
|
Orchestrator,
|
|
3336
4904
|
Semaphore,
|
|
3337
4905
|
SessionError,
|
|
4906
|
+
SessionExecutor,
|
|
3338
4907
|
SupervisorDaemon,
|
|
3339
4908
|
VERSION,
|
|
3340
4909
|
WebhookDispatcher,
|
|
@@ -3347,18 +4916,18 @@ export {
|
|
|
3347
4916
|
agentSandboxSchema,
|
|
3348
4917
|
agentToolEntrySchema,
|
|
3349
4918
|
agentToolSchema,
|
|
4919
|
+
appendLogBuffer,
|
|
3350
4920
|
auditLog,
|
|
3351
4921
|
budgetGuard,
|
|
3352
|
-
|
|
4922
|
+
buildFullPrompt,
|
|
4923
|
+
buildGitStrategyInstructions,
|
|
3353
4924
|
buildMiddlewareChain,
|
|
4925
|
+
buildReportingInstructions,
|
|
3354
4926
|
buildSDKHooks,
|
|
3355
4927
|
buildSandboxConfig,
|
|
3356
|
-
checkMemorySize,
|
|
3357
|
-
cleanupOrphanedWorktrees,
|
|
3358
4928
|
createBranch,
|
|
3359
|
-
|
|
4929
|
+
createSessionClone,
|
|
3360
4930
|
deleteBranch,
|
|
3361
|
-
extractMemoryFromResponse,
|
|
3362
4931
|
fetchRemote,
|
|
3363
4932
|
getBranchName,
|
|
3364
4933
|
getCurrentBranch,
|
|
@@ -3373,17 +4942,17 @@ export {
|
|
|
3373
4942
|
getSupervisorEventsPath,
|
|
3374
4943
|
getSupervisorInboxPath,
|
|
3375
4944
|
getSupervisorLockPath,
|
|
3376
|
-
getSupervisorMemoryPath,
|
|
3377
4945
|
getSupervisorStatePath,
|
|
3378
4946
|
getSupervisorsDir,
|
|
3379
4947
|
globalConfigSchema,
|
|
3380
4948
|
inboxMessageSchema,
|
|
4949
|
+
isProcessAlive,
|
|
3381
4950
|
listReposFromGlobalConfig,
|
|
3382
|
-
|
|
4951
|
+
listSessionClones,
|
|
3383
4952
|
loadAgentFile,
|
|
3384
4953
|
loadConfig,
|
|
3385
4954
|
loadGlobalConfig,
|
|
3386
|
-
|
|
4955
|
+
loadRepoInstructions,
|
|
3387
4956
|
loadWorkflow,
|
|
3388
4957
|
loopDetection,
|
|
3389
4958
|
matchesFilter,
|
|
@@ -3391,18 +4960,17 @@ export {
|
|
|
3391
4960
|
neoConfigSchema,
|
|
3392
4961
|
parseOutput,
|
|
3393
4962
|
pushBranch,
|
|
4963
|
+
pushSessionBranch,
|
|
3394
4964
|
removeRepoFromGlobalConfig,
|
|
3395
|
-
|
|
4965
|
+
removeSessionClone,
|
|
3396
4966
|
repoConfigSchema,
|
|
3397
4967
|
resolveAgent,
|
|
3398
4968
|
runSession,
|
|
3399
4969
|
runWithRecovery,
|
|
3400
|
-
saveMemory,
|
|
3401
4970
|
supervisorDaemonStateSchema,
|
|
3402
4971
|
supervisorDaemonStateSchema as supervisorStateSchema,
|
|
3403
4972
|
toRepoSlug,
|
|
3404
4973
|
webhookIncomingEventSchema,
|
|
3405
|
-
withGitLock,
|
|
3406
4974
|
workflowGateDefSchema,
|
|
3407
4975
|
workflowStepDefSchema
|
|
3408
4976
|
};
|