@agntk/agent-harness 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/NOTICE +41 -0
- package/README.md +445 -0
- package/defaults/agents/summarizer.md +49 -0
- package/defaults/instincts/lead-with-answer.md +24 -0
- package/defaults/instincts/qualify-before-recommending.md +40 -0
- package/defaults/instincts/read-before-edit.md +23 -0
- package/defaults/instincts/search-before-create.md +23 -0
- package/defaults/playbooks/ship-feature.md +31 -0
- package/defaults/rules/ask-before-assuming.md +35 -0
- package/defaults/rules/operations.md +35 -0
- package/defaults/rules/respect-the-user.md +39 -0
- package/defaults/skills/business-analyst.md +181 -0
- package/defaults/skills/content-marketer.md +184 -0
- package/defaults/skills/research.md +34 -0
- package/defaults/tools/example-web-search.md +60 -0
- package/defaults/workflows/daily-reflection.md +54 -0
- package/dist/agent-framework-K4GUIICH.js +344 -0
- package/dist/agent-framework-K4GUIICH.js.map +1 -0
- package/dist/analytics-RPT73WNM.js +12 -0
- package/dist/analytics-RPT73WNM.js.map +1 -0
- package/dist/auto-processor-OLE45UI3.js +13 -0
- package/dist/auto-processor-OLE45UI3.js.map +1 -0
- package/dist/chunk-274RV3YO.js +162 -0
- package/dist/chunk-274RV3YO.js.map +1 -0
- package/dist/chunk-4CWAGBNS.js +168 -0
- package/dist/chunk-4CWAGBNS.js.map +1 -0
- package/dist/chunk-4FDUOGSZ.js +69 -0
- package/dist/chunk-4FDUOGSZ.js.map +1 -0
- package/dist/chunk-5H34JPMB.js +199 -0
- package/dist/chunk-5H34JPMB.js.map +1 -0
- package/dist/chunk-6EMOEYGU.js +102 -0
- package/dist/chunk-6EMOEYGU.js.map +1 -0
- package/dist/chunk-A7BJPQQ6.js +236 -0
- package/dist/chunk-A7BJPQQ6.js.map +1 -0
- package/dist/chunk-AGAAFJEO.js +76 -0
- package/dist/chunk-AGAAFJEO.js.map +1 -0
- package/dist/chunk-BSKDOFRT.js +65 -0
- package/dist/chunk-BSKDOFRT.js.map +1 -0
- package/dist/chunk-CHJ5GNZC.js +100 -0
- package/dist/chunk-CHJ5GNZC.js.map +1 -0
- package/dist/chunk-CSL3ERUI.js +307 -0
- package/dist/chunk-CSL3ERUI.js.map +1 -0
- package/dist/chunk-DA7IKHC4.js +229 -0
- package/dist/chunk-DA7IKHC4.js.map +1 -0
- package/dist/chunk-DGUM43GV.js +11 -0
- package/dist/chunk-DGUM43GV.js.map +1 -0
- package/dist/chunk-DTTXPHFW.js +211 -0
- package/dist/chunk-DTTXPHFW.js.map +1 -0
- package/dist/chunk-FD55B3IO.js +204 -0
- package/dist/chunk-FD55B3IO.js.map +1 -0
- package/dist/chunk-FLZU44SV.js +230 -0
- package/dist/chunk-FLZU44SV.js.map +1 -0
- package/dist/chunk-GJNNR2RA.js +200 -0
- package/dist/chunk-GJNNR2RA.js.map +1 -0
- package/dist/chunk-GNUSHD2Y.js +111 -0
- package/dist/chunk-GNUSHD2Y.js.map +1 -0
- package/dist/chunk-GUJTBGVS.js +2212 -0
- package/dist/chunk-GUJTBGVS.js.map +1 -0
- package/dist/chunk-IZ6UZ3ZL.js +207 -0
- package/dist/chunk-IZ6UZ3ZL.js.map +1 -0
- package/dist/chunk-JKMGYWXB.js +197 -0
- package/dist/chunk-JKMGYWXB.js.map +1 -0
- package/dist/chunk-KFX54TQM.js +165 -0
- package/dist/chunk-KFX54TQM.js.map +1 -0
- package/dist/chunk-M7NXUK55.js +199 -0
- package/dist/chunk-M7NXUK55.js.map +1 -0
- package/dist/chunk-MPZ3BPUI.js +374 -0
- package/dist/chunk-MPZ3BPUI.js.map +1 -0
- package/dist/chunk-OC6YSTDX.js +119 -0
- package/dist/chunk-OC6YSTDX.js.map +1 -0
- package/dist/chunk-RC6MEZB6.js +469 -0
- package/dist/chunk-RC6MEZB6.js.map +1 -0
- package/dist/chunk-RY3ZFII7.js +3440 -0
- package/dist/chunk-RY3ZFII7.js.map +1 -0
- package/dist/chunk-TAT6JU3X.js +167 -0
- package/dist/chunk-TAT6JU3X.js.map +1 -0
- package/dist/chunk-UDZIS2AQ.js +79 -0
- package/dist/chunk-UDZIS2AQ.js.map +1 -0
- package/dist/chunk-UPLBF4RZ.js +115 -0
- package/dist/chunk-UPLBF4RZ.js.map +1 -0
- package/dist/chunk-UWQTZMNI.js +154 -0
- package/dist/chunk-UWQTZMNI.js.map +1 -0
- package/dist/chunk-W4T7PGI2.js +346 -0
- package/dist/chunk-W4T7PGI2.js.map +1 -0
- package/dist/chunk-XTBKL5BI.js +111 -0
- package/dist/chunk-XTBKL5BI.js.map +1 -0
- package/dist/chunk-YIJY5DBV.js +399 -0
- package/dist/chunk-YIJY5DBV.js.map +1 -0
- package/dist/chunk-YUFNYN2H.js +242 -0
- package/dist/chunk-YUFNYN2H.js.map +1 -0
- package/dist/chunk-Z2PUCXTZ.js +94 -0
- package/dist/chunk-Z2PUCXTZ.js.map +1 -0
- package/dist/chunk-ZZJOFKAT.js +13 -0
- package/dist/chunk-ZZJOFKAT.js.map +1 -0
- package/dist/cli/index.js +3661 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/config-WVMRUOCA.js +13 -0
- package/dist/config-WVMRUOCA.js.map +1 -0
- package/dist/context-loader-3ORBPMHJ.js +13 -0
- package/dist/context-loader-3ORBPMHJ.js.map +1 -0
- package/dist/conversation-QDEIDQPH.js +22 -0
- package/dist/conversation-QDEIDQPH.js.map +1 -0
- package/dist/cost-tracker-RS3W7SVY.js +24 -0
- package/dist/cost-tracker-RS3W7SVY.js.map +1 -0
- package/dist/delegate-VJCJLYEK.js +29 -0
- package/dist/delegate-VJCJLYEK.js.map +1 -0
- package/dist/emotional-state-VQVRA6ED.js +206 -0
- package/dist/emotional-state-VQVRA6ED.js.map +1 -0
- package/dist/env-discovery-2BLVMAIM.js +251 -0
- package/dist/env-discovery-2BLVMAIM.js.map +1 -0
- package/dist/export-6GCYHEHQ.js +165 -0
- package/dist/export-6GCYHEHQ.js.map +1 -0
- package/dist/graph-YUIPOSOO.js +14 -0
- package/dist/graph-YUIPOSOO.js.map +1 -0
- package/dist/harness-LCHA3DWP.js +10 -0
- package/dist/harness-LCHA3DWP.js.map +1 -0
- package/dist/harness-WE4SLCML.js +26 -0
- package/dist/harness-WE4SLCML.js.map +1 -0
- package/dist/health-NZ6WNIMV.js +23 -0
- package/dist/health-NZ6WNIMV.js.map +1 -0
- package/dist/index.d.ts +3612 -0
- package/dist/index.js +13501 -0
- package/dist/index.js.map +1 -0
- package/dist/indexer-LONANRRM.js +16 -0
- package/dist/indexer-LONANRRM.js.map +1 -0
- package/dist/instinct-learner-SRM72DHF.js +20 -0
- package/dist/instinct-learner-SRM72DHF.js.map +1 -0
- package/dist/intake-4M3HNU43.js +21 -0
- package/dist/intake-4M3HNU43.js.map +1 -0
- package/dist/intelligence-HJOCA4SJ.js +1081 -0
- package/dist/intelligence-HJOCA4SJ.js.map +1 -0
- package/dist/journal-WANJL3MI.js +24 -0
- package/dist/journal-WANJL3MI.js.map +1 -0
- package/dist/loader-C3TKIKZR.js +23 -0
- package/dist/loader-C3TKIKZR.js.map +1 -0
- package/dist/mcp-WTQJJZAO.js +15 -0
- package/dist/mcp-WTQJJZAO.js.map +1 -0
- package/dist/mcp-discovery-WPAQFL6S.js +377 -0
- package/dist/mcp-discovery-WPAQFL6S.js.map +1 -0
- package/dist/mcp-installer-6O2XXD3V.js +394 -0
- package/dist/mcp-installer-6O2XXD3V.js.map +1 -0
- package/dist/metrics-KXGNFAAB.js +20 -0
- package/dist/metrics-KXGNFAAB.js.map +1 -0
- package/dist/primitive-registry-I6VTIR4W.js +512 -0
- package/dist/primitive-registry-I6VTIR4W.js.map +1 -0
- package/dist/project-discovery-C4UMD7JI.js +246 -0
- package/dist/project-discovery-C4UMD7JI.js.map +1 -0
- package/dist/provider-LQHQX7Z7.js +26 -0
- package/dist/provider-LQHQX7Z7.js.map +1 -0
- package/dist/provider-SXPQZ74H.js +28 -0
- package/dist/provider-SXPQZ74H.js.map +1 -0
- package/dist/rate-limiter-RLRVM325.js +22 -0
- package/dist/rate-limiter-RLRVM325.js.map +1 -0
- package/dist/rule-engine-YGQ3RYZM.js +182 -0
- package/dist/rule-engine-YGQ3RYZM.js.map +1 -0
- package/dist/scaffold-A3VRRCBV.js +347 -0
- package/dist/scaffold-A3VRRCBV.js.map +1 -0
- package/dist/scheduler-XHHIVHRI.js +397 -0
- package/dist/scheduler-XHHIVHRI.js.map +1 -0
- package/dist/search-V3W5JMJG.js +75 -0
- package/dist/search-V3W5JMJG.js.map +1 -0
- package/dist/semantic-search-2DTOO5UX.js +241 -0
- package/dist/semantic-search-2DTOO5UX.js.map +1 -0
- package/dist/serve-DTQ3HENY.js +291 -0
- package/dist/serve-DTQ3HENY.js.map +1 -0
- package/dist/sessions-CZGVXKQE.js +21 -0
- package/dist/sessions-CZGVXKQE.js.map +1 -0
- package/dist/sources-RW5DT56F.js +32 -0
- package/dist/sources-RW5DT56F.js.map +1 -0
- package/dist/starter-packs-76YUVHEU.js +893 -0
- package/dist/starter-packs-76YUVHEU.js.map +1 -0
- package/dist/state-GMXILIHW.js +13 -0
- package/dist/state-GMXILIHW.js.map +1 -0
- package/dist/state-merge-NKO5FRBA.js +174 -0
- package/dist/state-merge-NKO5FRBA.js.map +1 -0
- package/dist/telemetry-UC6PBXC7.js +22 -0
- package/dist/telemetry-UC6PBXC7.js.map +1 -0
- package/dist/tool-executor-MJ7IG7PQ.js +28 -0
- package/dist/tool-executor-MJ7IG7PQ.js.map +1 -0
- package/dist/tools-DZ4KETET.js +20 -0
- package/dist/tools-DZ4KETET.js.map +1 -0
- package/dist/types-EW7AIB3R.js +18 -0
- package/dist/types-EW7AIB3R.js.map +1 -0
- package/dist/types-WGDLSPO6.js +16 -0
- package/dist/types-WGDLSPO6.js.map +1 -0
- package/dist/universal-installer-QGS4SJGX.js +578 -0
- package/dist/universal-installer-QGS4SJGX.js.map +1 -0
- package/dist/validator-7WXMDIHH.js +22 -0
- package/dist/validator-7WXMDIHH.js.map +1 -0
- package/dist/verification-gate-FYXUX6LH.js +246 -0
- package/dist/verification-gate-FYXUX6LH.js.map +1 -0
- package/dist/versioning-Z3XNE2Q2.js +271 -0
- package/dist/versioning-Z3XNE2Q2.js.map +1 -0
- package/dist/watcher-ISJC7YKL.js +109 -0
- package/dist/watcher-ISJC7YKL.js.map +1 -0
- package/dist/web-server-DD7ZOP46.js +28 -0
- package/dist/web-server-DD7ZOP46.js.map +1 -0
- package/package.json +76 -0
- package/sources.yaml +121 -0
- package/templates/assistant/CORE.md +24 -0
- package/templates/assistant/SYSTEM.md +24 -0
- package/templates/assistant/config.yaml +51 -0
- package/templates/base/CORE.md +17 -0
- package/templates/base/SYSTEM.md +24 -0
- package/templates/base/config.yaml +51 -0
- package/templates/claude-opus/config.yaml +51 -0
- package/templates/code-reviewer/CORE.md +25 -0
- package/templates/code-reviewer/SYSTEM.md +30 -0
- package/templates/code-reviewer/config.yaml +51 -0
- package/templates/gpt4/config.yaml +51 -0
- package/templates/local/config.yaml +51 -0
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
getSpending
|
|
5
|
+
} from "./chunk-JKMGYWXB.js";
|
|
6
|
+
|
|
7
|
+
// src/runtime/health.ts
|
|
8
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
var HEALTH_FILE = "health.json";
|
|
11
|
+
function getHealthPath(harnessDir) {
|
|
12
|
+
return join(harnessDir, "memory", HEALTH_FILE);
|
|
13
|
+
}
|
|
14
|
+
function defaultMetrics() {
|
|
15
|
+
return {
|
|
16
|
+
lastSuccessfulRun: null,
|
|
17
|
+
lastFailedRun: null,
|
|
18
|
+
lastError: null,
|
|
19
|
+
consecutiveFailures: 0,
|
|
20
|
+
totalRuns: 0,
|
|
21
|
+
totalSuccesses: 0,
|
|
22
|
+
totalFailures: 0,
|
|
23
|
+
bootedAt: null,
|
|
24
|
+
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
function loadHealth(harnessDir) {
|
|
28
|
+
const healthPath = getHealthPath(harnessDir);
|
|
29
|
+
if (!existsSync(healthPath)) {
|
|
30
|
+
return defaultMetrics();
|
|
31
|
+
}
|
|
32
|
+
try {
|
|
33
|
+
const content = readFileSync(healthPath, "utf-8");
|
|
34
|
+
const parsed = JSON.parse(content);
|
|
35
|
+
if (typeof parsed === "object" && parsed !== null && "totalRuns" in parsed) {
|
|
36
|
+
return parsed;
|
|
37
|
+
}
|
|
38
|
+
return defaultMetrics();
|
|
39
|
+
} catch {
|
|
40
|
+
return defaultMetrics();
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
function saveHealth(harnessDir, metrics) {
|
|
44
|
+
const memoryDir = join(harnessDir, "memory");
|
|
45
|
+
if (!existsSync(memoryDir)) {
|
|
46
|
+
mkdirSync(memoryDir, { recursive: true });
|
|
47
|
+
}
|
|
48
|
+
metrics.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
49
|
+
writeFileSync(getHealthPath(harnessDir), JSON.stringify(metrics, null, 2), "utf-8");
|
|
50
|
+
}
|
|
51
|
+
function recordSuccess(harnessDir) {
|
|
52
|
+
const metrics = loadHealth(harnessDir);
|
|
53
|
+
metrics.totalRuns++;
|
|
54
|
+
metrics.totalSuccesses++;
|
|
55
|
+
metrics.consecutiveFailures = 0;
|
|
56
|
+
metrics.lastSuccessfulRun = (/* @__PURE__ */ new Date()).toISOString();
|
|
57
|
+
saveHealth(harnessDir, metrics);
|
|
58
|
+
}
|
|
59
|
+
function recordFailure(harnessDir, error) {
|
|
60
|
+
const metrics = loadHealth(harnessDir);
|
|
61
|
+
metrics.totalRuns++;
|
|
62
|
+
metrics.totalFailures++;
|
|
63
|
+
metrics.consecutiveFailures++;
|
|
64
|
+
metrics.lastFailedRun = (/* @__PURE__ */ new Date()).toISOString();
|
|
65
|
+
metrics.lastError = error ?? null;
|
|
66
|
+
saveHealth(harnessDir, metrics);
|
|
67
|
+
}
|
|
68
|
+
function recordBoot(harnessDir) {
|
|
69
|
+
const metrics = loadHealth(harnessDir);
|
|
70
|
+
metrics.bootedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
71
|
+
saveHealth(harnessDir, metrics);
|
|
72
|
+
}
|
|
73
|
+
function getHealthStatus(harnessDir) {
|
|
74
|
+
const metrics = loadHealth(harnessDir);
|
|
75
|
+
const checks = [];
|
|
76
|
+
const requiredFiles = ["CORE.md", "config.yaml", "state.md"];
|
|
77
|
+
const missingFiles = requiredFiles.filter((f) => !existsSync(join(harnessDir, f)));
|
|
78
|
+
if (missingFiles.length === 0) {
|
|
79
|
+
checks.push({ name: "core-files", status: "pass", message: "All core files present" });
|
|
80
|
+
} else {
|
|
81
|
+
checks.push({ name: "core-files", status: "fail", message: `Missing: ${missingFiles.join(", ")}` });
|
|
82
|
+
}
|
|
83
|
+
const memoryDir = join(harnessDir, "memory");
|
|
84
|
+
if (existsSync(memoryDir)) {
|
|
85
|
+
checks.push({ name: "memory-dir", status: "pass", message: "Memory directory exists" });
|
|
86
|
+
} else {
|
|
87
|
+
checks.push({ name: "memory-dir", status: "fail", message: "Memory directory missing" });
|
|
88
|
+
}
|
|
89
|
+
const apiKeys = [
|
|
90
|
+
{ name: "OpenRouter", envVar: "OPENROUTER_API_KEY" },
|
|
91
|
+
{ name: "Anthropic", envVar: "ANTHROPIC_API_KEY" },
|
|
92
|
+
{ name: "OpenAI", envVar: "OPENAI_API_KEY" }
|
|
93
|
+
];
|
|
94
|
+
const presentKeys = apiKeys.filter((k) => process.env[k.envVar]);
|
|
95
|
+
if (presentKeys.length > 0) {
|
|
96
|
+
checks.push({
|
|
97
|
+
name: "api-keys",
|
|
98
|
+
status: "pass",
|
|
99
|
+
message: `API keys: ${presentKeys.map((k) => k.name).join(", ")}`
|
|
100
|
+
});
|
|
101
|
+
} else {
|
|
102
|
+
checks.push({ name: "api-keys", status: "warn", message: "No API keys found in environment" });
|
|
103
|
+
}
|
|
104
|
+
if (metrics.consecutiveFailures === 0) {
|
|
105
|
+
checks.push({ name: "run-health", status: "pass", message: "No consecutive failures" });
|
|
106
|
+
} else if (metrics.consecutiveFailures < 3) {
|
|
107
|
+
checks.push({
|
|
108
|
+
name: "run-health",
|
|
109
|
+
status: "warn",
|
|
110
|
+
message: `${metrics.consecutiveFailures} consecutive failure(s)`
|
|
111
|
+
});
|
|
112
|
+
} else {
|
|
113
|
+
checks.push({
|
|
114
|
+
name: "run-health",
|
|
115
|
+
status: "fail",
|
|
116
|
+
message: `${metrics.consecutiveFailures} consecutive failures \u2014 last error: ${metrics.lastError ?? "unknown"}`
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
if (metrics.lastSuccessfulRun) {
|
|
120
|
+
const hoursSinceSuccess = (Date.now() - new Date(metrics.lastSuccessfulRun).getTime()) / 36e5;
|
|
121
|
+
if (hoursSinceSuccess < 24) {
|
|
122
|
+
checks.push({ name: "last-success", status: "pass", message: `Last success: ${metrics.lastSuccessfulRun}` });
|
|
123
|
+
} else {
|
|
124
|
+
checks.push({
|
|
125
|
+
name: "last-success",
|
|
126
|
+
status: "warn",
|
|
127
|
+
message: `Last success was ${Math.round(hoursSinceSuccess)}h ago`
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
} else if (metrics.totalRuns > 0) {
|
|
131
|
+
checks.push({ name: "last-success", status: "warn", message: "No successful runs recorded" });
|
|
132
|
+
}
|
|
133
|
+
let costToday = 0;
|
|
134
|
+
let costThisMonth = 0;
|
|
135
|
+
try {
|
|
136
|
+
const today = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
137
|
+
const monthStart = `${(/* @__PURE__ */ new Date()).getFullYear()}-${String((/* @__PURE__ */ new Date()).getMonth() + 1).padStart(2, "0")}-01`;
|
|
138
|
+
costToday = getSpending(harnessDir, today).total_cost_usd;
|
|
139
|
+
costThisMonth = getSpending(harnessDir, monthStart).total_cost_usd;
|
|
140
|
+
} catch {
|
|
141
|
+
}
|
|
142
|
+
const failCount = checks.filter((c) => c.status === "fail").length;
|
|
143
|
+
const warnCount = checks.filter((c) => c.status === "warn").length;
|
|
144
|
+
let status;
|
|
145
|
+
if (failCount > 0) {
|
|
146
|
+
status = "unhealthy";
|
|
147
|
+
} else if (warnCount > 0) {
|
|
148
|
+
status = "degraded";
|
|
149
|
+
} else {
|
|
150
|
+
status = "healthy";
|
|
151
|
+
}
|
|
152
|
+
return { status, checks, metrics, costToday, costThisMonth };
|
|
153
|
+
}
|
|
154
|
+
function resetHealth(harnessDir) {
|
|
155
|
+
saveHealth(harnessDir, defaultMetrics());
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
export {
|
|
159
|
+
loadHealth,
|
|
160
|
+
saveHealth,
|
|
161
|
+
recordSuccess,
|
|
162
|
+
recordFailure,
|
|
163
|
+
recordBoot,
|
|
164
|
+
getHealthStatus,
|
|
165
|
+
resetHealth
|
|
166
|
+
};
|
|
167
|
+
//# sourceMappingURL=chunk-TAT6JU3X.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/health.ts"],"sourcesContent":["import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';\nimport { join } from 'path';\nimport { getSpending } from './cost-tracker.js';\n\n/** Individual health check result */\nexport interface HealthCheck {\n name: string;\n status: 'pass' | 'warn' | 'fail';\n message: string;\n}\n\n/** Persisted health metrics */\nexport interface HealthMetrics {\n lastSuccessfulRun: string | null;\n lastFailedRun: string | null;\n lastError: string | null;\n consecutiveFailures: number;\n totalRuns: number;\n totalSuccesses: number;\n totalFailures: number;\n bootedAt: string | null;\n updatedAt: string;\n}\n\n/** Overall health status */\nexport interface HealthStatus {\n status: 'healthy' | 'degraded' | 'unhealthy';\n checks: HealthCheck[];\n metrics: HealthMetrics;\n costToday: number;\n costThisMonth: number;\n}\n\nconst HEALTH_FILE = 'health.json';\n\nfunction getHealthPath(harnessDir: string): string {\n return join(harnessDir, 'memory', HEALTH_FILE);\n}\n\nfunction defaultMetrics(): HealthMetrics {\n return {\n lastSuccessfulRun: null,\n lastFailedRun: null,\n lastError: null,\n consecutiveFailures: 0,\n totalRuns: 0,\n totalSuccesses: 0,\n totalFailures: 0,\n bootedAt: null,\n updatedAt: new Date().toISOString(),\n };\n}\n\n/**\n * Load health metrics from disk.\n */\nexport function loadHealth(harnessDir: string): HealthMetrics {\n const healthPath = getHealthPath(harnessDir);\n if (!existsSync(healthPath)) {\n return defaultMetrics();\n }\n\n try {\n const content = readFileSync(healthPath, 'utf-8');\n const parsed: unknown = JSON.parse(content);\n if (\n typeof parsed === 'object' &&\n parsed !== null &&\n 'totalRuns' in parsed\n ) {\n return parsed as HealthMetrics;\n }\n return defaultMetrics();\n } catch {\n return defaultMetrics();\n }\n}\n\n/**\n * Save health metrics to disk.\n */\nexport function saveHealth(harnessDir: string, metrics: HealthMetrics): void {\n const memoryDir = join(harnessDir, 'memory');\n if (!existsSync(memoryDir)) {\n mkdirSync(memoryDir, { recursive: true });\n }\n\n metrics.updatedAt = new Date().toISOString();\n writeFileSync(getHealthPath(harnessDir), JSON.stringify(metrics, null, 2), 'utf-8');\n}\n\n/**\n * Record a successful run.\n */\nexport function recordSuccess(harnessDir: string): void {\n const metrics = loadHealth(harnessDir);\n metrics.totalRuns++;\n metrics.totalSuccesses++;\n metrics.consecutiveFailures = 0;\n metrics.lastSuccessfulRun = new Date().toISOString();\n saveHealth(harnessDir, metrics);\n}\n\n/**\n * Record a failed run.\n */\nexport function recordFailure(harnessDir: string, error?: string): void {\n const metrics = loadHealth(harnessDir);\n metrics.totalRuns++;\n metrics.totalFailures++;\n metrics.consecutiveFailures++;\n metrics.lastFailedRun = new Date().toISOString();\n metrics.lastError = error ?? null;\n saveHealth(harnessDir, metrics);\n}\n\n/**\n * Record boot time.\n */\nexport function recordBoot(harnessDir: string): void {\n const metrics = loadHealth(harnessDir);\n metrics.bootedAt = new Date().toISOString();\n saveHealth(harnessDir, metrics);\n}\n\n/**\n * Run all health checks and return overall status.\n */\nexport function getHealthStatus(harnessDir: string): HealthStatus {\n const metrics = loadHealth(harnessDir);\n const checks: HealthCheck[] = [];\n\n // Check 1: Required files exist\n const requiredFiles = ['CORE.md', 'config.yaml', 'state.md'];\n const missingFiles = requiredFiles.filter((f) => !existsSync(join(harnessDir, f)));\n if (missingFiles.length === 0) {\n checks.push({ name: 'core-files', status: 'pass', message: 'All core files present' });\n } else {\n checks.push({ name: 'core-files', status: 'fail', message: `Missing: ${missingFiles.join(', ')}` });\n }\n\n // Check 2: Memory directory exists\n const memoryDir = join(harnessDir, 'memory');\n if (existsSync(memoryDir)) {\n checks.push({ name: 'memory-dir', status: 'pass', message: 'Memory directory exists' });\n } else {\n checks.push({ name: 'memory-dir', status: 'fail', message: 'Memory directory missing' });\n }\n\n // Check 3: API key availability\n const apiKeys: Array<{ name: string; envVar: string }> = [\n { name: 'OpenRouter', envVar: 'OPENROUTER_API_KEY' },\n { name: 'Anthropic', envVar: 'ANTHROPIC_API_KEY' },\n { name: 'OpenAI', envVar: 'OPENAI_API_KEY' },\n ];\n const presentKeys = apiKeys.filter((k) => process.env[k.envVar]);\n if (presentKeys.length > 0) {\n checks.push({\n name: 'api-keys',\n status: 'pass',\n message: `API keys: ${presentKeys.map((k) => k.name).join(', ')}`,\n });\n } else {\n checks.push({ name: 'api-keys', status: 'warn', message: 'No API keys found in environment' });\n }\n\n // Check 4: Consecutive failures\n if (metrics.consecutiveFailures === 0) {\n checks.push({ name: 'run-health', status: 'pass', message: 'No consecutive failures' });\n } else if (metrics.consecutiveFailures < 3) {\n checks.push({\n name: 'run-health',\n status: 'warn',\n message: `${metrics.consecutiveFailures} consecutive failure(s)`,\n });\n } else {\n checks.push({\n name: 'run-health',\n status: 'fail',\n message: `${metrics.consecutiveFailures} consecutive failures — last error: ${metrics.lastError ?? 'unknown'}`,\n });\n }\n\n // Check 5: Last run recency (warn if no successful run in 24h when there have been runs)\n if (metrics.lastSuccessfulRun) {\n const hoursSinceSuccess = (Date.now() - new Date(metrics.lastSuccessfulRun).getTime()) / 3600000;\n if (hoursSinceSuccess < 24) {\n checks.push({ name: 'last-success', status: 'pass', message: `Last success: ${metrics.lastSuccessfulRun}` });\n } else {\n checks.push({\n name: 'last-success',\n status: 'warn',\n message: `Last success was ${Math.round(hoursSinceSuccess)}h ago`,\n });\n }\n } else if (metrics.totalRuns > 0) {\n checks.push({ name: 'last-success', status: 'warn', message: 'No successful runs recorded' });\n }\n\n // Cost checks\n let costToday = 0;\n let costThisMonth = 0;\n\n try {\n const today = new Date().toISOString().split('T')[0];\n const monthStart = `${new Date().getFullYear()}-${String(new Date().getMonth() + 1).padStart(2, '0')}-01`;\n costToday = getSpending(harnessDir, today).total_cost_usd;\n costThisMonth = getSpending(harnessDir, monthStart).total_cost_usd;\n } catch {\n // Cost data may not exist\n }\n\n // Determine overall status\n const failCount = checks.filter((c) => c.status === 'fail').length;\n const warnCount = checks.filter((c) => c.status === 'warn').length;\n\n let status: HealthStatus['status'];\n if (failCount > 0) {\n status = 'unhealthy';\n } else if (warnCount > 0) {\n status = 'degraded';\n } else {\n status = 'healthy';\n }\n\n return { status, checks, metrics, costToday, costThisMonth };\n}\n\n/**\n * Reset health metrics (for testing or fresh start).\n */\nexport function resetHealth(harnessDir: string): void {\n saveHealth(harnessDir, defaultMetrics());\n}\n"],"mappings":";;;;;;;AAAA,SAAS,cAAc,eAAe,YAAY,iBAAiB;AACnE,SAAS,YAAY;AAgCrB,IAAM,cAAc;AAEpB,SAAS,cAAc,YAA4B;AACjD,SAAO,KAAK,YAAY,UAAU,WAAW;AAC/C;AAEA,SAAS,iBAAgC;AACvC,SAAO;AAAA,IACL,mBAAmB;AAAA,IACnB,eAAe;AAAA,IACf,WAAW;AAAA,IACX,qBAAqB;AAAA,IACrB,WAAW;AAAA,IACX,gBAAgB;AAAA,IAChB,eAAe;AAAA,IACf,UAAU;AAAA,IACV,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC;AACF;AAKO,SAAS,WAAW,YAAmC;AAC5D,QAAM,aAAa,cAAc,UAAU;AAC3C,MAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,WAAO,eAAe;AAAA,EACxB;AAEA,MAAI;AACF,UAAM,UAAU,aAAa,YAAY,OAAO;AAChD,UAAM,SAAkB,KAAK,MAAM,OAAO;AAC1C,QACE,OAAO,WAAW,YAClB,WAAW,QACX,eAAe,QACf;AACA,aAAO;AAAA,IACT;AACA,WAAO,eAAe;AAAA,EACxB,QAAQ;AACN,WAAO,eAAe;AAAA,EACxB;AACF;AAKO,SAAS,WAAW,YAAoB,SAA8B;AAC3E,QAAM,YAAY,KAAK,YAAY,QAAQ;AAC3C,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,cAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,EAC1C;AAEA,UAAQ,aAAY,oBAAI,KAAK,GAAE,YAAY;AAC3C,gBAAc,cAAc,UAAU,GAAG,KAAK,UAAU,SAAS,MAAM,CAAC,GAAG,OAAO;AACpF;AAKO,SAAS,cAAc,YAA0B;AACtD,QAAM,UAAU,WAAW,UAAU;AACrC,UAAQ;AACR,UAAQ;AACR,UAAQ,sBAAsB;AAC9B,UAAQ,qBAAoB,oBAAI,KAAK,GAAE,YAAY;AACnD,aAAW,YAAY,OAAO;AAChC;AAKO,SAAS,cAAc,YAAoB,OAAsB;AACtE,QAAM,UAAU,WAAW,UAAU;AACrC,UAAQ;AACR,UAAQ;AACR,UAAQ;AACR,UAAQ,iBAAgB,oBAAI,KAAK,GAAE,YAAY;AAC/C,UAAQ,YAAY,SAAS;AAC7B,aAAW,YAAY,OAAO;AAChC;AAKO,SAAS,WAAW,YAA0B;AACnD,QAAM,UAAU,WAAW,UAAU;AACrC,UAAQ,YAAW,oBAAI,KAAK,GAAE,YAAY;AAC1C,aAAW,YAAY,OAAO;AAChC;AAKO,SAAS,gBAAgB,YAAkC;AAChE,QAAM,UAAU,WAAW,UAAU;AACrC,QAAM,SAAwB,CAAC;AAG/B,QAAM,gBAAgB,CAAC,WAAW,eAAe,UAAU;AAC3D,QAAM,eAAe,cAAc,OAAO,CAAC,MAAM,CAAC,WAAW,KAAK,YAAY,CAAC,CAAC,CAAC;AACjF,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO,KAAK,EAAE,MAAM,cAAc,QAAQ,QAAQ,SAAS,yBAAyB,CAAC;AAAA,EACvF,OAAO;AACL,WAAO,KAAK,EAAE,MAAM,cAAc,QAAQ,QAAQ,SAAS,YAAY,aAAa,KAAK,IAAI,CAAC,GAAG,CAAC;AAAA,EACpG;AAGA,QAAM,YAAY,KAAK,YAAY,QAAQ;AAC3C,MAAI,WAAW,SAAS,GAAG;AACzB,WAAO,KAAK,EAAE,MAAM,cAAc,QAAQ,QAAQ,SAAS,0BAA0B,CAAC;AAAA,EACxF,OAAO;AACL,WAAO,KAAK,EAAE,MAAM,cAAc,QAAQ,QAAQ,SAAS,2BAA2B,CAAC;AAAA,EACzF;AAGA,QAAM,UAAmD;AAAA,IACvD,EAAE,MAAM,cAAc,QAAQ,qBAAqB;AAAA,IACnD,EAAE,MAAM,aAAa,QAAQ,oBAAoB;AAAA,IACjD,EAAE,MAAM,UAAU,QAAQ,iBAAiB;AAAA,EAC7C;AACA,QAAM,cAAc,QAAQ,OAAO,CAAC,MAAM,QAAQ,IAAI,EAAE,MAAM,CAAC;AAC/D,MAAI,YAAY,SAAS,GAAG;AAC1B,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS,aAAa,YAAY,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,IAAI,CAAC;AAAA,IACjE,CAAC;AAAA,EACH,OAAO;AACL,WAAO,KAAK,EAAE,MAAM,YAAY,QAAQ,QAAQ,SAAS,mCAAmC,CAAC;AAAA,EAC/F;AAGA,MAAI,QAAQ,wBAAwB,GAAG;AACrC,WAAO,KAAK,EAAE,MAAM,cAAc,QAAQ,QAAQ,SAAS,0BAA0B,CAAC;AAAA,EACxF,WAAW,QAAQ,sBAAsB,GAAG;AAC1C,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS,GAAG,QAAQ,mBAAmB;AAAA,IACzC,CAAC;AAAA,EACH,OAAO;AACL,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS,GAAG,QAAQ,mBAAmB,4CAAuC,QAAQ,aAAa,SAAS;AAAA,IAC9G,CAAC;AAAA,EACH;AAGA,MAAI,QAAQ,mBAAmB;AAC7B,UAAM,qBAAqB,KAAK,IAAI,IAAI,IAAI,KAAK,QAAQ,iBAAiB,EAAE,QAAQ,KAAK;AACzF,QAAI,oBAAoB,IAAI;AAC1B,aAAO,KAAK,EAAE,MAAM,gBAAgB,QAAQ,QAAQ,SAAS,iBAAiB,QAAQ,iBAAiB,GAAG,CAAC;AAAA,IAC7G,OAAO;AACL,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,QAAQ;AAAA,QACR,SAAS,oBAAoB,KAAK,MAAM,iBAAiB,CAAC;AAAA,MAC5D,CAAC;AAAA,IACH;AAAA,EACF,WAAW,QAAQ,YAAY,GAAG;AAChC,WAAO,KAAK,EAAE,MAAM,gBAAgB,QAAQ,QAAQ,SAAS,8BAA8B,CAAC;AAAA,EAC9F;AAGA,MAAI,YAAY;AAChB,MAAI,gBAAgB;AAEpB,MAAI;AACF,UAAM,SAAQ,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AACnD,UAAM,aAAa,IAAG,oBAAI,KAAK,GAAE,YAAY,CAAC,IAAI,QAAO,oBAAI,KAAK,GAAE,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC;AACpG,gBAAY,YAAY,YAAY,KAAK,EAAE;AAC3C,oBAAgB,YAAY,YAAY,UAAU,EAAE;AAAA,EACtD,QAAQ;AAAA,EAER;AAGA,QAAM,YAAY,OAAO,OAAO,CAAC,MAAM,EAAE,WAAW,MAAM,EAAE;AAC5D,QAAM,YAAY,OAAO,OAAO,CAAC,MAAM,EAAE,WAAW,MAAM,EAAE;AAE5D,MAAI;AACJ,MAAI,YAAY,GAAG;AACjB,aAAS;AAAA,EACX,WAAW,YAAY,GAAG;AACxB,aAAS;AAAA,EACX,OAAO;AACL,aAAS;AAAA,EACX;AAEA,SAAO,EAAE,QAAQ,QAAQ,SAAS,WAAW,cAAc;AAC7D;AAKO,SAAS,YAAY,YAA0B;AACpD,aAAW,YAAY,eAAe,CAAC;AACzC;","names":[]}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
withFileLockSync
|
|
5
|
+
} from "./chunk-Z2PUCXTZ.js";
|
|
6
|
+
|
|
7
|
+
// src/runtime/state.ts
|
|
8
|
+
import { readFileSync, writeFileSync, existsSync } from "fs";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
var DEFAULT_STATE = {
|
|
11
|
+
mode: "idle",
|
|
12
|
+
goals: [],
|
|
13
|
+
active_workflows: [],
|
|
14
|
+
last_interaction: (/* @__PURE__ */ new Date()).toISOString(),
|
|
15
|
+
unfinished_business: []
|
|
16
|
+
};
|
|
17
|
+
function loadState(harnessDir) {
|
|
18
|
+
const statePath = join(harnessDir, "state.md");
|
|
19
|
+
if (!existsSync(statePath)) {
|
|
20
|
+
return { ...DEFAULT_STATE };
|
|
21
|
+
}
|
|
22
|
+
const content = readFileSync(statePath, "utf-8");
|
|
23
|
+
return parseStateMd(content);
|
|
24
|
+
}
|
|
25
|
+
function saveState(harnessDir, state) {
|
|
26
|
+
const statePath = join(harnessDir, "state.md");
|
|
27
|
+
const content = renderStateMd(state);
|
|
28
|
+
withFileLockSync(harnessDir, statePath, () => {
|
|
29
|
+
writeFileSync(statePath, content, "utf-8");
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
function parseStateMd(content) {
|
|
33
|
+
const state = { ...DEFAULT_STATE };
|
|
34
|
+
const modeMatch = content.match(/## Mode\s*\n(.+)/);
|
|
35
|
+
if (modeMatch) state.mode = modeMatch[1].trim();
|
|
36
|
+
const goalsMatch = content.match(/## Goals\s*\n([\s\S]*?)(?=\n## |\n$|$)/);
|
|
37
|
+
if (goalsMatch) {
|
|
38
|
+
state.goals = goalsMatch[1].split("\n").filter((l) => l.startsWith("- ")).map((l) => l.replace(/^- /, "").trim());
|
|
39
|
+
}
|
|
40
|
+
const workflowsMatch = content.match(/## Active Workflows\s*\n([\s\S]*?)(?=\n## |\n$|$)/);
|
|
41
|
+
if (workflowsMatch) {
|
|
42
|
+
state.active_workflows = workflowsMatch[1].split("\n").filter((l) => l.startsWith("- ")).map((l) => l.replace(/^- /, "").trim());
|
|
43
|
+
}
|
|
44
|
+
const lastMatch = content.match(/## Last Interaction\s*\n(.+)/);
|
|
45
|
+
if (lastMatch) state.last_interaction = lastMatch[1].trim();
|
|
46
|
+
const unfinishedMatch = content.match(/## Unfinished Business\s*\n([\s\S]*?)(?=\n## |\n$|$)/);
|
|
47
|
+
if (unfinishedMatch) {
|
|
48
|
+
state.unfinished_business = unfinishedMatch[1].split("\n").filter((l) => l.startsWith("- ")).map((l) => l.replace(/^- /, "").trim());
|
|
49
|
+
}
|
|
50
|
+
return state;
|
|
51
|
+
}
|
|
52
|
+
function renderStateMd(state) {
|
|
53
|
+
const lines = [
|
|
54
|
+
"# Agent State",
|
|
55
|
+
"",
|
|
56
|
+
"## Mode",
|
|
57
|
+
state.mode,
|
|
58
|
+
"",
|
|
59
|
+
"## Goals",
|
|
60
|
+
...state.goals.map((g) => `- ${g}`),
|
|
61
|
+
"",
|
|
62
|
+
"## Active Workflows",
|
|
63
|
+
...state.active_workflows.map((w) => `- ${w}`),
|
|
64
|
+
"",
|
|
65
|
+
"## Last Interaction",
|
|
66
|
+
state.last_interaction,
|
|
67
|
+
"",
|
|
68
|
+
"## Unfinished Business",
|
|
69
|
+
...state.unfinished_business.map((u) => `- ${u}`),
|
|
70
|
+
""
|
|
71
|
+
];
|
|
72
|
+
return lines.join("\n");
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export {
|
|
76
|
+
loadState,
|
|
77
|
+
saveState
|
|
78
|
+
};
|
|
79
|
+
//# sourceMappingURL=chunk-UDZIS2AQ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/state.ts"],"sourcesContent":["import { readFileSync, writeFileSync, existsSync } from 'fs';\nimport { join } from 'path';\nimport type { AgentState } from '../core/types.js';\nimport { withFileLockSync } from './file-lock.js';\n\nconst DEFAULT_STATE: AgentState = {\n mode: 'idle',\n goals: [],\n active_workflows: [],\n last_interaction: new Date().toISOString(),\n unfinished_business: [],\n};\n\nexport function loadState(harnessDir: string): AgentState {\n const statePath = join(harnessDir, 'state.md');\n\n if (!existsSync(statePath)) {\n return { ...DEFAULT_STATE };\n }\n\n const content = readFileSync(statePath, 'utf-8');\n return parseStateMd(content);\n}\n\nexport function saveState(harnessDir: string, state: AgentState): void {\n const statePath = join(harnessDir, 'state.md');\n const content = renderStateMd(state);\n withFileLockSync(harnessDir, statePath, () => {\n writeFileSync(statePath, content, 'utf-8');\n });\n}\n\nfunction parseStateMd(content: string): AgentState {\n const state = { ...DEFAULT_STATE };\n\n const modeMatch = content.match(/## Mode\\s*\\n(.+)/);\n if (modeMatch) state.mode = modeMatch[1].trim();\n\n const goalsMatch = content.match(/## Goals\\s*\\n([\\s\\S]*?)(?=\\n## |\\n$|$)/);\n if (goalsMatch) {\n state.goals = goalsMatch[1]\n .split('\\n')\n .filter(l => l.startsWith('- '))\n .map(l => l.replace(/^- /, '').trim());\n }\n\n const workflowsMatch = content.match(/## Active Workflows\\s*\\n([\\s\\S]*?)(?=\\n## |\\n$|$)/);\n if (workflowsMatch) {\n state.active_workflows = workflowsMatch[1]\n .split('\\n')\n .filter(l => l.startsWith('- '))\n .map(l => l.replace(/^- /, '').trim());\n }\n\n const lastMatch = content.match(/## Last Interaction\\s*\\n(.+)/);\n if (lastMatch) state.last_interaction = lastMatch[1].trim();\n\n const unfinishedMatch = content.match(/## Unfinished Business\\s*\\n([\\s\\S]*?)(?=\\n## |\\n$|$)/);\n if (unfinishedMatch) {\n state.unfinished_business = unfinishedMatch[1]\n .split('\\n')\n .filter(l => l.startsWith('- '))\n .map(l => l.replace(/^- /, '').trim());\n }\n\n return state;\n}\n\nfunction renderStateMd(state: AgentState): string {\n const lines: string[] = [\n '# Agent State',\n '',\n '## Mode',\n state.mode,\n '',\n '## Goals',\n ...state.goals.map(g => `- ${g}`),\n '',\n '## Active Workflows',\n ...state.active_workflows.map(w => `- ${w}`),\n '',\n '## Last Interaction',\n state.last_interaction,\n '',\n '## Unfinished Business',\n ...state.unfinished_business.map(u => `- ${u}`),\n '',\n ];\n\n return lines.join('\\n');\n}\n"],"mappings":";;;;;;;AAAA,SAAS,cAAc,eAAe,kBAAkB;AACxD,SAAS,YAAY;AAIrB,IAAM,gBAA4B;AAAA,EAChC,MAAM;AAAA,EACN,OAAO,CAAC;AAAA,EACR,kBAAkB,CAAC;AAAA,EACnB,mBAAkB,oBAAI,KAAK,GAAE,YAAY;AAAA,EACzC,qBAAqB,CAAC;AACxB;AAEO,SAAS,UAAU,YAAgC;AACxD,QAAM,YAAY,KAAK,YAAY,UAAU;AAE7C,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,WAAO,EAAE,GAAG,cAAc;AAAA,EAC5B;AAEA,QAAM,UAAU,aAAa,WAAW,OAAO;AAC/C,SAAO,aAAa,OAAO;AAC7B;AAEO,SAAS,UAAU,YAAoB,OAAyB;AACrE,QAAM,YAAY,KAAK,YAAY,UAAU;AAC7C,QAAM,UAAU,cAAc,KAAK;AACnC,mBAAiB,YAAY,WAAW,MAAM;AAC5C,kBAAc,WAAW,SAAS,OAAO;AAAA,EAC3C,CAAC;AACH;AAEA,SAAS,aAAa,SAA6B;AACjD,QAAM,QAAQ,EAAE,GAAG,cAAc;AAEjC,QAAM,YAAY,QAAQ,MAAM,kBAAkB;AAClD,MAAI,UAAW,OAAM,OAAO,UAAU,CAAC,EAAE,KAAK;AAE9C,QAAM,aAAa,QAAQ,MAAM,wCAAwC;AACzE,MAAI,YAAY;AACd,UAAM,QAAQ,WAAW,CAAC,EACvB,MAAM,IAAI,EACV,OAAO,OAAK,EAAE,WAAW,IAAI,CAAC,EAC9B,IAAI,OAAK,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAK,CAAC;AAAA,EACzC;AAEA,QAAM,iBAAiB,QAAQ,MAAM,mDAAmD;AACxF,MAAI,gBAAgB;AAClB,UAAM,mBAAmB,eAAe,CAAC,EACtC,MAAM,IAAI,EACV,OAAO,OAAK,EAAE,WAAW,IAAI,CAAC,EAC9B,IAAI,OAAK,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAK,CAAC;AAAA,EACzC;AAEA,QAAM,YAAY,QAAQ,MAAM,8BAA8B;AAC9D,MAAI,UAAW,OAAM,mBAAmB,UAAU,CAAC,EAAE,KAAK;AAE1D,QAAM,kBAAkB,QAAQ,MAAM,sDAAsD;AAC5F,MAAI,iBAAiB;AACnB,UAAM,sBAAsB,gBAAgB,CAAC,EAC1C,MAAM,IAAI,EACV,OAAO,OAAK,EAAE,WAAW,IAAI,CAAC,EAC9B,IAAI,OAAK,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAK,CAAC;AAAA,EACzC;AAEA,SAAO;AACT;AAEA,SAAS,cAAc,OAA2B;AAChD,QAAM,QAAkB;AAAA,IACtB;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,GAAG,MAAM,MAAM,IAAI,OAAK,KAAK,CAAC,EAAE;AAAA,IAChC;AAAA,IACA;AAAA,IACA,GAAG,MAAM,iBAAiB,IAAI,OAAK,KAAK,CAAC,EAAE;AAAA,IAC3C;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,GAAG,MAAM,oBAAoB,IAAI,OAAK,KAAK,CAAC,EAAE;AAAA,IAC9C;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;","names":[]}
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
CORE_PRIMITIVE_DIRS,
|
|
5
|
+
FrontmatterSchema
|
|
6
|
+
} from "./chunk-4CWAGBNS.js";
|
|
7
|
+
|
|
8
|
+
// src/primitives/loader.ts
|
|
9
|
+
import { readFileSync, readdirSync, existsSync } from "fs";
|
|
10
|
+
import { join, extname } from "path";
|
|
11
|
+
import matter from "gray-matter";
|
|
12
|
+
var L0_REGEX = /<!--\s*L0:\s*([\s\S]*?)\s*-->/;
|
|
13
|
+
var L1_REGEX = /<!--\s*L1:\s*([\s\S]*?)\s*-->/;
|
|
14
|
+
function parseHarnessDocument(filePath) {
|
|
15
|
+
const raw = readFileSync(filePath, "utf-8");
|
|
16
|
+
const { data, content } = matter(raw);
|
|
17
|
+
const normalized = { ...data };
|
|
18
|
+
for (const key of ["created", "updated"]) {
|
|
19
|
+
if (normalized[key] instanceof Date) {
|
|
20
|
+
normalized[key] = normalized[key].toISOString().split("T")[0];
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
let frontmatter;
|
|
24
|
+
try {
|
|
25
|
+
frontmatter = FrontmatterSchema.parse(normalized);
|
|
26
|
+
} catch {
|
|
27
|
+
const id = filePath.split("/").pop()?.replace(".md", "") || "unknown";
|
|
28
|
+
frontmatter = FrontmatterSchema.parse({ id });
|
|
29
|
+
}
|
|
30
|
+
const l0Match = content.match(L0_REGEX);
|
|
31
|
+
const l1Match = content.match(L1_REGEX);
|
|
32
|
+
const l0 = l0Match ? l0Match[1].trim() : "";
|
|
33
|
+
const l1 = l1Match ? l1Match[1].trim() : "";
|
|
34
|
+
const body = content.replace(L0_REGEX, "").replace(L1_REGEX, "").trim();
|
|
35
|
+
return {
|
|
36
|
+
path: filePath,
|
|
37
|
+
frontmatter,
|
|
38
|
+
l0,
|
|
39
|
+
l1,
|
|
40
|
+
body,
|
|
41
|
+
raw
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
function loadDirectory(dirPath) {
|
|
45
|
+
return loadDirectoryWithErrors(dirPath).docs;
|
|
46
|
+
}
|
|
47
|
+
function loadDirectoryWithErrors(dirPath) {
|
|
48
|
+
if (!existsSync(dirPath)) return { docs: [], errors: [] };
|
|
49
|
+
const files = readdirSync(dirPath);
|
|
50
|
+
const docs = [];
|
|
51
|
+
const errors = [];
|
|
52
|
+
for (const file of files) {
|
|
53
|
+
if (extname(file) !== ".md") continue;
|
|
54
|
+
if (file.startsWith("_")) continue;
|
|
55
|
+
if (file.startsWith(".")) continue;
|
|
56
|
+
const filePath = join(dirPath, file);
|
|
57
|
+
try {
|
|
58
|
+
const doc = parseHarnessDocument(filePath);
|
|
59
|
+
if (doc.frontmatter.status !== "archived" && doc.frontmatter.status !== "deprecated") {
|
|
60
|
+
docs.push(doc);
|
|
61
|
+
}
|
|
62
|
+
} catch (err) {
|
|
63
|
+
errors.push({
|
|
64
|
+
path: filePath,
|
|
65
|
+
error: err instanceof Error ? err.message : String(err)
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
return { docs, errors };
|
|
70
|
+
}
|
|
71
|
+
function loadAllPrimitives(harnessDir, extraDirs) {
|
|
72
|
+
return loadAllPrimitivesWithErrors(harnessDir, extraDirs).primitives;
|
|
73
|
+
}
|
|
74
|
+
function loadAllPrimitivesWithErrors(harnessDir, extraDirs) {
|
|
75
|
+
const primitives = /* @__PURE__ */ new Map();
|
|
76
|
+
const allErrors = [];
|
|
77
|
+
const directories = [...CORE_PRIMITIVE_DIRS];
|
|
78
|
+
if (extraDirs) {
|
|
79
|
+
for (const dir of extraDirs) {
|
|
80
|
+
if (!directories.includes(dir)) {
|
|
81
|
+
directories.push(dir);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
for (const dir of directories) {
|
|
86
|
+
const { docs, errors } = loadDirectoryWithErrors(join(harnessDir, dir));
|
|
87
|
+
primitives.set(dir, docs);
|
|
88
|
+
allErrors.push(...errors);
|
|
89
|
+
}
|
|
90
|
+
return { primitives, errors: allErrors };
|
|
91
|
+
}
|
|
92
|
+
function estimateTokens(text) {
|
|
93
|
+
return Math.ceil(text.length / 4);
|
|
94
|
+
}
|
|
95
|
+
function getAtLevel(doc, level) {
|
|
96
|
+
switch (level) {
|
|
97
|
+
case 0:
|
|
98
|
+
return doc.l0 || doc.frontmatter.id;
|
|
99
|
+
case 1:
|
|
100
|
+
return doc.l1 || doc.l0 || doc.body.slice(0, 400);
|
|
101
|
+
case 2:
|
|
102
|
+
return doc.body;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
export {
|
|
107
|
+
parseHarnessDocument,
|
|
108
|
+
loadDirectory,
|
|
109
|
+
loadDirectoryWithErrors,
|
|
110
|
+
loadAllPrimitives,
|
|
111
|
+
loadAllPrimitivesWithErrors,
|
|
112
|
+
estimateTokens,
|
|
113
|
+
getAtLevel
|
|
114
|
+
};
|
|
115
|
+
//# sourceMappingURL=chunk-UPLBF4RZ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/primitives/loader.ts"],"sourcesContent":["import { readFileSync, readdirSync, existsSync } from 'fs';\nimport { join, extname } from 'path';\nimport matter from 'gray-matter';\nimport { FrontmatterSchema, CORE_PRIMITIVE_DIRS, type HarnessDocument, type Frontmatter } from '../core/types.js';\n\nexport interface ParseError {\n path: string;\n error: string;\n}\n\nexport interface LoadResult {\n docs: HarnessDocument[];\n errors: ParseError[];\n}\n\n// Extract L0 and L1 from HTML comments at the top of the markdown body\n// Format: <!-- L0: one-line summary -->\n// <!-- L1: paragraph summary -->\nconst L0_REGEX = /<!--\\s*L0:\\s*([\\s\\S]*?)\\s*-->/;\nconst L1_REGEX = /<!--\\s*L1:\\s*([\\s\\S]*?)\\s*-->/;\n\nexport function parseHarnessDocument(filePath: string): HarnessDocument {\n const raw = readFileSync(filePath, 'utf-8');\n const { data, content } = matter(raw);\n\n // Parse frontmatter with defaults\n // Normalize dates: gray-matter converts date strings to Date objects\n const normalized = { ...data };\n for (const key of ['created', 'updated']) {\n if (normalized[key] instanceof Date) {\n normalized[key] = (normalized[key] as Date).toISOString().split('T')[0];\n }\n }\n\n let frontmatter: Frontmatter;\n try {\n frontmatter = FrontmatterSchema.parse(normalized);\n } catch {\n // Fallback: create minimal frontmatter from filename\n const id = filePath.split('/').pop()?.replace('.md', '') || 'unknown';\n frontmatter = FrontmatterSchema.parse({ id });\n }\n\n // Extract L0 and L1 from content\n const l0Match = content.match(L0_REGEX);\n const l1Match = content.match(L1_REGEX);\n\n const l0 = l0Match ? l0Match[1].trim() : '';\n const l1 = l1Match ? l1Match[1].trim() : '';\n\n // Body is the content without L0/L1 comments\n const body = content\n .replace(L0_REGEX, '')\n .replace(L1_REGEX, '')\n .trim();\n\n return {\n path: filePath,\n frontmatter,\n l0,\n l1,\n body,\n raw,\n };\n}\n\nexport function loadDirectory(dirPath: string): HarnessDocument[] {\n return loadDirectoryWithErrors(dirPath).docs;\n}\n\nexport function loadDirectoryWithErrors(dirPath: string): LoadResult {\n if (!existsSync(dirPath)) return { docs: [], errors: [] };\n\n const files = readdirSync(dirPath);\n const docs: HarnessDocument[] = [];\n const errors: ParseError[] = [];\n\n for (const file of files) {\n if (extname(file) !== '.md') continue;\n if (file.startsWith('_')) continue; // Skip index files\n if (file.startsWith('.')) continue; // Skip hidden files\n\n const filePath = join(dirPath, file);\n try {\n const doc = parseHarnessDocument(filePath);\n if (doc.frontmatter.status !== 'archived' && doc.frontmatter.status !== 'deprecated') {\n docs.push(doc);\n }\n } catch (err) {\n errors.push({\n path: filePath,\n error: err instanceof Error ? err.message : String(err),\n });\n }\n }\n\n return { docs, errors };\n}\n\nexport interface LoadAllResult {\n primitives: Map<string, HarnessDocument[]>;\n errors: ParseError[];\n}\n\nexport function loadAllPrimitives(harnessDir: string, extraDirs?: string[]): Map<string, HarnessDocument[]> {\n return loadAllPrimitivesWithErrors(harnessDir, extraDirs).primitives;\n}\n\nexport function loadAllPrimitivesWithErrors(harnessDir: string, extraDirs?: string[]): LoadAllResult {\n const primitives = new Map<string, HarnessDocument[]>();\n const allErrors: ParseError[] = [];\n\n const directories: string[] = [...CORE_PRIMITIVE_DIRS];\n if (extraDirs) {\n for (const dir of extraDirs) {\n if (!directories.includes(dir)) {\n directories.push(dir);\n }\n }\n }\n\n for (const dir of directories) {\n const { docs, errors } = loadDirectoryWithErrors(join(harnessDir, dir));\n primitives.set(dir, docs);\n allErrors.push(...errors);\n }\n\n return { primitives, errors: allErrors };\n}\n\n// Estimate token count (rough: 1 token ≈ 4 chars)\nexport function estimateTokens(text: string): number {\n return Math.ceil(text.length / 4);\n}\n\n// Load a file at a specific disclosure level\nexport function getAtLevel(doc: HarnessDocument, level: 0 | 1 | 2): string {\n switch (level) {\n case 0:\n return doc.l0 || doc.frontmatter.id;\n case 1:\n return doc.l1 || doc.l0 || doc.body.slice(0, 400);\n case 2:\n return doc.body;\n }\n}\n"],"mappings":";;;;;;;;AAAA,SAAS,cAAc,aAAa,kBAAkB;AACtD,SAAS,MAAM,eAAe;AAC9B,OAAO,YAAY;AAgBnB,IAAM,WAAW;AACjB,IAAM,WAAW;AAEV,SAAS,qBAAqB,UAAmC;AACtE,QAAM,MAAM,aAAa,UAAU,OAAO;AAC1C,QAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,GAAG;AAIpC,QAAM,aAAa,EAAE,GAAG,KAAK;AAC7B,aAAW,OAAO,CAAC,WAAW,SAAS,GAAG;AACxC,QAAI,WAAW,GAAG,aAAa,MAAM;AACnC,iBAAW,GAAG,IAAK,WAAW,GAAG,EAAW,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,IACxE;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,kBAAc,kBAAkB,MAAM,UAAU;AAAA,EAClD,QAAQ;AAEN,UAAM,KAAK,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,QAAQ,OAAO,EAAE,KAAK;AAC5D,kBAAc,kBAAkB,MAAM,EAAE,GAAG,CAAC;AAAA,EAC9C;AAGA,QAAM,UAAU,QAAQ,MAAM,QAAQ;AACtC,QAAM,UAAU,QAAQ,MAAM,QAAQ;AAEtC,QAAM,KAAK,UAAU,QAAQ,CAAC,EAAE,KAAK,IAAI;AACzC,QAAM,KAAK,UAAU,QAAQ,CAAC,EAAE,KAAK,IAAI;AAGzC,QAAM,OAAO,QACV,QAAQ,UAAU,EAAE,EACpB,QAAQ,UAAU,EAAE,EACpB,KAAK;AAER,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,SAAS,cAAc,SAAoC;AAChE,SAAO,wBAAwB,OAAO,EAAE;AAC1C;AAEO,SAAS,wBAAwB,SAA6B;AACnE,MAAI,CAAC,WAAW,OAAO,EAAG,QAAO,EAAE,MAAM,CAAC,GAAG,QAAQ,CAAC,EAAE;AAExD,QAAM,QAAQ,YAAY,OAAO;AACjC,QAAM,OAA0B,CAAC;AACjC,QAAM,SAAuB,CAAC;AAE9B,aAAW,QAAQ,OAAO;AACxB,QAAI,QAAQ,IAAI,MAAM,MAAO;AAC7B,QAAI,KAAK,WAAW,GAAG,EAAG;AAC1B,QAAI,KAAK,WAAW,GAAG,EAAG;AAE1B,UAAM,WAAW,KAAK,SAAS,IAAI;AACnC,QAAI;AACF,YAAM,MAAM,qBAAqB,QAAQ;AACzC,UAAI,IAAI,YAAY,WAAW,cAAc,IAAI,YAAY,WAAW,cAAc;AACpF,aAAK,KAAK,GAAG;AAAA,MACf;AAAA,IACF,SAAS,KAAK;AACZ,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,MACxD,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,OAAO;AACxB;AAOO,SAAS,kBAAkB,YAAoB,WAAsD;AAC1G,SAAO,4BAA4B,YAAY,SAAS,EAAE;AAC5D;AAEO,SAAS,4BAA4B,YAAoB,WAAqC;AACnG,QAAM,aAAa,oBAAI,IAA+B;AACtD,QAAM,YAA0B,CAAC;AAEjC,QAAM,cAAwB,CAAC,GAAG,mBAAmB;AACrD,MAAI,WAAW;AACb,eAAW,OAAO,WAAW;AAC3B,UAAI,CAAC,YAAY,SAAS,GAAG,GAAG;AAC9B,oBAAY,KAAK,GAAG;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAEA,aAAW,OAAO,aAAa;AAC7B,UAAM,EAAE,MAAM,OAAO,IAAI,wBAAwB,KAAK,YAAY,GAAG,CAAC;AACtE,eAAW,IAAI,KAAK,IAAI;AACxB,cAAU,KAAK,GAAG,MAAM;AAAA,EAC1B;AAEA,SAAO,EAAE,YAAY,QAAQ,UAAU;AACzC;AAGO,SAAS,eAAe,MAAsB;AACnD,SAAO,KAAK,KAAK,KAAK,SAAS,CAAC;AAClC;AAGO,SAAS,WAAW,KAAsB,OAA0B;AACzE,UAAQ,OAAO;AAAA,IACb,KAAK;AACH,aAAO,IAAI,MAAM,IAAI,YAAY;AAAA,IACnC,KAAK;AACH,aAAO,IAAI,MAAM,IAAI,MAAM,IAAI,KAAK,MAAM,GAAG,GAAG;AAAA,IAClD,KAAK;AACH,aAAO,IAAI;AAAA,EACf;AACF;","names":[]}
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
estimateTokens,
|
|
5
|
+
getAtLevel,
|
|
6
|
+
loadAllPrimitivesWithErrors
|
|
7
|
+
} from "./chunk-UPLBF4RZ.js";
|
|
8
|
+
import {
|
|
9
|
+
log
|
|
10
|
+
} from "./chunk-BSKDOFRT.js";
|
|
11
|
+
|
|
12
|
+
// src/runtime/context-loader.ts
|
|
13
|
+
import { readFileSync, existsSync } from "fs";
|
|
14
|
+
import { join } from "path";
|
|
15
|
+
function buildSystemPrompt(harnessDir, config) {
|
|
16
|
+
const maxTokens = config.model.max_tokens;
|
|
17
|
+
const budget = {
|
|
18
|
+
max_tokens: maxTokens,
|
|
19
|
+
used_tokens: 0,
|
|
20
|
+
remaining: maxTokens,
|
|
21
|
+
loaded_files: []
|
|
22
|
+
};
|
|
23
|
+
const warnings = [];
|
|
24
|
+
const sections = [];
|
|
25
|
+
const corePath = join(harnessDir, "CORE.md");
|
|
26
|
+
if (existsSync(corePath)) {
|
|
27
|
+
const core = readFileSync(corePath, "utf-8");
|
|
28
|
+
sections.push(`# CORE IDENTITY
|
|
29
|
+
|
|
30
|
+
${core}`);
|
|
31
|
+
budget.used_tokens += estimateTokens(core);
|
|
32
|
+
budget.loaded_files.push("CORE.md");
|
|
33
|
+
}
|
|
34
|
+
const statePath = join(harnessDir, "state.md");
|
|
35
|
+
if (existsSync(statePath)) {
|
|
36
|
+
const state = readFileSync(statePath, "utf-8");
|
|
37
|
+
sections.push(`# CURRENT STATE
|
|
38
|
+
|
|
39
|
+
${state}`);
|
|
40
|
+
budget.used_tokens += estimateTokens(state);
|
|
41
|
+
budget.loaded_files.push("state.md");
|
|
42
|
+
}
|
|
43
|
+
const systemPath = join(harnessDir, "SYSTEM.md");
|
|
44
|
+
if (existsSync(systemPath)) {
|
|
45
|
+
const system = readFileSync(systemPath, "utf-8");
|
|
46
|
+
sections.push(`# SYSTEM
|
|
47
|
+
|
|
48
|
+
${system}`);
|
|
49
|
+
budget.used_tokens += estimateTokens(system);
|
|
50
|
+
budget.loaded_files.push("SYSTEM.md");
|
|
51
|
+
}
|
|
52
|
+
const extDirs = config.extensions?.directories ?? [];
|
|
53
|
+
const { primitives, errors: parseErrors } = loadAllPrimitivesWithErrors(harnessDir, extDirs);
|
|
54
|
+
if (parseErrors.length > 0) {
|
|
55
|
+
for (const pe of parseErrors) {
|
|
56
|
+
log.warn(`Failed to parse primitive: ${pe.path} \u2014 ${pe.error}`);
|
|
57
|
+
}
|
|
58
|
+
warnings.push(`${parseErrors.length} primitive file(s) failed to parse`);
|
|
59
|
+
}
|
|
60
|
+
const targetBudget = maxTokens * 0.15;
|
|
61
|
+
const priorityOrder = ["rules", "instincts", "skills", "playbooks", "tools", "workflows", "agents"];
|
|
62
|
+
for (const dir of extDirs) {
|
|
63
|
+
if (!priorityOrder.includes(dir)) {
|
|
64
|
+
priorityOrder.push(dir);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
const allDocs = [];
|
|
68
|
+
for (const category of priorityOrder) {
|
|
69
|
+
const docs = primitives.get(category);
|
|
70
|
+
if (!docs || docs.length === 0) continue;
|
|
71
|
+
for (const doc of docs) {
|
|
72
|
+
allDocs.push({ category, doc });
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
if (allDocs.length === 0) {
|
|
76
|
+
warnings.push("No primitives found \u2014 add rules, instincts, or skills to improve agent behavior");
|
|
77
|
+
}
|
|
78
|
+
const primitiveBudget = targetBudget - budget.used_tokens;
|
|
79
|
+
let totalL2Demand = 0;
|
|
80
|
+
for (const { doc } of allDocs) {
|
|
81
|
+
totalL2Demand += estimateTokens(getAtLevel(doc, 2));
|
|
82
|
+
}
|
|
83
|
+
let globalLevel;
|
|
84
|
+
if (totalL2Demand <= primitiveBudget) {
|
|
85
|
+
globalLevel = 2;
|
|
86
|
+
} else {
|
|
87
|
+
let totalL1Demand = 0;
|
|
88
|
+
for (const { doc } of allDocs) {
|
|
89
|
+
totalL1Demand += estimateTokens(getAtLevel(doc, 1));
|
|
90
|
+
}
|
|
91
|
+
globalLevel = totalL1Demand <= primitiveBudget ? 1 : 0;
|
|
92
|
+
}
|
|
93
|
+
for (const category of priorityOrder) {
|
|
94
|
+
const docs = primitives.get(category);
|
|
95
|
+
if (!docs || docs.length === 0) continue;
|
|
96
|
+
const categoryLabel = category.toUpperCase();
|
|
97
|
+
const categoryDocs = [];
|
|
98
|
+
for (const doc of docs) {
|
|
99
|
+
let level = globalLevel;
|
|
100
|
+
let content = getAtLevel(doc, level);
|
|
101
|
+
let tokens = estimateTokens(content);
|
|
102
|
+
while (budget.used_tokens + tokens > targetBudget && level > 0) {
|
|
103
|
+
level = level - 1;
|
|
104
|
+
content = getAtLevel(doc, level);
|
|
105
|
+
tokens = estimateTokens(content);
|
|
106
|
+
}
|
|
107
|
+
categoryDocs.push(`### ${doc.frontmatter.id}
|
|
108
|
+
${content}`);
|
|
109
|
+
budget.used_tokens += tokens;
|
|
110
|
+
budget.loaded_files.push(doc.path);
|
|
111
|
+
}
|
|
112
|
+
if (categoryDocs.length > 0) {
|
|
113
|
+
sections.push(`# ${categoryLabel}
|
|
114
|
+
|
|
115
|
+
${categoryDocs.join("\n\n")}`);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
const scratchPath = join(harnessDir, "memory", "scratch.md");
|
|
119
|
+
if (existsSync(scratchPath)) {
|
|
120
|
+
const scratch = readFileSync(scratchPath, "utf-8");
|
|
121
|
+
if (scratch.trim()) {
|
|
122
|
+
sections.push(`# SCRATCH (Current Working Memory)
|
|
123
|
+
|
|
124
|
+
${scratch}`);
|
|
125
|
+
budget.used_tokens += estimateTokens(scratch);
|
|
126
|
+
budget.loaded_files.push("memory/scratch.md");
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
budget.remaining = maxTokens - budget.used_tokens;
|
|
130
|
+
const usagePercent = budget.used_tokens / maxTokens * 100;
|
|
131
|
+
if (usagePercent > 12) {
|
|
132
|
+
warnings.push(
|
|
133
|
+
`System prompt using ${usagePercent.toFixed(1)}% of total context (${budget.used_tokens}/${maxTokens} tokens) \u2014 some primitives may be truncated`
|
|
134
|
+
);
|
|
135
|
+
log.warn(
|
|
136
|
+
`Context budget high: ${budget.used_tokens}/${maxTokens} tokens (${usagePercent.toFixed(1)}%), ${budget.loaded_files.length} files loaded`
|
|
137
|
+
);
|
|
138
|
+
}
|
|
139
|
+
if (globalLevel < 2) {
|
|
140
|
+
const levelName = globalLevel === 0 ? "L0 (summary only)" : "L1 (paragraph summary)";
|
|
141
|
+
warnings.push(`Primitives loaded at ${levelName} due to budget constraints`);
|
|
142
|
+
}
|
|
143
|
+
return {
|
|
144
|
+
systemPrompt: sections.join("\n\n---\n\n"),
|
|
145
|
+
budget,
|
|
146
|
+
parseErrors,
|
|
147
|
+
warnings
|
|
148
|
+
};
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
export {
|
|
152
|
+
buildSystemPrompt
|
|
153
|
+
};
|
|
154
|
+
//# sourceMappingURL=chunk-UWQTZMNI.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/context-loader.ts"],"sourcesContent":["import { readFileSync, existsSync } from 'fs';\nimport { join } from 'path';\nimport { loadAllPrimitivesWithErrors, estimateTokens, getAtLevel } from '../primitives/loader.js';\nimport type { ParseError } from '../primitives/loader.js';\nimport type { HarnessConfig, HarnessDocument, ContextBudget } from '../core/types.js';\nimport { log } from '../core/logger.js';\n\nexport interface LoadedContext {\n systemPrompt: string;\n budget: ContextBudget;\n parseErrors: ParseError[];\n warnings: string[];\n}\n\nexport function buildSystemPrompt(harnessDir: string, config: HarnessConfig): LoadedContext {\n const maxTokens = config.model.max_tokens;\n const budget: ContextBudget = {\n max_tokens: maxTokens,\n used_tokens: 0,\n remaining: maxTokens,\n loaded_files: [],\n };\n\n const warnings: string[] = [];\n const sections: string[] = [];\n\n // --- Step 1: Load CORE.md (always, full content) ---\n const corePath = join(harnessDir, 'CORE.md');\n if (existsSync(corePath)) {\n const core = readFileSync(corePath, 'utf-8');\n sections.push(`# CORE IDENTITY\\n\\n${core}`);\n budget.used_tokens += estimateTokens(core);\n budget.loaded_files.push('CORE.md');\n }\n\n // --- Step 2: Load state.md ---\n const statePath = join(harnessDir, 'state.md');\n if (existsSync(statePath)) {\n const state = readFileSync(statePath, 'utf-8');\n sections.push(`# CURRENT STATE\\n\\n${state}`);\n budget.used_tokens += estimateTokens(state);\n budget.loaded_files.push('state.md');\n }\n\n // --- Step 3: Load SYSTEM.md (boot instructions) ---\n const systemPath = join(harnessDir, 'SYSTEM.md');\n if (existsSync(systemPath)) {\n const system = readFileSync(systemPath, 'utf-8');\n sections.push(`# SYSTEM\\n\\n${system}`);\n budget.used_tokens += estimateTokens(system);\n budget.loaded_files.push('SYSTEM.md');\n }\n\n // --- Step 4: Load all primitives at appropriate level ---\n const extDirs = config.extensions?.directories ?? [];\n const { primitives, errors: parseErrors } = loadAllPrimitivesWithErrors(harnessDir, extDirs);\n\n // Report parse errors\n if (parseErrors.length > 0) {\n for (const pe of parseErrors) {\n log.warn(`Failed to parse primitive: ${pe.path} — ${pe.error}`);\n }\n warnings.push(`${parseErrors.length} primitive file(s) failed to parse`);\n }\n\n const targetBudget = maxTokens * 0.15; // Use 15% of context for harness\n\n // Priority order for loading primitives (core dirs first, extensions appended)\n const priorityOrder = ['rules', 'instincts', 'skills', 'playbooks', 'tools', 'workflows', 'agents'];\n for (const dir of extDirs) {\n if (!priorityOrder.includes(dir)) {\n priorityOrder.push(dir);\n }\n }\n\n // Collect all docs to estimate total demand before deciding levels\n const allDocs: { category: string; doc: HarnessDocument }[] = [];\n for (const category of priorityOrder) {\n const docs = primitives.get(category);\n if (!docs || docs.length === 0) continue;\n for (const doc of docs) {\n allDocs.push({ category, doc });\n }\n }\n\n if (allDocs.length === 0) {\n warnings.push('No primitives found — add rules, instincts, or skills to improve agent behavior');\n }\n\n // Estimate total L2 demand vs available budget for primitives\n const primitiveBudget = targetBudget - budget.used_tokens;\n let totalL2Demand = 0;\n for (const { doc } of allDocs) {\n totalL2Demand += estimateTokens(getAtLevel(doc, 2));\n }\n\n // Choose a global disclosure level based on how much fits\n let globalLevel: 0 | 1 | 2;\n if (totalL2Demand <= primitiveBudget) {\n globalLevel = 2; // Everything fits at full\n } else {\n // Estimate L1 demand\n let totalL1Demand = 0;\n for (const { doc } of allDocs) {\n totalL1Demand += estimateTokens(getAtLevel(doc, 1));\n }\n globalLevel = totalL1Demand <= primitiveBudget ? 1 : 0;\n }\n\n for (const category of priorityOrder) {\n const docs = primitives.get(category);\n if (!docs || docs.length === 0) continue;\n\n const categoryLabel = category.toUpperCase();\n const categoryDocs: string[] = [];\n\n for (const doc of docs) {\n // Start from global level, fall back if this doc would exceed budget\n let level = globalLevel;\n let content = getAtLevel(doc, level);\n let tokens = estimateTokens(content);\n\n while (budget.used_tokens + tokens > targetBudget && level > 0) {\n level = (level - 1) as 0 | 1;\n content = getAtLevel(doc, level);\n tokens = estimateTokens(content);\n }\n\n categoryDocs.push(`### ${doc.frontmatter.id}\\n${content}`);\n budget.used_tokens += tokens;\n budget.loaded_files.push(doc.path);\n }\n\n if (categoryDocs.length > 0) {\n sections.push(`# ${categoryLabel}\\n\\n${categoryDocs.join('\\n\\n')}`);\n }\n }\n\n // --- Step 5: Load scratch.md if exists ---\n const scratchPath = join(harnessDir, 'memory', 'scratch.md');\n if (existsSync(scratchPath)) {\n const scratch = readFileSync(scratchPath, 'utf-8');\n if (scratch.trim()) {\n sections.push(`# SCRATCH (Current Working Memory)\\n\\n${scratch}`);\n budget.used_tokens += estimateTokens(scratch);\n budget.loaded_files.push('memory/scratch.md');\n }\n }\n\n budget.remaining = maxTokens - budget.used_tokens;\n\n // --- Step 6: Budget warnings ---\n const usagePercent = (budget.used_tokens / maxTokens) * 100;\n if (usagePercent > 12) {\n // System prompt using more than 80% of its 15% allocation\n warnings.push(\n `System prompt using ${usagePercent.toFixed(1)}% of total context ` +\n `(${budget.used_tokens}/${maxTokens} tokens) — some primitives may be truncated`,\n );\n log.warn(\n `Context budget high: ${budget.used_tokens}/${maxTokens} tokens ` +\n `(${usagePercent.toFixed(1)}%), ${budget.loaded_files.length} files loaded`,\n );\n }\n\n if (globalLevel < 2) {\n const levelName = globalLevel === 0 ? 'L0 (summary only)' : 'L1 (paragraph summary)';\n warnings.push(`Primitives loaded at ${levelName} due to budget constraints`);\n }\n\n return {\n systemPrompt: sections.join('\\n\\n---\\n\\n'),\n budget,\n parseErrors,\n warnings,\n };\n}\n"],"mappings":";;;;;;;;;;;;AAAA,SAAS,cAAc,kBAAkB;AACzC,SAAS,YAAY;AAad,SAAS,kBAAkB,YAAoB,QAAsC;AAC1F,QAAM,YAAY,OAAO,MAAM;AAC/B,QAAM,SAAwB;AAAA,IAC5B,YAAY;AAAA,IACZ,aAAa;AAAA,IACb,WAAW;AAAA,IACX,cAAc,CAAC;AAAA,EACjB;AAEA,QAAM,WAAqB,CAAC;AAC5B,QAAM,WAAqB,CAAC;AAG5B,QAAM,WAAW,KAAK,YAAY,SAAS;AAC3C,MAAI,WAAW,QAAQ,GAAG;AACxB,UAAM,OAAO,aAAa,UAAU,OAAO;AAC3C,aAAS,KAAK;AAAA;AAAA,EAAsB,IAAI,EAAE;AAC1C,WAAO,eAAe,eAAe,IAAI;AACzC,WAAO,aAAa,KAAK,SAAS;AAAA,EACpC;AAGA,QAAM,YAAY,KAAK,YAAY,UAAU;AAC7C,MAAI,WAAW,SAAS,GAAG;AACzB,UAAM,QAAQ,aAAa,WAAW,OAAO;AAC7C,aAAS,KAAK;AAAA;AAAA,EAAsB,KAAK,EAAE;AAC3C,WAAO,eAAe,eAAe,KAAK;AAC1C,WAAO,aAAa,KAAK,UAAU;AAAA,EACrC;AAGA,QAAM,aAAa,KAAK,YAAY,WAAW;AAC/C,MAAI,WAAW,UAAU,GAAG;AAC1B,UAAM,SAAS,aAAa,YAAY,OAAO;AAC/C,aAAS,KAAK;AAAA;AAAA,EAAe,MAAM,EAAE;AACrC,WAAO,eAAe,eAAe,MAAM;AAC3C,WAAO,aAAa,KAAK,WAAW;AAAA,EACtC;AAGA,QAAM,UAAU,OAAO,YAAY,eAAe,CAAC;AACnD,QAAM,EAAE,YAAY,QAAQ,YAAY,IAAI,4BAA4B,YAAY,OAAO;AAG3F,MAAI,YAAY,SAAS,GAAG;AAC1B,eAAW,MAAM,aAAa;AAC5B,UAAI,KAAK,8BAA8B,GAAG,IAAI,WAAM,GAAG,KAAK,EAAE;AAAA,IAChE;AACA,aAAS,KAAK,GAAG,YAAY,MAAM,oCAAoC;AAAA,EACzE;AAEA,QAAM,eAAe,YAAY;AAGjC,QAAM,gBAAgB,CAAC,SAAS,aAAa,UAAU,aAAa,SAAS,aAAa,QAAQ;AAClG,aAAW,OAAO,SAAS;AACzB,QAAI,CAAC,cAAc,SAAS,GAAG,GAAG;AAChC,oBAAc,KAAK,GAAG;AAAA,IACxB;AAAA,EACF;AAGA,QAAM,UAAwD,CAAC;AAC/D,aAAW,YAAY,eAAe;AACpC,UAAM,OAAO,WAAW,IAAI,QAAQ;AACpC,QAAI,CAAC,QAAQ,KAAK,WAAW,EAAG;AAChC,eAAW,OAAO,MAAM;AACtB,cAAQ,KAAK,EAAE,UAAU,IAAI,CAAC;AAAA,IAChC;AAAA,EACF;AAEA,MAAI,QAAQ,WAAW,GAAG;AACxB,aAAS,KAAK,sFAAiF;AAAA,EACjG;AAGA,QAAM,kBAAkB,eAAe,OAAO;AAC9C,MAAI,gBAAgB;AACpB,aAAW,EAAE,IAAI,KAAK,SAAS;AAC7B,qBAAiB,eAAe,WAAW,KAAK,CAAC,CAAC;AAAA,EACpD;AAGA,MAAI;AACJ,MAAI,iBAAiB,iBAAiB;AACpC,kBAAc;AAAA,EAChB,OAAO;AAEL,QAAI,gBAAgB;AACpB,eAAW,EAAE,IAAI,KAAK,SAAS;AAC7B,uBAAiB,eAAe,WAAW,KAAK,CAAC,CAAC;AAAA,IACpD;AACA,kBAAc,iBAAiB,kBAAkB,IAAI;AAAA,EACvD;AAEA,aAAW,YAAY,eAAe;AACpC,UAAM,OAAO,WAAW,IAAI,QAAQ;AACpC,QAAI,CAAC,QAAQ,KAAK,WAAW,EAAG;AAEhC,UAAM,gBAAgB,SAAS,YAAY;AAC3C,UAAM,eAAyB,CAAC;AAEhC,eAAW,OAAO,MAAM;AAEtB,UAAI,QAAQ;AACZ,UAAI,UAAU,WAAW,KAAK,KAAK;AACnC,UAAI,SAAS,eAAe,OAAO;AAEnC,aAAO,OAAO,cAAc,SAAS,gBAAgB,QAAQ,GAAG;AAC9D,gBAAS,QAAQ;AACjB,kBAAU,WAAW,KAAK,KAAK;AAC/B,iBAAS,eAAe,OAAO;AAAA,MACjC;AAEA,mBAAa,KAAK,OAAO,IAAI,YAAY,EAAE;AAAA,EAAK,OAAO,EAAE;AACzD,aAAO,eAAe;AACtB,aAAO,aAAa,KAAK,IAAI,IAAI;AAAA,IACnC;AAEA,QAAI,aAAa,SAAS,GAAG;AAC3B,eAAS,KAAK,KAAK,aAAa;AAAA;AAAA,EAAO,aAAa,KAAK,MAAM,CAAC,EAAE;AAAA,IACpE;AAAA,EACF;AAGA,QAAM,cAAc,KAAK,YAAY,UAAU,YAAY;AAC3D,MAAI,WAAW,WAAW,GAAG;AAC3B,UAAM,UAAU,aAAa,aAAa,OAAO;AACjD,QAAI,QAAQ,KAAK,GAAG;AAClB,eAAS,KAAK;AAAA;AAAA,EAAyC,OAAO,EAAE;AAChE,aAAO,eAAe,eAAe,OAAO;AAC5C,aAAO,aAAa,KAAK,mBAAmB;AAAA,IAC9C;AAAA,EACF;AAEA,SAAO,YAAY,YAAY,OAAO;AAGtC,QAAM,eAAgB,OAAO,cAAc,YAAa;AACxD,MAAI,eAAe,IAAI;AAErB,aAAS;AAAA,MACP,uBAAuB,aAAa,QAAQ,CAAC,CAAC,uBAC1C,OAAO,WAAW,IAAI,SAAS;AAAA,IACrC;AACA,QAAI;AAAA,MACF,wBAAwB,OAAO,WAAW,IAAI,SAAS,YACnD,aAAa,QAAQ,CAAC,CAAC,OAAO,OAAO,aAAa,MAAM;AAAA,IAC9D;AAAA,EACF;AAEA,MAAI,cAAc,GAAG;AACnB,UAAM,YAAY,gBAAgB,IAAI,sBAAsB;AAC5D,aAAS,KAAK,wBAAwB,SAAS,4BAA4B;AAAA,EAC7E;AAEA,SAAO;AAAA,IACL,cAAc,SAAS,KAAK,aAAa;AAAA,IACzC;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
|