@agntk/agent-harness 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/NOTICE +41 -0
- package/README.md +445 -0
- package/defaults/agents/summarizer.md +49 -0
- package/defaults/instincts/lead-with-answer.md +24 -0
- package/defaults/instincts/qualify-before-recommending.md +40 -0
- package/defaults/instincts/read-before-edit.md +23 -0
- package/defaults/instincts/search-before-create.md +23 -0
- package/defaults/playbooks/ship-feature.md +31 -0
- package/defaults/rules/ask-before-assuming.md +35 -0
- package/defaults/rules/operations.md +35 -0
- package/defaults/rules/respect-the-user.md +39 -0
- package/defaults/skills/business-analyst.md +181 -0
- package/defaults/skills/content-marketer.md +184 -0
- package/defaults/skills/research.md +34 -0
- package/defaults/tools/example-web-search.md +60 -0
- package/defaults/workflows/daily-reflection.md +54 -0
- package/dist/agent-framework-K4GUIICH.js +344 -0
- package/dist/agent-framework-K4GUIICH.js.map +1 -0
- package/dist/analytics-RPT73WNM.js +12 -0
- package/dist/analytics-RPT73WNM.js.map +1 -0
- package/dist/auto-processor-OLE45UI3.js +13 -0
- package/dist/auto-processor-OLE45UI3.js.map +1 -0
- package/dist/chunk-274RV3YO.js +162 -0
- package/dist/chunk-274RV3YO.js.map +1 -0
- package/dist/chunk-4CWAGBNS.js +168 -0
- package/dist/chunk-4CWAGBNS.js.map +1 -0
- package/dist/chunk-4FDUOGSZ.js +69 -0
- package/dist/chunk-4FDUOGSZ.js.map +1 -0
- package/dist/chunk-5H34JPMB.js +199 -0
- package/dist/chunk-5H34JPMB.js.map +1 -0
- package/dist/chunk-6EMOEYGU.js +102 -0
- package/dist/chunk-6EMOEYGU.js.map +1 -0
- package/dist/chunk-A7BJPQQ6.js +236 -0
- package/dist/chunk-A7BJPQQ6.js.map +1 -0
- package/dist/chunk-AGAAFJEO.js +76 -0
- package/dist/chunk-AGAAFJEO.js.map +1 -0
- package/dist/chunk-BSKDOFRT.js +65 -0
- package/dist/chunk-BSKDOFRT.js.map +1 -0
- package/dist/chunk-CHJ5GNZC.js +100 -0
- package/dist/chunk-CHJ5GNZC.js.map +1 -0
- package/dist/chunk-CSL3ERUI.js +307 -0
- package/dist/chunk-CSL3ERUI.js.map +1 -0
- package/dist/chunk-DA7IKHC4.js +229 -0
- package/dist/chunk-DA7IKHC4.js.map +1 -0
- package/dist/chunk-DGUM43GV.js +11 -0
- package/dist/chunk-DGUM43GV.js.map +1 -0
- package/dist/chunk-DTTXPHFW.js +211 -0
- package/dist/chunk-DTTXPHFW.js.map +1 -0
- package/dist/chunk-FD55B3IO.js +204 -0
- package/dist/chunk-FD55B3IO.js.map +1 -0
- package/dist/chunk-FLZU44SV.js +230 -0
- package/dist/chunk-FLZU44SV.js.map +1 -0
- package/dist/chunk-GJNNR2RA.js +200 -0
- package/dist/chunk-GJNNR2RA.js.map +1 -0
- package/dist/chunk-GNUSHD2Y.js +111 -0
- package/dist/chunk-GNUSHD2Y.js.map +1 -0
- package/dist/chunk-GUJTBGVS.js +2212 -0
- package/dist/chunk-GUJTBGVS.js.map +1 -0
- package/dist/chunk-IZ6UZ3ZL.js +207 -0
- package/dist/chunk-IZ6UZ3ZL.js.map +1 -0
- package/dist/chunk-JKMGYWXB.js +197 -0
- package/dist/chunk-JKMGYWXB.js.map +1 -0
- package/dist/chunk-KFX54TQM.js +165 -0
- package/dist/chunk-KFX54TQM.js.map +1 -0
- package/dist/chunk-M7NXUK55.js +199 -0
- package/dist/chunk-M7NXUK55.js.map +1 -0
- package/dist/chunk-MPZ3BPUI.js +374 -0
- package/dist/chunk-MPZ3BPUI.js.map +1 -0
- package/dist/chunk-OC6YSTDX.js +119 -0
- package/dist/chunk-OC6YSTDX.js.map +1 -0
- package/dist/chunk-RC6MEZB6.js +469 -0
- package/dist/chunk-RC6MEZB6.js.map +1 -0
- package/dist/chunk-RY3ZFII7.js +3440 -0
- package/dist/chunk-RY3ZFII7.js.map +1 -0
- package/dist/chunk-TAT6JU3X.js +167 -0
- package/dist/chunk-TAT6JU3X.js.map +1 -0
- package/dist/chunk-UDZIS2AQ.js +79 -0
- package/dist/chunk-UDZIS2AQ.js.map +1 -0
- package/dist/chunk-UPLBF4RZ.js +115 -0
- package/dist/chunk-UPLBF4RZ.js.map +1 -0
- package/dist/chunk-UWQTZMNI.js +154 -0
- package/dist/chunk-UWQTZMNI.js.map +1 -0
- package/dist/chunk-W4T7PGI2.js +346 -0
- package/dist/chunk-W4T7PGI2.js.map +1 -0
- package/dist/chunk-XTBKL5BI.js +111 -0
- package/dist/chunk-XTBKL5BI.js.map +1 -0
- package/dist/chunk-YIJY5DBV.js +399 -0
- package/dist/chunk-YIJY5DBV.js.map +1 -0
- package/dist/chunk-YUFNYN2H.js +242 -0
- package/dist/chunk-YUFNYN2H.js.map +1 -0
- package/dist/chunk-Z2PUCXTZ.js +94 -0
- package/dist/chunk-Z2PUCXTZ.js.map +1 -0
- package/dist/chunk-ZZJOFKAT.js +13 -0
- package/dist/chunk-ZZJOFKAT.js.map +1 -0
- package/dist/cli/index.js +3661 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/config-WVMRUOCA.js +13 -0
- package/dist/config-WVMRUOCA.js.map +1 -0
- package/dist/context-loader-3ORBPMHJ.js +13 -0
- package/dist/context-loader-3ORBPMHJ.js.map +1 -0
- package/dist/conversation-QDEIDQPH.js +22 -0
- package/dist/conversation-QDEIDQPH.js.map +1 -0
- package/dist/cost-tracker-RS3W7SVY.js +24 -0
- package/dist/cost-tracker-RS3W7SVY.js.map +1 -0
- package/dist/delegate-VJCJLYEK.js +29 -0
- package/dist/delegate-VJCJLYEK.js.map +1 -0
- package/dist/emotional-state-VQVRA6ED.js +206 -0
- package/dist/emotional-state-VQVRA6ED.js.map +1 -0
- package/dist/env-discovery-2BLVMAIM.js +251 -0
- package/dist/env-discovery-2BLVMAIM.js.map +1 -0
- package/dist/export-6GCYHEHQ.js +165 -0
- package/dist/export-6GCYHEHQ.js.map +1 -0
- package/dist/graph-YUIPOSOO.js +14 -0
- package/dist/graph-YUIPOSOO.js.map +1 -0
- package/dist/harness-LCHA3DWP.js +10 -0
- package/dist/harness-LCHA3DWP.js.map +1 -0
- package/dist/harness-WE4SLCML.js +26 -0
- package/dist/harness-WE4SLCML.js.map +1 -0
- package/dist/health-NZ6WNIMV.js +23 -0
- package/dist/health-NZ6WNIMV.js.map +1 -0
- package/dist/index.d.ts +3612 -0
- package/dist/index.js +13501 -0
- package/dist/index.js.map +1 -0
- package/dist/indexer-LONANRRM.js +16 -0
- package/dist/indexer-LONANRRM.js.map +1 -0
- package/dist/instinct-learner-SRM72DHF.js +20 -0
- package/dist/instinct-learner-SRM72DHF.js.map +1 -0
- package/dist/intake-4M3HNU43.js +21 -0
- package/dist/intake-4M3HNU43.js.map +1 -0
- package/dist/intelligence-HJOCA4SJ.js +1081 -0
- package/dist/intelligence-HJOCA4SJ.js.map +1 -0
- package/dist/journal-WANJL3MI.js +24 -0
- package/dist/journal-WANJL3MI.js.map +1 -0
- package/dist/loader-C3TKIKZR.js +23 -0
- package/dist/loader-C3TKIKZR.js.map +1 -0
- package/dist/mcp-WTQJJZAO.js +15 -0
- package/dist/mcp-WTQJJZAO.js.map +1 -0
- package/dist/mcp-discovery-WPAQFL6S.js +377 -0
- package/dist/mcp-discovery-WPAQFL6S.js.map +1 -0
- package/dist/mcp-installer-6O2XXD3V.js +394 -0
- package/dist/mcp-installer-6O2XXD3V.js.map +1 -0
- package/dist/metrics-KXGNFAAB.js +20 -0
- package/dist/metrics-KXGNFAAB.js.map +1 -0
- package/dist/primitive-registry-I6VTIR4W.js +512 -0
- package/dist/primitive-registry-I6VTIR4W.js.map +1 -0
- package/dist/project-discovery-C4UMD7JI.js +246 -0
- package/dist/project-discovery-C4UMD7JI.js.map +1 -0
- package/dist/provider-LQHQX7Z7.js +26 -0
- package/dist/provider-LQHQX7Z7.js.map +1 -0
- package/dist/provider-SXPQZ74H.js +28 -0
- package/dist/provider-SXPQZ74H.js.map +1 -0
- package/dist/rate-limiter-RLRVM325.js +22 -0
- package/dist/rate-limiter-RLRVM325.js.map +1 -0
- package/dist/rule-engine-YGQ3RYZM.js +182 -0
- package/dist/rule-engine-YGQ3RYZM.js.map +1 -0
- package/dist/scaffold-A3VRRCBV.js +347 -0
- package/dist/scaffold-A3VRRCBV.js.map +1 -0
- package/dist/scheduler-XHHIVHRI.js +397 -0
- package/dist/scheduler-XHHIVHRI.js.map +1 -0
- package/dist/search-V3W5JMJG.js +75 -0
- package/dist/search-V3W5JMJG.js.map +1 -0
- package/dist/semantic-search-2DTOO5UX.js +241 -0
- package/dist/semantic-search-2DTOO5UX.js.map +1 -0
- package/dist/serve-DTQ3HENY.js +291 -0
- package/dist/serve-DTQ3HENY.js.map +1 -0
- package/dist/sessions-CZGVXKQE.js +21 -0
- package/dist/sessions-CZGVXKQE.js.map +1 -0
- package/dist/sources-RW5DT56F.js +32 -0
- package/dist/sources-RW5DT56F.js.map +1 -0
- package/dist/starter-packs-76YUVHEU.js +893 -0
- package/dist/starter-packs-76YUVHEU.js.map +1 -0
- package/dist/state-GMXILIHW.js +13 -0
- package/dist/state-GMXILIHW.js.map +1 -0
- package/dist/state-merge-NKO5FRBA.js +174 -0
- package/dist/state-merge-NKO5FRBA.js.map +1 -0
- package/dist/telemetry-UC6PBXC7.js +22 -0
- package/dist/telemetry-UC6PBXC7.js.map +1 -0
- package/dist/tool-executor-MJ7IG7PQ.js +28 -0
- package/dist/tool-executor-MJ7IG7PQ.js.map +1 -0
- package/dist/tools-DZ4KETET.js +20 -0
- package/dist/tools-DZ4KETET.js.map +1 -0
- package/dist/types-EW7AIB3R.js +18 -0
- package/dist/types-EW7AIB3R.js.map +1 -0
- package/dist/types-WGDLSPO6.js +16 -0
- package/dist/types-WGDLSPO6.js.map +1 -0
- package/dist/universal-installer-QGS4SJGX.js +578 -0
- package/dist/universal-installer-QGS4SJGX.js.map +1 -0
- package/dist/validator-7WXMDIHH.js +22 -0
- package/dist/validator-7WXMDIHH.js.map +1 -0
- package/dist/verification-gate-FYXUX6LH.js +246 -0
- package/dist/verification-gate-FYXUX6LH.js.map +1 -0
- package/dist/versioning-Z3XNE2Q2.js +271 -0
- package/dist/versioning-Z3XNE2Q2.js.map +1 -0
- package/dist/watcher-ISJC7YKL.js +109 -0
- package/dist/watcher-ISJC7YKL.js.map +1 -0
- package/dist/web-server-DD7ZOP46.js +28 -0
- package/dist/web-server-DD7ZOP46.js.map +1 -0
- package/package.json +76 -0
- package/sources.yaml +121 -0
- package/templates/assistant/CORE.md +24 -0
- package/templates/assistant/SYSTEM.md +24 -0
- package/templates/assistant/config.yaml +51 -0
- package/templates/base/CORE.md +17 -0
- package/templates/base/SYSTEM.md +24 -0
- package/templates/base/config.yaml +51 -0
- package/templates/claude-opus/config.yaml +51 -0
- package/templates/code-reviewer/CORE.md +25 -0
- package/templates/code-reviewer/SYSTEM.md +30 -0
- package/templates/code-reviewer/config.yaml +51 -0
- package/templates/gpt4/config.yaml +51 -0
- package/templates/local/config.yaml +51 -0
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
fixCapability
|
|
5
|
+
} from "./chunk-W4T7PGI2.js";
|
|
6
|
+
import {
|
|
7
|
+
validateMcpConfig
|
|
8
|
+
} from "./chunk-5H34JPMB.js";
|
|
9
|
+
import {
|
|
10
|
+
buildSystemPrompt
|
|
11
|
+
} from "./chunk-UWQTZMNI.js";
|
|
12
|
+
import {
|
|
13
|
+
loadState
|
|
14
|
+
} from "./chunk-UDZIS2AQ.js";
|
|
15
|
+
import {
|
|
16
|
+
loadDirectoryWithErrors
|
|
17
|
+
} from "./chunk-UPLBF4RZ.js";
|
|
18
|
+
import {
|
|
19
|
+
loadConfig
|
|
20
|
+
} from "./chunk-CHJ5GNZC.js";
|
|
21
|
+
import {
|
|
22
|
+
getPrimitiveDirs
|
|
23
|
+
} from "./chunk-4CWAGBNS.js";
|
|
24
|
+
|
|
25
|
+
// src/runtime/validator.ts
|
|
26
|
+
import { existsSync, readdirSync, mkdirSync } from "fs";
|
|
27
|
+
import { join, relative } from "path";
|
|
28
|
+
function validateHarness(dir) {
|
|
29
|
+
const result = {
|
|
30
|
+
ok: [],
|
|
31
|
+
warnings: [],
|
|
32
|
+
errors: [],
|
|
33
|
+
parseErrors: [],
|
|
34
|
+
primitiveCounts: /* @__PURE__ */ new Map(),
|
|
35
|
+
totalPrimitives: 0
|
|
36
|
+
};
|
|
37
|
+
const requiredFiles = ["CORE.md"];
|
|
38
|
+
for (const file of requiredFiles) {
|
|
39
|
+
if (existsSync(join(dir, file))) {
|
|
40
|
+
result.ok.push(`${file} exists`);
|
|
41
|
+
} else {
|
|
42
|
+
result.errors.push(`Missing required file: ${file}`);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
const optionalFiles = ["SYSTEM.md", "state.md", "config.yaml"];
|
|
46
|
+
for (const file of optionalFiles) {
|
|
47
|
+
if (existsSync(join(dir, file))) {
|
|
48
|
+
result.ok.push(`${file} exists`);
|
|
49
|
+
} else {
|
|
50
|
+
result.warnings.push(`Optional file missing: ${file}`);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
let config;
|
|
54
|
+
try {
|
|
55
|
+
config = loadConfig(dir);
|
|
56
|
+
result.ok.push(`Config valid (agent: ${config.agent.name}, model: ${config.model.id})`);
|
|
57
|
+
} catch (err) {
|
|
58
|
+
result.errors.push(`Config error: ${err instanceof Error ? err.message : String(err)}`);
|
|
59
|
+
}
|
|
60
|
+
try {
|
|
61
|
+
const state = loadState(dir);
|
|
62
|
+
result.ok.push(`State valid (mode: ${state.mode})`);
|
|
63
|
+
} catch (err) {
|
|
64
|
+
result.warnings.push(`State parse issue: ${err instanceof Error ? err.message : String(err)}`);
|
|
65
|
+
}
|
|
66
|
+
const primitiveDirs = getPrimitiveDirs(config);
|
|
67
|
+
const allDocs = [];
|
|
68
|
+
for (const primDir of primitiveDirs) {
|
|
69
|
+
const fullPath = join(dir, primDir);
|
|
70
|
+
if (!existsSync(fullPath)) {
|
|
71
|
+
result.primitiveCounts.set(primDir, 0);
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
const { docs, errors } = loadDirectoryWithErrors(fullPath);
|
|
75
|
+
result.primitiveCounts.set(primDir, docs.length);
|
|
76
|
+
result.totalPrimitives += docs.length;
|
|
77
|
+
allDocs.push(...docs);
|
|
78
|
+
if (errors.length > 0) {
|
|
79
|
+
result.parseErrors.push(...errors);
|
|
80
|
+
for (const pe of errors) {
|
|
81
|
+
const relPath = relative(dir, pe.path);
|
|
82
|
+
result.errors.push(`Parse error in ${relPath}: ${pe.error}`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
if (docs.length > 0) {
|
|
86
|
+
result.ok.push(`${primDir}/: ${docs.length} valid file(s)`);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
const knownIds = new Set(allDocs.map((d) => d.frontmatter.id));
|
|
90
|
+
for (const doc of allDocs) {
|
|
91
|
+
const related = doc.frontmatter.related;
|
|
92
|
+
if (!related || related.length === 0) continue;
|
|
93
|
+
for (const ref of related) {
|
|
94
|
+
if (knownIds.has(ref)) continue;
|
|
95
|
+
const refPath = join(dir, ref);
|
|
96
|
+
if (existsSync(refPath)) continue;
|
|
97
|
+
if (existsSync(refPath + ".md")) continue;
|
|
98
|
+
const docRel = relative(dir, doc.path);
|
|
99
|
+
result.warnings.push(`Broken reference in ${docRel}: "${ref}" not found (related: field)`);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
let missingL0 = 0;
|
|
103
|
+
let missingL1 = 0;
|
|
104
|
+
for (const doc of allDocs) {
|
|
105
|
+
if (!doc.l0) missingL0++;
|
|
106
|
+
if (!doc.l1) missingL1++;
|
|
107
|
+
}
|
|
108
|
+
if (missingL0 > 0) {
|
|
109
|
+
result.warnings.push(`${missingL0} primitive(s) missing L0 summary`);
|
|
110
|
+
}
|
|
111
|
+
if (missingL1 > 0) {
|
|
112
|
+
result.warnings.push(`${missingL1} primitive(s) missing L1 summary`);
|
|
113
|
+
}
|
|
114
|
+
if (config) {
|
|
115
|
+
try {
|
|
116
|
+
const ctx = buildSystemPrompt(dir, config);
|
|
117
|
+
const usagePercent = (ctx.budget.used_tokens / ctx.budget.max_tokens * 100).toFixed(1);
|
|
118
|
+
result.ok.push(
|
|
119
|
+
`Context budget: ${ctx.budget.used_tokens}/${ctx.budget.max_tokens} tokens (${usagePercent}%)`
|
|
120
|
+
);
|
|
121
|
+
for (const warning of ctx.warnings) {
|
|
122
|
+
result.warnings.push(warning);
|
|
123
|
+
}
|
|
124
|
+
} catch (err) {
|
|
125
|
+
if (process.env.DEBUG) console.error(`Validator config load: ${err instanceof Error ? err.message : String(err)}`);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
if (process.env.OPENROUTER_API_KEY || process.env.ANTHROPIC_API_KEY || process.env.OPENAI_API_KEY) {
|
|
129
|
+
const keys = [];
|
|
130
|
+
if (process.env.OPENROUTER_API_KEY) keys.push("OPENROUTER_API_KEY");
|
|
131
|
+
if (process.env.ANTHROPIC_API_KEY) keys.push("ANTHROPIC_API_KEY");
|
|
132
|
+
if (process.env.OPENAI_API_KEY) keys.push("OPENAI_API_KEY");
|
|
133
|
+
result.ok.push(`API key(s) configured: ${keys.join(", ")}`);
|
|
134
|
+
} else {
|
|
135
|
+
result.warnings.push("No API key set (OPENROUTER_API_KEY, ANTHROPIC_API_KEY, or OPENAI_API_KEY)");
|
|
136
|
+
}
|
|
137
|
+
if (config) {
|
|
138
|
+
const servers = config.mcp?.servers ?? {};
|
|
139
|
+
const serverCount = Object.keys(servers).length;
|
|
140
|
+
if (serverCount > 0) {
|
|
141
|
+
const mcpErrors = validateMcpConfig(config);
|
|
142
|
+
const enabledCount = Object.values(servers).filter((s) => s.enabled !== false).length;
|
|
143
|
+
if (mcpErrors.length === 0) {
|
|
144
|
+
result.ok.push(`MCP: ${serverCount} server(s) configured (${enabledCount} enabled)`);
|
|
145
|
+
} else {
|
|
146
|
+
result.ok.push(`MCP: ${serverCount} server(s) configured (${enabledCount} enabled)`);
|
|
147
|
+
for (const err of mcpErrors) {
|
|
148
|
+
result.errors.push(`MCP server "${err.server}": ${err.error}`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
const memoryDirs = ["memory", "memory/sessions", "memory/journal"];
|
|
154
|
+
for (const memDir of memoryDirs) {
|
|
155
|
+
if (!existsSync(join(dir, memDir))) {
|
|
156
|
+
result.warnings.push(`Missing directory: ${memDir}/`);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
return result;
|
|
160
|
+
}
|
|
161
|
+
function doctorHarness(dir) {
|
|
162
|
+
const validation = validateHarness(dir);
|
|
163
|
+
const result = {
|
|
164
|
+
...validation,
|
|
165
|
+
fixes: [],
|
|
166
|
+
directoriesCreated: []
|
|
167
|
+
};
|
|
168
|
+
const dirsToCreate = ["memory", "memory/sessions", "memory/journal", "intake"];
|
|
169
|
+
for (const d of dirsToCreate) {
|
|
170
|
+
const fullPath = join(dir, d);
|
|
171
|
+
if (!existsSync(fullPath)) {
|
|
172
|
+
mkdirSync(fullPath, { recursive: true });
|
|
173
|
+
result.directoriesCreated.push(d);
|
|
174
|
+
result.fixes.push(`Created directory: ${d}/`);
|
|
175
|
+
result.warnings = result.warnings.filter((w) => !w.includes(`Missing directory: ${d}/`));
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
const primitiveDirs = getPrimitiveDirs();
|
|
179
|
+
for (const primDir of primitiveDirs) {
|
|
180
|
+
const fullPath = join(dir, primDir);
|
|
181
|
+
if (!existsSync(fullPath)) continue;
|
|
182
|
+
let files;
|
|
183
|
+
try {
|
|
184
|
+
files = readdirSync(fullPath).filter(
|
|
185
|
+
(f) => f.endsWith(".md") && !f.startsWith(".") && !f.startsWith("_")
|
|
186
|
+
);
|
|
187
|
+
} catch (_readErr) {
|
|
188
|
+
continue;
|
|
189
|
+
}
|
|
190
|
+
for (const file of files) {
|
|
191
|
+
const filePath = join(fullPath, file);
|
|
192
|
+
const fixResult = fixCapability(filePath);
|
|
193
|
+
if (fixResult.fixes_applied.length > 0) {
|
|
194
|
+
const relPath = relative(dir, filePath);
|
|
195
|
+
for (const fix of fixResult.fixes_applied) {
|
|
196
|
+
result.fixes.push(`${relPath}: ${fix}`);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
if (result.fixes.length > 0) {
|
|
202
|
+
result.warnings = result.warnings.filter(
|
|
203
|
+
(w) => !w.includes("missing L0") && !w.includes("missing L1")
|
|
204
|
+
);
|
|
205
|
+
let missingL0 = 0;
|
|
206
|
+
let missingL1 = 0;
|
|
207
|
+
for (const primDir of primitiveDirs) {
|
|
208
|
+
const fullPath = join(dir, primDir);
|
|
209
|
+
if (!existsSync(fullPath)) continue;
|
|
210
|
+
const { docs } = loadDirectoryWithErrors(fullPath);
|
|
211
|
+
for (const doc of docs) {
|
|
212
|
+
if (!doc.l0) missingL0++;
|
|
213
|
+
if (!doc.l1) missingL1++;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
if (missingL0 > 0) {
|
|
217
|
+
result.warnings.push(`${missingL0} primitive(s) still missing L0 summary`);
|
|
218
|
+
}
|
|
219
|
+
if (missingL1 > 0) {
|
|
220
|
+
result.warnings.push(`${missingL1} primitive(s) still missing L1 summary`);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
return result;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
export {
|
|
227
|
+
validateHarness,
|
|
228
|
+
doctorHarness
|
|
229
|
+
};
|
|
230
|
+
//# sourceMappingURL=chunk-FLZU44SV.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/validator.ts"],"sourcesContent":["import { existsSync, readdirSync, mkdirSync } from 'fs';\nimport { join, relative } from 'path';\nimport { loadDirectoryWithErrors } from '../primitives/loader.js';\nimport { fixCapability } from './intake.js';\nimport { buildSystemPrompt } from './context-loader.js';\nimport { loadConfig } from '../core/config.js';\nimport { loadState } from './state.js';\nimport { validateMcpConfig } from './mcp.js';\nimport { getPrimitiveDirs } from '../core/types.js';\nimport type { HarnessConfig, HarnessDocument } from '../core/types.js';\nimport type { ParseError } from '../primitives/loader.js';\n\nexport interface ValidationResult {\n ok: string[];\n warnings: string[];\n errors: string[];\n parseErrors: ParseError[];\n primitiveCounts: Map<string, number>;\n totalPrimitives: number;\n}\n\n/**\n * Comprehensive harness validation:\n * - Required/optional files\n * - Config validation\n * - State validation\n * - Primitive loading with parse error collection\n * - Cross-reference integrity (related: fields)\n * - Context budget check with warnings\n * - Memory directory structure\n * - API key presence\n */\nexport function validateHarness(dir: string): ValidationResult {\n const result: ValidationResult = {\n ok: [],\n warnings: [],\n errors: [],\n parseErrors: [],\n primitiveCounts: new Map(),\n totalPrimitives: 0,\n };\n\n // --- Required files ---\n const requiredFiles = ['CORE.md'];\n for (const file of requiredFiles) {\n if (existsSync(join(dir, file))) {\n result.ok.push(`${file} exists`);\n } else {\n result.errors.push(`Missing required file: ${file}`);\n }\n }\n\n const optionalFiles = ['SYSTEM.md', 'state.md', 'config.yaml'];\n for (const file of optionalFiles) {\n if (existsSync(join(dir, file))) {\n result.ok.push(`${file} exists`);\n } else {\n result.warnings.push(`Optional file missing: ${file}`);\n }\n }\n\n // --- Config validation ---\n let config: HarnessConfig | undefined;\n try {\n config = loadConfig(dir);\n result.ok.push(`Config valid (agent: ${config.agent.name}, model: ${config.model.id})`);\n } catch (err: unknown) {\n result.errors.push(`Config error: ${err instanceof Error ? err.message : String(err)}`);\n }\n\n // --- State validation ---\n try {\n const state = loadState(dir);\n result.ok.push(`State valid (mode: ${state.mode})`);\n } catch (err: unknown) {\n result.warnings.push(`State parse issue: ${err instanceof Error ? err.message : String(err)}`);\n }\n\n // --- Primitive loading + parse errors ---\n const primitiveDirs = getPrimitiveDirs(config);\n const allDocs: HarnessDocument[] = [];\n\n for (const primDir of primitiveDirs) {\n const fullPath = join(dir, primDir);\n if (!existsSync(fullPath)) {\n result.primitiveCounts.set(primDir, 0);\n continue;\n }\n\n const { docs, errors } = loadDirectoryWithErrors(fullPath);\n result.primitiveCounts.set(primDir, docs.length);\n result.totalPrimitives += docs.length;\n allDocs.push(...docs);\n\n if (errors.length > 0) {\n result.parseErrors.push(...errors);\n for (const pe of errors) {\n const relPath = relative(dir, pe.path);\n result.errors.push(`Parse error in ${relPath}: ${pe.error}`);\n }\n }\n\n if (docs.length > 0) {\n result.ok.push(`${primDir}/: ${docs.length} valid file(s)`);\n }\n }\n\n // --- Cross-reference integrity ---\n const knownIds = new Set(allDocs.map((d) => d.frontmatter.id));\n for (const doc of allDocs) {\n const related = doc.frontmatter.related;\n if (!related || related.length === 0) continue;\n\n for (const ref of related) {\n // Check if reference is a known primitive ID\n if (knownIds.has(ref)) continue;\n\n // Check if reference is a valid file path\n const refPath = join(dir, ref);\n if (existsSync(refPath)) continue;\n\n // Check if reference is a file path with .md extension\n if (existsSync(refPath + '.md')) continue;\n\n const docRel = relative(dir, doc.path);\n result.warnings.push(`Broken reference in ${docRel}: \"${ref}\" not found (related: field)`);\n }\n }\n\n // --- Missing L0/L1 warnings ---\n let missingL0 = 0;\n let missingL1 = 0;\n for (const doc of allDocs) {\n if (!doc.l0) missingL0++;\n if (!doc.l1) missingL1++;\n }\n if (missingL0 > 0) {\n result.warnings.push(`${missingL0} primitive(s) missing L0 summary`);\n }\n if (missingL1 > 0) {\n result.warnings.push(`${missingL1} primitive(s) missing L1 summary`);\n }\n\n // --- Context budget ---\n if (config) {\n try {\n const ctx = buildSystemPrompt(dir, config);\n const usagePercent = ((ctx.budget.used_tokens / ctx.budget.max_tokens) * 100).toFixed(1);\n result.ok.push(\n `Context budget: ${ctx.budget.used_tokens}/${ctx.budget.max_tokens} tokens (${usagePercent}%)`,\n );\n\n // Surface context-loader warnings\n for (const warning of ctx.warnings) {\n result.warnings.push(warning);\n }\n } catch (err) {\n if (process.env.DEBUG) console.error(`Validator config load: ${err instanceof Error ? err.message : String(err)}`);\n }\n }\n\n // --- API key ---\n if (process.env.OPENROUTER_API_KEY || process.env.ANTHROPIC_API_KEY || process.env.OPENAI_API_KEY) {\n const keys: string[] = [];\n if (process.env.OPENROUTER_API_KEY) keys.push('OPENROUTER_API_KEY');\n if (process.env.ANTHROPIC_API_KEY) keys.push('ANTHROPIC_API_KEY');\n if (process.env.OPENAI_API_KEY) keys.push('OPENAI_API_KEY');\n result.ok.push(`API key(s) configured: ${keys.join(', ')}`);\n } else {\n result.warnings.push('No API key set (OPENROUTER_API_KEY, ANTHROPIC_API_KEY, or OPENAI_API_KEY)');\n }\n\n // --- MCP server validation ---\n if (config) {\n const servers = config.mcp?.servers ?? {};\n const serverCount = Object.keys(servers).length;\n if (serverCount > 0) {\n const mcpErrors = validateMcpConfig(config);\n const enabledCount = Object.values(servers).filter((s) => s.enabled !== false).length;\n\n if (mcpErrors.length === 0) {\n result.ok.push(`MCP: ${serverCount} server(s) configured (${enabledCount} enabled)`);\n } else {\n result.ok.push(`MCP: ${serverCount} server(s) configured (${enabledCount} enabled)`);\n for (const err of mcpErrors) {\n result.errors.push(`MCP server \"${err.server}\": ${err.error}`);\n }\n }\n }\n }\n\n // --- Memory directories ---\n const memoryDirs = ['memory', 'memory/sessions', 'memory/journal'];\n for (const memDir of memoryDirs) {\n if (!existsSync(join(dir, memDir))) {\n result.warnings.push(`Missing directory: ${memDir}/`);\n }\n }\n\n return result;\n}\n\nexport interface DoctorResult extends ValidationResult {\n fixes: string[];\n directoriesCreated: string[];\n}\n\n/**\n * Run validation then auto-fix all fixable issues:\n * - Fix primitives with missing id/status/L0/L1/tags\n * - Create missing memory directories\n */\nexport function doctorHarness(dir: string): DoctorResult {\n // Phase 1: Validate\n const validation = validateHarness(dir);\n const result: DoctorResult = {\n ...validation,\n fixes: [],\n directoriesCreated: [],\n };\n\n // Phase 2: Create missing directories\n const dirsToCreate = ['memory', 'memory/sessions', 'memory/journal', 'intake'];\n for (const d of dirsToCreate) {\n const fullPath = join(dir, d);\n if (!existsSync(fullPath)) {\n mkdirSync(fullPath, { recursive: true });\n result.directoriesCreated.push(d);\n result.fixes.push(`Created directory: ${d}/`);\n // Remove the warning about this missing dir\n result.warnings = result.warnings.filter((w) => !w.includes(`Missing directory: ${d}/`));\n }\n }\n\n // Phase 3: Auto-fix primitives with fixable issues\n const primitiveDirs = getPrimitiveDirs();\n for (const primDir of primitiveDirs) {\n const fullPath = join(dir, primDir);\n if (!existsSync(fullPath)) continue;\n\n let files: string[];\n try {\n files = readdirSync(fullPath).filter(\n (f) => f.endsWith('.md') && !f.startsWith('.') && !f.startsWith('_'),\n );\n } catch (_readErr) {\n continue;\n }\n\n for (const file of files) {\n const filePath = join(fullPath, file);\n const fixResult = fixCapability(filePath);\n\n if (fixResult.fixes_applied.length > 0) {\n const relPath = relative(dir, filePath);\n for (const fix of fixResult.fixes_applied) {\n result.fixes.push(`${relPath}: ${fix}`);\n }\n // Remove stale L0/L1 warnings since we just fixed them\n }\n }\n }\n\n // Recalculate L0/L1 warnings after fixes\n if (result.fixes.length > 0) {\n result.warnings = result.warnings.filter(\n (w) => !w.includes('missing L0') && !w.includes('missing L1'),\n );\n // Re-check L0/L1 counts\n let missingL0 = 0;\n let missingL1 = 0;\n for (const primDir of primitiveDirs) {\n const fullPath = join(dir, primDir);\n if (!existsSync(fullPath)) continue;\n const { docs } = loadDirectoryWithErrors(fullPath);\n for (const doc of docs) {\n if (!doc.l0) missingL0++;\n if (!doc.l1) missingL1++;\n }\n }\n if (missingL0 > 0) {\n result.warnings.push(`${missingL0} primitive(s) still missing L0 summary`);\n }\n if (missingL1 > 0) {\n result.warnings.push(`${missingL1} primitive(s) still missing L1 summary`);\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,YAAY,aAAa,iBAAiB;AACnD,SAAS,MAAM,gBAAgB;AA+BxB,SAAS,gBAAgB,KAA+B;AAC7D,QAAM,SAA2B;AAAA,IAC/B,IAAI,CAAC;AAAA,IACL,UAAU,CAAC;AAAA,IACX,QAAQ,CAAC;AAAA,IACT,aAAa,CAAC;AAAA,IACd,iBAAiB,oBAAI,IAAI;AAAA,IACzB,iBAAiB;AAAA,EACnB;AAGA,QAAM,gBAAgB,CAAC,SAAS;AAChC,aAAW,QAAQ,eAAe;AAChC,QAAI,WAAW,KAAK,KAAK,IAAI,CAAC,GAAG;AAC/B,aAAO,GAAG,KAAK,GAAG,IAAI,SAAS;AAAA,IACjC,OAAO;AACL,aAAO,OAAO,KAAK,0BAA0B,IAAI,EAAE;AAAA,IACrD;AAAA,EACF;AAEA,QAAM,gBAAgB,CAAC,aAAa,YAAY,aAAa;AAC7D,aAAW,QAAQ,eAAe;AAChC,QAAI,WAAW,KAAK,KAAK,IAAI,CAAC,GAAG;AAC/B,aAAO,GAAG,KAAK,GAAG,IAAI,SAAS;AAAA,IACjC,OAAO;AACL,aAAO,SAAS,KAAK,0BAA0B,IAAI,EAAE;AAAA,IACvD;AAAA,EACF;AAGA,MAAI;AACJ,MAAI;AACF,aAAS,WAAW,GAAG;AACvB,WAAO,GAAG,KAAK,wBAAwB,OAAO,MAAM,IAAI,YAAY,OAAO,MAAM,EAAE,GAAG;AAAA,EACxF,SAAS,KAAc;AACrB,WAAO,OAAO,KAAK,iBAAiB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,EACxF;AAGA,MAAI;AACF,UAAM,QAAQ,UAAU,GAAG;AAC3B,WAAO,GAAG,KAAK,sBAAsB,MAAM,IAAI,GAAG;AAAA,EACpD,SAAS,KAAc;AACrB,WAAO,SAAS,KAAK,sBAAsB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,EAC/F;AAGA,QAAM,gBAAgB,iBAAiB,MAAM;AAC7C,QAAM,UAA6B,CAAC;AAEpC,aAAW,WAAW,eAAe;AACnC,UAAM,WAAW,KAAK,KAAK,OAAO;AAClC,QAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,aAAO,gBAAgB,IAAI,SAAS,CAAC;AACrC;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,OAAO,IAAI,wBAAwB,QAAQ;AACzD,WAAO,gBAAgB,IAAI,SAAS,KAAK,MAAM;AAC/C,WAAO,mBAAmB,KAAK;AAC/B,YAAQ,KAAK,GAAG,IAAI;AAEpB,QAAI,OAAO,SAAS,GAAG;AACrB,aAAO,YAAY,KAAK,GAAG,MAAM;AACjC,iBAAW,MAAM,QAAQ;AACvB,cAAM,UAAU,SAAS,KAAK,GAAG,IAAI;AACrC,eAAO,OAAO,KAAK,kBAAkB,OAAO,KAAK,GAAG,KAAK,EAAE;AAAA,MAC7D;AAAA,IACF;AAEA,QAAI,KAAK,SAAS,GAAG;AACnB,aAAO,GAAG,KAAK,GAAG,OAAO,MAAM,KAAK,MAAM,gBAAgB;AAAA,IAC5D;AAAA,EACF;AAGA,QAAM,WAAW,IAAI,IAAI,QAAQ,IAAI,CAAC,MAAM,EAAE,YAAY,EAAE,CAAC;AAC7D,aAAW,OAAO,SAAS;AACzB,UAAM,UAAU,IAAI,YAAY;AAChC,QAAI,CAAC,WAAW,QAAQ,WAAW,EAAG;AAEtC,eAAW,OAAO,SAAS;AAEzB,UAAI,SAAS,IAAI,GAAG,EAAG;AAGvB,YAAM,UAAU,KAAK,KAAK,GAAG;AAC7B,UAAI,WAAW,OAAO,EAAG;AAGzB,UAAI,WAAW,UAAU,KAAK,EAAG;AAEjC,YAAM,SAAS,SAAS,KAAK,IAAI,IAAI;AACrC,aAAO,SAAS,KAAK,uBAAuB,MAAM,MAAM,GAAG,8BAA8B;AAAA,IAC3F;AAAA,EACF;AAGA,MAAI,YAAY;AAChB,MAAI,YAAY;AAChB,aAAW,OAAO,SAAS;AACzB,QAAI,CAAC,IAAI,GAAI;AACb,QAAI,CAAC,IAAI,GAAI;AAAA,EACf;AACA,MAAI,YAAY,GAAG;AACjB,WAAO,SAAS,KAAK,GAAG,SAAS,kCAAkC;AAAA,EACrE;AACA,MAAI,YAAY,GAAG;AACjB,WAAO,SAAS,KAAK,GAAG,SAAS,kCAAkC;AAAA,EACrE;AAGA,MAAI,QAAQ;AACV,QAAI;AACF,YAAM,MAAM,kBAAkB,KAAK,MAAM;AACzC,YAAM,gBAAiB,IAAI,OAAO,cAAc,IAAI,OAAO,aAAc,KAAK,QAAQ,CAAC;AACvF,aAAO,GAAG;AAAA,QACR,mBAAmB,IAAI,OAAO,WAAW,IAAI,IAAI,OAAO,UAAU,YAAY,YAAY;AAAA,MAC5F;AAGA,iBAAW,WAAW,IAAI,UAAU;AAClC,eAAO,SAAS,KAAK,OAAO;AAAA,MAC9B;AAAA,IACF,SAAS,KAAK;AACZ,UAAI,QAAQ,IAAI,MAAO,SAAQ,MAAM,0BAA0B,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,IACnH;AAAA,EACF;AAGA,MAAI,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,qBAAqB,QAAQ,IAAI,gBAAgB;AACjG,UAAM,OAAiB,CAAC;AACxB,QAAI,QAAQ,IAAI,mBAAoB,MAAK,KAAK,oBAAoB;AAClE,QAAI,QAAQ,IAAI,kBAAmB,MAAK,KAAK,mBAAmB;AAChE,QAAI,QAAQ,IAAI,eAAgB,MAAK,KAAK,gBAAgB;AAC1D,WAAO,GAAG,KAAK,0BAA0B,KAAK,KAAK,IAAI,CAAC,EAAE;AAAA,EAC5D,OAAO;AACL,WAAO,SAAS,KAAK,2EAA2E;AAAA,EAClG;AAGA,MAAI,QAAQ;AACV,UAAM,UAAU,OAAO,KAAK,WAAW,CAAC;AACxC,UAAM,cAAc,OAAO,KAAK,OAAO,EAAE;AACzC,QAAI,cAAc,GAAG;AACnB,YAAM,YAAY,kBAAkB,MAAM;AAC1C,YAAM,eAAe,OAAO,OAAO,OAAO,EAAE,OAAO,CAAC,MAAM,EAAE,YAAY,KAAK,EAAE;AAE/E,UAAI,UAAU,WAAW,GAAG;AAC1B,eAAO,GAAG,KAAK,QAAQ,WAAW,0BAA0B,YAAY,WAAW;AAAA,MACrF,OAAO;AACL,eAAO,GAAG,KAAK,QAAQ,WAAW,0BAA0B,YAAY,WAAW;AACnF,mBAAW,OAAO,WAAW;AAC3B,iBAAO,OAAO,KAAK,eAAe,IAAI,MAAM,MAAM,IAAI,KAAK,EAAE;AAAA,QAC/D;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,aAAa,CAAC,UAAU,mBAAmB,gBAAgB;AACjE,aAAW,UAAU,YAAY;AAC/B,QAAI,CAAC,WAAW,KAAK,KAAK,MAAM,CAAC,GAAG;AAClC,aAAO,SAAS,KAAK,sBAAsB,MAAM,GAAG;AAAA,IACtD;AAAA,EACF;AAEA,SAAO;AACT;AAYO,SAAS,cAAc,KAA2B;AAEvD,QAAM,aAAa,gBAAgB,GAAG;AACtC,QAAM,SAAuB;AAAA,IAC3B,GAAG;AAAA,IACH,OAAO,CAAC;AAAA,IACR,oBAAoB,CAAC;AAAA,EACvB;AAGA,QAAM,eAAe,CAAC,UAAU,mBAAmB,kBAAkB,QAAQ;AAC7E,aAAW,KAAK,cAAc;AAC5B,UAAM,WAAW,KAAK,KAAK,CAAC;AAC5B,QAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,gBAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AACvC,aAAO,mBAAmB,KAAK,CAAC;AAChC,aAAO,MAAM,KAAK,sBAAsB,CAAC,GAAG;AAE5C,aAAO,WAAW,OAAO,SAAS,OAAO,CAAC,MAAM,CAAC,EAAE,SAAS,sBAAsB,CAAC,GAAG,CAAC;AAAA,IACzF;AAAA,EACF;AAGA,QAAM,gBAAgB,iBAAiB;AACvC,aAAW,WAAW,eAAe;AACnC,UAAM,WAAW,KAAK,KAAK,OAAO;AAClC,QAAI,CAAC,WAAW,QAAQ,EAAG;AAE3B,QAAI;AACJ,QAAI;AACF,cAAQ,YAAY,QAAQ,EAAE;AAAA,QAC5B,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,KAAK,CAAC,EAAE,WAAW,GAAG;AAAA,MACrE;AAAA,IACF,SAAS,UAAU;AACjB;AAAA,IACF;AAEA,eAAW,QAAQ,OAAO;AACxB,YAAM,WAAW,KAAK,UAAU,IAAI;AACpC,YAAM,YAAY,cAAc,QAAQ;AAExC,UAAI,UAAU,cAAc,SAAS,GAAG;AACtC,cAAM,UAAU,SAAS,KAAK,QAAQ;AACtC,mBAAW,OAAO,UAAU,eAAe;AACzC,iBAAO,MAAM,KAAK,GAAG,OAAO,KAAK,GAAG,EAAE;AAAA,QACxC;AAAA,MAEF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,MAAM,SAAS,GAAG;AAC3B,WAAO,WAAW,OAAO,SAAS;AAAA,MAChC,CAAC,MAAM,CAAC,EAAE,SAAS,YAAY,KAAK,CAAC,EAAE,SAAS,YAAY;AAAA,IAC9D;AAEA,QAAI,YAAY;AAChB,QAAI,YAAY;AAChB,eAAW,WAAW,eAAe;AACnC,YAAM,WAAW,KAAK,KAAK,OAAO;AAClC,UAAI,CAAC,WAAW,QAAQ,EAAG;AAC3B,YAAM,EAAE,KAAK,IAAI,wBAAwB,QAAQ;AACjD,iBAAW,OAAO,MAAM;AACtB,YAAI,CAAC,IAAI,GAAI;AACb,YAAI,CAAC,IAAI,GAAI;AAAA,MACf;AAAA,IACF;AACA,QAAI,YAAY,GAAG;AACjB,aAAO,SAAS,KAAK,GAAG,SAAS,wCAAwC;AAAA,IAC3E;AACA,QAAI,YAAY,GAAG;AACjB,aAAO,SAAS,KAAK,GAAG,SAAS,wCAAwC;AAAA,IAC3E;AAAA,EACF;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
loadDirectory
|
|
5
|
+
} from "./chunk-UPLBF4RZ.js";
|
|
6
|
+
import {
|
|
7
|
+
generate,
|
|
8
|
+
getModel
|
|
9
|
+
} from "./chunk-IZ6UZ3ZL.js";
|
|
10
|
+
import {
|
|
11
|
+
loadConfig
|
|
12
|
+
} from "./chunk-CHJ5GNZC.js";
|
|
13
|
+
|
|
14
|
+
// src/runtime/instinct-learner.ts
|
|
15
|
+
import { writeFileSync, readdirSync, readFileSync, existsSync, mkdirSync } from "fs";
|
|
16
|
+
import { join } from "path";
|
|
17
|
+
async function proposeInstincts(harnessDir, fromJournalDate, apiKey) {
|
|
18
|
+
const config = loadConfig(harnessDir);
|
|
19
|
+
const model = getModel(config, apiKey);
|
|
20
|
+
const existingInstincts = loadDirectory(join(harnessDir, "instincts"));
|
|
21
|
+
const existingBehaviors = existingInstincts.map((d) => d.l0 || d.frontmatter.id).join("\n- ");
|
|
22
|
+
let recentContext = "";
|
|
23
|
+
if (fromJournalDate) {
|
|
24
|
+
const journalPath = join(harnessDir, "memory", "journal", `${fromJournalDate}.md`);
|
|
25
|
+
if (existsSync(journalPath)) {
|
|
26
|
+
recentContext = readFileSync(journalPath, "utf-8");
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
if (!recentContext) {
|
|
30
|
+
const sessionsDir = join(harnessDir, "memory", "sessions");
|
|
31
|
+
if (existsSync(sessionsDir)) {
|
|
32
|
+
const files = readdirSync(sessionsDir).filter((f) => f.endsWith(".md") && !f.startsWith(".")).sort().reverse().slice(0, 10);
|
|
33
|
+
recentContext = files.map((f) => readFileSync(join(sessionsDir, f), "utf-8")).join("\n\n---\n\n");
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
if (!recentContext) {
|
|
37
|
+
return [];
|
|
38
|
+
}
|
|
39
|
+
const prompt = `Analyze these recent agent interactions and identify potential instincts \u2014 reflexive behaviors that should become automatic.
|
|
40
|
+
|
|
41
|
+
Existing instincts (DO NOT duplicate):
|
|
42
|
+
- ${existingBehaviors || "none yet"}
|
|
43
|
+
|
|
44
|
+
Recent context:
|
|
45
|
+
${recentContext.slice(0, 4e3)}
|
|
46
|
+
|
|
47
|
+
For each candidate instinct, respond with EXACTLY this JSON format (one per line):
|
|
48
|
+
{"id": "kebab-case-id", "behavior": "One sentence describing the behavior", "provenance": "Where this was learned from", "confidence": 0.8}
|
|
49
|
+
|
|
50
|
+
Only propose instincts with confidence >= 0.7. Only propose genuinely useful behaviors, not obvious ones.
|
|
51
|
+
If there are no good candidates, respond with: NONE`;
|
|
52
|
+
const result = await generate({
|
|
53
|
+
model,
|
|
54
|
+
system: "You are an instinct analyzer. Extract behavioral patterns. Be selective \u2014 only propose high-value instincts.",
|
|
55
|
+
prompt
|
|
56
|
+
});
|
|
57
|
+
const candidates = [];
|
|
58
|
+
for (const line of result.text.split("\n")) {
|
|
59
|
+
const trimmed = line.trim();
|
|
60
|
+
if (trimmed === "NONE") break;
|
|
61
|
+
if (!trimmed.startsWith("{")) continue;
|
|
62
|
+
try {
|
|
63
|
+
const parsed = JSON.parse(trimmed);
|
|
64
|
+
if (parsed.id && parsed.behavior && parsed.confidence >= 0.7) {
|
|
65
|
+
candidates.push({
|
|
66
|
+
id: parsed.id,
|
|
67
|
+
behavior: parsed.behavior,
|
|
68
|
+
provenance: parsed.provenance || "auto-detected",
|
|
69
|
+
confidence: parsed.confidence
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
} catch {
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
return candidates;
|
|
76
|
+
}
|
|
77
|
+
function installInstinct(harnessDir, candidate) {
|
|
78
|
+
const instinctsDir = join(harnessDir, "instincts");
|
|
79
|
+
if (!existsSync(instinctsDir)) {
|
|
80
|
+
mkdirSync(instinctsDir, { recursive: true });
|
|
81
|
+
}
|
|
82
|
+
const today = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
83
|
+
const filePath = join(instinctsDir, `${candidate.id}.md`);
|
|
84
|
+
if (existsSync(filePath)) {
|
|
85
|
+
return "";
|
|
86
|
+
}
|
|
87
|
+
const content = `---
|
|
88
|
+
id: ${candidate.id}
|
|
89
|
+
tags: [instinct, auto-learned]
|
|
90
|
+
created: ${today}
|
|
91
|
+
updated: ${today}
|
|
92
|
+
author: agent
|
|
93
|
+
status: active
|
|
94
|
+
source: auto-detected
|
|
95
|
+
---
|
|
96
|
+
|
|
97
|
+
<!-- L0: ${candidate.behavior} -->
|
|
98
|
+
<!-- L1: ${candidate.behavior} Learned from: ${candidate.provenance}. Confidence: ${candidate.confidence}. -->
|
|
99
|
+
|
|
100
|
+
# Instinct: ${candidate.id.split("-").map((w) => w.charAt(0).toUpperCase() + w.slice(1)).join(" ")}
|
|
101
|
+
|
|
102
|
+
${candidate.behavior}
|
|
103
|
+
|
|
104
|
+
**Provenance:** ${candidate.provenance}
|
|
105
|
+
**Confidence:** ${candidate.confidence}
|
|
106
|
+
**Auto-learned:** ${today}
|
|
107
|
+
`;
|
|
108
|
+
writeFileSync(filePath, content, "utf-8");
|
|
109
|
+
return filePath;
|
|
110
|
+
}
|
|
111
|
+
async function learnFromSessions(harnessDir, autoInstall = false, apiKey) {
|
|
112
|
+
const candidates = await proposeInstincts(harnessDir, void 0, apiKey);
|
|
113
|
+
const installed = [];
|
|
114
|
+
const skipped = [];
|
|
115
|
+
if (autoInstall) {
|
|
116
|
+
for (const candidate of candidates) {
|
|
117
|
+
const path = installInstinct(harnessDir, candidate);
|
|
118
|
+
if (path) {
|
|
119
|
+
installed.push(candidate.id);
|
|
120
|
+
} else {
|
|
121
|
+
skipped.push(candidate.id);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
return { candidates, installed, skipped };
|
|
126
|
+
}
|
|
127
|
+
function harvestInstincts(harnessDir, options) {
|
|
128
|
+
const journalDir = join(harnessDir, "memory", "journal");
|
|
129
|
+
if (!existsSync(journalDir)) {
|
|
130
|
+
return { candidates: [], installed: [], skipped: [], journalsScanned: 0 };
|
|
131
|
+
}
|
|
132
|
+
const files = readdirSync(journalDir).filter((f) => f.endsWith(".md") && !f.startsWith(".") && !f.startsWith("_")).sort();
|
|
133
|
+
const from = options?.from;
|
|
134
|
+
const to = options?.to;
|
|
135
|
+
const filtered = files.filter((f) => {
|
|
136
|
+
const dateMatch = f.match(/^(\d{4}-\d{2}-\d{2})/);
|
|
137
|
+
if (!dateMatch) return false;
|
|
138
|
+
const d = dateMatch[1];
|
|
139
|
+
if (from && d < from) return false;
|
|
140
|
+
if (to && d > to) return false;
|
|
141
|
+
return true;
|
|
142
|
+
});
|
|
143
|
+
const instinctsDir = join(harnessDir, "instincts");
|
|
144
|
+
const existingIds = /* @__PURE__ */ new Set();
|
|
145
|
+
const existingBehaviors = /* @__PURE__ */ new Set();
|
|
146
|
+
if (existsSync(instinctsDir)) {
|
|
147
|
+
const docs = loadDirectory(instinctsDir);
|
|
148
|
+
for (const doc of docs) {
|
|
149
|
+
existingIds.add(doc.frontmatter.id);
|
|
150
|
+
if (doc.l0) existingBehaviors.add(doc.l0.toLowerCase());
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
const candidates = [];
|
|
154
|
+
const seenIds = /* @__PURE__ */ new Set();
|
|
155
|
+
for (const file of filtered) {
|
|
156
|
+
const content = readFileSync(join(journalDir, file), "utf-8");
|
|
157
|
+
const sectionMatch = content.match(/## Instinct Candidates\n([\s\S]*?)(?=\n## |\n*$)/);
|
|
158
|
+
if (!sectionMatch) continue;
|
|
159
|
+
const lines = sectionMatch[1].split("\n").filter((l) => l.startsWith("- ")).map((l) => l.slice(2).trim().replace(/^INSTINCT:\s*/i, ""));
|
|
160
|
+
const dateMatch = file.match(/^(\d{4}-\d{2}-\d{2})/);
|
|
161
|
+
const journalDate = dateMatch ? dateMatch[1] : "unknown";
|
|
162
|
+
for (const line of lines) {
|
|
163
|
+
if (!line) continue;
|
|
164
|
+
const id = line.toLowerCase().replace(/[^a-z0-9\s-]/g, "").replace(/\s+/g, "-").slice(0, 50).replace(/-+$/, "");
|
|
165
|
+
if (!id) continue;
|
|
166
|
+
if (seenIds.has(id)) continue;
|
|
167
|
+
if (existingIds.has(id)) continue;
|
|
168
|
+
const behaviorLower = line.toLowerCase();
|
|
169
|
+
if (existingBehaviors.has(behaviorLower)) continue;
|
|
170
|
+
seenIds.add(id);
|
|
171
|
+
candidates.push({
|
|
172
|
+
id,
|
|
173
|
+
behavior: line,
|
|
174
|
+
provenance: `journal:${journalDate}`,
|
|
175
|
+
confidence: 0.75
|
|
176
|
+
});
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
const installed = [];
|
|
180
|
+
const skipped = [];
|
|
181
|
+
if (options?.install) {
|
|
182
|
+
for (const candidate of candidates) {
|
|
183
|
+
const path = installInstinct(harnessDir, candidate);
|
|
184
|
+
if (path) {
|
|
185
|
+
installed.push(candidate.id);
|
|
186
|
+
} else {
|
|
187
|
+
skipped.push(candidate.id);
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
return { candidates, installed, skipped, journalsScanned: filtered.length };
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
export {
|
|
195
|
+
proposeInstincts,
|
|
196
|
+
installInstinct,
|
|
197
|
+
learnFromSessions,
|
|
198
|
+
harvestInstincts
|
|
199
|
+
};
|
|
200
|
+
//# sourceMappingURL=chunk-GJNNR2RA.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/instinct-learner.ts"],"sourcesContent":["import { writeFileSync, readdirSync, readFileSync, existsSync, mkdirSync } from 'fs';\nimport { join } from 'path';\nimport { getModel, generate } from '../llm/provider.js';\nimport { loadConfig } from '../core/config.js';\nimport { loadDirectory } from '../primitives/loader.js';\n\nexport interface InstinctCandidate {\n id: string;\n behavior: string;\n provenance: string;\n confidence: number;\n}\n\nexport interface LearnResult {\n candidates: InstinctCandidate[];\n installed: string[];\n skipped: string[];\n}\n\nexport async function proposeInstincts(\n harnessDir: string,\n fromJournalDate?: string,\n apiKey?: string,\n): Promise<InstinctCandidate[]> {\n const config = loadConfig(harnessDir);\n const model = getModel(config, apiKey);\n\n // Load existing instincts to avoid duplicates\n const existingInstincts = loadDirectory(join(harnessDir, 'instincts'));\n const existingBehaviors = existingInstincts.map((d) => d.l0 || d.frontmatter.id).join('\\n- ');\n\n // Load recent sessions or journal\n let recentContext = '';\n if (fromJournalDate) {\n const journalPath = join(harnessDir, 'memory', 'journal', `${fromJournalDate}.md`);\n if (existsSync(journalPath)) {\n recentContext = readFileSync(journalPath, 'utf-8');\n }\n }\n\n if (!recentContext) {\n // Fall back to recent sessions\n const sessionsDir = join(harnessDir, 'memory', 'sessions');\n if (existsSync(sessionsDir)) {\n const files = readdirSync(sessionsDir)\n .filter((f) => f.endsWith('.md') && !f.startsWith('.'))\n .sort()\n .reverse()\n .slice(0, 10);\n\n recentContext = files\n .map((f) => readFileSync(join(sessionsDir, f), 'utf-8'))\n .join('\\n\\n---\\n\\n');\n }\n }\n\n if (!recentContext) {\n return [];\n }\n\n const prompt = `Analyze these recent agent interactions and identify potential instincts — reflexive behaviors that should become automatic.\n\nExisting instincts (DO NOT duplicate):\n- ${existingBehaviors || 'none yet'}\n\nRecent context:\n${recentContext.slice(0, 4000)}\n\nFor each candidate instinct, respond with EXACTLY this JSON format (one per line):\n{\"id\": \"kebab-case-id\", \"behavior\": \"One sentence describing the behavior\", \"provenance\": \"Where this was learned from\", \"confidence\": 0.8}\n\nOnly propose instincts with confidence >= 0.7. Only propose genuinely useful behaviors, not obvious ones.\nIf there are no good candidates, respond with: NONE`;\n\n const result = await generate({\n model,\n system: 'You are an instinct analyzer. Extract behavioral patterns. Be selective — only propose high-value instincts.',\n prompt,\n });\n\n const candidates: InstinctCandidate[] = [];\n\n for (const line of result.text.split('\\n')) {\n const trimmed = line.trim();\n if (trimmed === 'NONE') break;\n if (!trimmed.startsWith('{')) continue;\n\n try {\n const parsed = JSON.parse(trimmed);\n if (parsed.id && parsed.behavior && parsed.confidence >= 0.7) {\n candidates.push({\n id: parsed.id,\n behavior: parsed.behavior,\n provenance: parsed.provenance || 'auto-detected',\n confidence: parsed.confidence,\n });\n }\n } catch {\n // Skip malformed lines\n }\n }\n\n return candidates;\n}\n\nexport function installInstinct(harnessDir: string, candidate: InstinctCandidate): string {\n const instinctsDir = join(harnessDir, 'instincts');\n if (!existsSync(instinctsDir)) {\n mkdirSync(instinctsDir, { recursive: true });\n }\n\n const today = new Date().toISOString().split('T')[0];\n const filePath = join(instinctsDir, `${candidate.id}.md`);\n\n // Don't overwrite existing\n if (existsSync(filePath)) {\n return '';\n }\n\n const content = `---\nid: ${candidate.id}\ntags: [instinct, auto-learned]\ncreated: ${today}\nupdated: ${today}\nauthor: agent\nstatus: active\nsource: auto-detected\n---\n\n<!-- L0: ${candidate.behavior} -->\n<!-- L1: ${candidate.behavior} Learned from: ${candidate.provenance}. Confidence: ${candidate.confidence}. -->\n\n# Instinct: ${candidate.id.split('-').map((w) => w.charAt(0).toUpperCase() + w.slice(1)).join(' ')}\n\n${candidate.behavior}\n\n**Provenance:** ${candidate.provenance}\n**Confidence:** ${candidate.confidence}\n**Auto-learned:** ${today}\n`;\n\n writeFileSync(filePath, content, 'utf-8');\n return filePath;\n}\n\nexport async function learnFromSessions(\n harnessDir: string,\n autoInstall: boolean = false,\n apiKey?: string,\n): Promise<LearnResult> {\n const candidates = await proposeInstincts(harnessDir, undefined, apiKey);\n const installed: string[] = [];\n const skipped: string[] = [];\n\n if (autoInstall) {\n for (const candidate of candidates) {\n const path = installInstinct(harnessDir, candidate);\n if (path) {\n installed.push(candidate.id);\n } else {\n skipped.push(candidate.id);\n }\n }\n }\n\n return { candidates, installed, skipped };\n}\n\nexport interface HarvestResult {\n candidates: InstinctCandidate[];\n installed: string[];\n skipped: string[];\n journalsScanned: number;\n}\n\n/**\n * Harvest instinct candidates from journal entries.\n * Scans all journals (or journals within a date range) for instinct candidate\n * sections, deduplicates against existing instincts, and optionally installs them.\n *\n * Unlike learnFromSessions which uses LLM calls, harvestInstincts is pure file-based —\n * it extracts already-identified candidates from journal synthesis output.\n */\nexport function harvestInstincts(\n harnessDir: string,\n options?: { from?: string; to?: string; install?: boolean },\n): HarvestResult {\n const journalDir = join(harnessDir, 'memory', 'journal');\n if (!existsSync(journalDir)) {\n return { candidates: [], installed: [], skipped: [], journalsScanned: 0 };\n }\n\n const files = readdirSync(journalDir)\n .filter((f) => f.endsWith('.md') && !f.startsWith('.') && !f.startsWith('_'))\n .sort();\n\n // Filter by date range\n const from = options?.from;\n const to = options?.to;\n const filtered = files.filter((f) => {\n const dateMatch = f.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n if (!dateMatch) return false;\n const d = dateMatch[1];\n if (from && d < from) return false;\n if (to && d > to) return false;\n return true;\n });\n\n // Load existing instinct IDs to deduplicate\n const instinctsDir = join(harnessDir, 'instincts');\n const existingIds = new Set<string>();\n const existingBehaviors = new Set<string>();\n if (existsSync(instinctsDir)) {\n const docs = loadDirectory(instinctsDir);\n for (const doc of docs) {\n existingIds.add(doc.frontmatter.id);\n if (doc.l0) existingBehaviors.add(doc.l0.toLowerCase());\n }\n }\n\n const candidates: InstinctCandidate[] = [];\n const seenIds = new Set<string>();\n\n for (const file of filtered) {\n const content = readFileSync(join(journalDir, file), 'utf-8');\n\n // Extract instinct candidates section\n const sectionMatch = content.match(/## Instinct Candidates\\n([\\s\\S]*?)(?=\\n## |\\n*$)/);\n if (!sectionMatch) continue;\n\n const lines = sectionMatch[1]\n .split('\\n')\n .filter((l) => l.startsWith('- '))\n .map((l) => l.slice(2).trim().replace(/^INSTINCT:\\s*/i, ''));\n\n const dateMatch = file.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n const journalDate = dateMatch ? dateMatch[1] : 'unknown';\n\n for (const line of lines) {\n if (!line) continue;\n\n // Generate a kebab-case id from the behavior text\n const id = line\n .toLowerCase()\n .replace(/[^a-z0-9\\s-]/g, '')\n .replace(/\\s+/g, '-')\n .slice(0, 50)\n .replace(/-+$/, '');\n\n if (!id) continue;\n if (seenIds.has(id)) continue;\n if (existingIds.has(id)) continue;\n\n // Fuzzy dedup: skip if behavior text closely matches existing instinct L0\n const behaviorLower = line.toLowerCase();\n if (existingBehaviors.has(behaviorLower)) continue;\n\n seenIds.add(id);\n candidates.push({\n id,\n behavior: line,\n provenance: `journal:${journalDate}`,\n confidence: 0.75,\n });\n }\n }\n\n const installed: string[] = [];\n const skipped: string[] = [];\n\n if (options?.install) {\n for (const candidate of candidates) {\n const path = installInstinct(harnessDir, candidate);\n if (path) {\n installed.push(candidate.id);\n } else {\n skipped.push(candidate.id);\n }\n }\n }\n\n return { candidates, installed, skipped, journalsScanned: filtered.length };\n}\n"],"mappings":";;;;;;;;;;;;;;AAAA,SAAS,eAAe,aAAa,cAAc,YAAY,iBAAiB;AAChF,SAAS,YAAY;AAkBrB,eAAsB,iBACpB,YACA,iBACA,QAC8B;AAC9B,QAAM,SAAS,WAAW,UAAU;AACpC,QAAM,QAAQ,SAAS,QAAQ,MAAM;AAGrC,QAAM,oBAAoB,cAAc,KAAK,YAAY,WAAW,CAAC;AACrE,QAAM,oBAAoB,kBAAkB,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,EAAE,KAAK,MAAM;AAG5F,MAAI,gBAAgB;AACpB,MAAI,iBAAiB;AACnB,UAAM,cAAc,KAAK,YAAY,UAAU,WAAW,GAAG,eAAe,KAAK;AACjF,QAAI,WAAW,WAAW,GAAG;AAC3B,sBAAgB,aAAa,aAAa,OAAO;AAAA,IACnD;AAAA,EACF;AAEA,MAAI,CAAC,eAAe;AAElB,UAAM,cAAc,KAAK,YAAY,UAAU,UAAU;AACzD,QAAI,WAAW,WAAW,GAAG;AAC3B,YAAM,QAAQ,YAAY,WAAW,EAClC,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,CAAC,EACrD,KAAK,EACL,QAAQ,EACR,MAAM,GAAG,EAAE;AAEd,sBAAgB,MACb,IAAI,CAAC,MAAM,aAAa,KAAK,aAAa,CAAC,GAAG,OAAO,CAAC,EACtD,KAAK,aAAa;AAAA,IACvB;AAAA,EACF;AAEA,MAAI,CAAC,eAAe;AAClB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAS;AAAA;AAAA;AAAA,IAGb,qBAAqB,UAAU;AAAA;AAAA;AAAA,EAGjC,cAAc,MAAM,GAAG,GAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQ5B,QAAM,SAAS,MAAM,SAAS;AAAA,IAC5B;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,EACF,CAAC;AAED,QAAM,aAAkC,CAAC;AAEzC,aAAW,QAAQ,OAAO,KAAK,MAAM,IAAI,GAAG;AAC1C,UAAM,UAAU,KAAK,KAAK;AAC1B,QAAI,YAAY,OAAQ;AACxB,QAAI,CAAC,QAAQ,WAAW,GAAG,EAAG;AAE9B,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,OAAO;AACjC,UAAI,OAAO,MAAM,OAAO,YAAY,OAAO,cAAc,KAAK;AAC5D,mBAAW,KAAK;AAAA,UACd,IAAI,OAAO;AAAA,UACX,UAAU,OAAO;AAAA,UACjB,YAAY,OAAO,cAAc;AAAA,UACjC,YAAY,OAAO;AAAA,QACrB,CAAC;AAAA,MACH;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;AAEO,SAAS,gBAAgB,YAAoB,WAAsC;AACxF,QAAM,eAAe,KAAK,YAAY,WAAW;AACjD,MAAI,CAAC,WAAW,YAAY,GAAG;AAC7B,cAAU,cAAc,EAAE,WAAW,KAAK,CAAC;AAAA,EAC7C;AAEA,QAAM,SAAQ,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AACnD,QAAM,WAAW,KAAK,cAAc,GAAG,UAAU,EAAE,KAAK;AAGxD,MAAI,WAAW,QAAQ,GAAG;AACxB,WAAO;AAAA,EACT;AAEA,QAAM,UAAU;AAAA,MACZ,UAAU,EAAE;AAAA;AAAA,WAEP,KAAK;AAAA,WACL,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,WAML,UAAU,QAAQ;AAAA,WAClB,UAAU,QAAQ,kBAAkB,UAAU,UAAU,iBAAiB,UAAU,UAAU;AAAA;AAAA,cAE1F,UAAU,GAAG,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,YAAY,IAAI,EAAE,MAAM,CAAC,CAAC,EAAE,KAAK,GAAG,CAAC;AAAA;AAAA,EAEhG,UAAU,QAAQ;AAAA;AAAA,kBAEF,UAAU,UAAU;AAAA,kBACpB,UAAU,UAAU;AAAA,oBAClB,KAAK;AAAA;AAGvB,gBAAc,UAAU,SAAS,OAAO;AACxC,SAAO;AACT;AAEA,eAAsB,kBACpB,YACA,cAAuB,OACvB,QACsB;AACtB,QAAM,aAAa,MAAM,iBAAiB,YAAY,QAAW,MAAM;AACvE,QAAM,YAAsB,CAAC;AAC7B,QAAM,UAAoB,CAAC;AAE3B,MAAI,aAAa;AACf,eAAW,aAAa,YAAY;AAClC,YAAM,OAAO,gBAAgB,YAAY,SAAS;AAClD,UAAI,MAAM;AACR,kBAAU,KAAK,UAAU,EAAE;AAAA,MAC7B,OAAO;AACL,gBAAQ,KAAK,UAAU,EAAE;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,YAAY,WAAW,QAAQ;AAC1C;AAiBO,SAAS,iBACd,YACA,SACe;AACf,QAAM,aAAa,KAAK,YAAY,UAAU,SAAS;AACvD,MAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,WAAO,EAAE,YAAY,CAAC,GAAG,WAAW,CAAC,GAAG,SAAS,CAAC,GAAG,iBAAiB,EAAE;AAAA,EAC1E;AAEA,QAAM,QAAQ,YAAY,UAAU,EACjC,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,KAAK,CAAC,EAAE,WAAW,GAAG,CAAC,EAC3E,KAAK;AAGR,QAAM,OAAO,SAAS;AACtB,QAAM,KAAK,SAAS;AACpB,QAAM,WAAW,MAAM,OAAO,CAAC,MAAM;AACnC,UAAM,YAAY,EAAE,MAAM,sBAAsB;AAChD,QAAI,CAAC,UAAW,QAAO;AACvB,UAAM,IAAI,UAAU,CAAC;AACrB,QAAI,QAAQ,IAAI,KAAM,QAAO;AAC7B,QAAI,MAAM,IAAI,GAAI,QAAO;AACzB,WAAO;AAAA,EACT,CAAC;AAGD,QAAM,eAAe,KAAK,YAAY,WAAW;AACjD,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,oBAAoB,oBAAI,IAAY;AAC1C,MAAI,WAAW,YAAY,GAAG;AAC5B,UAAM,OAAO,cAAc,YAAY;AACvC,eAAW,OAAO,MAAM;AACtB,kBAAY,IAAI,IAAI,YAAY,EAAE;AAClC,UAAI,IAAI,GAAI,mBAAkB,IAAI,IAAI,GAAG,YAAY,CAAC;AAAA,IACxD;AAAA,EACF;AAEA,QAAM,aAAkC,CAAC;AACzC,QAAM,UAAU,oBAAI,IAAY;AAEhC,aAAW,QAAQ,UAAU;AAC3B,UAAM,UAAU,aAAa,KAAK,YAAY,IAAI,GAAG,OAAO;AAG5D,UAAM,eAAe,QAAQ,MAAM,kDAAkD;AACrF,QAAI,CAAC,aAAc;AAEnB,UAAM,QAAQ,aAAa,CAAC,EACzB,MAAM,IAAI,EACV,OAAO,CAAC,MAAM,EAAE,WAAW,IAAI,CAAC,EAChC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,KAAK,EAAE,QAAQ,kBAAkB,EAAE,CAAC;AAE7D,UAAM,YAAY,KAAK,MAAM,sBAAsB;AACnD,UAAM,cAAc,YAAY,UAAU,CAAC,IAAI;AAE/C,eAAW,QAAQ,OAAO;AACxB,UAAI,CAAC,KAAM;AAGX,YAAM,KAAK,KACR,YAAY,EACZ,QAAQ,iBAAiB,EAAE,EAC3B,QAAQ,QAAQ,GAAG,EACnB,MAAM,GAAG,EAAE,EACX,QAAQ,OAAO,EAAE;AAEpB,UAAI,CAAC,GAAI;AACT,UAAI,QAAQ,IAAI,EAAE,EAAG;AACrB,UAAI,YAAY,IAAI,EAAE,EAAG;AAGzB,YAAM,gBAAgB,KAAK,YAAY;AACvC,UAAI,kBAAkB,IAAI,aAAa,EAAG;AAE1C,cAAQ,IAAI,EAAE;AACd,iBAAW,KAAK;AAAA,QACd;AAAA,QACA,UAAU;AAAA,QACV,YAAY,WAAW,WAAW;AAAA,QAClC,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,YAAsB,CAAC;AAC7B,QAAM,UAAoB,CAAC;AAE3B,MAAI,SAAS,SAAS;AACpB,eAAW,aAAa,YAAY;AAClC,YAAM,OAAO,gBAAgB,YAAY,SAAS;AAClD,UAAI,MAAM;AACR,kBAAU,KAAK,UAAU,EAAE;AAAA,MAC7B,OAAO;AACL,gBAAQ,KAAK,UAAU,EAAE;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,YAAY,WAAW,SAAS,iBAAiB,SAAS,OAAO;AAC5E;","names":[]}
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
// src/runtime/analytics.ts
|
|
5
|
+
import { readFileSync, existsSync, readdirSync } from "fs";
|
|
6
|
+
import { join } from "path";
|
|
7
|
+
import matter from "gray-matter";
|
|
8
|
+
function parseSessionFile(filePath) {
|
|
9
|
+
try {
|
|
10
|
+
const content = readFileSync(filePath, "utf-8");
|
|
11
|
+
const { data, content: body } = matter(content);
|
|
12
|
+
const id = typeof data.id === "string" ? data.id : "";
|
|
13
|
+
const created = typeof data.created === "string" ? data.created : data.created instanceof Date ? data.created.toISOString() : "";
|
|
14
|
+
const updated = typeof data.updated === "string" ? data.updated : data.updated instanceof Date ? data.updated.toISOString() : "";
|
|
15
|
+
const durationMinutes = typeof data.duration_minutes === "number" ? data.duration_minutes : 0;
|
|
16
|
+
const dateMatch = id.match(/^(\d{4}-\d{2}-\d{2})/);
|
|
17
|
+
const date = dateMatch ? dateMatch[1] : "";
|
|
18
|
+
const tokensMatch = body.match(/\*\*Tokens:\*\*\s*(\d+)/);
|
|
19
|
+
const stepsMatch = body.match(/\*\*Steps:\*\*\s*(\d+)/);
|
|
20
|
+
const modelMatch = body.match(/\*\*Model:\*\*\s*(.+)/);
|
|
21
|
+
const delegateMatch = body.match(/\*\*Delegated to:\*\*\s*(.+)/);
|
|
22
|
+
const tokens = tokensMatch ? parseInt(tokensMatch[1], 10) : 0;
|
|
23
|
+
const steps = stepsMatch ? parseInt(stepsMatch[1], 10) : 0;
|
|
24
|
+
const model = modelMatch ? modelMatch[1].trim() : void 0;
|
|
25
|
+
const delegatedTo = delegateMatch ? delegateMatch[1].trim() : void 0;
|
|
26
|
+
return {
|
|
27
|
+
id,
|
|
28
|
+
date,
|
|
29
|
+
started: created,
|
|
30
|
+
ended: updated,
|
|
31
|
+
tokens,
|
|
32
|
+
steps,
|
|
33
|
+
durationMinutes,
|
|
34
|
+
model,
|
|
35
|
+
delegatedTo
|
|
36
|
+
};
|
|
37
|
+
} catch {
|
|
38
|
+
return null;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
function getSessionAnalytics(harnessDir) {
|
|
42
|
+
const sessionsDir = join(harnessDir, "memory", "sessions");
|
|
43
|
+
const sessions = [];
|
|
44
|
+
if (existsSync(sessionsDir)) {
|
|
45
|
+
const files = readdirSync(sessionsDir).filter((f) => f.endsWith(".md") && !f.startsWith(".") && !f.startsWith("_"));
|
|
46
|
+
for (const file of files) {
|
|
47
|
+
const data = parseSessionFile(join(sessionsDir, file));
|
|
48
|
+
if (data) sessions.push(data);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
const sessionsPerDay = /* @__PURE__ */ new Map();
|
|
52
|
+
const tokensPerDay = /* @__PURE__ */ new Map();
|
|
53
|
+
const modelUsage = /* @__PURE__ */ new Map();
|
|
54
|
+
let totalTokens = 0;
|
|
55
|
+
let totalDurationMinutes = 0;
|
|
56
|
+
let delegationCount = 0;
|
|
57
|
+
let earliest = "";
|
|
58
|
+
let latest = "";
|
|
59
|
+
for (const session of sessions) {
|
|
60
|
+
totalTokens += session.tokens;
|
|
61
|
+
totalDurationMinutes += session.durationMinutes;
|
|
62
|
+
if (session.delegatedTo) delegationCount++;
|
|
63
|
+
if (session.date) {
|
|
64
|
+
sessionsPerDay.set(session.date, (sessionsPerDay.get(session.date) ?? 0) + 1);
|
|
65
|
+
tokensPerDay.set(session.date, (tokensPerDay.get(session.date) ?? 0) + session.tokens);
|
|
66
|
+
if (!earliest || session.date < earliest) earliest = session.date;
|
|
67
|
+
if (!latest || session.date > latest) latest = session.date;
|
|
68
|
+
}
|
|
69
|
+
if (session.model) {
|
|
70
|
+
modelUsage.set(session.model, (modelUsage.get(session.model) ?? 0) + 1);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
const topDays = Array.from(sessionsPerDay.entries()).map(([date, count]) => ({
|
|
74
|
+
date,
|
|
75
|
+
sessions: count,
|
|
76
|
+
tokens: tokensPerDay.get(date) ?? 0
|
|
77
|
+
})).sort((a, b) => b.sessions - a.sessions || b.tokens - a.tokens).slice(0, 7);
|
|
78
|
+
return {
|
|
79
|
+
totalSessions: sessions.length,
|
|
80
|
+
totalTokens,
|
|
81
|
+
totalDurationMinutes,
|
|
82
|
+
avgTokensPerSession: sessions.length > 0 ? Math.round(totalTokens / sessions.length) : 0,
|
|
83
|
+
avgDurationMinutes: sessions.length > 0 ? Math.round(totalDurationMinutes / sessions.length) : 0,
|
|
84
|
+
sessionsPerDay,
|
|
85
|
+
tokensPerDay,
|
|
86
|
+
modelUsage,
|
|
87
|
+
delegationCount,
|
|
88
|
+
dateRange: earliest && latest ? { earliest, latest } : null,
|
|
89
|
+
topDays
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
function getSessionsInRange(harnessDir, from, to) {
|
|
93
|
+
const sessionsDir = join(harnessDir, "memory", "sessions");
|
|
94
|
+
if (!existsSync(sessionsDir)) return [];
|
|
95
|
+
const files = readdirSync(sessionsDir).filter((f) => f.endsWith(".md") && !f.startsWith(".") && !f.startsWith("_"));
|
|
96
|
+
const sessions = [];
|
|
97
|
+
for (const file of files) {
|
|
98
|
+
const data = parseSessionFile(join(sessionsDir, file));
|
|
99
|
+
if (!data || !data.date) continue;
|
|
100
|
+
if (from && data.date < from) continue;
|
|
101
|
+
if (to && data.date > to) continue;
|
|
102
|
+
sessions.push(data);
|
|
103
|
+
}
|
|
104
|
+
return sessions.sort((a, b) => a.date.localeCompare(b.date));
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
export {
|
|
108
|
+
getSessionAnalytics,
|
|
109
|
+
getSessionsInRange
|
|
110
|
+
};
|
|
111
|
+
//# sourceMappingURL=chunk-GNUSHD2Y.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/analytics.ts"],"sourcesContent":["import { readFileSync, existsSync, readdirSync } from 'fs';\nimport { join } from 'path';\nimport matter from 'gray-matter';\n\nexport interface SessionData {\n id: string;\n date: string;\n started: string;\n ended: string;\n tokens: number;\n steps: number;\n durationMinutes: number;\n model?: string;\n delegatedTo?: string;\n}\n\nexport interface SessionAnalytics {\n totalSessions: number;\n totalTokens: number;\n totalDurationMinutes: number;\n avgTokensPerSession: number;\n avgDurationMinutes: number;\n sessionsPerDay: Map<string, number>;\n tokensPerDay: Map<string, number>;\n modelUsage: Map<string, number>;\n delegationCount: number;\n dateRange: { earliest: string; latest: string } | null;\n topDays: Array<{ date: string; sessions: number; tokens: number }>;\n}\n\n/**\n * Parse a session markdown file to extract structured data.\n */\nfunction parseSessionFile(filePath: string): SessionData | null {\n try {\n const content = readFileSync(filePath, 'utf-8');\n const { data, content: body } = matter(content);\n\n const id = typeof data.id === 'string' ? data.id : '';\n const created = typeof data.created === 'string'\n ? data.created\n : data.created instanceof Date\n ? data.created.toISOString()\n : '';\n const updated = typeof data.updated === 'string'\n ? data.updated\n : data.updated instanceof Date\n ? data.updated.toISOString()\n : '';\n const durationMinutes = typeof data.duration_minutes === 'number' ? data.duration_minutes : 0;\n\n // Extract date from id (YYYY-MM-DD-xxxxxxxx format)\n const dateMatch = id.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n const date = dateMatch ? dateMatch[1] : '';\n\n // Extract tokens and steps from body\n const tokensMatch = body.match(/\\*\\*Tokens:\\*\\*\\s*(\\d+)/);\n const stepsMatch = body.match(/\\*\\*Steps:\\*\\*\\s*(\\d+)/);\n const modelMatch = body.match(/\\*\\*Model:\\*\\*\\s*(.+)/);\n const delegateMatch = body.match(/\\*\\*Delegated to:\\*\\*\\s*(.+)/);\n\n const tokens = tokensMatch ? parseInt(tokensMatch[1], 10) : 0;\n const steps = stepsMatch ? parseInt(stepsMatch[1], 10) : 0;\n const model = modelMatch ? modelMatch[1].trim() : undefined;\n const delegatedTo = delegateMatch ? delegateMatch[1].trim() : undefined;\n\n return {\n id,\n date,\n started: created,\n ended: updated,\n tokens,\n steps,\n durationMinutes,\n model,\n delegatedTo,\n };\n } catch {\n return null;\n }\n}\n\n/**\n * Load all sessions and compute analytics.\n */\nexport function getSessionAnalytics(harnessDir: string): SessionAnalytics {\n const sessionsDir = join(harnessDir, 'memory', 'sessions');\n const sessions: SessionData[] = [];\n\n if (existsSync(sessionsDir)) {\n const files = readdirSync(sessionsDir)\n .filter((f) => f.endsWith('.md') && !f.startsWith('.') && !f.startsWith('_'));\n\n for (const file of files) {\n const data = parseSessionFile(join(sessionsDir, file));\n if (data) sessions.push(data);\n }\n }\n\n const sessionsPerDay = new Map<string, number>();\n const tokensPerDay = new Map<string, number>();\n const modelUsage = new Map<string, number>();\n let totalTokens = 0;\n let totalDurationMinutes = 0;\n let delegationCount = 0;\n let earliest = '';\n let latest = '';\n\n for (const session of sessions) {\n totalTokens += session.tokens;\n totalDurationMinutes += session.durationMinutes;\n\n if (session.delegatedTo) delegationCount++;\n\n if (session.date) {\n sessionsPerDay.set(session.date, (sessionsPerDay.get(session.date) ?? 0) + 1);\n tokensPerDay.set(session.date, (tokensPerDay.get(session.date) ?? 0) + session.tokens);\n\n if (!earliest || session.date < earliest) earliest = session.date;\n if (!latest || session.date > latest) latest = session.date;\n }\n\n if (session.model) {\n modelUsage.set(session.model, (modelUsage.get(session.model) ?? 0) + 1);\n }\n }\n\n // Top days by session count\n const topDays = Array.from(sessionsPerDay.entries())\n .map(([date, count]) => ({\n date,\n sessions: count,\n tokens: tokensPerDay.get(date) ?? 0,\n }))\n .sort((a, b) => b.sessions - a.sessions || b.tokens - a.tokens)\n .slice(0, 7);\n\n return {\n totalSessions: sessions.length,\n totalTokens,\n totalDurationMinutes,\n avgTokensPerSession: sessions.length > 0 ? Math.round(totalTokens / sessions.length) : 0,\n avgDurationMinutes: sessions.length > 0 ? Math.round(totalDurationMinutes / sessions.length) : 0,\n sessionsPerDay,\n tokensPerDay,\n modelUsage,\n delegationCount,\n dateRange: earliest && latest ? { earliest, latest } : null,\n topDays,\n };\n}\n\n/**\n * Load raw session data for a date range.\n */\nexport function getSessionsInRange(\n harnessDir: string,\n from?: string,\n to?: string,\n): SessionData[] {\n const sessionsDir = join(harnessDir, 'memory', 'sessions');\n if (!existsSync(sessionsDir)) return [];\n\n const files = readdirSync(sessionsDir)\n .filter((f) => f.endsWith('.md') && !f.startsWith('.') && !f.startsWith('_'));\n\n const sessions: SessionData[] = [];\n for (const file of files) {\n const data = parseSessionFile(join(sessionsDir, file));\n if (!data || !data.date) continue;\n\n if (from && data.date < from) continue;\n if (to && data.date > to) continue;\n\n sessions.push(data);\n }\n\n return sessions.sort((a, b) => a.date.localeCompare(b.date));\n}\n"],"mappings":";;;;AAAA,SAAS,cAAc,YAAY,mBAAmB;AACtD,SAAS,YAAY;AACrB,OAAO,YAAY;AA+BnB,SAAS,iBAAiB,UAAsC;AAC9D,MAAI;AACF,UAAM,UAAU,aAAa,UAAU,OAAO;AAC9C,UAAM,EAAE,MAAM,SAAS,KAAK,IAAI,OAAO,OAAO;AAE9C,UAAM,KAAK,OAAO,KAAK,OAAO,WAAW,KAAK,KAAK;AACnD,UAAM,UAAU,OAAO,KAAK,YAAY,WACpC,KAAK,UACL,KAAK,mBAAmB,OACtB,KAAK,QAAQ,YAAY,IACzB;AACN,UAAM,UAAU,OAAO,KAAK,YAAY,WACpC,KAAK,UACL,KAAK,mBAAmB,OACtB,KAAK,QAAQ,YAAY,IACzB;AACN,UAAM,kBAAkB,OAAO,KAAK,qBAAqB,WAAW,KAAK,mBAAmB;AAG5F,UAAM,YAAY,GAAG,MAAM,sBAAsB;AACjD,UAAM,OAAO,YAAY,UAAU,CAAC,IAAI;AAGxC,UAAM,cAAc,KAAK,MAAM,yBAAyB;AACxD,UAAM,aAAa,KAAK,MAAM,wBAAwB;AACtD,UAAM,aAAa,KAAK,MAAM,uBAAuB;AACrD,UAAM,gBAAgB,KAAK,MAAM,8BAA8B;AAE/D,UAAM,SAAS,cAAc,SAAS,YAAY,CAAC,GAAG,EAAE,IAAI;AAC5D,UAAM,QAAQ,aAAa,SAAS,WAAW,CAAC,GAAG,EAAE,IAAI;AACzD,UAAM,QAAQ,aAAa,WAAW,CAAC,EAAE,KAAK,IAAI;AAClD,UAAM,cAAc,gBAAgB,cAAc,CAAC,EAAE,KAAK,IAAI;AAE9D,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,oBAAoB,YAAsC;AACxE,QAAM,cAAc,KAAK,YAAY,UAAU,UAAU;AACzD,QAAM,WAA0B,CAAC;AAEjC,MAAI,WAAW,WAAW,GAAG;AAC3B,UAAM,QAAQ,YAAY,WAAW,EAClC,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,KAAK,CAAC,EAAE,WAAW,GAAG,CAAC;AAE9E,eAAW,QAAQ,OAAO;AACxB,YAAM,OAAO,iBAAiB,KAAK,aAAa,IAAI,CAAC;AACrD,UAAI,KAAM,UAAS,KAAK,IAAI;AAAA,IAC9B;AAAA,EACF;AAEA,QAAM,iBAAiB,oBAAI,IAAoB;AAC/C,QAAM,eAAe,oBAAI,IAAoB;AAC7C,QAAM,aAAa,oBAAI,IAAoB;AAC3C,MAAI,cAAc;AAClB,MAAI,uBAAuB;AAC3B,MAAI,kBAAkB;AACtB,MAAI,WAAW;AACf,MAAI,SAAS;AAEb,aAAW,WAAW,UAAU;AAC9B,mBAAe,QAAQ;AACvB,4BAAwB,QAAQ;AAEhC,QAAI,QAAQ,YAAa;AAEzB,QAAI,QAAQ,MAAM;AAChB,qBAAe,IAAI,QAAQ,OAAO,eAAe,IAAI,QAAQ,IAAI,KAAK,KAAK,CAAC;AAC5E,mBAAa,IAAI,QAAQ,OAAO,aAAa,IAAI,QAAQ,IAAI,KAAK,KAAK,QAAQ,MAAM;AAErF,UAAI,CAAC,YAAY,QAAQ,OAAO,SAAU,YAAW,QAAQ;AAC7D,UAAI,CAAC,UAAU,QAAQ,OAAO,OAAQ,UAAS,QAAQ;AAAA,IACzD;AAEA,QAAI,QAAQ,OAAO;AACjB,iBAAW,IAAI,QAAQ,QAAQ,WAAW,IAAI,QAAQ,KAAK,KAAK,KAAK,CAAC;AAAA,IACxE;AAAA,EACF;AAGA,QAAM,UAAU,MAAM,KAAK,eAAe,QAAQ,CAAC,EAChD,IAAI,CAAC,CAAC,MAAM,KAAK,OAAO;AAAA,IACvB;AAAA,IACA,UAAU;AAAA,IACV,QAAQ,aAAa,IAAI,IAAI,KAAK;AAAA,EACpC,EAAE,EACD,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,YAAY,EAAE,SAAS,EAAE,MAAM,EAC7D,MAAM,GAAG,CAAC;AAEb,SAAO;AAAA,IACL,eAAe,SAAS;AAAA,IACxB;AAAA,IACA;AAAA,IACA,qBAAqB,SAAS,SAAS,IAAI,KAAK,MAAM,cAAc,SAAS,MAAM,IAAI;AAAA,IACvF,oBAAoB,SAAS,SAAS,IAAI,KAAK,MAAM,uBAAuB,SAAS,MAAM,IAAI;AAAA,IAC/F;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,WAAW,YAAY,SAAS,EAAE,UAAU,OAAO,IAAI;AAAA,IACvD;AAAA,EACF;AACF;AAKO,SAAS,mBACd,YACA,MACA,IACe;AACf,QAAM,cAAc,KAAK,YAAY,UAAU,UAAU;AACzD,MAAI,CAAC,WAAW,WAAW,EAAG,QAAO,CAAC;AAEtC,QAAM,QAAQ,YAAY,WAAW,EAClC,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,KAAK,CAAC,EAAE,WAAW,GAAG,CAAC;AAE9E,QAAM,WAA0B,CAAC;AACjC,aAAW,QAAQ,OAAO;AACxB,UAAM,OAAO,iBAAiB,KAAK,aAAa,IAAI,CAAC;AACrD,QAAI,CAAC,QAAQ,CAAC,KAAK,KAAM;AAEzB,QAAI,QAAQ,KAAK,OAAO,KAAM;AAC9B,QAAI,MAAM,KAAK,OAAO,GAAI;AAE1B,aAAS,KAAK,IAAI;AAAA,EACpB;AAEA,SAAO,SAAS,KAAK,CAAC,GAAG,MAAM,EAAE,KAAK,cAAc,EAAE,IAAI,CAAC;AAC7D;","names":[]}
|