@agntk/agent-harness 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/NOTICE +41 -0
- package/README.md +445 -0
- package/defaults/agents/summarizer.md +49 -0
- package/defaults/instincts/lead-with-answer.md +24 -0
- package/defaults/instincts/qualify-before-recommending.md +40 -0
- package/defaults/instincts/read-before-edit.md +23 -0
- package/defaults/instincts/search-before-create.md +23 -0
- package/defaults/playbooks/ship-feature.md +31 -0
- package/defaults/rules/ask-before-assuming.md +35 -0
- package/defaults/rules/operations.md +35 -0
- package/defaults/rules/respect-the-user.md +39 -0
- package/defaults/skills/business-analyst.md +181 -0
- package/defaults/skills/content-marketer.md +184 -0
- package/defaults/skills/research.md +34 -0
- package/defaults/tools/example-web-search.md +60 -0
- package/defaults/workflows/daily-reflection.md +54 -0
- package/dist/agent-framework-K4GUIICH.js +344 -0
- package/dist/agent-framework-K4GUIICH.js.map +1 -0
- package/dist/analytics-RPT73WNM.js +12 -0
- package/dist/analytics-RPT73WNM.js.map +1 -0
- package/dist/auto-processor-OLE45UI3.js +13 -0
- package/dist/auto-processor-OLE45UI3.js.map +1 -0
- package/dist/chunk-274RV3YO.js +162 -0
- package/dist/chunk-274RV3YO.js.map +1 -0
- package/dist/chunk-4CWAGBNS.js +168 -0
- package/dist/chunk-4CWAGBNS.js.map +1 -0
- package/dist/chunk-4FDUOGSZ.js +69 -0
- package/dist/chunk-4FDUOGSZ.js.map +1 -0
- package/dist/chunk-5H34JPMB.js +199 -0
- package/dist/chunk-5H34JPMB.js.map +1 -0
- package/dist/chunk-6EMOEYGU.js +102 -0
- package/dist/chunk-6EMOEYGU.js.map +1 -0
- package/dist/chunk-A7BJPQQ6.js +236 -0
- package/dist/chunk-A7BJPQQ6.js.map +1 -0
- package/dist/chunk-AGAAFJEO.js +76 -0
- package/dist/chunk-AGAAFJEO.js.map +1 -0
- package/dist/chunk-BSKDOFRT.js +65 -0
- package/dist/chunk-BSKDOFRT.js.map +1 -0
- package/dist/chunk-CHJ5GNZC.js +100 -0
- package/dist/chunk-CHJ5GNZC.js.map +1 -0
- package/dist/chunk-CSL3ERUI.js +307 -0
- package/dist/chunk-CSL3ERUI.js.map +1 -0
- package/dist/chunk-DA7IKHC4.js +229 -0
- package/dist/chunk-DA7IKHC4.js.map +1 -0
- package/dist/chunk-DGUM43GV.js +11 -0
- package/dist/chunk-DGUM43GV.js.map +1 -0
- package/dist/chunk-DTTXPHFW.js +211 -0
- package/dist/chunk-DTTXPHFW.js.map +1 -0
- package/dist/chunk-FD55B3IO.js +204 -0
- package/dist/chunk-FD55B3IO.js.map +1 -0
- package/dist/chunk-FLZU44SV.js +230 -0
- package/dist/chunk-FLZU44SV.js.map +1 -0
- package/dist/chunk-GJNNR2RA.js +200 -0
- package/dist/chunk-GJNNR2RA.js.map +1 -0
- package/dist/chunk-GNUSHD2Y.js +111 -0
- package/dist/chunk-GNUSHD2Y.js.map +1 -0
- package/dist/chunk-GUJTBGVS.js +2212 -0
- package/dist/chunk-GUJTBGVS.js.map +1 -0
- package/dist/chunk-IZ6UZ3ZL.js +207 -0
- package/dist/chunk-IZ6UZ3ZL.js.map +1 -0
- package/dist/chunk-JKMGYWXB.js +197 -0
- package/dist/chunk-JKMGYWXB.js.map +1 -0
- package/dist/chunk-KFX54TQM.js +165 -0
- package/dist/chunk-KFX54TQM.js.map +1 -0
- package/dist/chunk-M7NXUK55.js +199 -0
- package/dist/chunk-M7NXUK55.js.map +1 -0
- package/dist/chunk-MPZ3BPUI.js +374 -0
- package/dist/chunk-MPZ3BPUI.js.map +1 -0
- package/dist/chunk-OC6YSTDX.js +119 -0
- package/dist/chunk-OC6YSTDX.js.map +1 -0
- package/dist/chunk-RC6MEZB6.js +469 -0
- package/dist/chunk-RC6MEZB6.js.map +1 -0
- package/dist/chunk-RY3ZFII7.js +3440 -0
- package/dist/chunk-RY3ZFII7.js.map +1 -0
- package/dist/chunk-TAT6JU3X.js +167 -0
- package/dist/chunk-TAT6JU3X.js.map +1 -0
- package/dist/chunk-UDZIS2AQ.js +79 -0
- package/dist/chunk-UDZIS2AQ.js.map +1 -0
- package/dist/chunk-UPLBF4RZ.js +115 -0
- package/dist/chunk-UPLBF4RZ.js.map +1 -0
- package/dist/chunk-UWQTZMNI.js +154 -0
- package/dist/chunk-UWQTZMNI.js.map +1 -0
- package/dist/chunk-W4T7PGI2.js +346 -0
- package/dist/chunk-W4T7PGI2.js.map +1 -0
- package/dist/chunk-XTBKL5BI.js +111 -0
- package/dist/chunk-XTBKL5BI.js.map +1 -0
- package/dist/chunk-YIJY5DBV.js +399 -0
- package/dist/chunk-YIJY5DBV.js.map +1 -0
- package/dist/chunk-YUFNYN2H.js +242 -0
- package/dist/chunk-YUFNYN2H.js.map +1 -0
- package/dist/chunk-Z2PUCXTZ.js +94 -0
- package/dist/chunk-Z2PUCXTZ.js.map +1 -0
- package/dist/chunk-ZZJOFKAT.js +13 -0
- package/dist/chunk-ZZJOFKAT.js.map +1 -0
- package/dist/cli/index.js +3661 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/config-WVMRUOCA.js +13 -0
- package/dist/config-WVMRUOCA.js.map +1 -0
- package/dist/context-loader-3ORBPMHJ.js +13 -0
- package/dist/context-loader-3ORBPMHJ.js.map +1 -0
- package/dist/conversation-QDEIDQPH.js +22 -0
- package/dist/conversation-QDEIDQPH.js.map +1 -0
- package/dist/cost-tracker-RS3W7SVY.js +24 -0
- package/dist/cost-tracker-RS3W7SVY.js.map +1 -0
- package/dist/delegate-VJCJLYEK.js +29 -0
- package/dist/delegate-VJCJLYEK.js.map +1 -0
- package/dist/emotional-state-VQVRA6ED.js +206 -0
- package/dist/emotional-state-VQVRA6ED.js.map +1 -0
- package/dist/env-discovery-2BLVMAIM.js +251 -0
- package/dist/env-discovery-2BLVMAIM.js.map +1 -0
- package/dist/export-6GCYHEHQ.js +165 -0
- package/dist/export-6GCYHEHQ.js.map +1 -0
- package/dist/graph-YUIPOSOO.js +14 -0
- package/dist/graph-YUIPOSOO.js.map +1 -0
- package/dist/harness-LCHA3DWP.js +10 -0
- package/dist/harness-LCHA3DWP.js.map +1 -0
- package/dist/harness-WE4SLCML.js +26 -0
- package/dist/harness-WE4SLCML.js.map +1 -0
- package/dist/health-NZ6WNIMV.js +23 -0
- package/dist/health-NZ6WNIMV.js.map +1 -0
- package/dist/index.d.ts +3612 -0
- package/dist/index.js +13501 -0
- package/dist/index.js.map +1 -0
- package/dist/indexer-LONANRRM.js +16 -0
- package/dist/indexer-LONANRRM.js.map +1 -0
- package/dist/instinct-learner-SRM72DHF.js +20 -0
- package/dist/instinct-learner-SRM72DHF.js.map +1 -0
- package/dist/intake-4M3HNU43.js +21 -0
- package/dist/intake-4M3HNU43.js.map +1 -0
- package/dist/intelligence-HJOCA4SJ.js +1081 -0
- package/dist/intelligence-HJOCA4SJ.js.map +1 -0
- package/dist/journal-WANJL3MI.js +24 -0
- package/dist/journal-WANJL3MI.js.map +1 -0
- package/dist/loader-C3TKIKZR.js +23 -0
- package/dist/loader-C3TKIKZR.js.map +1 -0
- package/dist/mcp-WTQJJZAO.js +15 -0
- package/dist/mcp-WTQJJZAO.js.map +1 -0
- package/dist/mcp-discovery-WPAQFL6S.js +377 -0
- package/dist/mcp-discovery-WPAQFL6S.js.map +1 -0
- package/dist/mcp-installer-6O2XXD3V.js +394 -0
- package/dist/mcp-installer-6O2XXD3V.js.map +1 -0
- package/dist/metrics-KXGNFAAB.js +20 -0
- package/dist/metrics-KXGNFAAB.js.map +1 -0
- package/dist/primitive-registry-I6VTIR4W.js +512 -0
- package/dist/primitive-registry-I6VTIR4W.js.map +1 -0
- package/dist/project-discovery-C4UMD7JI.js +246 -0
- package/dist/project-discovery-C4UMD7JI.js.map +1 -0
- package/dist/provider-LQHQX7Z7.js +26 -0
- package/dist/provider-LQHQX7Z7.js.map +1 -0
- package/dist/provider-SXPQZ74H.js +28 -0
- package/dist/provider-SXPQZ74H.js.map +1 -0
- package/dist/rate-limiter-RLRVM325.js +22 -0
- package/dist/rate-limiter-RLRVM325.js.map +1 -0
- package/dist/rule-engine-YGQ3RYZM.js +182 -0
- package/dist/rule-engine-YGQ3RYZM.js.map +1 -0
- package/dist/scaffold-A3VRRCBV.js +347 -0
- package/dist/scaffold-A3VRRCBV.js.map +1 -0
- package/dist/scheduler-XHHIVHRI.js +397 -0
- package/dist/scheduler-XHHIVHRI.js.map +1 -0
- package/dist/search-V3W5JMJG.js +75 -0
- package/dist/search-V3W5JMJG.js.map +1 -0
- package/dist/semantic-search-2DTOO5UX.js +241 -0
- package/dist/semantic-search-2DTOO5UX.js.map +1 -0
- package/dist/serve-DTQ3HENY.js +291 -0
- package/dist/serve-DTQ3HENY.js.map +1 -0
- package/dist/sessions-CZGVXKQE.js +21 -0
- package/dist/sessions-CZGVXKQE.js.map +1 -0
- package/dist/sources-RW5DT56F.js +32 -0
- package/dist/sources-RW5DT56F.js.map +1 -0
- package/dist/starter-packs-76YUVHEU.js +893 -0
- package/dist/starter-packs-76YUVHEU.js.map +1 -0
- package/dist/state-GMXILIHW.js +13 -0
- package/dist/state-GMXILIHW.js.map +1 -0
- package/dist/state-merge-NKO5FRBA.js +174 -0
- package/dist/state-merge-NKO5FRBA.js.map +1 -0
- package/dist/telemetry-UC6PBXC7.js +22 -0
- package/dist/telemetry-UC6PBXC7.js.map +1 -0
- package/dist/tool-executor-MJ7IG7PQ.js +28 -0
- package/dist/tool-executor-MJ7IG7PQ.js.map +1 -0
- package/dist/tools-DZ4KETET.js +20 -0
- package/dist/tools-DZ4KETET.js.map +1 -0
- package/dist/types-EW7AIB3R.js +18 -0
- package/dist/types-EW7AIB3R.js.map +1 -0
- package/dist/types-WGDLSPO6.js +16 -0
- package/dist/types-WGDLSPO6.js.map +1 -0
- package/dist/universal-installer-QGS4SJGX.js +578 -0
- package/dist/universal-installer-QGS4SJGX.js.map +1 -0
- package/dist/validator-7WXMDIHH.js +22 -0
- package/dist/validator-7WXMDIHH.js.map +1 -0
- package/dist/verification-gate-FYXUX6LH.js +246 -0
- package/dist/verification-gate-FYXUX6LH.js.map +1 -0
- package/dist/versioning-Z3XNE2Q2.js +271 -0
- package/dist/versioning-Z3XNE2Q2.js.map +1 -0
- package/dist/watcher-ISJC7YKL.js +109 -0
- package/dist/watcher-ISJC7YKL.js.map +1 -0
- package/dist/web-server-DD7ZOP46.js +28 -0
- package/dist/web-server-DD7ZOP46.js.map +1 -0
- package/package.json +76 -0
- package/sources.yaml +121 -0
- package/templates/assistant/CORE.md +24 -0
- package/templates/assistant/SYSTEM.md +24 -0
- package/templates/assistant/config.yaml +51 -0
- package/templates/base/CORE.md +17 -0
- package/templates/base/SYSTEM.md +24 -0
- package/templates/base/config.yaml +51 -0
- package/templates/claude-opus/config.yaml +51 -0
- package/templates/code-reviewer/CORE.md +25 -0
- package/templates/code-reviewer/SYSTEM.md +30 -0
- package/templates/code-reviewer/config.yaml +51 -0
- package/templates/gpt4/config.yaml +51 -0
- package/templates/local/config.yaml +51 -0
|
@@ -0,0 +1,307 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
parseHarnessDocument
|
|
5
|
+
} from "./chunk-UPLBF4RZ.js";
|
|
6
|
+
import {
|
|
7
|
+
generate,
|
|
8
|
+
getModel
|
|
9
|
+
} from "./chunk-IZ6UZ3ZL.js";
|
|
10
|
+
import {
|
|
11
|
+
loadConfig
|
|
12
|
+
} from "./chunk-CHJ5GNZC.js";
|
|
13
|
+
|
|
14
|
+
// src/runtime/journal.ts
|
|
15
|
+
import { readFileSync, writeFileSync, readdirSync, existsSync, mkdirSync } from "fs";
|
|
16
|
+
import { join } from "path";
|
|
17
|
+
async function synthesizeJournal(harnessDir, date, apiKey) {
|
|
18
|
+
const targetDate = date || (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
19
|
+
const sessionsDir = join(harnessDir, "memory", "sessions");
|
|
20
|
+
const journalDir = join(harnessDir, "memory", "journal");
|
|
21
|
+
if (!existsSync(journalDir)) {
|
|
22
|
+
mkdirSync(journalDir, { recursive: true });
|
|
23
|
+
}
|
|
24
|
+
const sessions = [];
|
|
25
|
+
if (existsSync(sessionsDir)) {
|
|
26
|
+
const files = readdirSync(sessionsDir).filter(
|
|
27
|
+
(f) => f.endsWith(".md") && f.startsWith(targetDate)
|
|
28
|
+
);
|
|
29
|
+
for (const file of files) {
|
|
30
|
+
try {
|
|
31
|
+
const doc = parseHarnessDocument(join(sessionsDir, file));
|
|
32
|
+
sessions.push(doc);
|
|
33
|
+
} catch {
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
if (sessions.length === 0) {
|
|
38
|
+
return {
|
|
39
|
+
date: targetDate,
|
|
40
|
+
sessions: [],
|
|
41
|
+
synthesis: "No sessions recorded today.",
|
|
42
|
+
structured: {
|
|
43
|
+
summary: "No sessions recorded today.",
|
|
44
|
+
insights: [],
|
|
45
|
+
instinct_candidates: [],
|
|
46
|
+
knowledge_updates: []
|
|
47
|
+
},
|
|
48
|
+
instinct_candidates: [],
|
|
49
|
+
tokens_used: 0
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
const sessionSummaries = sessions.map((s, i) => {
|
|
53
|
+
const prompt = s.body.match(/## Prompt\n([\s\S]*?)(?=\n## |$)/)?.[1]?.trim() || "";
|
|
54
|
+
const summary = s.body.match(/## Summary\n([\s\S]*?)(?=\n## |$)/)?.[1]?.trim() || "";
|
|
55
|
+
return `Session ${i + 1}:
|
|
56
|
+
Prompt: ${prompt}
|
|
57
|
+
Summary: ${summary}`;
|
|
58
|
+
}).join("\n\n");
|
|
59
|
+
const synthesisPrompt = `You are synthesizing today's agent sessions into a structured journal entry.
|
|
60
|
+
|
|
61
|
+
Sessions from ${targetDate}:
|
|
62
|
+
|
|
63
|
+
${sessionSummaries}
|
|
64
|
+
|
|
65
|
+
Write a journal entry with these EXACT sections (use the exact headers shown):
|
|
66
|
+
|
|
67
|
+
## Summary
|
|
68
|
+
2-3 sentence synthesis of what happened today.
|
|
69
|
+
|
|
70
|
+
## Insights
|
|
71
|
+
Bullet points (starting with "- ") of patterns, recurring themes, or notable observations.
|
|
72
|
+
|
|
73
|
+
## Instinct Candidates
|
|
74
|
+
Bullet points (starting with "- INSTINCT: ") of behaviors that should become reflexive rules.
|
|
75
|
+
|
|
76
|
+
## Knowledge Updates
|
|
77
|
+
Bullet points (starting with "- ") of new facts, corrections, or learnings that should be remembered.`;
|
|
78
|
+
const config = loadConfig(harnessDir);
|
|
79
|
+
const model = getModel(config, apiKey);
|
|
80
|
+
const result = await generate({
|
|
81
|
+
model,
|
|
82
|
+
system: "You are a reflective journal synthesizer. Be concise and insightful. Follow the output format exactly.",
|
|
83
|
+
prompt: synthesisPrompt
|
|
84
|
+
});
|
|
85
|
+
const structured = parseJournalSynthesis(result.text);
|
|
86
|
+
const journalPath = join(journalDir, `${targetDate}.md`);
|
|
87
|
+
const journalContent = `---
|
|
88
|
+
id: journal-${targetDate}
|
|
89
|
+
tags: [journal, daily]
|
|
90
|
+
created: ${targetDate}
|
|
91
|
+
updated: ${(/* @__PURE__ */ new Date()).toISOString().split("T")[0]}
|
|
92
|
+
author: infrastructure
|
|
93
|
+
status: active
|
|
94
|
+
---
|
|
95
|
+
|
|
96
|
+
<!-- L0: Journal for ${targetDate} \u2014 ${sessions.length} sessions synthesized. -->
|
|
97
|
+
<!-- L1: ${structured.summary.slice(0, 200)} -->
|
|
98
|
+
|
|
99
|
+
# Journal: ${targetDate}
|
|
100
|
+
|
|
101
|
+
**Sessions:** ${sessions.length}
|
|
102
|
+
**Tokens used:** ${result.usage.totalTokens}
|
|
103
|
+
|
|
104
|
+
${result.text}
|
|
105
|
+
`;
|
|
106
|
+
writeFileSync(journalPath, journalContent, "utf-8");
|
|
107
|
+
return {
|
|
108
|
+
date: targetDate,
|
|
109
|
+
sessions,
|
|
110
|
+
synthesis: result.text,
|
|
111
|
+
structured,
|
|
112
|
+
instinct_candidates: structured.instinct_candidates,
|
|
113
|
+
tokens_used: result.usage.totalTokens
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
function parseJournalSynthesis(text) {
|
|
117
|
+
const sectionRegex = /## (Summary|Insights|Instinct Candidates|Knowledge Updates|Patterns)\n([\s\S]*?)(?=\n## |\n*$)/g;
|
|
118
|
+
const sections = /* @__PURE__ */ new Map();
|
|
119
|
+
for (const match of text.matchAll(sectionRegex)) {
|
|
120
|
+
sections.set(match[1].toLowerCase(), match[2].trim());
|
|
121
|
+
}
|
|
122
|
+
const extractBullets = (content) => {
|
|
123
|
+
if (!content) return [];
|
|
124
|
+
return content.split("\n").filter((line) => line.startsWith("- ")).map((line) => line.slice(2).trim()).filter(Boolean);
|
|
125
|
+
};
|
|
126
|
+
const summary = sections.get("summary") ?? "";
|
|
127
|
+
const insightsRaw = sections.get("insights") ?? sections.get("patterns") ?? "";
|
|
128
|
+
const insights = extractBullets(insightsRaw);
|
|
129
|
+
const instinctRaw = sections.get("instinct candidates") ?? "";
|
|
130
|
+
const instinct_candidates = extractBullets(instinctRaw).map(
|
|
131
|
+
(line) => line.replace(/^INSTINCT:\s*/i, "")
|
|
132
|
+
);
|
|
133
|
+
const knowledgeRaw = sections.get("knowledge updates") ?? "";
|
|
134
|
+
const knowledge_updates = extractBullets(knowledgeRaw);
|
|
135
|
+
return { summary, insights, instinct_candidates, knowledge_updates };
|
|
136
|
+
}
|
|
137
|
+
async function synthesizeJournalRange(harnessDir, options) {
|
|
138
|
+
const sessionsDir = join(harnessDir, "memory", "sessions");
|
|
139
|
+
if (!existsSync(sessionsDir)) return [];
|
|
140
|
+
const files = readdirSync(sessionsDir).filter(
|
|
141
|
+
(f) => f.endsWith(".md") && !f.startsWith(".") && !f.startsWith("_")
|
|
142
|
+
);
|
|
143
|
+
const dateSet = /* @__PURE__ */ new Set();
|
|
144
|
+
for (const file of files) {
|
|
145
|
+
const match = file.match(/^(\d{4}-\d{2}-\d{2})/);
|
|
146
|
+
if (match) dateSet.add(match[1]);
|
|
147
|
+
}
|
|
148
|
+
let dates = [...dateSet].sort();
|
|
149
|
+
if (!options.all) {
|
|
150
|
+
const from = options.from;
|
|
151
|
+
const to = options.to || (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
152
|
+
if (from) {
|
|
153
|
+
dates = dates.filter((d) => d >= from && d <= to);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
if (dates.length === 0) return [];
|
|
157
|
+
const journalDir = join(harnessDir, "memory", "journal");
|
|
158
|
+
const existingJournals = /* @__PURE__ */ new Set();
|
|
159
|
+
if (existsSync(journalDir)) {
|
|
160
|
+
for (const jf of readdirSync(journalDir)) {
|
|
161
|
+
const match = jf.match(/^(\d{4}-\d{2}-\d{2})/);
|
|
162
|
+
if (match) existingJournals.add(match[1]);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
const entries = [];
|
|
166
|
+
for (const date of dates) {
|
|
167
|
+
if (!options.force && existingJournals.has(date)) continue;
|
|
168
|
+
const entry = await synthesizeJournal(harnessDir, date, options.apiKey);
|
|
169
|
+
entries.push(entry);
|
|
170
|
+
}
|
|
171
|
+
return entries;
|
|
172
|
+
}
|
|
173
|
+
function listUnjournaled(harnessDir) {
|
|
174
|
+
const sessionsDir = join(harnessDir, "memory", "sessions");
|
|
175
|
+
if (!existsSync(sessionsDir)) return [];
|
|
176
|
+
const sessionDates = /* @__PURE__ */ new Set();
|
|
177
|
+
for (const file of readdirSync(sessionsDir)) {
|
|
178
|
+
if (!file.endsWith(".md") || file.startsWith(".")) continue;
|
|
179
|
+
const match = file.match(/^(\d{4}-\d{2}-\d{2})/);
|
|
180
|
+
if (match) sessionDates.add(match[1]);
|
|
181
|
+
}
|
|
182
|
+
const journalDir = join(harnessDir, "memory", "journal");
|
|
183
|
+
const journalDates = /* @__PURE__ */ new Set();
|
|
184
|
+
if (existsSync(journalDir)) {
|
|
185
|
+
for (const file of readdirSync(journalDir)) {
|
|
186
|
+
const match = file.match(/^(\d{4}-\d{2}-\d{2})/);
|
|
187
|
+
if (match) journalDates.add(match[1]);
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
return [...sessionDates].filter((d) => !journalDates.has(d)).sort();
|
|
191
|
+
}
|
|
192
|
+
function listJournals(harnessDir) {
|
|
193
|
+
const journalDir = join(harnessDir, "memory", "journal");
|
|
194
|
+
if (!existsSync(journalDir)) return [];
|
|
195
|
+
return readdirSync(journalDir).filter((f) => f.endsWith(".md") && !f.startsWith(".")).sort().reverse();
|
|
196
|
+
}
|
|
197
|
+
function getWeekStart(dateStr) {
|
|
198
|
+
const date = /* @__PURE__ */ new Date(dateStr + "T12:00:00Z");
|
|
199
|
+
const day = date.getUTCDay();
|
|
200
|
+
const diff = day === 0 ? 6 : day - 1;
|
|
201
|
+
date.setUTCDate(date.getUTCDate() - diff);
|
|
202
|
+
return date.toISOString().split("T")[0];
|
|
203
|
+
}
|
|
204
|
+
function getWeekEnd(dateStr) {
|
|
205
|
+
const start = /* @__PURE__ */ new Date(getWeekStart(dateStr) + "T12:00:00Z");
|
|
206
|
+
start.setUTCDate(start.getUTCDate() + 6);
|
|
207
|
+
return start.toISOString().split("T")[0];
|
|
208
|
+
}
|
|
209
|
+
function compressJournals(harnessDir, options) {
|
|
210
|
+
const journalDir = join(harnessDir, "memory", "journal");
|
|
211
|
+
if (!existsSync(journalDir)) return [];
|
|
212
|
+
const weeklyDir = join(journalDir, "weekly");
|
|
213
|
+
if (!existsSync(weeklyDir)) {
|
|
214
|
+
mkdirSync(weeklyDir, { recursive: true });
|
|
215
|
+
}
|
|
216
|
+
const files = readdirSync(journalDir).filter((f) => f.endsWith(".md") && !f.startsWith(".") && !f.startsWith("_")).sort();
|
|
217
|
+
const weeks = /* @__PURE__ */ new Map();
|
|
218
|
+
for (const file of files) {
|
|
219
|
+
const dateMatch = file.match(/^(\d{4}-\d{2}-\d{2})/);
|
|
220
|
+
if (!dateMatch) continue;
|
|
221
|
+
const weekStart = getWeekStart(dateMatch[1]);
|
|
222
|
+
if (!weeks.has(weekStart)) weeks.set(weekStart, []);
|
|
223
|
+
weeks.get(weekStart).push(file);
|
|
224
|
+
}
|
|
225
|
+
const results = [];
|
|
226
|
+
for (const [weekStart, journalFiles] of weeks) {
|
|
227
|
+
const weekEnd = getWeekEnd(weekStart);
|
|
228
|
+
const weeklyFile = join(weeklyDir, `${weekStart}.md`);
|
|
229
|
+
if (!options?.force && existsSync(weeklyFile)) continue;
|
|
230
|
+
const today = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
231
|
+
const currentWeekStart = getWeekStart(today);
|
|
232
|
+
if (weekStart === currentWeekStart) continue;
|
|
233
|
+
const allSummaries = [];
|
|
234
|
+
const allInsights = [];
|
|
235
|
+
const allInstinctCandidates = [];
|
|
236
|
+
const allKnowledgeUpdates = [];
|
|
237
|
+
const journalDates = [];
|
|
238
|
+
for (const file of journalFiles) {
|
|
239
|
+
const content = readFileSync(join(journalDir, file), "utf-8");
|
|
240
|
+
const dateMatch = file.match(/^(\d{4}-\d{2}-\d{2})/);
|
|
241
|
+
if (dateMatch) journalDates.push(dateMatch[1]);
|
|
242
|
+
const structured = parseJournalSynthesis(content);
|
|
243
|
+
if (structured.summary) allSummaries.push(`**${dateMatch?.[1]}:** ${structured.summary}`);
|
|
244
|
+
allInsights.push(...structured.insights);
|
|
245
|
+
allInstinctCandidates.push(...structured.instinct_candidates);
|
|
246
|
+
allKnowledgeUpdates.push(...structured.knowledge_updates);
|
|
247
|
+
}
|
|
248
|
+
const uniqueInsights = [...new Set(allInsights)];
|
|
249
|
+
const uniqueInstincts = [...new Set(allInstinctCandidates)];
|
|
250
|
+
const uniqueKnowledge = [...new Set(allKnowledgeUpdates)];
|
|
251
|
+
const weekSummary = allSummaries.join("\n\n");
|
|
252
|
+
const insightsBullets = uniqueInsights.map((i) => `- ${i}`).join("\n");
|
|
253
|
+
const instinctBullets = uniqueInstincts.map((i) => `- INSTINCT: ${i}`).join("\n");
|
|
254
|
+
const knowledgeBullets = uniqueKnowledge.map((k) => `- ${k}`).join("\n");
|
|
255
|
+
const weeklyContent = `---
|
|
256
|
+
id: weekly-${weekStart}
|
|
257
|
+
tags: [journal, weekly]
|
|
258
|
+
created: ${weekStart}
|
|
259
|
+
updated: ${(/* @__PURE__ */ new Date()).toISOString().split("T")[0]}
|
|
260
|
+
author: infrastructure
|
|
261
|
+
status: active
|
|
262
|
+
---
|
|
263
|
+
|
|
264
|
+
<!-- L0: Weekly journal roll-up ${weekStart} to ${weekEnd} (${journalDates.length} days) -->
|
|
265
|
+
<!-- L1: ${allSummaries[0]?.slice(0, 200) || "No summaries available"} -->
|
|
266
|
+
|
|
267
|
+
# Weekly Journal: ${weekStart} to ${weekEnd}
|
|
268
|
+
|
|
269
|
+
**Days journaled:** ${journalDates.length}
|
|
270
|
+
**Dates:** ${journalDates.join(", ")}
|
|
271
|
+
|
|
272
|
+
## Summary
|
|
273
|
+
${weekSummary || "No daily summaries available."}
|
|
274
|
+
|
|
275
|
+
## Insights
|
|
276
|
+
${insightsBullets || "(none)"}
|
|
277
|
+
|
|
278
|
+
## Instinct Candidates
|
|
279
|
+
${instinctBullets || "(none)"}
|
|
280
|
+
|
|
281
|
+
## Knowledge Updates
|
|
282
|
+
${knowledgeBullets || "(none)"}
|
|
283
|
+
`;
|
|
284
|
+
writeFileSync(weeklyFile, weeklyContent, "utf-8");
|
|
285
|
+
results.push({
|
|
286
|
+
weekStart,
|
|
287
|
+
weekEnd,
|
|
288
|
+
journalDates,
|
|
289
|
+
summary: weekSummary,
|
|
290
|
+
allInsights: uniqueInsights,
|
|
291
|
+
allInstinctCandidates: uniqueInstincts,
|
|
292
|
+
allKnowledgeUpdates: uniqueKnowledge,
|
|
293
|
+
filePath: weeklyFile
|
|
294
|
+
});
|
|
295
|
+
}
|
|
296
|
+
return results;
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
export {
|
|
300
|
+
synthesizeJournal,
|
|
301
|
+
parseJournalSynthesis,
|
|
302
|
+
synthesizeJournalRange,
|
|
303
|
+
listUnjournaled,
|
|
304
|
+
listJournals,
|
|
305
|
+
compressJournals
|
|
306
|
+
};
|
|
307
|
+
//# sourceMappingURL=chunk-CSL3ERUI.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/journal.ts"],"sourcesContent":["import { readFileSync, writeFileSync, readdirSync, existsSync, mkdirSync } from 'fs';\nimport { join } from 'path';\nimport { parseHarnessDocument } from '../primitives/loader.js';\nimport { getModel, generate } from '../llm/provider.js';\nimport { loadConfig } from '../core/config.js';\nimport type { HarnessConfig, HarnessDocument } from '../core/types.js';\n\nexport interface JournalSynthesis {\n summary: string;\n insights: string[];\n instinct_candidates: string[];\n knowledge_updates: string[];\n}\n\nexport interface JournalEntry {\n date: string;\n sessions: HarnessDocument[];\n synthesis: string;\n structured: JournalSynthesis;\n instinct_candidates: string[];\n tokens_used: number;\n}\n\nexport async function synthesizeJournal(\n harnessDir: string,\n date?: string,\n apiKey?: string,\n): Promise<JournalEntry> {\n const targetDate = date || new Date().toISOString().split('T')[0];\n const sessionsDir = join(harnessDir, 'memory', 'sessions');\n const journalDir = join(harnessDir, 'memory', 'journal');\n\n if (!existsSync(journalDir)) {\n mkdirSync(journalDir, { recursive: true });\n }\n\n // Load sessions for this date\n const sessions: HarnessDocument[] = [];\n if (existsSync(sessionsDir)) {\n const files = readdirSync(sessionsDir).filter(\n (f) => f.endsWith('.md') && f.startsWith(targetDate),\n );\n\n for (const file of files) {\n try {\n const doc = parseHarnessDocument(join(sessionsDir, file));\n sessions.push(doc);\n } catch {\n // Skip unparseable\n }\n }\n }\n\n if (sessions.length === 0) {\n return {\n date: targetDate,\n sessions: [],\n synthesis: 'No sessions recorded today.',\n structured: {\n summary: 'No sessions recorded today.',\n insights: [],\n instinct_candidates: [],\n knowledge_updates: [],\n },\n instinct_candidates: [],\n tokens_used: 0,\n };\n }\n\n // Build synthesis prompt\n const sessionSummaries = sessions\n .map((s, i) => {\n const prompt = s.body.match(/## Prompt\\n([\\s\\S]*?)(?=\\n## |$)/)?.[1]?.trim() || '';\n const summary = s.body.match(/## Summary\\n([\\s\\S]*?)(?=\\n## |$)/)?.[1]?.trim() || '';\n return `Session ${i + 1}:\\n Prompt: ${prompt}\\n Summary: ${summary}`;\n })\n .join('\\n\\n');\n\n const synthesisPrompt = `You are synthesizing today's agent sessions into a structured journal entry.\n\nSessions from ${targetDate}:\n\n${sessionSummaries}\n\nWrite a journal entry with these EXACT sections (use the exact headers shown):\n\n## Summary\n2-3 sentence synthesis of what happened today.\n\n## Insights\nBullet points (starting with \"- \") of patterns, recurring themes, or notable observations.\n\n## Instinct Candidates\nBullet points (starting with \"- INSTINCT: \") of behaviors that should become reflexive rules.\n\n## Knowledge Updates\nBullet points (starting with \"- \") of new facts, corrections, or learnings that should be remembered.`;\n\n const config = loadConfig(harnessDir);\n const model = getModel(config, apiKey);\n\n const result = await generate({\n model,\n system: 'You are a reflective journal synthesizer. Be concise and insightful. Follow the output format exactly.',\n prompt: synthesisPrompt,\n });\n\n // Parse structured sections from the response\n const structured = parseJournalSynthesis(result.text);\n\n // Write journal entry\n const journalPath = join(journalDir, `${targetDate}.md`);\n const journalContent = `---\nid: journal-${targetDate}\ntags: [journal, daily]\ncreated: ${targetDate}\nupdated: ${new Date().toISOString().split('T')[0]}\nauthor: infrastructure\nstatus: active\n---\n\n<!-- L0: Journal for ${targetDate} — ${sessions.length} sessions synthesized. -->\n<!-- L1: ${structured.summary.slice(0, 200)} -->\n\n# Journal: ${targetDate}\n\n**Sessions:** ${sessions.length}\n**Tokens used:** ${result.usage.totalTokens}\n\n${result.text}\n`;\n\n writeFileSync(journalPath, journalContent, 'utf-8');\n\n return {\n date: targetDate,\n sessions,\n synthesis: result.text,\n structured,\n instinct_candidates: structured.instinct_candidates,\n tokens_used: result.usage.totalTokens,\n };\n}\n\n/**\n * Parse a journal synthesis response into structured sections.\n * Resilient to missing sections — returns empty arrays/strings for missing parts.\n */\nexport function parseJournalSynthesis(text: string): JournalSynthesis {\n const sectionRegex = /## (Summary|Insights|Instinct Candidates|Knowledge Updates|Patterns)\\n([\\s\\S]*?)(?=\\n## |\\n*$)/g;\n const sections = new Map<string, string>();\n\n for (const match of text.matchAll(sectionRegex)) {\n sections.set(match[1].toLowerCase(), match[2].trim());\n }\n\n const extractBullets = (content: string | undefined): string[] => {\n if (!content) return [];\n return content\n .split('\\n')\n .filter((line) => line.startsWith('- '))\n .map((line) => line.slice(2).trim())\n .filter(Boolean);\n };\n\n const summary = sections.get('summary') ?? '';\n\n // \"Insights\" or legacy \"Patterns\" section\n const insightsRaw = sections.get('insights') ?? sections.get('patterns') ?? '';\n const insights = extractBullets(insightsRaw);\n\n // Extract instinct candidates, stripping \"INSTINCT:\" prefix\n const instinctRaw = sections.get('instinct candidates') ?? '';\n const instinct_candidates = extractBullets(instinctRaw).map((line) =>\n line.replace(/^INSTINCT:\\s*/i, ''),\n );\n\n const knowledgeRaw = sections.get('knowledge updates') ?? '';\n const knowledge_updates = extractBullets(knowledgeRaw);\n\n return { summary, insights, instinct_candidates, knowledge_updates };\n}\n\n/**\n * Synthesize journals for a date range.\n * Processes each date that has sessions, skipping dates already journaled unless force is set.\n */\nexport async function synthesizeJournalRange(\n harnessDir: string,\n options: { from?: string; to?: string; all?: boolean; force?: boolean; apiKey?: string },\n): Promise<JournalEntry[]> {\n const sessionsDir = join(harnessDir, 'memory', 'sessions');\n if (!existsSync(sessionsDir)) return [];\n\n // Collect all unique dates from session filenames\n const files = readdirSync(sessionsDir).filter(\n (f) => f.endsWith('.md') && !f.startsWith('.') && !f.startsWith('_'),\n );\n const dateSet = new Set<string>();\n for (const file of files) {\n const match = file.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n if (match) dateSet.add(match[1]);\n }\n\n let dates = [...dateSet].sort();\n\n // Apply range filters\n if (!options.all) {\n const from = options.from;\n const to = options.to || new Date().toISOString().split('T')[0];\n if (from) {\n dates = dates.filter((d) => d >= from && d <= to);\n }\n }\n\n if (dates.length === 0) return [];\n\n // Check which dates already have journals\n const journalDir = join(harnessDir, 'memory', 'journal');\n const existingJournals = new Set<string>();\n if (existsSync(journalDir)) {\n for (const jf of readdirSync(journalDir)) {\n const match = jf.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n if (match) existingJournals.add(match[1]);\n }\n }\n\n const entries: JournalEntry[] = [];\n for (const date of dates) {\n if (!options.force && existingJournals.has(date)) continue;\n\n const entry = await synthesizeJournal(harnessDir, date, options.apiKey);\n entries.push(entry);\n }\n\n return entries;\n}\n\n/**\n * List dates that have sessions but no journal entry.\n */\nexport function listUnjournaled(harnessDir: string): string[] {\n const sessionsDir = join(harnessDir, 'memory', 'sessions');\n if (!existsSync(sessionsDir)) return [];\n\n const sessionDates = new Set<string>();\n for (const file of readdirSync(sessionsDir)) {\n if (!file.endsWith('.md') || file.startsWith('.')) continue;\n const match = file.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n if (match) sessionDates.add(match[1]);\n }\n\n const journalDir = join(harnessDir, 'memory', 'journal');\n const journalDates = new Set<string>();\n if (existsSync(journalDir)) {\n for (const file of readdirSync(journalDir)) {\n const match = file.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n if (match) journalDates.add(match[1]);\n }\n }\n\n return [...sessionDates].filter((d) => !journalDates.has(d)).sort();\n}\n\nexport function listJournals(harnessDir: string): string[] {\n const journalDir = join(harnessDir, 'memory', 'journal');\n if (!existsSync(journalDir)) return [];\n\n return readdirSync(journalDir)\n .filter((f) => f.endsWith('.md') && !f.startsWith('.'))\n .sort()\n .reverse();\n}\n\nexport interface WeekSummary {\n weekStart: string;\n weekEnd: string;\n journalDates: string[];\n summary: string;\n allInsights: string[];\n allInstinctCandidates: string[];\n allKnowledgeUpdates: string[];\n filePath: string;\n}\n\n/**\n * Get the Monday of the ISO week for a given date string (YYYY-MM-DD).\n */\nfunction getWeekStart(dateStr: string): string {\n const date = new Date(dateStr + 'T12:00:00Z');\n const day = date.getUTCDay();\n const diff = day === 0 ? 6 : day - 1;\n date.setUTCDate(date.getUTCDate() - diff);\n return date.toISOString().split('T')[0];\n}\n\n/**\n * Get the Sunday of the ISO week for a given date string.\n */\nfunction getWeekEnd(dateStr: string): string {\n const start = new Date(getWeekStart(dateStr) + 'T12:00:00Z');\n start.setUTCDate(start.getUTCDate() + 6);\n return start.toISOString().split('T')[0];\n}\n\n/**\n * Compress daily journals into weekly roll-up summaries.\n * Groups journals by ISO week, aggregates structured sections, writes\n * weekly summary files to memory/journal/weekly/. Pure file-based — no LLM calls.\n *\n * Returns only weeks that were newly created (skips existing unless force=true).\n */\nexport function compressJournals(\n harnessDir: string,\n options?: { force?: boolean },\n): WeekSummary[] {\n const journalDir = join(harnessDir, 'memory', 'journal');\n if (!existsSync(journalDir)) return [];\n\n const weeklyDir = join(journalDir, 'weekly');\n if (!existsSync(weeklyDir)) {\n mkdirSync(weeklyDir, { recursive: true });\n }\n\n // Load all daily journals\n const files = readdirSync(journalDir)\n .filter((f) => f.endsWith('.md') && !f.startsWith('.') && !f.startsWith('_'))\n .sort();\n\n // Group by week\n const weeks = new Map<string, string[]>();\n for (const file of files) {\n const dateMatch = file.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n if (!dateMatch) continue;\n const weekStart = getWeekStart(dateMatch[1]);\n if (!weeks.has(weekStart)) weeks.set(weekStart, []);\n weeks.get(weekStart)!.push(file);\n }\n\n const results: WeekSummary[] = [];\n\n for (const [weekStart, journalFiles] of weeks) {\n const weekEnd = getWeekEnd(weekStart);\n const weeklyFile = join(weeklyDir, `${weekStart}.md`);\n\n // Skip existing unless force\n if (!options?.force && existsSync(weeklyFile)) continue;\n\n // Only compress complete past weeks (not the current week)\n const today = new Date().toISOString().split('T')[0];\n const currentWeekStart = getWeekStart(today);\n if (weekStart === currentWeekStart) continue;\n\n // Aggregate structured sections from each journal\n const allSummaries: string[] = [];\n const allInsights: string[] = [];\n const allInstinctCandidates: string[] = [];\n const allKnowledgeUpdates: string[] = [];\n const journalDates: string[] = [];\n\n for (const file of journalFiles) {\n const content = readFileSync(join(journalDir, file), 'utf-8');\n const dateMatch = file.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n if (dateMatch) journalDates.push(dateMatch[1]);\n\n const structured = parseJournalSynthesis(content);\n if (structured.summary) allSummaries.push(`**${dateMatch?.[1]}:** ${structured.summary}`);\n allInsights.push(...structured.insights);\n allInstinctCandidates.push(...structured.instinct_candidates);\n allKnowledgeUpdates.push(...structured.knowledge_updates);\n }\n\n // Deduplicate\n const uniqueInsights = [...new Set(allInsights)];\n const uniqueInstincts = [...new Set(allInstinctCandidates)];\n const uniqueKnowledge = [...new Set(allKnowledgeUpdates)];\n\n const weekSummary = allSummaries.join('\\n\\n');\n const insightsBullets = uniqueInsights.map((i) => `- ${i}`).join('\\n');\n const instinctBullets = uniqueInstincts.map((i) => `- INSTINCT: ${i}`).join('\\n');\n const knowledgeBullets = uniqueKnowledge.map((k) => `- ${k}`).join('\\n');\n\n const weeklyContent = `---\nid: weekly-${weekStart}\ntags: [journal, weekly]\ncreated: ${weekStart}\nupdated: ${new Date().toISOString().split('T')[0]}\nauthor: infrastructure\nstatus: active\n---\n\n<!-- L0: Weekly journal roll-up ${weekStart} to ${weekEnd} (${journalDates.length} days) -->\n<!-- L1: ${allSummaries[0]?.slice(0, 200) || 'No summaries available'} -->\n\n# Weekly Journal: ${weekStart} to ${weekEnd}\n\n**Days journaled:** ${journalDates.length}\n**Dates:** ${journalDates.join(', ')}\n\n## Summary\n${weekSummary || 'No daily summaries available.'}\n\n## Insights\n${insightsBullets || '(none)'}\n\n## Instinct Candidates\n${instinctBullets || '(none)'}\n\n## Knowledge Updates\n${knowledgeBullets || '(none)'}\n`;\n\n writeFileSync(weeklyFile, weeklyContent, 'utf-8');\n\n results.push({\n weekStart,\n weekEnd,\n journalDates,\n summary: weekSummary,\n allInsights: uniqueInsights,\n allInstinctCandidates: uniqueInstincts,\n allKnowledgeUpdates: uniqueKnowledge,\n filePath: weeklyFile,\n });\n }\n\n return results;\n}\n"],"mappings":";;;;;;;;;;;;;;AAAA,SAAS,cAAc,eAAe,aAAa,YAAY,iBAAiB;AAChF,SAAS,YAAY;AAsBrB,eAAsB,kBACpB,YACA,MACA,QACuB;AACvB,QAAM,aAAa,SAAQ,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAChE,QAAM,cAAc,KAAK,YAAY,UAAU,UAAU;AACzD,QAAM,aAAa,KAAK,YAAY,UAAU,SAAS;AAEvD,MAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,cAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC3C;AAGA,QAAM,WAA8B,CAAC;AACrC,MAAI,WAAW,WAAW,GAAG;AAC3B,UAAM,QAAQ,YAAY,WAAW,EAAE;AAAA,MACrC,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,EAAE,WAAW,UAAU;AAAA,IACrD;AAEA,eAAW,QAAQ,OAAO;AACxB,UAAI;AACF,cAAM,MAAM,qBAAqB,KAAK,aAAa,IAAI,CAAC;AACxD,iBAAS,KAAK,GAAG;AAAA,MACnB,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,UAAU,CAAC;AAAA,MACX,WAAW;AAAA,MACX,YAAY;AAAA,QACV,SAAS;AAAA,QACT,UAAU,CAAC;AAAA,QACX,qBAAqB,CAAC;AAAA,QACtB,mBAAmB,CAAC;AAAA,MACtB;AAAA,MACA,qBAAqB,CAAC;AAAA,MACtB,aAAa;AAAA,IACf;AAAA,EACF;AAGA,QAAM,mBAAmB,SACtB,IAAI,CAAC,GAAG,MAAM;AACb,UAAM,SAAS,EAAE,KAAK,MAAM,kCAAkC,IAAI,CAAC,GAAG,KAAK,KAAK;AAChF,UAAM,UAAU,EAAE,KAAK,MAAM,mCAAmC,IAAI,CAAC,GAAG,KAAK,KAAK;AAClF,WAAO,WAAW,IAAI,CAAC;AAAA,YAAgB,MAAM;AAAA,aAAgB,OAAO;AAAA,EACtE,CAAC,EACA,KAAK,MAAM;AAEd,QAAM,kBAAkB;AAAA;AAAA,gBAEV,UAAU;AAAA;AAAA,EAExB,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgBhB,QAAM,SAAS,WAAW,UAAU;AACpC,QAAM,QAAQ,SAAS,QAAQ,MAAM;AAErC,QAAM,SAAS,MAAM,SAAS;AAAA,IAC5B;AAAA,IACA,QAAQ;AAAA,IACR,QAAQ;AAAA,EACV,CAAC;AAGD,QAAM,aAAa,sBAAsB,OAAO,IAAI;AAGpD,QAAM,cAAc,KAAK,YAAY,GAAG,UAAU,KAAK;AACvD,QAAM,iBAAiB;AAAA,cACX,UAAU;AAAA;AAAA,WAEb,UAAU;AAAA,YACV,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,uBAK1B,UAAU,WAAM,SAAS,MAAM;AAAA,WAC3C,WAAW,QAAQ,MAAM,GAAG,GAAG,CAAC;AAAA;AAAA,aAE9B,UAAU;AAAA;AAAA,gBAEP,SAAS,MAAM;AAAA,mBACZ,OAAO,MAAM,WAAW;AAAA;AAAA,EAEzC,OAAO,IAAI;AAAA;AAGX,gBAAc,aAAa,gBAAgB,OAAO;AAElD,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA,WAAW,OAAO;AAAA,IAClB;AAAA,IACA,qBAAqB,WAAW;AAAA,IAChC,aAAa,OAAO,MAAM;AAAA,EAC5B;AACF;AAMO,SAAS,sBAAsB,MAAgC;AACpE,QAAM,eAAe;AACrB,QAAM,WAAW,oBAAI,IAAoB;AAEzC,aAAW,SAAS,KAAK,SAAS,YAAY,GAAG;AAC/C,aAAS,IAAI,MAAM,CAAC,EAAE,YAAY,GAAG,MAAM,CAAC,EAAE,KAAK,CAAC;AAAA,EACtD;AAEA,QAAM,iBAAiB,CAAC,YAA0C;AAChE,QAAI,CAAC,QAAS,QAAO,CAAC;AACtB,WAAO,QACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAK,WAAW,IAAI,CAAC,EACtC,IAAI,CAAC,SAAS,KAAK,MAAM,CAAC,EAAE,KAAK,CAAC,EAClC,OAAO,OAAO;AAAA,EACnB;AAEA,QAAM,UAAU,SAAS,IAAI,SAAS,KAAK;AAG3C,QAAM,cAAc,SAAS,IAAI,UAAU,KAAK,SAAS,IAAI,UAAU,KAAK;AAC5E,QAAM,WAAW,eAAe,WAAW;AAG3C,QAAM,cAAc,SAAS,IAAI,qBAAqB,KAAK;AAC3D,QAAM,sBAAsB,eAAe,WAAW,EAAE;AAAA,IAAI,CAAC,SAC3D,KAAK,QAAQ,kBAAkB,EAAE;AAAA,EACnC;AAEA,QAAM,eAAe,SAAS,IAAI,mBAAmB,KAAK;AAC1D,QAAM,oBAAoB,eAAe,YAAY;AAErD,SAAO,EAAE,SAAS,UAAU,qBAAqB,kBAAkB;AACrE;AAMA,eAAsB,uBACpB,YACA,SACyB;AACzB,QAAM,cAAc,KAAK,YAAY,UAAU,UAAU;AACzD,MAAI,CAAC,WAAW,WAAW,EAAG,QAAO,CAAC;AAGtC,QAAM,QAAQ,YAAY,WAAW,EAAE;AAAA,IACrC,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,KAAK,CAAC,EAAE,WAAW,GAAG;AAAA,EACrE;AACA,QAAM,UAAU,oBAAI,IAAY;AAChC,aAAW,QAAQ,OAAO;AACxB,UAAM,QAAQ,KAAK,MAAM,sBAAsB;AAC/C,QAAI,MAAO,SAAQ,IAAI,MAAM,CAAC,CAAC;AAAA,EACjC;AAEA,MAAI,QAAQ,CAAC,GAAG,OAAO,EAAE,KAAK;AAG9B,MAAI,CAAC,QAAQ,KAAK;AAChB,UAAM,OAAO,QAAQ;AACrB,UAAM,KAAK,QAAQ,OAAM,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAC9D,QAAI,MAAM;AACR,cAAQ,MAAM,OAAO,CAAC,MAAM,KAAK,QAAQ,KAAK,EAAE;AAAA,IAClD;AAAA,EACF;AAEA,MAAI,MAAM,WAAW,EAAG,QAAO,CAAC;AAGhC,QAAM,aAAa,KAAK,YAAY,UAAU,SAAS;AACvD,QAAM,mBAAmB,oBAAI,IAAY;AACzC,MAAI,WAAW,UAAU,GAAG;AAC1B,eAAW,MAAM,YAAY,UAAU,GAAG;AACxC,YAAM,QAAQ,GAAG,MAAM,sBAAsB;AAC7C,UAAI,MAAO,kBAAiB,IAAI,MAAM,CAAC,CAAC;AAAA,IAC1C;AAAA,EACF;AAEA,QAAM,UAA0B,CAAC;AACjC,aAAW,QAAQ,OAAO;AACxB,QAAI,CAAC,QAAQ,SAAS,iBAAiB,IAAI,IAAI,EAAG;AAElD,UAAM,QAAQ,MAAM,kBAAkB,YAAY,MAAM,QAAQ,MAAM;AACtE,YAAQ,KAAK,KAAK;AAAA,EACpB;AAEA,SAAO;AACT;AAKO,SAAS,gBAAgB,YAA8B;AAC5D,QAAM,cAAc,KAAK,YAAY,UAAU,UAAU;AACzD,MAAI,CAAC,WAAW,WAAW,EAAG,QAAO,CAAC;AAEtC,QAAM,eAAe,oBAAI,IAAY;AACrC,aAAW,QAAQ,YAAY,WAAW,GAAG;AAC3C,QAAI,CAAC,KAAK,SAAS,KAAK,KAAK,KAAK,WAAW,GAAG,EAAG;AACnD,UAAM,QAAQ,KAAK,MAAM,sBAAsB;AAC/C,QAAI,MAAO,cAAa,IAAI,MAAM,CAAC,CAAC;AAAA,EACtC;AAEA,QAAM,aAAa,KAAK,YAAY,UAAU,SAAS;AACvD,QAAM,eAAe,oBAAI,IAAY;AACrC,MAAI,WAAW,UAAU,GAAG;AAC1B,eAAW,QAAQ,YAAY,UAAU,GAAG;AAC1C,YAAM,QAAQ,KAAK,MAAM,sBAAsB;AAC/C,UAAI,MAAO,cAAa,IAAI,MAAM,CAAC,CAAC;AAAA,IACtC;AAAA,EACF;AAEA,SAAO,CAAC,GAAG,YAAY,EAAE,OAAO,CAAC,MAAM,CAAC,aAAa,IAAI,CAAC,CAAC,EAAE,KAAK;AACpE;AAEO,SAAS,aAAa,YAA8B;AACzD,QAAM,aAAa,KAAK,YAAY,UAAU,SAAS;AACvD,MAAI,CAAC,WAAW,UAAU,EAAG,QAAO,CAAC;AAErC,SAAO,YAAY,UAAU,EAC1B,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,CAAC,EACrD,KAAK,EACL,QAAQ;AACb;AAgBA,SAAS,aAAa,SAAyB;AAC7C,QAAM,OAAO,oBAAI,KAAK,UAAU,YAAY;AAC5C,QAAM,MAAM,KAAK,UAAU;AAC3B,QAAM,OAAO,QAAQ,IAAI,IAAI,MAAM;AACnC,OAAK,WAAW,KAAK,WAAW,IAAI,IAAI;AACxC,SAAO,KAAK,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AACxC;AAKA,SAAS,WAAW,SAAyB;AAC3C,QAAM,QAAQ,oBAAI,KAAK,aAAa,OAAO,IAAI,YAAY;AAC3D,QAAM,WAAW,MAAM,WAAW,IAAI,CAAC;AACvC,SAAO,MAAM,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AACzC;AASO,SAAS,iBACd,YACA,SACe;AACf,QAAM,aAAa,KAAK,YAAY,UAAU,SAAS;AACvD,MAAI,CAAC,WAAW,UAAU,EAAG,QAAO,CAAC;AAErC,QAAM,YAAY,KAAK,YAAY,QAAQ;AAC3C,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,cAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,EAC1C;AAGA,QAAM,QAAQ,YAAY,UAAU,EACjC,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,KAAK,CAAC,EAAE,WAAW,GAAG,CAAC,EAC3E,KAAK;AAGR,QAAM,QAAQ,oBAAI,IAAsB;AACxC,aAAW,QAAQ,OAAO;AACxB,UAAM,YAAY,KAAK,MAAM,sBAAsB;AACnD,QAAI,CAAC,UAAW;AAChB,UAAM,YAAY,aAAa,UAAU,CAAC,CAAC;AAC3C,QAAI,CAAC,MAAM,IAAI,SAAS,EAAG,OAAM,IAAI,WAAW,CAAC,CAAC;AAClD,UAAM,IAAI,SAAS,EAAG,KAAK,IAAI;AAAA,EACjC;AAEA,QAAM,UAAyB,CAAC;AAEhC,aAAW,CAAC,WAAW,YAAY,KAAK,OAAO;AAC7C,UAAM,UAAU,WAAW,SAAS;AACpC,UAAM,aAAa,KAAK,WAAW,GAAG,SAAS,KAAK;AAGpD,QAAI,CAAC,SAAS,SAAS,WAAW,UAAU,EAAG;AAG/C,UAAM,SAAQ,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AACnD,UAAM,mBAAmB,aAAa,KAAK;AAC3C,QAAI,cAAc,iBAAkB;AAGpC,UAAM,eAAyB,CAAC;AAChC,UAAM,cAAwB,CAAC;AAC/B,UAAM,wBAAkC,CAAC;AACzC,UAAM,sBAAgC,CAAC;AACvC,UAAM,eAAyB,CAAC;AAEhC,eAAW,QAAQ,cAAc;AAC/B,YAAM,UAAU,aAAa,KAAK,YAAY,IAAI,GAAG,OAAO;AAC5D,YAAM,YAAY,KAAK,MAAM,sBAAsB;AACnD,UAAI,UAAW,cAAa,KAAK,UAAU,CAAC,CAAC;AAE7C,YAAM,aAAa,sBAAsB,OAAO;AAChD,UAAI,WAAW,QAAS,cAAa,KAAK,KAAK,YAAY,CAAC,CAAC,OAAO,WAAW,OAAO,EAAE;AACxF,kBAAY,KAAK,GAAG,WAAW,QAAQ;AACvC,4BAAsB,KAAK,GAAG,WAAW,mBAAmB;AAC5D,0BAAoB,KAAK,GAAG,WAAW,iBAAiB;AAAA,IAC1D;AAGA,UAAM,iBAAiB,CAAC,GAAG,IAAI,IAAI,WAAW,CAAC;AAC/C,UAAM,kBAAkB,CAAC,GAAG,IAAI,IAAI,qBAAqB,CAAC;AAC1D,UAAM,kBAAkB,CAAC,GAAG,IAAI,IAAI,mBAAmB,CAAC;AAExD,UAAM,cAAc,aAAa,KAAK,MAAM;AAC5C,UAAM,kBAAkB,eAAe,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,EAAE,KAAK,IAAI;AACrE,UAAM,kBAAkB,gBAAgB,IAAI,CAAC,MAAM,eAAe,CAAC,EAAE,EAAE,KAAK,IAAI;AAChF,UAAM,mBAAmB,gBAAgB,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,EAAE,KAAK,IAAI;AAEvE,UAAM,gBAAgB;AAAA,aACb,SAAS;AAAA;AAAA,WAEX,SAAS;AAAA,YACT,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kCAKf,SAAS,OAAO,OAAO,KAAK,aAAa,MAAM;AAAA,WACtE,aAAa,CAAC,GAAG,MAAM,GAAG,GAAG,KAAK,wBAAwB;AAAA;AAAA,oBAEjD,SAAS,OAAO,OAAO;AAAA;AAAA,sBAErB,aAAa,MAAM;AAAA,aAC5B,aAAa,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA,EAGlC,eAAe,+BAA+B;AAAA;AAAA;AAAA,EAG9C,mBAAmB,QAAQ;AAAA;AAAA;AAAA,EAG3B,mBAAmB,QAAQ;AAAA;AAAA;AAAA,EAG3B,oBAAoB,QAAQ;AAAA;AAG1B,kBAAc,YAAY,eAAe,OAAO;AAEhD,YAAQ,KAAK;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,aAAa;AAAA,MACb,uBAAuB;AAAA,MACvB,qBAAqB;AAAA,MACrB,UAAU;AAAA,IACZ,CAAC;AAAA,EACH;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
loadTools
|
|
5
|
+
} from "./chunk-XTBKL5BI.js";
|
|
6
|
+
import {
|
|
7
|
+
log
|
|
8
|
+
} from "./chunk-BSKDOFRT.js";
|
|
9
|
+
|
|
10
|
+
// src/runtime/tool-executor.ts
|
|
11
|
+
import { tool as aiTool, jsonSchema } from "ai";
|
|
12
|
+
function resolveEndpoint(endpoint, input) {
|
|
13
|
+
return endpoint.replace(/\{(\w+)\}/g, (_match, key) => {
|
|
14
|
+
const value = input[key];
|
|
15
|
+
if (value === void 0 || value === null) {
|
|
16
|
+
return `{${key}}`;
|
|
17
|
+
}
|
|
18
|
+
return encodeURIComponent(String(value));
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
function buildOperationSchema(operation) {
|
|
22
|
+
const params = [];
|
|
23
|
+
const paramRegex = /\{(\w+)\}/g;
|
|
24
|
+
let match;
|
|
25
|
+
while ((match = paramRegex.exec(operation.endpoint)) !== null) {
|
|
26
|
+
params.push(match[1]);
|
|
27
|
+
}
|
|
28
|
+
const properties = {};
|
|
29
|
+
for (const param of params) {
|
|
30
|
+
properties[param] = { type: "string", description: `Value for ${param}` };
|
|
31
|
+
}
|
|
32
|
+
if (["POST", "PUT", "PATCH"].includes(operation.method)) {
|
|
33
|
+
properties["body"] = { type: "string", description: "Request body (JSON string)" };
|
|
34
|
+
}
|
|
35
|
+
properties["query"] = { type: "string", description: "Query parameters (key=value&key2=value2)" };
|
|
36
|
+
return {
|
|
37
|
+
type: "object",
|
|
38
|
+
properties,
|
|
39
|
+
required: params
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
async function executeHttpOperation(operation, baseUrl, authHeaders, input, timeoutMs) {
|
|
43
|
+
const resolvedPath = resolveEndpoint(operation.endpoint, input);
|
|
44
|
+
let url = resolvedPath.startsWith("http") ? resolvedPath : `${baseUrl}${resolvedPath}`;
|
|
45
|
+
const query = input["query"];
|
|
46
|
+
if (typeof query === "string" && query.length > 0) {
|
|
47
|
+
const separator = url.includes("?") ? "&" : "?";
|
|
48
|
+
url = `${url}${separator}${query}`;
|
|
49
|
+
}
|
|
50
|
+
const headers = {
|
|
51
|
+
"Content-Type": "application/json",
|
|
52
|
+
"Accept": "application/json",
|
|
53
|
+
...authHeaders
|
|
54
|
+
};
|
|
55
|
+
const fetchOptions = {
|
|
56
|
+
method: operation.method,
|
|
57
|
+
headers,
|
|
58
|
+
signal: AbortSignal.timeout(timeoutMs)
|
|
59
|
+
};
|
|
60
|
+
if (["POST", "PUT", "PATCH"].includes(operation.method)) {
|
|
61
|
+
const body = input["body"];
|
|
62
|
+
if (typeof body === "string") {
|
|
63
|
+
fetchOptions.body = body;
|
|
64
|
+
} else if (body !== void 0 && body !== null) {
|
|
65
|
+
fetchOptions.body = JSON.stringify(body);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
const response = await fetch(url, fetchOptions);
|
|
69
|
+
if (!response.ok) {
|
|
70
|
+
const errorText = await response.text().catch(() => "Unknown error");
|
|
71
|
+
throw new Error(`HTTP ${response.status} ${response.statusText}: ${errorText.slice(0, 500)}`);
|
|
72
|
+
}
|
|
73
|
+
const contentType = response.headers.get("content-type") ?? "";
|
|
74
|
+
if (contentType.includes("application/json")) {
|
|
75
|
+
return response.json();
|
|
76
|
+
}
|
|
77
|
+
return response.text();
|
|
78
|
+
}
|
|
79
|
+
function sanitizeToolName(name) {
|
|
80
|
+
return name.replace(/[^a-zA-Z0-9_-]/g, "_").replace(/_+/g, "_").replace(/^_|_$/g, "").slice(0, 64);
|
|
81
|
+
}
|
|
82
|
+
function extractBaseUrl(toolDef) {
|
|
83
|
+
for (const op of toolDef.operations) {
|
|
84
|
+
if (op.endpoint.startsWith("http")) {
|
|
85
|
+
try {
|
|
86
|
+
const url = new URL(op.endpoint);
|
|
87
|
+
return `${url.protocol}//${url.host}`;
|
|
88
|
+
} catch {
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
const urlMatch = toolDef.doc.body.match(/(?:base[_ ]?url|api[_ ]?url|endpoint)\s*[:=]\s*`?(https?:\/\/[^\s`"']+)/i);
|
|
93
|
+
if (urlMatch) {
|
|
94
|
+
try {
|
|
95
|
+
const url = new URL(urlMatch[1]);
|
|
96
|
+
return `${url.protocol}//${url.host}`;
|
|
97
|
+
} catch {
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
return "";
|
|
101
|
+
}
|
|
102
|
+
function buildAuthHeaders(toolDef) {
|
|
103
|
+
const headers = {};
|
|
104
|
+
for (const auth of toolDef.auth) {
|
|
105
|
+
const value = process.env[auth.envVar];
|
|
106
|
+
if (!value) continue;
|
|
107
|
+
const envLower = auth.envVar.toLowerCase();
|
|
108
|
+
if (envLower.includes("bot_token")) {
|
|
109
|
+
headers["Authorization"] = `Bot ${value}`;
|
|
110
|
+
} else if (envLower.includes("token") || envLower.includes("api_key") || envLower.includes("apikey")) {
|
|
111
|
+
headers["Authorization"] = `Bearer ${value}`;
|
|
112
|
+
} else {
|
|
113
|
+
headers["Authorization"] = `Bearer ${value}`;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
return headers;
|
|
117
|
+
}
|
|
118
|
+
function convertToolDefinition(toolDef, config) {
|
|
119
|
+
const tools = {};
|
|
120
|
+
const baseUrl = extractBaseUrl(toolDef);
|
|
121
|
+
const allowHttp = config.allowHttpExecution !== false;
|
|
122
|
+
const timeoutMs = config.toolTimeoutMs ?? 3e4;
|
|
123
|
+
for (const operation of toolDef.operations) {
|
|
124
|
+
const toolName = sanitizeToolName(`${toolDef.id}_${operation.name}`);
|
|
125
|
+
const opSchema = buildOperationSchema(operation);
|
|
126
|
+
tools[toolName] = aiTool({
|
|
127
|
+
description: `[${toolDef.id}] ${operation.method} ${operation.endpoint} \u2014 ${toolDef.doc.l0}`,
|
|
128
|
+
inputSchema: jsonSchema(opSchema),
|
|
129
|
+
execute: async (input) => {
|
|
130
|
+
const typedInput = input;
|
|
131
|
+
if (!allowHttp) {
|
|
132
|
+
return { error: "HTTP tool execution is disabled" };
|
|
133
|
+
}
|
|
134
|
+
const missingAuth = toolDef.auth.filter((a) => !process.env[a.envVar]);
|
|
135
|
+
if (missingAuth.length > 0) {
|
|
136
|
+
return {
|
|
137
|
+
error: `Missing required auth: ${missingAuth.map((a) => a.envVar).join(", ")}`
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
const authHeaders = buildAuthHeaders(toolDef);
|
|
141
|
+
try {
|
|
142
|
+
const result = await executeHttpOperation(
|
|
143
|
+
operation,
|
|
144
|
+
baseUrl,
|
|
145
|
+
authHeaders,
|
|
146
|
+
typedInput,
|
|
147
|
+
timeoutMs
|
|
148
|
+
);
|
|
149
|
+
return result;
|
|
150
|
+
} catch (err) {
|
|
151
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
152
|
+
log.error(`Tool ${toolName} execution failed: ${message}`);
|
|
153
|
+
return { error: message };
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
}
|
|
158
|
+
return tools;
|
|
159
|
+
}
|
|
160
|
+
function convertProgrammaticTool(pt) {
|
|
161
|
+
const toolName = sanitizeToolName(pt.name);
|
|
162
|
+
return {
|
|
163
|
+
[toolName]: aiTool({
|
|
164
|
+
description: pt.description,
|
|
165
|
+
inputSchema: pt.inputSchema,
|
|
166
|
+
execute: async (input) => {
|
|
167
|
+
try {
|
|
168
|
+
return await pt.execute(input);
|
|
169
|
+
} catch (err) {
|
|
170
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
171
|
+
log.error(`Tool ${toolName} execution failed: ${message}`);
|
|
172
|
+
return { error: message };
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
})
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
function buildToolSet(harnessDir, config, mcpTools) {
|
|
179
|
+
const executorConfig = config ?? {};
|
|
180
|
+
const tools = {};
|
|
181
|
+
const toolDefs = loadTools(harnessDir);
|
|
182
|
+
for (const toolDef of toolDefs) {
|
|
183
|
+
if (toolDef.status !== "active") continue;
|
|
184
|
+
if (toolDef.operations.length === 0) continue;
|
|
185
|
+
const converted = convertToolDefinition(toolDef, executorConfig);
|
|
186
|
+
Object.assign(tools, converted);
|
|
187
|
+
}
|
|
188
|
+
if (executorConfig.tools) {
|
|
189
|
+
for (const pt of executorConfig.tools) {
|
|
190
|
+
const converted = convertProgrammaticTool(pt);
|
|
191
|
+
Object.assign(tools, converted);
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
if (mcpTools) {
|
|
195
|
+
Object.assign(tools, mcpTools);
|
|
196
|
+
}
|
|
197
|
+
return tools;
|
|
198
|
+
}
|
|
199
|
+
function createToolCallTracker() {
|
|
200
|
+
const calls = [];
|
|
201
|
+
let totalDurationMs = 0;
|
|
202
|
+
return {
|
|
203
|
+
record(result) {
|
|
204
|
+
calls.push(result);
|
|
205
|
+
totalDurationMs += result.durationMs;
|
|
206
|
+
},
|
|
207
|
+
getRecord() {
|
|
208
|
+
return { calls: [...calls], totalDurationMs };
|
|
209
|
+
}
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
function getToolSetSummary(tools) {
|
|
213
|
+
return Object.entries(tools).map(([name, t]) => {
|
|
214
|
+
const desc = t.description ?? "";
|
|
215
|
+
return `${name}: ${desc}`;
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
export {
|
|
220
|
+
resolveEndpoint,
|
|
221
|
+
buildOperationSchema,
|
|
222
|
+
executeHttpOperation,
|
|
223
|
+
buildAuthHeaders,
|
|
224
|
+
convertToolDefinition,
|
|
225
|
+
buildToolSet,
|
|
226
|
+
createToolCallTracker,
|
|
227
|
+
getToolSetSummary
|
|
228
|
+
};
|
|
229
|
+
//# sourceMappingURL=chunk-DA7IKHC4.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/tool-executor.ts"],"sourcesContent":["import { tool as aiTool, jsonSchema, type ToolSet } from 'ai';\nimport { z } from 'zod';\nimport { loadTools, type ToolDefinition, type ToolOperation } from './tools.js';\nimport type { HarnessConfig } from '../core/types.js';\nimport { log } from '../core/logger.js';\n\n// --- Types ---\n\n/** Result of a single tool execution */\nexport interface ToolCallResult {\n toolName: string;\n input: Record<string, unknown>;\n output: unknown;\n durationMs: number;\n error: string | null;\n}\n\n/** Record of all tool calls in a run (for session recording) */\nexport interface ToolCallRecord {\n calls: ToolCallResult[];\n totalDurationMs: number;\n}\n\n/** A programmatic tool definition (not from markdown) */\nexport interface ProgrammaticTool {\n name: string;\n description: string;\n inputSchema: z.ZodType;\n execute: (input: Record<string, unknown>) => Promise<unknown>;\n}\n\n/** Configuration for tool execution */\nexport interface ToolExecutorConfig {\n /** Maximum tool calls per run (default: 10) */\n maxToolCalls?: number;\n /** Timeout per tool call in ms (default: 30000) */\n toolTimeoutMs?: number;\n /** Whether to allow HTTP tool execution (default: true) */\n allowHttpExecution?: boolean;\n /** Additional programmatic tools */\n tools?: ProgrammaticTool[];\n}\n\n/** AI SDK ToolSet — record of named tool definitions */\nexport type AIToolSet = ToolSet;\n\n// --- HTTP Execution ---\n\n/**\n * Resolve a URL template by replacing `{param}` placeholders with input values.\n * E.g., `/repos/{owner}/{repo}/pulls` with { owner: 'a', repo: 'b' } → `/repos/a/b/pulls`\n */\nexport function resolveEndpoint(endpoint: string, input: Record<string, unknown>): string {\n return endpoint.replace(/\\{(\\w+)\\}/g, (_match, key: string) => {\n const value = input[key];\n if (value === undefined || value === null) {\n return `{${key}}`;\n }\n return encodeURIComponent(String(value));\n });\n}\n\n/**\n * Build a JSON Schema object for a tool operation's URL parameters.\n * Extracts `{param}` patterns from the endpoint URL and creates string properties for each.\n */\nexport function buildOperationSchema(operation: ToolOperation): Record<string, unknown> {\n const params: string[] = [];\n const paramRegex = /\\{(\\w+)\\}/g;\n let match: RegExpExecArray | null;\n while ((match = paramRegex.exec(operation.endpoint)) !== null) {\n params.push(match[1]);\n }\n\n const properties: Record<string, { type: string; description: string }> = {};\n for (const param of params) {\n properties[param] = { type: 'string', description: `Value for ${param}` };\n }\n\n // Add a body property for POST/PUT/PATCH\n if (['POST', 'PUT', 'PATCH'].includes(operation.method)) {\n properties['body'] = { type: 'string', description: 'Request body (JSON string)' };\n }\n\n // Add optional query parameters\n properties['query'] = { type: 'string', description: 'Query parameters (key=value&key2=value2)' };\n\n return {\n type: 'object',\n properties,\n required: params,\n };\n}\n\n/**\n * Execute an HTTP tool operation.\n * Resolves URL parameters, attaches auth headers, and makes the HTTP request.\n */\nexport async function executeHttpOperation(\n operation: ToolOperation,\n baseUrl: string,\n authHeaders: Record<string, string>,\n input: Record<string, unknown>,\n timeoutMs: number,\n): Promise<unknown> {\n const resolvedPath = resolveEndpoint(operation.endpoint, input);\n let url = resolvedPath.startsWith('http') ? resolvedPath : `${baseUrl}${resolvedPath}`;\n\n // Append query parameters if provided\n const query = input['query'];\n if (typeof query === 'string' && query.length > 0) {\n const separator = url.includes('?') ? '&' : '?';\n url = `${url}${separator}${query}`;\n }\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n 'Accept': 'application/json',\n ...authHeaders,\n };\n\n const fetchOptions: RequestInit = {\n method: operation.method,\n headers,\n signal: AbortSignal.timeout(timeoutMs),\n };\n\n // Attach body for methods that support it\n if (['POST', 'PUT', 'PATCH'].includes(operation.method)) {\n const body = input['body'];\n if (typeof body === 'string') {\n fetchOptions.body = body;\n } else if (body !== undefined && body !== null) {\n fetchOptions.body = JSON.stringify(body);\n }\n }\n\n const response = await fetch(url, fetchOptions);\n\n if (!response.ok) {\n const errorText = await response.text().catch(() => 'Unknown error');\n throw new Error(`HTTP ${response.status} ${response.statusText}: ${errorText.slice(0, 500)}`);\n }\n\n const contentType = response.headers.get('content-type') ?? '';\n if (contentType.includes('application/json')) {\n return response.json() as Promise<unknown>;\n }\n return response.text();\n}\n\n// --- Tool Conversion ---\n\n/**\n * Sanitize a tool name for the AI SDK.\n * Tool names must be alphanumeric with underscores/hyphens only.\n */\nfunction sanitizeToolName(name: string): string {\n return name\n .replace(/[^a-zA-Z0-9_-]/g, '_')\n .replace(/_+/g, '_')\n .replace(/^_|_$/g, '')\n .slice(0, 64);\n}\n\n/**\n * Extract a base URL from the tool's operations or body.\n * Looks for full URLs in operations, or common API URL patterns in the body.\n */\nfunction extractBaseUrl(toolDef: ToolDefinition): string {\n // Check operations for full URLs\n for (const op of toolDef.operations) {\n if (op.endpoint.startsWith('http')) {\n try {\n const url = new URL(op.endpoint);\n return `${url.protocol}//${url.host}`;\n } catch {\n // not a valid URL\n }\n }\n }\n\n // Try to find API base URL in the document body\n const urlMatch = toolDef.doc.body.match(/(?:base[_ ]?url|api[_ ]?url|endpoint)\\s*[:=]\\s*`?(https?:\\/\\/[^\\s`\"']+)/i);\n if (urlMatch) {\n try {\n const url = new URL(urlMatch[1]);\n return `${url.protocol}//${url.host}`;\n } catch {\n // not a valid URL\n }\n }\n\n return '';\n}\n\n/**\n * Build auth headers from a tool's auth configuration.\n * Maps known env var patterns to standard header formats.\n */\nexport function buildAuthHeaders(toolDef: ToolDefinition): Record<string, string> {\n const headers: Record<string, string> = {};\n\n for (const auth of toolDef.auth) {\n const value = process.env[auth.envVar];\n if (!value) continue;\n\n // Common patterns for auth header mapping (check specific patterns first)\n const envLower = auth.envVar.toLowerCase();\n if (envLower.includes('bot_token')) {\n headers['Authorization'] = `Bot ${value}`;\n } else if (envLower.includes('token') || envLower.includes('api_key') || envLower.includes('apikey')) {\n headers['Authorization'] = `Bearer ${value}`;\n } else {\n // Generic: use as Bearer token\n headers['Authorization'] = `Bearer ${value}`;\n }\n }\n\n return headers;\n}\n\n/**\n * Convert a single ToolDefinition (from markdown) into AI SDK tools.\n * Each operation becomes a separate tool entry.\n */\nexport function convertToolDefinition(\n toolDef: ToolDefinition,\n config: ToolExecutorConfig,\n): AIToolSet {\n const tools: AIToolSet = {};\n const baseUrl = extractBaseUrl(toolDef);\n const allowHttp = config.allowHttpExecution !== false;\n const timeoutMs = config.toolTimeoutMs ?? 30_000;\n\n for (const operation of toolDef.operations) {\n const toolName = sanitizeToolName(`${toolDef.id}_${operation.name}`);\n const opSchema = buildOperationSchema(operation);\n\n tools[toolName] = aiTool({\n description: `[${toolDef.id}] ${operation.method} ${operation.endpoint} — ${toolDef.doc.l0}`,\n inputSchema: jsonSchema<Record<string, unknown>>(opSchema),\n execute: async (input) => {\n const typedInput = input;\n\n if (!allowHttp) {\n return { error: 'HTTP tool execution is disabled' };\n }\n\n // Check auth\n const missingAuth = toolDef.auth.filter((a) => !process.env[a.envVar]);\n if (missingAuth.length > 0) {\n return {\n error: `Missing required auth: ${missingAuth.map((a) => a.envVar).join(', ')}`,\n };\n }\n\n const authHeaders = buildAuthHeaders(toolDef);\n\n try {\n const result = await executeHttpOperation(\n operation,\n baseUrl,\n authHeaders,\n typedInput,\n timeoutMs,\n );\n return result;\n } catch (err) {\n const message = err instanceof Error ? err.message : String(err);\n log.error(`Tool ${toolName} execution failed: ${message}`);\n return { error: message };\n }\n },\n });\n }\n\n return tools;\n}\n\n/**\n * Convert a programmatic tool definition into an AI SDK tool.\n */\nfunction convertProgrammaticTool(pt: ProgrammaticTool): AIToolSet {\n const toolName = sanitizeToolName(pt.name);\n\n return {\n [toolName]: aiTool({\n description: pt.description,\n inputSchema: pt.inputSchema,\n execute: async (input: unknown) => {\n try {\n return await pt.execute(input as Record<string, unknown>);\n } catch (err) {\n const message = err instanceof Error ? err.message : String(err);\n log.error(`Tool ${toolName} execution failed: ${message}`);\n return { error: message };\n }\n },\n }),\n };\n}\n\n// --- Public API ---\n\n/**\n * Load all tools from the harness directory and convert them to AI SDK format.\n * Includes markdown-defined tools, programmatic tools from config, and MCP tools.\n *\n * Returns an empty object if no tools are configured.\n *\n * @param harnessDir - Path to the harness directory\n * @param config - Tool executor configuration\n * @param mcpTools - Pre-loaded MCP tools to merge (from McpManager.getTools())\n */\nexport function buildToolSet(\n harnessDir: string,\n config?: ToolExecutorConfig,\n mcpTools?: AIToolSet,\n): AIToolSet {\n const executorConfig = config ?? {};\n const tools: AIToolSet = {};\n\n // Load markdown-defined tools\n const toolDefs = loadTools(harnessDir);\n for (const toolDef of toolDefs) {\n // Skip inactive tools\n if (toolDef.status !== 'active') continue;\n\n // Skip tools without operations\n if (toolDef.operations.length === 0) continue;\n\n const converted = convertToolDefinition(toolDef, executorConfig);\n Object.assign(tools, converted);\n }\n\n // Add programmatic tools\n if (executorConfig.tools) {\n for (const pt of executorConfig.tools) {\n const converted = convertProgrammaticTool(pt);\n Object.assign(tools, converted);\n }\n }\n\n // Merge MCP tools (from connected MCP servers)\n if (mcpTools) {\n Object.assign(tools, mcpTools);\n }\n\n return tools;\n}\n\n/**\n * Create a ToolCallRecord tracker for recording tool calls in a run.\n */\nexport function createToolCallTracker(): {\n record: (result: ToolCallResult) => void;\n getRecord: () => ToolCallRecord;\n} {\n const calls: ToolCallResult[] = [];\n let totalDurationMs = 0;\n\n return {\n record(result: ToolCallResult) {\n calls.push(result);\n totalDurationMs += result.durationMs;\n },\n getRecord(): ToolCallRecord {\n return { calls: [...calls], totalDurationMs };\n },\n };\n}\n\n/**\n * Get a human-readable summary of tools available in the harness.\n */\nexport function getToolSetSummary(tools: AIToolSet): string[] {\n return Object.entries(tools).map(([name, t]) => {\n const desc = (t as { description?: string }).description ?? '';\n return `${name}: ${desc}`;\n });\n}\n"],"mappings":";;;;;;;;;;AAAA,SAAS,QAAQ,QAAQ,kBAAgC;AAoDlD,SAAS,gBAAgB,UAAkB,OAAwC;AACxF,SAAO,SAAS,QAAQ,cAAc,CAAC,QAAQ,QAAgB;AAC7D,UAAM,QAAQ,MAAM,GAAG;AACvB,QAAI,UAAU,UAAa,UAAU,MAAM;AACzC,aAAO,IAAI,GAAG;AAAA,IAChB;AACA,WAAO,mBAAmB,OAAO,KAAK,CAAC;AAAA,EACzC,CAAC;AACH;AAMO,SAAS,qBAAqB,WAAmD;AACtF,QAAM,SAAmB,CAAC;AAC1B,QAAM,aAAa;AACnB,MAAI;AACJ,UAAQ,QAAQ,WAAW,KAAK,UAAU,QAAQ,OAAO,MAAM;AAC7D,WAAO,KAAK,MAAM,CAAC,CAAC;AAAA,EACtB;AAEA,QAAM,aAAoE,CAAC;AAC3E,aAAW,SAAS,QAAQ;AAC1B,eAAW,KAAK,IAAI,EAAE,MAAM,UAAU,aAAa,aAAa,KAAK,GAAG;AAAA,EAC1E;AAGA,MAAI,CAAC,QAAQ,OAAO,OAAO,EAAE,SAAS,UAAU,MAAM,GAAG;AACvD,eAAW,MAAM,IAAI,EAAE,MAAM,UAAU,aAAa,6BAA6B;AAAA,EACnF;AAGA,aAAW,OAAO,IAAI,EAAE,MAAM,UAAU,aAAa,2CAA2C;AAEhG,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA,UAAU;AAAA,EACZ;AACF;AAMA,eAAsB,qBACpB,WACA,SACA,aACA,OACA,WACkB;AAClB,QAAM,eAAe,gBAAgB,UAAU,UAAU,KAAK;AAC9D,MAAI,MAAM,aAAa,WAAW,MAAM,IAAI,eAAe,GAAG,OAAO,GAAG,YAAY;AAGpF,QAAM,QAAQ,MAAM,OAAO;AAC3B,MAAI,OAAO,UAAU,YAAY,MAAM,SAAS,GAAG;AACjD,UAAM,YAAY,IAAI,SAAS,GAAG,IAAI,MAAM;AAC5C,UAAM,GAAG,GAAG,GAAG,SAAS,GAAG,KAAK;AAAA,EAClC;AAEA,QAAM,UAAkC;AAAA,IACtC,gBAAgB;AAAA,IAChB,UAAU;AAAA,IACV,GAAG;AAAA,EACL;AAEA,QAAM,eAA4B;AAAA,IAChC,QAAQ,UAAU;AAAA,IAClB;AAAA,IACA,QAAQ,YAAY,QAAQ,SAAS;AAAA,EACvC;AAGA,MAAI,CAAC,QAAQ,OAAO,OAAO,EAAE,SAAS,UAAU,MAAM,GAAG;AACvD,UAAM,OAAO,MAAM,MAAM;AACzB,QAAI,OAAO,SAAS,UAAU;AAC5B,mBAAa,OAAO;AAAA,IACtB,WAAW,SAAS,UAAa,SAAS,MAAM;AAC9C,mBAAa,OAAO,KAAK,UAAU,IAAI;AAAA,IACzC;AAAA,EACF;AAEA,QAAM,WAAW,MAAM,MAAM,KAAK,YAAY;AAE9C,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,eAAe;AACnE,UAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,IAAI,SAAS,UAAU,KAAK,UAAU,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,EAC9F;AAEA,QAAM,cAAc,SAAS,QAAQ,IAAI,cAAc,KAAK;AAC5D,MAAI,YAAY,SAAS,kBAAkB,GAAG;AAC5C,WAAO,SAAS,KAAK;AAAA,EACvB;AACA,SAAO,SAAS,KAAK;AACvB;AAQA,SAAS,iBAAiB,MAAsB;AAC9C,SAAO,KACJ,QAAQ,mBAAmB,GAAG,EAC9B,QAAQ,OAAO,GAAG,EAClB,QAAQ,UAAU,EAAE,EACpB,MAAM,GAAG,EAAE;AAChB;AAMA,SAAS,eAAe,SAAiC;AAEvD,aAAW,MAAM,QAAQ,YAAY;AACnC,QAAI,GAAG,SAAS,WAAW,MAAM,GAAG;AAClC,UAAI;AACF,cAAM,MAAM,IAAI,IAAI,GAAG,QAAQ;AAC/B,eAAO,GAAG,IAAI,QAAQ,KAAK,IAAI,IAAI;AAAA,MACrC,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAGA,QAAM,WAAW,QAAQ,IAAI,KAAK,MAAM,0EAA0E;AAClH,MAAI,UAAU;AACZ,QAAI;AACF,YAAM,MAAM,IAAI,IAAI,SAAS,CAAC,CAAC;AAC/B,aAAO,GAAG,IAAI,QAAQ,KAAK,IAAI,IAAI;AAAA,IACrC,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;AAMO,SAAS,iBAAiB,SAAiD;AAChF,QAAM,UAAkC,CAAC;AAEzC,aAAW,QAAQ,QAAQ,MAAM;AAC/B,UAAM,QAAQ,QAAQ,IAAI,KAAK,MAAM;AACrC,QAAI,CAAC,MAAO;AAGZ,UAAM,WAAW,KAAK,OAAO,YAAY;AACzC,QAAI,SAAS,SAAS,WAAW,GAAG;AAClC,cAAQ,eAAe,IAAI,OAAO,KAAK;AAAA,IACzC,WAAW,SAAS,SAAS,OAAO,KAAK,SAAS,SAAS,SAAS,KAAK,SAAS,SAAS,QAAQ,GAAG;AACpG,cAAQ,eAAe,IAAI,UAAU,KAAK;AAAA,IAC5C,OAAO;AAEL,cAAQ,eAAe,IAAI,UAAU,KAAK;AAAA,IAC5C;AAAA,EACF;AAEA,SAAO;AACT;AAMO,SAAS,sBACd,SACA,QACW;AACX,QAAM,QAAmB,CAAC;AAC1B,QAAM,UAAU,eAAe,OAAO;AACtC,QAAM,YAAY,OAAO,uBAAuB;AAChD,QAAM,YAAY,OAAO,iBAAiB;AAE1C,aAAW,aAAa,QAAQ,YAAY;AAC1C,UAAM,WAAW,iBAAiB,GAAG,QAAQ,EAAE,IAAI,UAAU,IAAI,EAAE;AACnE,UAAM,WAAW,qBAAqB,SAAS;AAE/C,UAAM,QAAQ,IAAI,OAAO;AAAA,MACvB,aAAa,IAAI,QAAQ,EAAE,KAAK,UAAU,MAAM,IAAI,UAAU,QAAQ,WAAM,QAAQ,IAAI,EAAE;AAAA,MAC1F,aAAa,WAAoC,QAAQ;AAAA,MACzD,SAAS,OAAO,UAAU;AACxB,cAAM,aAAa;AAEnB,YAAI,CAAC,WAAW;AACd,iBAAO,EAAE,OAAO,kCAAkC;AAAA,QACpD;AAGA,cAAM,cAAc,QAAQ,KAAK,OAAO,CAAC,MAAM,CAAC,QAAQ,IAAI,EAAE,MAAM,CAAC;AACrE,YAAI,YAAY,SAAS,GAAG;AAC1B,iBAAO;AAAA,YACL,OAAO,0BAA0B,YAAY,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,IAAI,CAAC;AAAA,UAC9E;AAAA,QACF;AAEA,cAAM,cAAc,iBAAiB,OAAO;AAE5C,YAAI;AACF,gBAAM,SAAS,MAAM;AAAA,YACnB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,iBAAO;AAAA,QACT,SAAS,KAAK;AACZ,gBAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,cAAI,MAAM,QAAQ,QAAQ,sBAAsB,OAAO,EAAE;AACzD,iBAAO,EAAE,OAAO,QAAQ;AAAA,QAC1B;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,SAAS,wBAAwB,IAAiC;AAChE,QAAM,WAAW,iBAAiB,GAAG,IAAI;AAEzC,SAAO;AAAA,IACL,CAAC,QAAQ,GAAG,OAAO;AAAA,MACjB,aAAa,GAAG;AAAA,MAChB,aAAa,GAAG;AAAA,MAChB,SAAS,OAAO,UAAmB;AACjC,YAAI;AACF,iBAAO,MAAM,GAAG,QAAQ,KAAgC;AAAA,QAC1D,SAAS,KAAK;AACZ,gBAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,cAAI,MAAM,QAAQ,QAAQ,sBAAsB,OAAO,EAAE;AACzD,iBAAO,EAAE,OAAO,QAAQ;AAAA,QAC1B;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAcO,SAAS,aACd,YACA,QACA,UACW;AACX,QAAM,iBAAiB,UAAU,CAAC;AAClC,QAAM,QAAmB,CAAC;AAG1B,QAAM,WAAW,UAAU,UAAU;AACrC,aAAW,WAAW,UAAU;AAE9B,QAAI,QAAQ,WAAW,SAAU;AAGjC,QAAI,QAAQ,WAAW,WAAW,EAAG;AAErC,UAAM,YAAY,sBAAsB,SAAS,cAAc;AAC/D,WAAO,OAAO,OAAO,SAAS;AAAA,EAChC;AAGA,MAAI,eAAe,OAAO;AACxB,eAAW,MAAM,eAAe,OAAO;AACrC,YAAM,YAAY,wBAAwB,EAAE;AAC5C,aAAO,OAAO,OAAO,SAAS;AAAA,IAChC;AAAA,EACF;AAGA,MAAI,UAAU;AACZ,WAAO,OAAO,OAAO,QAAQ;AAAA,EAC/B;AAEA,SAAO;AACT;AAKO,SAAS,wBAGd;AACA,QAAM,QAA0B,CAAC;AACjC,MAAI,kBAAkB;AAEtB,SAAO;AAAA,IACL,OAAO,QAAwB;AAC7B,YAAM,KAAK,MAAM;AACjB,yBAAmB,OAAO;AAAA,IAC5B;AAAA,IACA,YAA4B;AAC1B,aAAO,EAAE,OAAO,CAAC,GAAG,KAAK,GAAG,gBAAgB;AAAA,IAC9C;AAAA,EACF;AACF;AAKO,SAAS,kBAAkB,OAA4B;AAC5D,SAAO,OAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM;AAC9C,UAAM,OAAQ,EAA+B,eAAe;AAC5D,WAAO,GAAG,IAAI,KAAK,IAAI;AAAA,EACzB,CAAC;AACH;","names":[]}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
2
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
3
|
+
}) : x)(function(x) {
|
|
4
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
5
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
6
|
+
});
|
|
7
|
+
|
|
8
|
+
export {
|
|
9
|
+
__require
|
|
10
|
+
};
|
|
11
|
+
//# sourceMappingURL=chunk-DGUM43GV.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|