@agntk/agent-harness 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/NOTICE +41 -0
- package/README.md +445 -0
- package/defaults/agents/summarizer.md +49 -0
- package/defaults/instincts/lead-with-answer.md +24 -0
- package/defaults/instincts/qualify-before-recommending.md +40 -0
- package/defaults/instincts/read-before-edit.md +23 -0
- package/defaults/instincts/search-before-create.md +23 -0
- package/defaults/playbooks/ship-feature.md +31 -0
- package/defaults/rules/ask-before-assuming.md +35 -0
- package/defaults/rules/operations.md +35 -0
- package/defaults/rules/respect-the-user.md +39 -0
- package/defaults/skills/business-analyst.md +181 -0
- package/defaults/skills/content-marketer.md +184 -0
- package/defaults/skills/research.md +34 -0
- package/defaults/tools/example-web-search.md +60 -0
- package/defaults/workflows/daily-reflection.md +54 -0
- package/dist/agent-framework-K4GUIICH.js +344 -0
- package/dist/agent-framework-K4GUIICH.js.map +1 -0
- package/dist/analytics-RPT73WNM.js +12 -0
- package/dist/analytics-RPT73WNM.js.map +1 -0
- package/dist/auto-processor-OLE45UI3.js +13 -0
- package/dist/auto-processor-OLE45UI3.js.map +1 -0
- package/dist/chunk-274RV3YO.js +162 -0
- package/dist/chunk-274RV3YO.js.map +1 -0
- package/dist/chunk-4CWAGBNS.js +168 -0
- package/dist/chunk-4CWAGBNS.js.map +1 -0
- package/dist/chunk-4FDUOGSZ.js +69 -0
- package/dist/chunk-4FDUOGSZ.js.map +1 -0
- package/dist/chunk-5H34JPMB.js +199 -0
- package/dist/chunk-5H34JPMB.js.map +1 -0
- package/dist/chunk-6EMOEYGU.js +102 -0
- package/dist/chunk-6EMOEYGU.js.map +1 -0
- package/dist/chunk-A7BJPQQ6.js +236 -0
- package/dist/chunk-A7BJPQQ6.js.map +1 -0
- package/dist/chunk-AGAAFJEO.js +76 -0
- package/dist/chunk-AGAAFJEO.js.map +1 -0
- package/dist/chunk-BSKDOFRT.js +65 -0
- package/dist/chunk-BSKDOFRT.js.map +1 -0
- package/dist/chunk-CHJ5GNZC.js +100 -0
- package/dist/chunk-CHJ5GNZC.js.map +1 -0
- package/dist/chunk-CSL3ERUI.js +307 -0
- package/dist/chunk-CSL3ERUI.js.map +1 -0
- package/dist/chunk-DA7IKHC4.js +229 -0
- package/dist/chunk-DA7IKHC4.js.map +1 -0
- package/dist/chunk-DGUM43GV.js +11 -0
- package/dist/chunk-DGUM43GV.js.map +1 -0
- package/dist/chunk-DTTXPHFW.js +211 -0
- package/dist/chunk-DTTXPHFW.js.map +1 -0
- package/dist/chunk-FD55B3IO.js +204 -0
- package/dist/chunk-FD55B3IO.js.map +1 -0
- package/dist/chunk-FLZU44SV.js +230 -0
- package/dist/chunk-FLZU44SV.js.map +1 -0
- package/dist/chunk-GJNNR2RA.js +200 -0
- package/dist/chunk-GJNNR2RA.js.map +1 -0
- package/dist/chunk-GNUSHD2Y.js +111 -0
- package/dist/chunk-GNUSHD2Y.js.map +1 -0
- package/dist/chunk-GUJTBGVS.js +2212 -0
- package/dist/chunk-GUJTBGVS.js.map +1 -0
- package/dist/chunk-IZ6UZ3ZL.js +207 -0
- package/dist/chunk-IZ6UZ3ZL.js.map +1 -0
- package/dist/chunk-JKMGYWXB.js +197 -0
- package/dist/chunk-JKMGYWXB.js.map +1 -0
- package/dist/chunk-KFX54TQM.js +165 -0
- package/dist/chunk-KFX54TQM.js.map +1 -0
- package/dist/chunk-M7NXUK55.js +199 -0
- package/dist/chunk-M7NXUK55.js.map +1 -0
- package/dist/chunk-MPZ3BPUI.js +374 -0
- package/dist/chunk-MPZ3BPUI.js.map +1 -0
- package/dist/chunk-OC6YSTDX.js +119 -0
- package/dist/chunk-OC6YSTDX.js.map +1 -0
- package/dist/chunk-RC6MEZB6.js +469 -0
- package/dist/chunk-RC6MEZB6.js.map +1 -0
- package/dist/chunk-RY3ZFII7.js +3440 -0
- package/dist/chunk-RY3ZFII7.js.map +1 -0
- package/dist/chunk-TAT6JU3X.js +167 -0
- package/dist/chunk-TAT6JU3X.js.map +1 -0
- package/dist/chunk-UDZIS2AQ.js +79 -0
- package/dist/chunk-UDZIS2AQ.js.map +1 -0
- package/dist/chunk-UPLBF4RZ.js +115 -0
- package/dist/chunk-UPLBF4RZ.js.map +1 -0
- package/dist/chunk-UWQTZMNI.js +154 -0
- package/dist/chunk-UWQTZMNI.js.map +1 -0
- package/dist/chunk-W4T7PGI2.js +346 -0
- package/dist/chunk-W4T7PGI2.js.map +1 -0
- package/dist/chunk-XTBKL5BI.js +111 -0
- package/dist/chunk-XTBKL5BI.js.map +1 -0
- package/dist/chunk-YIJY5DBV.js +399 -0
- package/dist/chunk-YIJY5DBV.js.map +1 -0
- package/dist/chunk-YUFNYN2H.js +242 -0
- package/dist/chunk-YUFNYN2H.js.map +1 -0
- package/dist/chunk-Z2PUCXTZ.js +94 -0
- package/dist/chunk-Z2PUCXTZ.js.map +1 -0
- package/dist/chunk-ZZJOFKAT.js +13 -0
- package/dist/chunk-ZZJOFKAT.js.map +1 -0
- package/dist/cli/index.js +3661 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/config-WVMRUOCA.js +13 -0
- package/dist/config-WVMRUOCA.js.map +1 -0
- package/dist/context-loader-3ORBPMHJ.js +13 -0
- package/dist/context-loader-3ORBPMHJ.js.map +1 -0
- package/dist/conversation-QDEIDQPH.js +22 -0
- package/dist/conversation-QDEIDQPH.js.map +1 -0
- package/dist/cost-tracker-RS3W7SVY.js +24 -0
- package/dist/cost-tracker-RS3W7SVY.js.map +1 -0
- package/dist/delegate-VJCJLYEK.js +29 -0
- package/dist/delegate-VJCJLYEK.js.map +1 -0
- package/dist/emotional-state-VQVRA6ED.js +206 -0
- package/dist/emotional-state-VQVRA6ED.js.map +1 -0
- package/dist/env-discovery-2BLVMAIM.js +251 -0
- package/dist/env-discovery-2BLVMAIM.js.map +1 -0
- package/dist/export-6GCYHEHQ.js +165 -0
- package/dist/export-6GCYHEHQ.js.map +1 -0
- package/dist/graph-YUIPOSOO.js +14 -0
- package/dist/graph-YUIPOSOO.js.map +1 -0
- package/dist/harness-LCHA3DWP.js +10 -0
- package/dist/harness-LCHA3DWP.js.map +1 -0
- package/dist/harness-WE4SLCML.js +26 -0
- package/dist/harness-WE4SLCML.js.map +1 -0
- package/dist/health-NZ6WNIMV.js +23 -0
- package/dist/health-NZ6WNIMV.js.map +1 -0
- package/dist/index.d.ts +3612 -0
- package/dist/index.js +13501 -0
- package/dist/index.js.map +1 -0
- package/dist/indexer-LONANRRM.js +16 -0
- package/dist/indexer-LONANRRM.js.map +1 -0
- package/dist/instinct-learner-SRM72DHF.js +20 -0
- package/dist/instinct-learner-SRM72DHF.js.map +1 -0
- package/dist/intake-4M3HNU43.js +21 -0
- package/dist/intake-4M3HNU43.js.map +1 -0
- package/dist/intelligence-HJOCA4SJ.js +1081 -0
- package/dist/intelligence-HJOCA4SJ.js.map +1 -0
- package/dist/journal-WANJL3MI.js +24 -0
- package/dist/journal-WANJL3MI.js.map +1 -0
- package/dist/loader-C3TKIKZR.js +23 -0
- package/dist/loader-C3TKIKZR.js.map +1 -0
- package/dist/mcp-WTQJJZAO.js +15 -0
- package/dist/mcp-WTQJJZAO.js.map +1 -0
- package/dist/mcp-discovery-WPAQFL6S.js +377 -0
- package/dist/mcp-discovery-WPAQFL6S.js.map +1 -0
- package/dist/mcp-installer-6O2XXD3V.js +394 -0
- package/dist/mcp-installer-6O2XXD3V.js.map +1 -0
- package/dist/metrics-KXGNFAAB.js +20 -0
- package/dist/metrics-KXGNFAAB.js.map +1 -0
- package/dist/primitive-registry-I6VTIR4W.js +512 -0
- package/dist/primitive-registry-I6VTIR4W.js.map +1 -0
- package/dist/project-discovery-C4UMD7JI.js +246 -0
- package/dist/project-discovery-C4UMD7JI.js.map +1 -0
- package/dist/provider-LQHQX7Z7.js +26 -0
- package/dist/provider-LQHQX7Z7.js.map +1 -0
- package/dist/provider-SXPQZ74H.js +28 -0
- package/dist/provider-SXPQZ74H.js.map +1 -0
- package/dist/rate-limiter-RLRVM325.js +22 -0
- package/dist/rate-limiter-RLRVM325.js.map +1 -0
- package/dist/rule-engine-YGQ3RYZM.js +182 -0
- package/dist/rule-engine-YGQ3RYZM.js.map +1 -0
- package/dist/scaffold-A3VRRCBV.js +347 -0
- package/dist/scaffold-A3VRRCBV.js.map +1 -0
- package/dist/scheduler-XHHIVHRI.js +397 -0
- package/dist/scheduler-XHHIVHRI.js.map +1 -0
- package/dist/search-V3W5JMJG.js +75 -0
- package/dist/search-V3W5JMJG.js.map +1 -0
- package/dist/semantic-search-2DTOO5UX.js +241 -0
- package/dist/semantic-search-2DTOO5UX.js.map +1 -0
- package/dist/serve-DTQ3HENY.js +291 -0
- package/dist/serve-DTQ3HENY.js.map +1 -0
- package/dist/sessions-CZGVXKQE.js +21 -0
- package/dist/sessions-CZGVXKQE.js.map +1 -0
- package/dist/sources-RW5DT56F.js +32 -0
- package/dist/sources-RW5DT56F.js.map +1 -0
- package/dist/starter-packs-76YUVHEU.js +893 -0
- package/dist/starter-packs-76YUVHEU.js.map +1 -0
- package/dist/state-GMXILIHW.js +13 -0
- package/dist/state-GMXILIHW.js.map +1 -0
- package/dist/state-merge-NKO5FRBA.js +174 -0
- package/dist/state-merge-NKO5FRBA.js.map +1 -0
- package/dist/telemetry-UC6PBXC7.js +22 -0
- package/dist/telemetry-UC6PBXC7.js.map +1 -0
- package/dist/tool-executor-MJ7IG7PQ.js +28 -0
- package/dist/tool-executor-MJ7IG7PQ.js.map +1 -0
- package/dist/tools-DZ4KETET.js +20 -0
- package/dist/tools-DZ4KETET.js.map +1 -0
- package/dist/types-EW7AIB3R.js +18 -0
- package/dist/types-EW7AIB3R.js.map +1 -0
- package/dist/types-WGDLSPO6.js +16 -0
- package/dist/types-WGDLSPO6.js.map +1 -0
- package/dist/universal-installer-QGS4SJGX.js +578 -0
- package/dist/universal-installer-QGS4SJGX.js.map +1 -0
- package/dist/validator-7WXMDIHH.js +22 -0
- package/dist/validator-7WXMDIHH.js.map +1 -0
- package/dist/verification-gate-FYXUX6LH.js +246 -0
- package/dist/verification-gate-FYXUX6LH.js.map +1 -0
- package/dist/versioning-Z3XNE2Q2.js +271 -0
- package/dist/versioning-Z3XNE2Q2.js.map +1 -0
- package/dist/watcher-ISJC7YKL.js +109 -0
- package/dist/watcher-ISJC7YKL.js.map +1 -0
- package/dist/web-server-DD7ZOP46.js +28 -0
- package/dist/web-server-DD7ZOP46.js.map +1 -0
- package/package.json +76 -0
- package/sources.yaml +121 -0
- package/templates/assistant/CORE.md +24 -0
- package/templates/assistant/SYSTEM.md +24 -0
- package/templates/assistant/config.yaml +51 -0
- package/templates/base/CORE.md +17 -0
- package/templates/base/SYSTEM.md +24 -0
- package/templates/base/config.yaml +51 -0
- package/templates/claude-opus/config.yaml +51 -0
- package/templates/code-reviewer/CORE.md +25 -0
- package/templates/code-reviewer/SYSTEM.md +30 -0
- package/templates/code-reviewer/config.yaml +51 -0
- package/templates/gpt4/config.yaml +51 -0
- package/templates/local/config.yaml +51 -0
|
@@ -0,0 +1,469 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
withFileLockSync
|
|
5
|
+
} from "./chunk-Z2PUCXTZ.js";
|
|
6
|
+
import {
|
|
7
|
+
log
|
|
8
|
+
} from "./chunk-BSKDOFRT.js";
|
|
9
|
+
|
|
10
|
+
// src/runtime/sources.ts
|
|
11
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync } from "fs";
|
|
12
|
+
import { join, dirname } from "path";
|
|
13
|
+
import { fileURLToPath } from "url";
|
|
14
|
+
import { parse as parseYaml, stringify as stringifyYaml } from "yaml";
|
|
15
|
+
var USER_SOURCES_FILE = "sources.yaml";
|
|
16
|
+
var SOURCES_DIR = "memory";
|
|
17
|
+
function getShippedSourcesPath() {
|
|
18
|
+
let dir = dirname(fileURLToPath(import.meta.url));
|
|
19
|
+
for (let i = 0; i < 5; i++) {
|
|
20
|
+
const candidate = join(dir, "sources.yaml");
|
|
21
|
+
if (existsSync(candidate)) return candidate;
|
|
22
|
+
dir = dirname(dir);
|
|
23
|
+
}
|
|
24
|
+
return join(dirname(dirname(fileURLToPath(import.meta.url))), "sources.yaml");
|
|
25
|
+
}
|
|
26
|
+
function loadShippedSources() {
|
|
27
|
+
const path = getShippedSourcesPath();
|
|
28
|
+
if (!existsSync(path)) return [];
|
|
29
|
+
try {
|
|
30
|
+
const raw = readFileSync(path, "utf-8");
|
|
31
|
+
const parsed = parseYaml(raw);
|
|
32
|
+
return normalizeSources(parsed.sources ?? []);
|
|
33
|
+
} catch (err) {
|
|
34
|
+
log.warn(`Failed to load shipped sources: ${err instanceof Error ? err.message : String(err)}`);
|
|
35
|
+
return [];
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
function loadUserSources(harnessDir) {
|
|
39
|
+
const userPath = join(harnessDir, SOURCES_DIR, USER_SOURCES_FILE);
|
|
40
|
+
if (!existsSync(userPath)) return [];
|
|
41
|
+
try {
|
|
42
|
+
const raw = readFileSync(userPath, "utf-8");
|
|
43
|
+
const parsed = parseYaml(raw);
|
|
44
|
+
return normalizeSources(parsed.sources ?? []);
|
|
45
|
+
} catch (err) {
|
|
46
|
+
log.warn(`Failed to load user sources: ${err instanceof Error ? err.message : String(err)}`);
|
|
47
|
+
return [];
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
function saveUserSources(harnessDir, sources) {
|
|
51
|
+
const memDir = join(harnessDir, SOURCES_DIR);
|
|
52
|
+
if (!existsSync(memDir)) mkdirSync(memDir, { recursive: true });
|
|
53
|
+
const userPath = join(memDir, USER_SOURCES_FILE);
|
|
54
|
+
const data = { version: "1.0", sources };
|
|
55
|
+
withFileLockSync(harnessDir, userPath, () => {
|
|
56
|
+
writeFileSync(userPath, stringifyYaml(data), "utf-8");
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
function loadAllSources(harnessDir) {
|
|
60
|
+
const shipped = loadShippedSources();
|
|
61
|
+
const user = loadUserSources(harnessDir);
|
|
62
|
+
const byName = /* @__PURE__ */ new Map();
|
|
63
|
+
for (const s of shipped) {
|
|
64
|
+
byName.set(s.name.toLowerCase(), s);
|
|
65
|
+
}
|
|
66
|
+
for (const s of user) {
|
|
67
|
+
byName.set(s.name.toLowerCase(), s);
|
|
68
|
+
}
|
|
69
|
+
return Array.from(byName.values());
|
|
70
|
+
}
|
|
71
|
+
function addSource(harnessDir, source) {
|
|
72
|
+
const userSources = loadUserSources(harnessDir);
|
|
73
|
+
const exists = userSources.find(
|
|
74
|
+
(s) => s.name.toLowerCase() === source.name.toLowerCase()
|
|
75
|
+
);
|
|
76
|
+
if (exists) return null;
|
|
77
|
+
const normalized = {
|
|
78
|
+
name: source.name,
|
|
79
|
+
url: source.url,
|
|
80
|
+
type: source.type,
|
|
81
|
+
content: source.content,
|
|
82
|
+
tags: source.tags ?? [],
|
|
83
|
+
description: source.description,
|
|
84
|
+
stats: source.stats
|
|
85
|
+
};
|
|
86
|
+
userSources.push(normalized);
|
|
87
|
+
saveUserSources(harnessDir, userSources);
|
|
88
|
+
return normalized;
|
|
89
|
+
}
|
|
90
|
+
function removeSource(harnessDir, name) {
|
|
91
|
+
const userSources = loadUserSources(harnessDir);
|
|
92
|
+
const index = userSources.findIndex(
|
|
93
|
+
(s) => s.name.toLowerCase() === name.toLowerCase()
|
|
94
|
+
);
|
|
95
|
+
if (index === -1) return false;
|
|
96
|
+
userSources.splice(index, 1);
|
|
97
|
+
saveUserSources(harnessDir, userSources);
|
|
98
|
+
return true;
|
|
99
|
+
}
|
|
100
|
+
function discoverSources(harnessDir, query, options) {
|
|
101
|
+
const sources = loadAllSources(harnessDir);
|
|
102
|
+
const results = [];
|
|
103
|
+
const maxResults = options?.maxResults ?? 20;
|
|
104
|
+
const queryLower = query.toLowerCase();
|
|
105
|
+
const queryWords = queryLower.split(/\s+/).filter((w) => w.length > 1);
|
|
106
|
+
for (const source of sources) {
|
|
107
|
+
if (options?.sourceNames && options.sourceNames.length > 0) {
|
|
108
|
+
const matches = options.sourceNames.some(
|
|
109
|
+
(n) => source.name.toLowerCase().includes(n.toLowerCase())
|
|
110
|
+
);
|
|
111
|
+
if (!matches) continue;
|
|
112
|
+
}
|
|
113
|
+
if (options?.type && !source.content.includes(options.type)) {
|
|
114
|
+
continue;
|
|
115
|
+
}
|
|
116
|
+
const score = computeSourceScore(source, queryLower, queryWords);
|
|
117
|
+
if (score > 0) {
|
|
118
|
+
const types = options?.type ? [options.type] : source.content;
|
|
119
|
+
for (const type of types) {
|
|
120
|
+
results.push({
|
|
121
|
+
source,
|
|
122
|
+
name: source.name,
|
|
123
|
+
description: source.description ?? "",
|
|
124
|
+
type,
|
|
125
|
+
url: source.url,
|
|
126
|
+
score
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
results.sort((a, b) => {
|
|
132
|
+
if (b.score !== a.score) return b.score - a.score;
|
|
133
|
+
return a.name.localeCompare(b.name);
|
|
134
|
+
});
|
|
135
|
+
return results.slice(0, maxResults);
|
|
136
|
+
}
|
|
137
|
+
function getSourcesForType(harnessDir, type) {
|
|
138
|
+
const sources = loadAllSources(harnessDir);
|
|
139
|
+
return sources.filter((s) => s.content.includes(type));
|
|
140
|
+
}
|
|
141
|
+
function getSourcesSummary(harnessDir) {
|
|
142
|
+
const sources = loadAllSources(harnessDir);
|
|
143
|
+
const summary = {};
|
|
144
|
+
const allTypes = ["skills", "agents", "rules", "playbooks", "hooks", "templates", "mcp", "plugins"];
|
|
145
|
+
for (const type of allTypes) {
|
|
146
|
+
summary[type] = sources.filter((s) => s.content.includes(type));
|
|
147
|
+
}
|
|
148
|
+
return summary;
|
|
149
|
+
}
|
|
150
|
+
var CONTENT_TYPE_TO_DIRS = {
|
|
151
|
+
skills: ["skills"],
|
|
152
|
+
agents: ["agents"],
|
|
153
|
+
rules: ["rules"],
|
|
154
|
+
playbooks: ["playbooks"],
|
|
155
|
+
hooks: ["hooks"],
|
|
156
|
+
templates: ["templates"],
|
|
157
|
+
mcp: [],
|
|
158
|
+
// MCP servers come from registries, not GitHub repo dirs
|
|
159
|
+
plugins: ["plugins"]
|
|
160
|
+
};
|
|
161
|
+
var MAX_API_CALLS_PER_DISCOVERY = 50;
|
|
162
|
+
var GITHUB_CONTENTS_TIMEOUT_MS = 1e4;
|
|
163
|
+
async function fetchGitHubContents(owner, repo, path, sourceName) {
|
|
164
|
+
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`;
|
|
165
|
+
const headers = {
|
|
166
|
+
Accept: "application/vnd.github.v3+json",
|
|
167
|
+
"User-Agent": "agent-harness"
|
|
168
|
+
};
|
|
169
|
+
if (process.env.GITHUB_TOKEN) {
|
|
170
|
+
headers.Authorization = `Bearer ${process.env.GITHUB_TOKEN}`;
|
|
171
|
+
}
|
|
172
|
+
const controller = new AbortController();
|
|
173
|
+
const timer = setTimeout(() => controller.abort(), GITHUB_CONTENTS_TIMEOUT_MS);
|
|
174
|
+
try {
|
|
175
|
+
const response = await fetch(apiUrl, { headers, signal: controller.signal });
|
|
176
|
+
if (!response.ok) {
|
|
177
|
+
if (response.status !== 404) {
|
|
178
|
+
log.warn(`GitHub Contents API ${response.status} for ${sourceName}/${path}`);
|
|
179
|
+
}
|
|
180
|
+
return null;
|
|
181
|
+
}
|
|
182
|
+
const data = await response.json();
|
|
183
|
+
return Array.isArray(data) ? data : [data];
|
|
184
|
+
} catch (err) {
|
|
185
|
+
if (err instanceof Error && err.name === "AbortError") {
|
|
186
|
+
log.warn(`GitHub Contents API timeout for ${sourceName}/${path}`);
|
|
187
|
+
} else {
|
|
188
|
+
log.warn(
|
|
189
|
+
`GitHub Contents API error for ${sourceName}/${path}: ${err instanceof Error ? err.message : String(err)}`
|
|
190
|
+
);
|
|
191
|
+
}
|
|
192
|
+
return null;
|
|
193
|
+
} finally {
|
|
194
|
+
clearTimeout(timer);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
function scoreContentMatch(itemName, itemPath, queryLower) {
|
|
198
|
+
const nameLower = itemName.toLowerCase();
|
|
199
|
+
const pathLower = itemPath.toLowerCase();
|
|
200
|
+
if (nameLower.includes(queryLower)) return 0.9;
|
|
201
|
+
if (pathLower.includes(queryLower)) return 0.6;
|
|
202
|
+
return 0;
|
|
203
|
+
}
|
|
204
|
+
function buildResultFromContent(source, item, queryLower, typeFilter) {
|
|
205
|
+
if (item.type !== "file") return null;
|
|
206
|
+
if (!item.name.endsWith(".md")) return null;
|
|
207
|
+
if (!item.download_url) return null;
|
|
208
|
+
const score = scoreContentMatch(item.name, item.path, queryLower);
|
|
209
|
+
if (score === 0) return null;
|
|
210
|
+
const inferredType = inferContentType(item.path, source.content);
|
|
211
|
+
if (typeFilter && inferredType !== typeFilter) return null;
|
|
212
|
+
return {
|
|
213
|
+
source,
|
|
214
|
+
name: item.name,
|
|
215
|
+
description: `${item.path} in ${source.name}`,
|
|
216
|
+
type: inferredType,
|
|
217
|
+
url: item.download_url,
|
|
218
|
+
score
|
|
219
|
+
};
|
|
220
|
+
}
|
|
221
|
+
async function fetchGitHubSource(source, query, options, budget) {
|
|
222
|
+
const match = source.url.match(/github\.com\/([^/]+)\/([^/]+)/);
|
|
223
|
+
if (!match) return [];
|
|
224
|
+
const [, owner, repoRaw] = match;
|
|
225
|
+
const repo = repoRaw.replace(/\.git$/, "").replace(/\/.*$/, "");
|
|
226
|
+
if (process.env.GITHUB_TOKEN && process.env.HARNESS_DISCOVER_USE_CODE_SEARCH === "1") {
|
|
227
|
+
log.debug(`[sources] using code search api for ${source.name} (GITHUB_TOKEN + opt-in detected)`);
|
|
228
|
+
return fetchGitHubSourceViaCodeSearch(source, query, owner, repo, options);
|
|
229
|
+
}
|
|
230
|
+
log.debug(`[sources] using contents api for ${source.name}`);
|
|
231
|
+
return fetchGitHubSourceViaContents(source, query, owner, repo, options, budget);
|
|
232
|
+
}
|
|
233
|
+
function dirsForSource(source) {
|
|
234
|
+
const dirs = /* @__PURE__ */ new Set();
|
|
235
|
+
for (const ct of source.content) {
|
|
236
|
+
for (const d of CONTENT_TYPE_TO_DIRS[ct] ?? []) {
|
|
237
|
+
dirs.add(d);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
if (dirs.size === 0) {
|
|
241
|
+
dirs.add("skills");
|
|
242
|
+
dirs.add("agents");
|
|
243
|
+
}
|
|
244
|
+
return Array.from(dirs);
|
|
245
|
+
}
|
|
246
|
+
async function fetchGitHubSourceViaContents(source, query, owner, repo, options, budget) {
|
|
247
|
+
const queryLower = query.toLowerCase();
|
|
248
|
+
const typeFilter = options?.type;
|
|
249
|
+
const maxResults = options?.maxResults ?? 10;
|
|
250
|
+
const callBudget = budget ?? { remaining: MAX_API_CALLS_PER_DISCOVERY };
|
|
251
|
+
const results = [];
|
|
252
|
+
const dirsToScan = dirsForSource(source);
|
|
253
|
+
const topLevelTasks = dirsToScan.map((dir) => {
|
|
254
|
+
if (callBudget.remaining <= 0) return Promise.resolve(null);
|
|
255
|
+
callBudget.remaining--;
|
|
256
|
+
return fetchGitHubContents(owner, repo, dir, source.name);
|
|
257
|
+
});
|
|
258
|
+
const topLevelLists = await Promise.all(topLevelTasks);
|
|
259
|
+
for (let i = 0; i < dirsToScan.length; i++) {
|
|
260
|
+
if (results.length >= maxResults) break;
|
|
261
|
+
const dirName = dirsToScan[i];
|
|
262
|
+
const items = topLevelLists[i];
|
|
263
|
+
if (!items) continue;
|
|
264
|
+
for (const item of items) {
|
|
265
|
+
if (results.length >= maxResults) break;
|
|
266
|
+
const result = buildResultFromContent(source, item, queryLower, typeFilter);
|
|
267
|
+
if (result) results.push(result);
|
|
268
|
+
}
|
|
269
|
+
if (dirName === "plugins" && results.length < maxResults) {
|
|
270
|
+
const matchingTopics = items.filter(
|
|
271
|
+
(item) => item.type === "dir" && item.name.toLowerCase().includes(queryLower)
|
|
272
|
+
);
|
|
273
|
+
for (const topic of matchingTopics) {
|
|
274
|
+
if (results.length >= maxResults) break;
|
|
275
|
+
if (callBudget.remaining <= 0) break;
|
|
276
|
+
await scanPluginTopic(
|
|
277
|
+
source,
|
|
278
|
+
owner,
|
|
279
|
+
repo,
|
|
280
|
+
topic.path,
|
|
281
|
+
queryLower,
|
|
282
|
+
typeFilter,
|
|
283
|
+
results,
|
|
284
|
+
maxResults,
|
|
285
|
+
callBudget
|
|
286
|
+
);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
results.sort((a, b) => b.score - a.score);
|
|
291
|
+
return results.slice(0, maxResults);
|
|
292
|
+
}
|
|
293
|
+
async function scanPluginTopic(source, owner, repo, topicPath, queryLower, typeFilter, results, maxResults, callBudget) {
|
|
294
|
+
if (callBudget.remaining <= 0) return;
|
|
295
|
+
callBudget.remaining--;
|
|
296
|
+
const subDirs = await fetchGitHubContents(owner, repo, topicPath, source.name);
|
|
297
|
+
if (!subDirs) return;
|
|
298
|
+
for (const sub of subDirs) {
|
|
299
|
+
if (results.length >= maxResults) return;
|
|
300
|
+
if (callBudget.remaining <= 0) return;
|
|
301
|
+
if (sub.type !== "dir") continue;
|
|
302
|
+
const validSubdir = sub.name === "skills" || sub.name === "agents" || sub.name === "rules" || sub.name === "playbooks" || sub.name === "hooks";
|
|
303
|
+
if (!validSubdir) continue;
|
|
304
|
+
callBudget.remaining--;
|
|
305
|
+
const files = await fetchGitHubContents(owner, repo, sub.path, source.name);
|
|
306
|
+
if (!files) continue;
|
|
307
|
+
for (const file of files) {
|
|
308
|
+
if (results.length >= maxResults) return;
|
|
309
|
+
const result = buildResultFromContent(source, file, queryLower, typeFilter);
|
|
310
|
+
if (result) results.push(result);
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
async function fetchGitHubSourceViaCodeSearch(source, query, owner, repo, options) {
|
|
315
|
+
const results = [];
|
|
316
|
+
const apiUrl = `https://api.github.com/search/code?q=${encodeURIComponent(query)}+repo:${owner}/${repo}`;
|
|
317
|
+
const controller = new AbortController();
|
|
318
|
+
const timer = setTimeout(() => controller.abort(), GITHUB_CONTENTS_TIMEOUT_MS);
|
|
319
|
+
try {
|
|
320
|
+
const response = await fetch(apiUrl, {
|
|
321
|
+
headers: {
|
|
322
|
+
Accept: "application/vnd.github.v3+json",
|
|
323
|
+
"User-Agent": "agent-harness",
|
|
324
|
+
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`
|
|
325
|
+
},
|
|
326
|
+
signal: controller.signal
|
|
327
|
+
});
|
|
328
|
+
if (!response.ok) {
|
|
329
|
+
log.warn(`GitHub Code Search API ${response.status} for ${source.name}`);
|
|
330
|
+
return results;
|
|
331
|
+
}
|
|
332
|
+
const data = await response.json();
|
|
333
|
+
if (!data.items) return results;
|
|
334
|
+
const maxResults = options?.maxResults ?? 10;
|
|
335
|
+
const queryLower = query.toLowerCase();
|
|
336
|
+
for (const item of data.items.slice(0, maxResults)) {
|
|
337
|
+
const type = inferContentType(item.path, source.content);
|
|
338
|
+
if (options?.type && type !== options.type) continue;
|
|
339
|
+
const nameScore = item.name.toLowerCase().includes(queryLower) ? 0.9 : 0.5;
|
|
340
|
+
results.push({
|
|
341
|
+
source,
|
|
342
|
+
name: item.name,
|
|
343
|
+
description: `${item.path} in ${source.name}`,
|
|
344
|
+
type,
|
|
345
|
+
url: item.html_url,
|
|
346
|
+
score: nameScore
|
|
347
|
+
});
|
|
348
|
+
}
|
|
349
|
+
} catch (err) {
|
|
350
|
+
log.warn(
|
|
351
|
+
`Failed Code Search for ${source.name}: ${err instanceof Error ? err.message : String(err)}`
|
|
352
|
+
);
|
|
353
|
+
} finally {
|
|
354
|
+
clearTimeout(timer);
|
|
355
|
+
}
|
|
356
|
+
return results;
|
|
357
|
+
}
|
|
358
|
+
async function discoverRemote(harnessDir, query, options) {
|
|
359
|
+
const sources = loadAllSources(harnessDir);
|
|
360
|
+
const maxResults = options?.maxResults ?? 20;
|
|
361
|
+
let filtered = sources;
|
|
362
|
+
if (options?.type) {
|
|
363
|
+
filtered = sources.filter((s) => s.content.includes(options.type));
|
|
364
|
+
}
|
|
365
|
+
if (options?.sourceNames && options.sourceNames.length > 0) {
|
|
366
|
+
filtered = filtered.filter(
|
|
367
|
+
(s) => options.sourceNames.some(
|
|
368
|
+
(n) => s.name.toLowerCase().includes(n.toLowerCase())
|
|
369
|
+
)
|
|
370
|
+
);
|
|
371
|
+
}
|
|
372
|
+
const sharedBudget = { remaining: MAX_API_CALLS_PER_DISCOVERY };
|
|
373
|
+
const promises = filtered.map(async (source) => {
|
|
374
|
+
if (source.type === "github") {
|
|
375
|
+
return fetchGitHubSource(source, query, options, sharedBudget);
|
|
376
|
+
}
|
|
377
|
+
return [];
|
|
378
|
+
});
|
|
379
|
+
const results = await Promise.allSettled(promises);
|
|
380
|
+
const allResults = [];
|
|
381
|
+
for (const result of results) {
|
|
382
|
+
if (result.status === "fulfilled") {
|
|
383
|
+
allResults.push(...result.value);
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
allResults.sort((a, b) => b.score - a.score);
|
|
387
|
+
const seen = /* @__PURE__ */ new Set();
|
|
388
|
+
const deduped = [];
|
|
389
|
+
for (const r of allResults) {
|
|
390
|
+
if (!seen.has(r.url)) {
|
|
391
|
+
seen.add(r.url);
|
|
392
|
+
deduped.push(r);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
return deduped.slice(0, maxResults);
|
|
396
|
+
}
|
|
397
|
+
function normalizeSources(raw) {
|
|
398
|
+
return raw.filter((s) => s.name && s.url && s.type).map((s) => ({
|
|
399
|
+
name: String(s.name),
|
|
400
|
+
url: String(s.url),
|
|
401
|
+
type: String(s.type),
|
|
402
|
+
content: Array.isArray(s.content) ? s.content.map((c) => String(c)) : [],
|
|
403
|
+
tags: Array.isArray(s.tags) ? s.tags.map(String) : [],
|
|
404
|
+
description: s.description ? String(s.description) : void 0,
|
|
405
|
+
stats: s.stats && typeof s.stats === "object" ? s.stats : void 0
|
|
406
|
+
}));
|
|
407
|
+
}
|
|
408
|
+
function computeSourceScore(source, queryLower, queryWords) {
|
|
409
|
+
let score = 0;
|
|
410
|
+
if (source.name.toLowerCase() === queryLower) {
|
|
411
|
+
score += 1;
|
|
412
|
+
} else if (source.name.toLowerCase().includes(queryLower)) {
|
|
413
|
+
score += 0.8;
|
|
414
|
+
}
|
|
415
|
+
const desc = (source.description ?? "").toLowerCase();
|
|
416
|
+
if (desc.includes(queryLower)) {
|
|
417
|
+
score += 0.5;
|
|
418
|
+
}
|
|
419
|
+
for (const tag of source.tags) {
|
|
420
|
+
if (tag.toLowerCase() === queryLower) {
|
|
421
|
+
score += 0.7;
|
|
422
|
+
} else if (tag.toLowerCase().includes(queryLower)) {
|
|
423
|
+
score += 0.3;
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
for (const ct of source.content) {
|
|
427
|
+
if (ct === queryLower) {
|
|
428
|
+
score += 0.8;
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
if (queryWords.length > 0 && score === 0) {
|
|
432
|
+
let wordHits = 0;
|
|
433
|
+
const allText = `${source.name} ${source.description ?? ""} ${source.tags.join(" ")} ${source.content.join(" ")}`.toLowerCase();
|
|
434
|
+
for (const word of queryWords) {
|
|
435
|
+
if (allText.includes(word)) wordHits++;
|
|
436
|
+
}
|
|
437
|
+
if (wordHits > 0) {
|
|
438
|
+
score += wordHits / queryWords.length * 0.5;
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
return Math.min(score, 1);
|
|
442
|
+
}
|
|
443
|
+
function inferContentType(filePath, sourceContentTypes) {
|
|
444
|
+
const pathLower = filePath.toLowerCase();
|
|
445
|
+
if (pathLower.includes("skill") || pathLower.includes("SKILL.md")) return "skills";
|
|
446
|
+
if (pathLower.includes("agent")) return "agents";
|
|
447
|
+
if (pathLower.includes("rule")) return "rules";
|
|
448
|
+
if (pathLower.includes("playbook")) return "playbooks";
|
|
449
|
+
if (pathLower.includes("hook")) return "hooks";
|
|
450
|
+
if (pathLower.includes("template")) return "templates";
|
|
451
|
+
if (pathLower.includes("mcp") || pathLower.includes("server")) return "mcp";
|
|
452
|
+
if (pathLower.includes("plugin")) return "plugins";
|
|
453
|
+
return sourceContentTypes[0] ?? "skills";
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
export {
|
|
457
|
+
loadShippedSources,
|
|
458
|
+
loadUserSources,
|
|
459
|
+
saveUserSources,
|
|
460
|
+
loadAllSources,
|
|
461
|
+
addSource,
|
|
462
|
+
removeSource,
|
|
463
|
+
discoverSources,
|
|
464
|
+
getSourcesForType,
|
|
465
|
+
getSourcesSummary,
|
|
466
|
+
fetchGitHubSource,
|
|
467
|
+
discoverRemote
|
|
468
|
+
};
|
|
469
|
+
//# sourceMappingURL=chunk-RC6MEZB6.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/sources.ts"],"sourcesContent":["import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join, dirname } from 'path';\nimport { fileURLToPath } from 'url';\nimport { parse as parseYaml, stringify as stringifyYaml } from 'yaml';\nimport { withFileLockSync } from './file-lock.js';\nimport { log } from '../core/logger.js';\n\n// ─── Types ───────────────────────────────────────────────────────────────────\n\nexport type SourceType = 'github' | 'registry' | 'api';\n\nexport type ContentType = 'skills' | 'agents' | 'rules' | 'playbooks' | 'hooks' | 'templates' | 'mcp' | 'plugins';\n\nexport interface Source {\n /** Display name */\n name: string;\n /** URL — GitHub repo, registry API, or endpoint */\n url: string;\n /** Source type */\n type: SourceType;\n /** Content types provided */\n content: ContentType[];\n /** Searchable tags */\n tags: string[];\n /** Description of the source */\n description?: string;\n /** Optional stats (e.g., { skills: 31, agents: 19 }) */\n stats?: Record<string, number>;\n}\n\nexport interface SourcesFile {\n version: string;\n sources: Source[];\n}\n\nexport interface SourceDiscoveryResult {\n /** Source the item came from */\n source: Source;\n /** Item name/title */\n name: string;\n /** Item description */\n description: string;\n /** Item type (skill, agent, rule, etc.) */\n type: ContentType;\n /** URL to the item (file or page) */\n url: string;\n /** Match relevance score (0-1) */\n score: number;\n}\n\nexport interface SourceDiscoveryOptions {\n /** Filter by content type */\n type?: ContentType;\n /** Maximum results */\n maxResults?: number;\n /** Only search these sources (by name) */\n sourceNames?: string[];\n}\n\n// ─── Constants ───────────────────────────────────────────────────────────────\n\nconst USER_SOURCES_FILE = 'sources.yaml';\nconst SOURCES_DIR = 'memory';\n\n// ─── Source Loading ──────────────────────────────────────────────────────────\n\n/**\n * Get the path to the shipped sources.yaml bundled with the package.\n */\nfunction getShippedSourcesPath(): string {\n let dir = dirname(fileURLToPath(import.meta.url));\n for (let i = 0; i < 5; i++) {\n const candidate = join(dir, 'sources.yaml');\n if (existsSync(candidate)) return candidate;\n dir = dirname(dir);\n }\n // Fallback\n return join(dirname(dirname(fileURLToPath(import.meta.url))), 'sources.yaml');\n}\n\n/**\n * Load the shipped sources.yaml from the package root.\n */\nexport function loadShippedSources(): Source[] {\n const path = getShippedSourcesPath();\n if (!existsSync(path)) return [];\n\n try {\n const raw = readFileSync(path, 'utf-8');\n const parsed = parseYaml(raw) as { version?: string; sources?: Array<Record<string, unknown>> };\n return normalizeSources(parsed.sources ?? []);\n } catch (err) {\n log.warn(`Failed to load shipped sources: ${err instanceof Error ? err.message : String(err)}`);\n return [];\n }\n}\n\n/**\n * Load user-added sources from the harness memory directory.\n */\nexport function loadUserSources(harnessDir: string): Source[] {\n const userPath = join(harnessDir, SOURCES_DIR, USER_SOURCES_FILE);\n if (!existsSync(userPath)) return [];\n\n try {\n const raw = readFileSync(userPath, 'utf-8');\n const parsed = parseYaml(raw) as { version?: string; sources?: Array<Record<string, unknown>> };\n return normalizeSources(parsed.sources ?? []);\n } catch (err) {\n log.warn(`Failed to load user sources: ${err instanceof Error ? err.message : String(err)}`);\n return [];\n }\n}\n\n/**\n * Save user sources to the harness memory directory.\n */\nexport function saveUserSources(harnessDir: string, sources: Source[]): void {\n const memDir = join(harnessDir, SOURCES_DIR);\n if (!existsSync(memDir)) mkdirSync(memDir, { recursive: true });\n\n const userPath = join(memDir, USER_SOURCES_FILE);\n const data: SourcesFile = { version: '1.0', sources };\n\n withFileLockSync(harnessDir, userPath, () => {\n writeFileSync(userPath, stringifyYaml(data), 'utf-8');\n });\n}\n\n/**\n * Load all sources: shipped + user-added, deduplicated by name.\n */\nexport function loadAllSources(harnessDir: string): Source[] {\n const shipped = loadShippedSources();\n const user = loadUserSources(harnessDir);\n\n // User sources override shipped sources with the same name\n const byName = new Map<string, Source>();\n for (const s of shipped) {\n byName.set(s.name.toLowerCase(), s);\n }\n for (const s of user) {\n byName.set(s.name.toLowerCase(), s);\n }\n\n return Array.from(byName.values());\n}\n\n// ─── Source Management ───────────────────────────────────────────────────────\n\n/**\n * Add a new source to the user's sources list.\n * Returns the added source, or null if it already exists.\n */\nexport function addSource(\n harnessDir: string,\n source: Omit<Source, 'tags'> & { tags?: string[] },\n): Source | null {\n const userSources = loadUserSources(harnessDir);\n\n // Check for duplicate by name\n const exists = userSources.find(\n (s) => s.name.toLowerCase() === source.name.toLowerCase(),\n );\n if (exists) return null;\n\n const normalized: Source = {\n name: source.name,\n url: source.url,\n type: source.type,\n content: source.content,\n tags: source.tags ?? [],\n description: source.description,\n stats: source.stats,\n };\n\n userSources.push(normalized);\n saveUserSources(harnessDir, userSources);\n return normalized;\n}\n\n/**\n * Remove a source by name from the user's sources list.\n * Returns true if removed, false if not found.\n */\nexport function removeSource(harnessDir: string, name: string): boolean {\n const userSources = loadUserSources(harnessDir);\n const index = userSources.findIndex(\n (s) => s.name.toLowerCase() === name.toLowerCase(),\n );\n\n if (index === -1) return false;\n\n userSources.splice(index, 1);\n saveUserSources(harnessDir, userSources);\n return true;\n}\n\n// ─── Discovery ───────────────────────────────────────────────────────────────\n\n/**\n * Search all sources for content matching a query.\n *\n * This performs local matching against source metadata (name, description,\n * tags, content types). For deeper search, each source type has its own\n * fetcher (GitHub API, registry API, etc.).\n *\n * @param harnessDir - Harness directory\n * @param query - Search query (text or content type)\n * @param options - Discovery options\n * @returns Ranked results from all matching sources\n */\nexport function discoverSources(\n harnessDir: string,\n query: string,\n options?: SourceDiscoveryOptions,\n): SourceDiscoveryResult[] {\n const sources = loadAllSources(harnessDir);\n const results: SourceDiscoveryResult[] = [];\n const maxResults = options?.maxResults ?? 20;\n const queryLower = query.toLowerCase();\n const queryWords = queryLower.split(/\\s+/).filter((w) => w.length > 1);\n\n for (const source of sources) {\n // Filter by source names if specified\n if (options?.sourceNames && options.sourceNames.length > 0) {\n const matches = options.sourceNames.some(\n (n) => source.name.toLowerCase().includes(n.toLowerCase()),\n );\n if (!matches) continue;\n }\n\n // Filter by content type\n if (options?.type && !source.content.includes(options.type)) {\n continue;\n }\n\n // Score the source against the query\n const score = computeSourceScore(source, queryLower, queryWords);\n\n if (score > 0) {\n // For each content type this source provides, create a result\n const types = options?.type\n ? [options.type]\n : source.content;\n\n for (const type of types) {\n results.push({\n source,\n name: source.name,\n description: source.description ?? '',\n type,\n url: source.url,\n score,\n });\n }\n }\n }\n\n // Sort by score descending, then by name\n results.sort((a, b) => {\n if (b.score !== a.score) return b.score - a.score;\n return a.name.localeCompare(b.name);\n });\n\n return results.slice(0, maxResults);\n}\n\n/**\n * Get all sources that provide a specific content type.\n */\nexport function getSourcesForType(\n harnessDir: string,\n type: ContentType,\n): Source[] {\n const sources = loadAllSources(harnessDir);\n return sources.filter((s) => s.content.includes(type));\n}\n\n/**\n * Get a summary of all known sources grouped by content type.\n */\nexport function getSourcesSummary(harnessDir: string): Record<ContentType, Source[]> {\n const sources = loadAllSources(harnessDir);\n const summary: Record<string, Source[]> = {};\n\n const allTypes: ContentType[] = ['skills', 'agents', 'rules', 'playbooks', 'hooks', 'templates', 'mcp', 'plugins'];\n for (const type of allTypes) {\n summary[type] = sources.filter((s) => s.content.includes(type));\n }\n\n return summary as Record<ContentType, Source[]>;\n}\n\n// ─── Remote Discovery (GitHub) ───────────────────────────────────────────────\n\n/**\n * GitHub Contents API entry shape.\n * https://docs.github.com/en/rest/repos/contents\n */\ninterface GitHubContentItem {\n name: string;\n path: string;\n type: 'file' | 'dir' | 'symlink' | 'submodule';\n download_url: string | null;\n html_url: string;\n}\n\n/**\n * Maps a Source's declared `content:` types to the GitHub directories we\n * should scan for them. Multiple content types may map to the same dir.\n * `plugins` and `templates` are always allowed because layouts vary.\n */\nconst CONTENT_TYPE_TO_DIRS: Record<ContentType, string[]> = {\n skills: ['skills'],\n agents: ['agents'],\n rules: ['rules'],\n playbooks: ['playbooks'],\n hooks: ['hooks'],\n templates: ['templates'],\n mcp: [], // MCP servers come from registries, not GitHub repo dirs\n plugins: ['plugins'],\n};\n\n/** Hard cap on Contents API calls per discoverRemote invocation, across all sources. */\nconst MAX_API_CALLS_PER_DISCOVERY = 50;\n\nconst GITHUB_CONTENTS_TIMEOUT_MS = 10000;\n\n/**\n * Fetch a single Contents API endpoint with timeout and standard headers.\n * Returns parsed JSON or null on any error (logged at warn level).\n */\nasync function fetchGitHubContents(\n owner: string,\n repo: string,\n path: string,\n sourceName: string,\n): Promise<GitHubContentItem[] | null> {\n const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`;\n const headers: Record<string, string> = {\n Accept: 'application/vnd.github.v3+json',\n 'User-Agent': 'agent-harness',\n };\n // Authenticated requests get a higher rate limit (5000/hr vs 60/hr).\n // Optional — Contents API works fine without it for new users.\n if (process.env.GITHUB_TOKEN) {\n headers.Authorization = `Bearer ${process.env.GITHUB_TOKEN}`;\n }\n\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), GITHUB_CONTENTS_TIMEOUT_MS);\n\n try {\n const response = await fetch(apiUrl, { headers, signal: controller.signal });\n if (!response.ok) {\n // 404 on a missing directory is expected (not all repos have all dirs).\n // Other statuses get logged so they're visible during discovery debugging.\n if (response.status !== 404) {\n log.warn(`GitHub Contents API ${response.status} for ${sourceName}/${path}`);\n }\n return null;\n }\n const data = (await response.json()) as GitHubContentItem[] | GitHubContentItem;\n // Endpoint returns an object for files, an array for directories.\n return Array.isArray(data) ? data : [data];\n } catch (err) {\n if (err instanceof Error && err.name === 'AbortError') {\n log.warn(`GitHub Contents API timeout for ${sourceName}/${path}`);\n } else {\n log.warn(\n `GitHub Contents API error for ${sourceName}/${path}: ${err instanceof Error ? err.message : String(err)}`,\n );\n }\n return null;\n } finally {\n clearTimeout(timer);\n }\n}\n\n/**\n * Score a candidate file against a search query.\n * Higher scores = better match. 0 means no match (skipped).\n */\nfunction scoreContentMatch(itemName: string, itemPath: string, queryLower: string): number {\n const nameLower = itemName.toLowerCase();\n const pathLower = itemPath.toLowerCase();\n if (nameLower.includes(queryLower)) return 0.9;\n if (pathLower.includes(queryLower)) return 0.6;\n return 0;\n}\n\n/**\n * Convert a single matched Contents API entry into a SourceDiscoveryResult.\n * Returns null if the entry doesn't match the query or content-type filter.\n */\nfunction buildResultFromContent(\n source: Source,\n item: GitHubContentItem,\n queryLower: string,\n typeFilter: ContentType | undefined,\n): SourceDiscoveryResult | null {\n if (item.type !== 'file') return null;\n if (!item.name.endsWith('.md')) return null;\n if (!item.download_url) return null;\n\n const score = scoreContentMatch(item.name, item.path, queryLower);\n if (score === 0) return null;\n\n const inferredType = inferContentType(item.path, source.content);\n if (typeFilter && inferredType !== typeFilter) return null;\n\n return {\n source,\n name: item.name,\n description: `${item.path} in ${source.name}`,\n type: inferredType,\n url: item.download_url,\n score,\n };\n}\n\n/**\n * Fetch content listing from a GitHub source via the Contents API.\n *\n * Uses `GET /repos/{owner}/{repo}/contents/{path}` which works without\n * authentication (60 req/hr limit). Recurses one level into `plugins/*`\n * to discover wshobson-style nested layouts.\n *\n * Falls back to the legacy Code Search API only if `GITHUB_TOKEN` is set\n * AND `HARNESS_DISCOVER_USE_CODE_SEARCH=1` is opted in via env. Code Search\n * always requires auth (returns 401 unauthenticated as of 2023).\n *\n * @param source - GitHub source definition\n * @param query - Search query (case-insensitive substring match)\n * @param options - Discovery options\n * @returns Discovery results from the GitHub repo\n */\nexport async function fetchGitHubSource(\n source: Source,\n query: string,\n options?: SourceDiscoveryOptions,\n budget?: CallBudget,\n): Promise<SourceDiscoveryResult[]> {\n // Parse GitHub URL to extract owner/repo\n const match = source.url.match(/github\\.com\\/([^/]+)\\/([^/]+)/);\n if (!match) return [];\n\n const [, owner, repoRaw] = match;\n // Strip trailing .git or path fragments\n const repo = repoRaw.replace(/\\.git$/, '').replace(/\\/.*$/, '');\n\n // Optional opt-in to legacy Code Search path. Default is Contents API.\n if (process.env.GITHUB_TOKEN && process.env.HARNESS_DISCOVER_USE_CODE_SEARCH === '1') {\n log.debug(`[sources] using code search api for ${source.name} (GITHUB_TOKEN + opt-in detected)`);\n return fetchGitHubSourceViaCodeSearch(source, query, owner, repo, options);\n }\n\n log.debug(`[sources] using contents api for ${source.name}`);\n return fetchGitHubSourceViaContents(source, query, owner, repo, options, budget);\n}\n\n/**\n * A budget for Contents API calls. Shared across a single discoverRemote\n * invocation so multiple sources can't collectively exhaust the rate limit.\n */\ninterface CallBudget {\n remaining: number;\n}\n\n/**\n * Determine which top-level directories to scan for a given source.\n * Scopes by the source's declared `content:` types — if a source only\n * provides skills, we don't scan agents/, rules/, etc.\n */\nfunction dirsForSource(source: Source): string[] {\n const dirs = new Set<string>();\n for (const ct of source.content) {\n for (const d of CONTENT_TYPE_TO_DIRS[ct] ?? []) {\n dirs.add(d);\n }\n }\n // If a source has no listed content types (shouldn't happen), fall back\n // to a safe default of skills+agents — the most common content types.\n if (dirs.size === 0) {\n dirs.add('skills');\n dirs.add('agents');\n }\n return Array.from(dirs);\n}\n\n/**\n * Discovery via the unauthenticated GitHub Contents API.\n *\n * Strategy (designed to fit within 60 req/hr unauth):\n * 1. Scan only the dirs the source advertises in its `content:` field.\n * 2. For plugins/, list topic names ONCE, filter topics by query match,\n * only recurse into matching topics. Never enumerate all topics.\n * 3. Share a call budget across the whole discovery so a single source\n * can't exhaust the rate limit for siblings.\n */\nasync function fetchGitHubSourceViaContents(\n source: Source,\n query: string,\n owner: string,\n repo: string,\n options?: SourceDiscoveryOptions,\n budget?: CallBudget,\n): Promise<SourceDiscoveryResult[]> {\n const queryLower = query.toLowerCase();\n const typeFilter = options?.type;\n const maxResults = options?.maxResults ?? 10;\n const callBudget: CallBudget = budget ?? { remaining: MAX_API_CALLS_PER_DISCOVERY };\n const results: SourceDiscoveryResult[] = [];\n\n const dirsToScan = dirsForSource(source);\n\n // Scan top-level dirs in parallel — but only the ones this source declares.\n const topLevelTasks = dirsToScan.map((dir) => {\n if (callBudget.remaining <= 0) return Promise.resolve(null);\n callBudget.remaining--;\n return fetchGitHubContents(owner, repo, dir, source.name);\n });\n const topLevelLists = await Promise.all(topLevelTasks);\n\n for (let i = 0; i < dirsToScan.length; i++) {\n if (results.length >= maxResults) break;\n const dirName = dirsToScan[i];\n const items = topLevelLists[i];\n if (!items) continue;\n\n // Direct file matches first.\n for (const item of items) {\n if (results.length >= maxResults) break;\n const result = buildResultFromContent(source, item, queryLower, typeFilter);\n if (result) results.push(result);\n }\n\n // For plugins/, recurse selectively: only into topics whose name matches the query.\n if (dirName === 'plugins' && results.length < maxResults) {\n const matchingTopics = items.filter(\n (item) => item.type === 'dir' && item.name.toLowerCase().includes(queryLower),\n );\n for (const topic of matchingTopics) {\n if (results.length >= maxResults) break;\n if (callBudget.remaining <= 0) break;\n await scanPluginTopic(\n source,\n owner,\n repo,\n topic.path,\n queryLower,\n typeFilter,\n results,\n maxResults,\n callBudget,\n );\n }\n }\n }\n\n // Sort by score descending so the best matches come first.\n results.sort((a, b) => b.score - a.score);\n return results.slice(0, maxResults);\n}\n\n/**\n * Recurse one level into a single matching plugins/<topic>/ directory,\n * inspecting the harness primitive subdirs inside it (agents/, skills/, etc.).\n * Decrements the shared call budget. Stops on budget exhaustion or maxResults.\n */\nasync function scanPluginTopic(\n source: Source,\n owner: string,\n repo: string,\n topicPath: string,\n queryLower: string,\n typeFilter: ContentType | undefined,\n results: SourceDiscoveryResult[],\n maxResults: number,\n callBudget: CallBudget,\n): Promise<void> {\n if (callBudget.remaining <= 0) return;\n callBudget.remaining--;\n const subDirs = await fetchGitHubContents(owner, repo, topicPath, source.name);\n if (!subDirs) return;\n\n for (const sub of subDirs) {\n if (results.length >= maxResults) return;\n if (callBudget.remaining <= 0) return;\n if (sub.type !== 'dir') continue;\n // Only descend into harness primitive subdirs.\n const validSubdir =\n sub.name === 'skills' ||\n sub.name === 'agents' ||\n sub.name === 'rules' ||\n sub.name === 'playbooks' ||\n sub.name === 'hooks';\n if (!validSubdir) continue;\n\n callBudget.remaining--;\n const files = await fetchGitHubContents(owner, repo, sub.path, source.name);\n if (!files) continue;\n\n for (const file of files) {\n if (results.length >= maxResults) return;\n const result = buildResultFromContent(source, file, queryLower, typeFilter);\n if (result) results.push(result);\n }\n }\n}\n\n/**\n * Legacy Code Search API path. Requires authentication (returns 401 unauthenticated\n * as of 2023). Kept as an opt-in fallback for power users with GITHUB_TOKEN who\n * want richer query semantics. Never the default.\n */\nasync function fetchGitHubSourceViaCodeSearch(\n source: Source,\n query: string,\n owner: string,\n repo: string,\n options?: SourceDiscoveryOptions,\n): Promise<SourceDiscoveryResult[]> {\n const results: SourceDiscoveryResult[] = [];\n const apiUrl = `https://api.github.com/search/code?q=${encodeURIComponent(query)}+repo:${owner}/${repo}`;\n\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), GITHUB_CONTENTS_TIMEOUT_MS);\n\n try {\n const response = await fetch(apiUrl, {\n headers: {\n Accept: 'application/vnd.github.v3+json',\n 'User-Agent': 'agent-harness',\n Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,\n },\n signal: controller.signal,\n });\n\n if (!response.ok) {\n log.warn(`GitHub Code Search API ${response.status} for ${source.name}`);\n return results;\n }\n\n const data = (await response.json()) as {\n items?: Array<{ name: string; path: string; html_url: string }>;\n };\n if (!data.items) return results;\n\n const maxResults = options?.maxResults ?? 10;\n const queryLower = query.toLowerCase();\n\n for (const item of data.items.slice(0, maxResults)) {\n const type = inferContentType(item.path, source.content);\n if (options?.type && type !== options.type) continue;\n const nameScore = item.name.toLowerCase().includes(queryLower) ? 0.9 : 0.5;\n results.push({\n source,\n name: item.name,\n description: `${item.path} in ${source.name}`,\n type,\n url: item.html_url,\n score: nameScore,\n });\n }\n } catch (err) {\n log.warn(\n `Failed Code Search for ${source.name}: ${err instanceof Error ? err.message : String(err)}`,\n );\n } finally {\n clearTimeout(timer);\n }\n\n return results;\n}\n\n/**\n * Perform a full remote discovery across all sources.\n * Searches GitHub repos and registries in parallel.\n *\n * @param harnessDir - Harness directory\n * @param query - Search query\n * @param options - Discovery options\n * @returns All discovery results, merged and ranked\n */\nexport async function discoverRemote(\n harnessDir: string,\n query: string,\n options?: SourceDiscoveryOptions,\n): Promise<SourceDiscoveryResult[]> {\n const sources = loadAllSources(harnessDir);\n const maxResults = options?.maxResults ?? 20;\n\n // Filter sources\n let filtered = sources;\n if (options?.type) {\n filtered = sources.filter((s) => s.content.includes(options.type!));\n }\n if (options?.sourceNames && options.sourceNames.length > 0) {\n filtered = filtered.filter((s) =>\n options.sourceNames!.some((n) =>\n s.name.toLowerCase().includes(n.toLowerCase()),\n ),\n );\n }\n\n // Shared call budget across ALL sources for this discovery, so a single\n // big repo can't exhaust the GitHub rate limit for its siblings.\n const sharedBudget: CallBudget = { remaining: MAX_API_CALLS_PER_DISCOVERY };\n\n // Search each source in parallel\n const promises = filtered.map(async (source) => {\n if (source.type === 'github') {\n return fetchGitHubSource(source, query, options, sharedBudget);\n }\n // Registry sources use the existing MCP registry search\n // API sources would need custom handlers\n return [] as SourceDiscoveryResult[];\n });\n\n const results = await Promise.allSettled(promises);\n const allResults: SourceDiscoveryResult[] = [];\n\n for (const result of results) {\n if (result.status === 'fulfilled') {\n allResults.push(...result.value);\n }\n }\n\n // Sort by score, deduplicate by URL\n allResults.sort((a, b) => b.score - a.score);\n const seen = new Set<string>();\n const deduped: SourceDiscoveryResult[] = [];\n for (const r of allResults) {\n if (!seen.has(r.url)) {\n seen.add(r.url);\n deduped.push(r);\n }\n }\n\n return deduped.slice(0, maxResults);\n}\n\n// ─── Helpers ─────────────────────────────────────────────────────────────────\n\nfunction normalizeSources(raw: Array<Record<string, unknown>>): Source[] {\n return raw\n .filter((s) => s.name && s.url && s.type)\n .map((s) => ({\n name: String(s.name),\n url: String(s.url),\n type: String(s.type) as SourceType,\n content: Array.isArray(s.content)\n ? (s.content as string[]).map((c) => String(c) as ContentType)\n : [],\n tags: Array.isArray(s.tags) ? (s.tags as string[]).map(String) : [],\n description: s.description ? String(s.description) : undefined,\n stats: s.stats && typeof s.stats === 'object'\n ? s.stats as Record<string, number>\n : undefined,\n }));\n}\n\nfunction computeSourceScore(\n source: Source,\n queryLower: string,\n queryWords: string[],\n): number {\n let score = 0;\n\n // Exact name match\n if (source.name.toLowerCase() === queryLower) {\n score += 1.0;\n } else if (source.name.toLowerCase().includes(queryLower)) {\n score += 0.8;\n }\n\n // Description match\n const desc = (source.description ?? '').toLowerCase();\n if (desc.includes(queryLower)) {\n score += 0.5;\n }\n\n // Tag match\n for (const tag of source.tags) {\n if (tag.toLowerCase() === queryLower) {\n score += 0.7;\n } else if (tag.toLowerCase().includes(queryLower)) {\n score += 0.3;\n }\n }\n\n // Content type match\n for (const ct of source.content) {\n if (ct === queryLower) {\n score += 0.8;\n }\n }\n\n // Word overlap\n if (queryWords.length > 0 && score === 0) {\n let wordHits = 0;\n const allText = `${source.name} ${source.description ?? ''} ${source.tags.join(' ')} ${source.content.join(' ')}`.toLowerCase();\n for (const word of queryWords) {\n if (allText.includes(word)) wordHits++;\n }\n if (wordHits > 0) {\n score += (wordHits / queryWords.length) * 0.5;\n }\n }\n\n return Math.min(score, 1.0);\n}\n\nfunction inferContentType(filePath: string, sourceContentTypes: ContentType[]): ContentType {\n const pathLower = filePath.toLowerCase();\n\n if (pathLower.includes('skill') || pathLower.includes('SKILL.md')) return 'skills';\n if (pathLower.includes('agent')) return 'agents';\n if (pathLower.includes('rule')) return 'rules';\n if (pathLower.includes('playbook')) return 'playbooks';\n if (pathLower.includes('hook')) return 'hooks';\n if (pathLower.includes('template')) return 'templates';\n if (pathLower.includes('mcp') || pathLower.includes('server')) return 'mcp';\n if (pathLower.includes('plugin')) return 'plugins';\n\n // Default to the first content type of the source\n return sourceContentTypes[0] ?? 'skills';\n}\n"],"mappings":";;;;;;;;;;AAAA,SAAS,YAAY,cAAc,eAAe,iBAAiB;AACnE,SAAS,MAAM,eAAe;AAC9B,SAAS,qBAAqB;AAC9B,SAAS,SAAS,WAAW,aAAa,qBAAqB;AA0D/D,IAAM,oBAAoB;AAC1B,IAAM,cAAc;AAOpB,SAAS,wBAAgC;AACvC,MAAI,MAAM,QAAQ,cAAc,YAAY,GAAG,CAAC;AAChD,WAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,UAAM,YAAY,KAAK,KAAK,cAAc;AAC1C,QAAI,WAAW,SAAS,EAAG,QAAO;AAClC,UAAM,QAAQ,GAAG;AAAA,EACnB;AAEA,SAAO,KAAK,QAAQ,QAAQ,cAAc,YAAY,GAAG,CAAC,CAAC,GAAG,cAAc;AAC9E;AAKO,SAAS,qBAA+B;AAC7C,QAAM,OAAO,sBAAsB;AACnC,MAAI,CAAC,WAAW,IAAI,EAAG,QAAO,CAAC;AAE/B,MAAI;AACF,UAAM,MAAM,aAAa,MAAM,OAAO;AACtC,UAAM,SAAS,UAAU,GAAG;AAC5B,WAAO,iBAAiB,OAAO,WAAW,CAAC,CAAC;AAAA,EAC9C,SAAS,KAAK;AACZ,QAAI,KAAK,mCAAmC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAC9F,WAAO,CAAC;AAAA,EACV;AACF;AAKO,SAAS,gBAAgB,YAA8B;AAC5D,QAAM,WAAW,KAAK,YAAY,aAAa,iBAAiB;AAChE,MAAI,CAAC,WAAW,QAAQ,EAAG,QAAO,CAAC;AAEnC,MAAI;AACF,UAAM,MAAM,aAAa,UAAU,OAAO;AAC1C,UAAM,SAAS,UAAU,GAAG;AAC5B,WAAO,iBAAiB,OAAO,WAAW,CAAC,CAAC;AAAA,EAC9C,SAAS,KAAK;AACZ,QAAI,KAAK,gCAAgC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAC3F,WAAO,CAAC;AAAA,EACV;AACF;AAKO,SAAS,gBAAgB,YAAoB,SAAyB;AAC3E,QAAM,SAAS,KAAK,YAAY,WAAW;AAC3C,MAAI,CAAC,WAAW,MAAM,EAAG,WAAU,QAAQ,EAAE,WAAW,KAAK,CAAC;AAE9D,QAAM,WAAW,KAAK,QAAQ,iBAAiB;AAC/C,QAAM,OAAoB,EAAE,SAAS,OAAO,QAAQ;AAEpD,mBAAiB,YAAY,UAAU,MAAM;AAC3C,kBAAc,UAAU,cAAc,IAAI,GAAG,OAAO;AAAA,EACtD,CAAC;AACH;AAKO,SAAS,eAAe,YAA8B;AAC3D,QAAM,UAAU,mBAAmB;AACnC,QAAM,OAAO,gBAAgB,UAAU;AAGvC,QAAM,SAAS,oBAAI,IAAoB;AACvC,aAAW,KAAK,SAAS;AACvB,WAAO,IAAI,EAAE,KAAK,YAAY,GAAG,CAAC;AAAA,EACpC;AACA,aAAW,KAAK,MAAM;AACpB,WAAO,IAAI,EAAE,KAAK,YAAY,GAAG,CAAC;AAAA,EACpC;AAEA,SAAO,MAAM,KAAK,OAAO,OAAO,CAAC;AACnC;AAQO,SAAS,UACd,YACA,QACe;AACf,QAAM,cAAc,gBAAgB,UAAU;AAG9C,QAAM,SAAS,YAAY;AAAA,IACzB,CAAC,MAAM,EAAE,KAAK,YAAY,MAAM,OAAO,KAAK,YAAY;AAAA,EAC1D;AACA,MAAI,OAAQ,QAAO;AAEnB,QAAM,aAAqB;AAAA,IACzB,MAAM,OAAO;AAAA,IACb,KAAK,OAAO;AAAA,IACZ,MAAM,OAAO;AAAA,IACb,SAAS,OAAO;AAAA,IAChB,MAAM,OAAO,QAAQ,CAAC;AAAA,IACtB,aAAa,OAAO;AAAA,IACpB,OAAO,OAAO;AAAA,EAChB;AAEA,cAAY,KAAK,UAAU;AAC3B,kBAAgB,YAAY,WAAW;AACvC,SAAO;AACT;AAMO,SAAS,aAAa,YAAoB,MAAuB;AACtE,QAAM,cAAc,gBAAgB,UAAU;AAC9C,QAAM,QAAQ,YAAY;AAAA,IACxB,CAAC,MAAM,EAAE,KAAK,YAAY,MAAM,KAAK,YAAY;AAAA,EACnD;AAEA,MAAI,UAAU,GAAI,QAAO;AAEzB,cAAY,OAAO,OAAO,CAAC;AAC3B,kBAAgB,YAAY,WAAW;AACvC,SAAO;AACT;AAgBO,SAAS,gBACd,YACA,OACA,SACyB;AACzB,QAAM,UAAU,eAAe,UAAU;AACzC,QAAM,UAAmC,CAAC;AAC1C,QAAM,aAAa,SAAS,cAAc;AAC1C,QAAM,aAAa,MAAM,YAAY;AACrC,QAAM,aAAa,WAAW,MAAM,KAAK,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAErE,aAAW,UAAU,SAAS;AAE5B,QAAI,SAAS,eAAe,QAAQ,YAAY,SAAS,GAAG;AAC1D,YAAM,UAAU,QAAQ,YAAY;AAAA,QAClC,CAAC,MAAM,OAAO,KAAK,YAAY,EAAE,SAAS,EAAE,YAAY,CAAC;AAAA,MAC3D;AACA,UAAI,CAAC,QAAS;AAAA,IAChB;AAGA,QAAI,SAAS,QAAQ,CAAC,OAAO,QAAQ,SAAS,QAAQ,IAAI,GAAG;AAC3D;AAAA,IACF;AAGA,UAAM,QAAQ,mBAAmB,QAAQ,YAAY,UAAU;AAE/D,QAAI,QAAQ,GAAG;AAEb,YAAM,QAAQ,SAAS,OACnB,CAAC,QAAQ,IAAI,IACb,OAAO;AAEX,iBAAW,QAAQ,OAAO;AACxB,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,MAAM,OAAO;AAAA,UACb,aAAa,OAAO,eAAe;AAAA,UACnC;AAAA,UACA,KAAK,OAAO;AAAA,UACZ;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,KAAK,CAAC,GAAG,MAAM;AACrB,QAAI,EAAE,UAAU,EAAE,MAAO,QAAO,EAAE,QAAQ,EAAE;AAC5C,WAAO,EAAE,KAAK,cAAc,EAAE,IAAI;AAAA,EACpC,CAAC;AAED,SAAO,QAAQ,MAAM,GAAG,UAAU;AACpC;AAKO,SAAS,kBACd,YACA,MACU;AACV,QAAM,UAAU,eAAe,UAAU;AACzC,SAAO,QAAQ,OAAO,CAAC,MAAM,EAAE,QAAQ,SAAS,IAAI,CAAC;AACvD;AAKO,SAAS,kBAAkB,YAAmD;AACnF,QAAM,UAAU,eAAe,UAAU;AACzC,QAAM,UAAoC,CAAC;AAE3C,QAAM,WAA0B,CAAC,UAAU,UAAU,SAAS,aAAa,SAAS,aAAa,OAAO,SAAS;AACjH,aAAW,QAAQ,UAAU;AAC3B,YAAQ,IAAI,IAAI,QAAQ,OAAO,CAAC,MAAM,EAAE,QAAQ,SAAS,IAAI,CAAC;AAAA,EAChE;AAEA,SAAO;AACT;AAqBA,IAAM,uBAAsD;AAAA,EAC1D,QAAQ,CAAC,QAAQ;AAAA,EACjB,QAAQ,CAAC,QAAQ;AAAA,EACjB,OAAO,CAAC,OAAO;AAAA,EACf,WAAW,CAAC,WAAW;AAAA,EACvB,OAAO,CAAC,OAAO;AAAA,EACf,WAAW,CAAC,WAAW;AAAA,EACvB,KAAK,CAAC;AAAA;AAAA,EACN,SAAS,CAAC,SAAS;AACrB;AAGA,IAAM,8BAA8B;AAEpC,IAAM,6BAA6B;AAMnC,eAAe,oBACb,OACA,MACA,MACA,YACqC;AACrC,QAAM,SAAS,gCAAgC,KAAK,IAAI,IAAI,aAAa,IAAI;AAC7E,QAAM,UAAkC;AAAA,IACtC,QAAQ;AAAA,IACR,cAAc;AAAA,EAChB;AAGA,MAAI,QAAQ,IAAI,cAAc;AAC5B,YAAQ,gBAAgB,UAAU,QAAQ,IAAI,YAAY;AAAA,EAC5D;AAEA,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,0BAA0B;AAE7E,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,QAAQ,EAAE,SAAS,QAAQ,WAAW,OAAO,CAAC;AAC3E,QAAI,CAAC,SAAS,IAAI;AAGhB,UAAI,SAAS,WAAW,KAAK;AAC3B,YAAI,KAAK,uBAAuB,SAAS,MAAM,QAAQ,UAAU,IAAI,IAAI,EAAE;AAAA,MAC7E;AACA,aAAO;AAAA,IACT;AACA,UAAM,OAAQ,MAAM,SAAS,KAAK;AAElC,WAAO,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,IAAI;AAAA,EAC3C,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,cAAc;AACrD,UAAI,KAAK,mCAAmC,UAAU,IAAI,IAAI,EAAE;AAAA,IAClE,OAAO;AACL,UAAI;AAAA,QACF,iCAAiC,UAAU,IAAI,IAAI,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC1G;AAAA,IACF;AACA,WAAO;AAAA,EACT,UAAE;AACA,iBAAa,KAAK;AAAA,EACpB;AACF;AAMA,SAAS,kBAAkB,UAAkB,UAAkB,YAA4B;AACzF,QAAM,YAAY,SAAS,YAAY;AACvC,QAAM,YAAY,SAAS,YAAY;AACvC,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAC3C,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAC3C,SAAO;AACT;AAMA,SAAS,uBACP,QACA,MACA,YACA,YAC8B;AAC9B,MAAI,KAAK,SAAS,OAAQ,QAAO;AACjC,MAAI,CAAC,KAAK,KAAK,SAAS,KAAK,EAAG,QAAO;AACvC,MAAI,CAAC,KAAK,aAAc,QAAO;AAE/B,QAAM,QAAQ,kBAAkB,KAAK,MAAM,KAAK,MAAM,UAAU;AAChE,MAAI,UAAU,EAAG,QAAO;AAExB,QAAM,eAAe,iBAAiB,KAAK,MAAM,OAAO,OAAO;AAC/D,MAAI,cAAc,iBAAiB,WAAY,QAAO;AAEtD,SAAO;AAAA,IACL;AAAA,IACA,MAAM,KAAK;AAAA,IACX,aAAa,GAAG,KAAK,IAAI,OAAO,OAAO,IAAI;AAAA,IAC3C,MAAM;AAAA,IACN,KAAK,KAAK;AAAA,IACV;AAAA,EACF;AACF;AAkBA,eAAsB,kBACpB,QACA,OACA,SACA,QACkC;AAElC,QAAM,QAAQ,OAAO,IAAI,MAAM,+BAA+B;AAC9D,MAAI,CAAC,MAAO,QAAO,CAAC;AAEpB,QAAM,CAAC,EAAE,OAAO,OAAO,IAAI;AAE3B,QAAM,OAAO,QAAQ,QAAQ,UAAU,EAAE,EAAE,QAAQ,SAAS,EAAE;AAG9D,MAAI,QAAQ,IAAI,gBAAgB,QAAQ,IAAI,qCAAqC,KAAK;AACpF,QAAI,MAAM,uCAAuC,OAAO,IAAI,mCAAmC;AAC/F,WAAO,+BAA+B,QAAQ,OAAO,OAAO,MAAM,OAAO;AAAA,EAC3E;AAEA,MAAI,MAAM,oCAAoC,OAAO,IAAI,EAAE;AAC3D,SAAO,6BAA6B,QAAQ,OAAO,OAAO,MAAM,SAAS,MAAM;AACjF;AAeA,SAAS,cAAc,QAA0B;AAC/C,QAAM,OAAO,oBAAI,IAAY;AAC7B,aAAW,MAAM,OAAO,SAAS;AAC/B,eAAW,KAAK,qBAAqB,EAAE,KAAK,CAAC,GAAG;AAC9C,WAAK,IAAI,CAAC;AAAA,IACZ;AAAA,EACF;AAGA,MAAI,KAAK,SAAS,GAAG;AACnB,SAAK,IAAI,QAAQ;AACjB,SAAK,IAAI,QAAQ;AAAA,EACnB;AACA,SAAO,MAAM,KAAK,IAAI;AACxB;AAYA,eAAe,6BACb,QACA,OACA,OACA,MACA,SACA,QACkC;AAClC,QAAM,aAAa,MAAM,YAAY;AACrC,QAAM,aAAa,SAAS;AAC5B,QAAM,aAAa,SAAS,cAAc;AAC1C,QAAM,aAAyB,UAAU,EAAE,WAAW,4BAA4B;AAClF,QAAM,UAAmC,CAAC;AAE1C,QAAM,aAAa,cAAc,MAAM;AAGvC,QAAM,gBAAgB,WAAW,IAAI,CAAC,QAAQ;AAC5C,QAAI,WAAW,aAAa,EAAG,QAAO,QAAQ,QAAQ,IAAI;AAC1D,eAAW;AACX,WAAO,oBAAoB,OAAO,MAAM,KAAK,OAAO,IAAI;AAAA,EAC1D,CAAC;AACD,QAAM,gBAAgB,MAAM,QAAQ,IAAI,aAAa;AAErD,WAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,QAAI,QAAQ,UAAU,WAAY;AAClC,UAAM,UAAU,WAAW,CAAC;AAC5B,UAAM,QAAQ,cAAc,CAAC;AAC7B,QAAI,CAAC,MAAO;AAGZ,eAAW,QAAQ,OAAO;AACxB,UAAI,QAAQ,UAAU,WAAY;AAClC,YAAM,SAAS,uBAAuB,QAAQ,MAAM,YAAY,UAAU;AAC1E,UAAI,OAAQ,SAAQ,KAAK,MAAM;AAAA,IACjC;AAGA,QAAI,YAAY,aAAa,QAAQ,SAAS,YAAY;AACxD,YAAM,iBAAiB,MAAM;AAAA,QAC3B,CAAC,SAAS,KAAK,SAAS,SAAS,KAAK,KAAK,YAAY,EAAE,SAAS,UAAU;AAAA,MAC9E;AACA,iBAAW,SAAS,gBAAgB;AAClC,YAAI,QAAQ,UAAU,WAAY;AAClC,YAAI,WAAW,aAAa,EAAG;AAC/B,cAAM;AAAA,UACJ;AAAA,UACA;AAAA,UACA;AAAA,UACA,MAAM;AAAA,UACN;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AACxC,SAAO,QAAQ,MAAM,GAAG,UAAU;AACpC;AAOA,eAAe,gBACb,QACA,OACA,MACA,WACA,YACA,YACA,SACA,YACA,YACe;AACf,MAAI,WAAW,aAAa,EAAG;AAC/B,aAAW;AACX,QAAM,UAAU,MAAM,oBAAoB,OAAO,MAAM,WAAW,OAAO,IAAI;AAC7E,MAAI,CAAC,QAAS;AAEd,aAAW,OAAO,SAAS;AACzB,QAAI,QAAQ,UAAU,WAAY;AAClC,QAAI,WAAW,aAAa,EAAG;AAC/B,QAAI,IAAI,SAAS,MAAO;AAExB,UAAM,cACJ,IAAI,SAAS,YACb,IAAI,SAAS,YACb,IAAI,SAAS,WACb,IAAI,SAAS,eACb,IAAI,SAAS;AACf,QAAI,CAAC,YAAa;AAElB,eAAW;AACX,UAAM,QAAQ,MAAM,oBAAoB,OAAO,MAAM,IAAI,MAAM,OAAO,IAAI;AAC1E,QAAI,CAAC,MAAO;AAEZ,eAAW,QAAQ,OAAO;AACxB,UAAI,QAAQ,UAAU,WAAY;AAClC,YAAM,SAAS,uBAAuB,QAAQ,MAAM,YAAY,UAAU;AAC1E,UAAI,OAAQ,SAAQ,KAAK,MAAM;AAAA,IACjC;AAAA,EACF;AACF;AAOA,eAAe,+BACb,QACA,OACA,OACA,MACA,SACkC;AAClC,QAAM,UAAmC,CAAC;AAC1C,QAAM,SAAS,wCAAwC,mBAAmB,KAAK,CAAC,SAAS,KAAK,IAAI,IAAI;AAEtG,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,0BAA0B;AAE7E,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,QAAQ;AAAA,MACnC,SAAS;AAAA,QACP,QAAQ;AAAA,QACR,cAAc;AAAA,QACd,eAAe,UAAU,QAAQ,IAAI,YAAY;AAAA,MACnD;AAAA,MACA,QAAQ,WAAW;AAAA,IACrB,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,UAAI,KAAK,0BAA0B,SAAS,MAAM,QAAQ,OAAO,IAAI,EAAE;AACvE,aAAO;AAAA,IACT;AAEA,UAAM,OAAQ,MAAM,SAAS,KAAK;AAGlC,QAAI,CAAC,KAAK,MAAO,QAAO;AAExB,UAAM,aAAa,SAAS,cAAc;AAC1C,UAAM,aAAa,MAAM,YAAY;AAErC,eAAW,QAAQ,KAAK,MAAM,MAAM,GAAG,UAAU,GAAG;AAClD,YAAM,OAAO,iBAAiB,KAAK,MAAM,OAAO,OAAO;AACvD,UAAI,SAAS,QAAQ,SAAS,QAAQ,KAAM;AAC5C,YAAM,YAAY,KAAK,KAAK,YAAY,EAAE,SAAS,UAAU,IAAI,MAAM;AACvE,cAAQ,KAAK;AAAA,QACX;AAAA,QACA,MAAM,KAAK;AAAA,QACX,aAAa,GAAG,KAAK,IAAI,OAAO,OAAO,IAAI;AAAA,QAC3C;AAAA,QACA,KAAK,KAAK;AAAA,QACV,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,EACF,SAAS,KAAK;AACZ,QAAI;AAAA,MACF,0BAA0B,OAAO,IAAI,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,IAC5F;AAAA,EACF,UAAE;AACA,iBAAa,KAAK;AAAA,EACpB;AAEA,SAAO;AACT;AAWA,eAAsB,eACpB,YACA,OACA,SACkC;AAClC,QAAM,UAAU,eAAe,UAAU;AACzC,QAAM,aAAa,SAAS,cAAc;AAG1C,MAAI,WAAW;AACf,MAAI,SAAS,MAAM;AACjB,eAAW,QAAQ,OAAO,CAAC,MAAM,EAAE,QAAQ,SAAS,QAAQ,IAAK,CAAC;AAAA,EACpE;AACA,MAAI,SAAS,eAAe,QAAQ,YAAY,SAAS,GAAG;AAC1D,eAAW,SAAS;AAAA,MAAO,CAAC,MAC1B,QAAQ,YAAa;AAAA,QAAK,CAAC,MACzB,EAAE,KAAK,YAAY,EAAE,SAAS,EAAE,YAAY,CAAC;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AAIA,QAAM,eAA2B,EAAE,WAAW,4BAA4B;AAG1E,QAAM,WAAW,SAAS,IAAI,OAAO,WAAW;AAC9C,QAAI,OAAO,SAAS,UAAU;AAC5B,aAAO,kBAAkB,QAAQ,OAAO,SAAS,YAAY;AAAA,IAC/D;AAGA,WAAO,CAAC;AAAA,EACV,CAAC;AAED,QAAM,UAAU,MAAM,QAAQ,WAAW,QAAQ;AACjD,QAAM,aAAsC,CAAC;AAE7C,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,WAAW,aAAa;AACjC,iBAAW,KAAK,GAAG,OAAO,KAAK;AAAA,IACjC;AAAA,EACF;AAGA,aAAW,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAC3C,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,UAAmC,CAAC;AAC1C,aAAW,KAAK,YAAY;AAC1B,QAAI,CAAC,KAAK,IAAI,EAAE,GAAG,GAAG;AACpB,WAAK,IAAI,EAAE,GAAG;AACd,cAAQ,KAAK,CAAC;AAAA,IAChB;AAAA,EACF;AAEA,SAAO,QAAQ,MAAM,GAAG,UAAU;AACpC;AAIA,SAAS,iBAAiB,KAA+C;AACvE,SAAO,IACJ,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,IAAI,EACvC,IAAI,CAAC,OAAO;AAAA,IACX,MAAM,OAAO,EAAE,IAAI;AAAA,IACnB,KAAK,OAAO,EAAE,GAAG;AAAA,IACjB,MAAM,OAAO,EAAE,IAAI;AAAA,IACnB,SAAS,MAAM,QAAQ,EAAE,OAAO,IAC3B,EAAE,QAAqB,IAAI,CAAC,MAAM,OAAO,CAAC,CAAgB,IAC3D,CAAC;AAAA,IACL,MAAM,MAAM,QAAQ,EAAE,IAAI,IAAK,EAAE,KAAkB,IAAI,MAAM,IAAI,CAAC;AAAA,IAClE,aAAa,EAAE,cAAc,OAAO,EAAE,WAAW,IAAI;AAAA,IACrD,OAAO,EAAE,SAAS,OAAO,EAAE,UAAU,WACjC,EAAE,QACF;AAAA,EACN,EAAE;AACN;AAEA,SAAS,mBACP,QACA,YACA,YACQ;AACR,MAAI,QAAQ;AAGZ,MAAI,OAAO,KAAK,YAAY,MAAM,YAAY;AAC5C,aAAS;AAAA,EACX,WAAW,OAAO,KAAK,YAAY,EAAE,SAAS,UAAU,GAAG;AACzD,aAAS;AAAA,EACX;AAGA,QAAM,QAAQ,OAAO,eAAe,IAAI,YAAY;AACpD,MAAI,KAAK,SAAS,UAAU,GAAG;AAC7B,aAAS;AAAA,EACX;AAGA,aAAW,OAAO,OAAO,MAAM;AAC7B,QAAI,IAAI,YAAY,MAAM,YAAY;AACpC,eAAS;AAAA,IACX,WAAW,IAAI,YAAY,EAAE,SAAS,UAAU,GAAG;AACjD,eAAS;AAAA,IACX;AAAA,EACF;AAGA,aAAW,MAAM,OAAO,SAAS;AAC/B,QAAI,OAAO,YAAY;AACrB,eAAS;AAAA,IACX;AAAA,EACF;AAGA,MAAI,WAAW,SAAS,KAAK,UAAU,GAAG;AACxC,QAAI,WAAW;AACf,UAAM,UAAU,GAAG,OAAO,IAAI,IAAI,OAAO,eAAe,EAAE,IAAI,OAAO,KAAK,KAAK,GAAG,CAAC,IAAI,OAAO,QAAQ,KAAK,GAAG,CAAC,GAAG,YAAY;AAC9H,eAAW,QAAQ,YAAY;AAC7B,UAAI,QAAQ,SAAS,IAAI,EAAG;AAAA,IAC9B;AACA,QAAI,WAAW,GAAG;AAChB,eAAU,WAAW,WAAW,SAAU;AAAA,IAC5C;AAAA,EACF;AAEA,SAAO,KAAK,IAAI,OAAO,CAAG;AAC5B;AAEA,SAAS,iBAAiB,UAAkB,oBAAgD;AAC1F,QAAM,YAAY,SAAS,YAAY;AAEvC,MAAI,UAAU,SAAS,OAAO,KAAK,UAAU,SAAS,UAAU,EAAG,QAAO;AAC1E,MAAI,UAAU,SAAS,OAAO,EAAG,QAAO;AACxC,MAAI,UAAU,SAAS,MAAM,EAAG,QAAO;AACvC,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAC3C,MAAI,UAAU,SAAS,MAAM,EAAG,QAAO;AACvC,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAC3C,MAAI,UAAU,SAAS,KAAK,KAAK,UAAU,SAAS,QAAQ,EAAG,QAAO;AACtE,MAAI,UAAU,SAAS,QAAQ,EAAG,QAAO;AAGzC,SAAO,mBAAmB,CAAC,KAAK;AAClC;","names":[]}
|