luckerr 0.41.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +267 -0
- package/README.zh-CN.md +237 -0
- package/dashboard/app.css +3022 -0
- package/dashboard/dist/app.js +30137 -0
- package/dashboard/dist/app.js.map +1 -0
- package/dashboard/dist/vendor-hljs.css +10 -0
- package/dashboard/dist/vendor-uplot.css +1 -0
- package/dashboard/index.html +19 -0
- package/data/deepseek-tokenizer.json.gz +0 -0
- package/dist/cli/acp-EOOAI4F5.js +712 -0
- package/dist/cli/acp-EOOAI4F5.js.map +1 -0
- package/dist/cli/chat-7J6GJXL2.js +51 -0
- package/dist/cli/chat-7J6GJXL2.js.map +1 -0
- package/dist/cli/chunk-2425HK6U.js +54 -0
- package/dist/cli/chunk-2425HK6U.js.map +1 -0
- package/dist/cli/chunk-25T6CVUP.js +172 -0
- package/dist/cli/chunk-25T6CVUP.js.map +1 -0
- package/dist/cli/chunk-2UQP6H6T.js +31 -0
- package/dist/cli/chunk-2UQP6H6T.js.map +1 -0
- package/dist/cli/chunk-56OAJILV.js +47 -0
- package/dist/cli/chunk-56OAJILV.js.map +1 -0
- package/dist/cli/chunk-5FTI4KXH.js +150 -0
- package/dist/cli/chunk-5FTI4KXH.js.map +1 -0
- package/dist/cli/chunk-5TWQD73O.js +2846 -0
- package/dist/cli/chunk-5TWQD73O.js.map +1 -0
- package/dist/cli/chunk-653BOCMK.js +40 -0
- package/dist/cli/chunk-653BOCMK.js.map +1 -0
- package/dist/cli/chunk-6ALJTWWQ.js +2663 -0
- package/dist/cli/chunk-6ALJTWWQ.js.map +1 -0
- package/dist/cli/chunk-6DRKA2IL.js +341 -0
- package/dist/cli/chunk-6DRKA2IL.js.map +1 -0
- package/dist/cli/chunk-6LV63NJV.js +634 -0
- package/dist/cli/chunk-6LV63NJV.js.map +1 -0
- package/dist/cli/chunk-74EX7SUH.js +25293 -0
- package/dist/cli/chunk-74EX7SUH.js.map +1 -0
- package/dist/cli/chunk-74U5RKTX.js +60611 -0
- package/dist/cli/chunk-74U5RKTX.js.map +1 -0
- package/dist/cli/chunk-ANJSUESV.js +143 -0
- package/dist/cli/chunk-ANJSUESV.js.map +1 -0
- package/dist/cli/chunk-DB2Z3DKZ.js +54 -0
- package/dist/cli/chunk-DB2Z3DKZ.js.map +1 -0
- package/dist/cli/chunk-DDIH3ZAA.js +400 -0
- package/dist/cli/chunk-DDIH3ZAA.js.map +1 -0
- package/dist/cli/chunk-ELN3Z3B2.js +621 -0
- package/dist/cli/chunk-ELN3Z3B2.js.map +1 -0
- package/dist/cli/chunk-F6BSQJGV.js +200 -0
- package/dist/cli/chunk-F6BSQJGV.js.map +1 -0
- package/dist/cli/chunk-FET2UAG5.js +246 -0
- package/dist/cli/chunk-FET2UAG5.js.map +1 -0
- package/dist/cli/chunk-FFJ342IJ.js +190 -0
- package/dist/cli/chunk-FFJ342IJ.js.map +1 -0
- package/dist/cli/chunk-GB3247B6.js +130 -0
- package/dist/cli/chunk-GB3247B6.js.map +1 -0
- package/dist/cli/chunk-HC2J4U3G.js +373 -0
- package/dist/cli/chunk-HC2J4U3G.js.map +1 -0
- package/dist/cli/chunk-HRUZAIHQ.js +42 -0
- package/dist/cli/chunk-HRUZAIHQ.js.map +1 -0
- package/dist/cli/chunk-J3ZJFUDL.js +308 -0
- package/dist/cli/chunk-J3ZJFUDL.js.map +1 -0
- package/dist/cli/chunk-J5XJHLWM.js +55 -0
- package/dist/cli/chunk-J5XJHLWM.js.map +1 -0
- package/dist/cli/chunk-JFGLMRZ6.js +160 -0
- package/dist/cli/chunk-JFGLMRZ6.js.map +1 -0
- package/dist/cli/chunk-JMBMLOBP.js +26 -0
- package/dist/cli/chunk-JMBMLOBP.js.map +1 -0
- package/dist/cli/chunk-JMWHXZEL.js +551 -0
- package/dist/cli/chunk-JMWHXZEL.js.map +1 -0
- package/dist/cli/chunk-KEQGPJBO.js +209 -0
- package/dist/cli/chunk-KEQGPJBO.js.map +1 -0
- package/dist/cli/chunk-M4K6U37F.js +232 -0
- package/dist/cli/chunk-M4K6U37F.js.map +1 -0
- package/dist/cli/chunk-MIJI2WMN.js +95 -0
- package/dist/cli/chunk-MIJI2WMN.js.map +1 -0
- package/dist/cli/chunk-MPAO3JNR.js +128 -0
- package/dist/cli/chunk-MPAO3JNR.js.map +1 -0
- package/dist/cli/chunk-PZOFBEDC.js +873 -0
- package/dist/cli/chunk-PZOFBEDC.js.map +1 -0
- package/dist/cli/chunk-RAILYQLN.js +46 -0
- package/dist/cli/chunk-RAILYQLN.js.map +1 -0
- package/dist/cli/chunk-RR35VQVT.js +90 -0
- package/dist/cli/chunk-RR35VQVT.js.map +1 -0
- package/dist/cli/chunk-RRA7VPW4.js +417 -0
- package/dist/cli/chunk-RRA7VPW4.js.map +1 -0
- package/dist/cli/chunk-RU36QVN3.js +452 -0
- package/dist/cli/chunk-RU36QVN3.js.map +1 -0
- package/dist/cli/chunk-RUBIINXR.js +1819 -0
- package/dist/cli/chunk-RUBIINXR.js.map +1 -0
- package/dist/cli/chunk-S4XVGLRW.js +499 -0
- package/dist/cli/chunk-S4XVGLRW.js.map +1 -0
- package/dist/cli/chunk-TUK7OWJA.js +51 -0
- package/dist/cli/chunk-TUK7OWJA.js.map +1 -0
- package/dist/cli/chunk-VALDDV76.js +580 -0
- package/dist/cli/chunk-VALDDV76.js.map +1 -0
- package/dist/cli/chunk-WQOGPYGN.js +11390 -0
- package/dist/cli/chunk-WQOGPYGN.js.map +1 -0
- package/dist/cli/chunk-WREKDFXT.js +34320 -0
- package/dist/cli/chunk-WREKDFXT.js.map +1 -0
- package/dist/cli/chunk-Y7XQU2EL.js +270 -0
- package/dist/cli/chunk-Y7XQU2EL.js.map +1 -0
- package/dist/cli/chunk-YBVCZJU4.js +54 -0
- package/dist/cli/chunk-YBVCZJU4.js.map +1 -0
- package/dist/cli/chunk-YLIHDXUQ.js +749 -0
- package/dist/cli/chunk-YLIHDXUQ.js.map +1 -0
- package/dist/cli/chunk-YV5XXFD7.js +767 -0
- package/dist/cli/chunk-YV5XXFD7.js.map +1 -0
- package/dist/cli/chunk-ZRCNIYRQ.js +101 -0
- package/dist/cli/chunk-ZRCNIYRQ.js.map +1 -0
- package/dist/cli/code-CRKVCMFZ.js +155 -0
- package/dist/cli/code-CRKVCMFZ.js.map +1 -0
- package/dist/cli/commands-QLMD3T7B.js +356 -0
- package/dist/cli/commands-QLMD3T7B.js.map +1 -0
- package/dist/cli/commit-53PP32NC.js +293 -0
- package/dist/cli/commit-53PP32NC.js.map +1 -0
- package/dist/cli/desktop-R6W5CLJ5.js +1046 -0
- package/dist/cli/desktop-R6W5CLJ5.js.map +1 -0
- package/dist/cli/devtools-YECO25QO.js +3719 -0
- package/dist/cli/devtools-YECO25QO.js.map +1 -0
- package/dist/cli/diff-LYNRCJZE.js +166 -0
- package/dist/cli/diff-LYNRCJZE.js.map +1 -0
- package/dist/cli/doctor-5IBP4R5J.js +28 -0
- package/dist/cli/doctor-5IBP4R5J.js.map +1 -0
- package/dist/cli/events-QN6KLN2V.js +340 -0
- package/dist/cli/events-QN6KLN2V.js.map +1 -0
- package/dist/cli/index.js +3500 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/cli/mcp-FGKEH7RG.js +277 -0
- package/dist/cli/mcp-FGKEH7RG.js.map +1 -0
- package/dist/cli/mcp-browse-YCND4NWT.js +178 -0
- package/dist/cli/mcp-browse-YCND4NWT.js.map +1 -0
- package/dist/cli/mcp-inspect-V34J3VX5.js +143 -0
- package/dist/cli/mcp-inspect-V34J3VX5.js.map +1 -0
- package/dist/cli/package.json +3 -0
- package/dist/cli/prompt-I775PNKT.js +16 -0
- package/dist/cli/prompt-I775PNKT.js.map +1 -0
- package/dist/cli/prune-sessions-KGIIYD3P.js +44 -0
- package/dist/cli/prune-sessions-KGIIYD3P.js.map +1 -0
- package/dist/cli/replay-RDXLUAOE.js +292 -0
- package/dist/cli/replay-RDXLUAOE.js.map +1 -0
- package/dist/cli/run-RCAC2RYW.js +223 -0
- package/dist/cli/run-RCAC2RYW.js.map +1 -0
- package/dist/cli/server-FFU6TLYJ.js +3658 -0
- package/dist/cli/server-FFU6TLYJ.js.map +1 -0
- package/dist/cli/sessions-QT26MQAE.js +107 -0
- package/dist/cli/sessions-QT26MQAE.js.map +1 -0
- package/dist/cli/setup-VV4WKXHV.js +767 -0
- package/dist/cli/setup-VV4WKXHV.js.map +1 -0
- package/dist/cli/stats-JVZPQWAN.js +15 -0
- package/dist/cli/stats-JVZPQWAN.js.map +1 -0
- package/dist/cli/update-KYI3OVJP.js +15 -0
- package/dist/cli/update-KYI3OVJP.js.map +1 -0
- package/dist/cli/version-ANYORXTI.js +34 -0
- package/dist/cli/version-ANYORXTI.js.map +1 -0
- package/dist/index.d.ts +2557 -0
- package/dist/index.js +15000 -0
- package/dist/index.js.map +1 -0
- package/package.json +106 -0
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { createRequire as __cr } from 'node:module'; if (typeof globalThis.require === 'undefined') { globalThis.require = __cr(import.meta.url); }
|
|
3
|
+
import {
|
|
4
|
+
t
|
|
5
|
+
} from "./chunk-5TWQD73O.js";
|
|
6
|
+
|
|
7
|
+
// src/hooks.ts
|
|
8
|
+
import { spawn } from "child_process";
|
|
9
|
+
import { existsSync, readFileSync } from "fs";
|
|
10
|
+
import { homedir } from "os";
|
|
11
|
+
import { join } from "path";
|
|
12
|
+
var HOOK_EVENTS = [
|
|
13
|
+
"PreToolUse",
|
|
14
|
+
"PostToolUse",
|
|
15
|
+
"UserPromptSubmit",
|
|
16
|
+
"Stop"
|
|
17
|
+
];
|
|
18
|
+
var BLOCKING_EVENTS = /* @__PURE__ */ new Set(["PreToolUse", "UserPromptSubmit"]);
|
|
19
|
+
var DEFAULT_TIMEOUTS_MS = {
|
|
20
|
+
PreToolUse: 5e3,
|
|
21
|
+
UserPromptSubmit: 5e3,
|
|
22
|
+
PostToolUse: 3e4,
|
|
23
|
+
Stop: 3e4
|
|
24
|
+
};
|
|
25
|
+
var HOOK_SETTINGS_FILENAME = "settings.json";
|
|
26
|
+
var HOOK_SETTINGS_DIRNAME = ".luckerr";
|
|
27
|
+
function globalSettingsPath(homeDirOverride) {
|
|
28
|
+
return join(homeDirOverride ?? homedir(), HOOK_SETTINGS_DIRNAME, HOOK_SETTINGS_FILENAME);
|
|
29
|
+
}
|
|
30
|
+
function projectSettingsPath(projectRoot) {
|
|
31
|
+
return join(projectRoot, HOOK_SETTINGS_DIRNAME, HOOK_SETTINGS_FILENAME);
|
|
32
|
+
}
|
|
33
|
+
function readSettingsFile(path) {
|
|
34
|
+
if (!existsSync(path)) return null;
|
|
35
|
+
try {
|
|
36
|
+
const raw = readFileSync(path, "utf8");
|
|
37
|
+
const parsed = JSON.parse(raw);
|
|
38
|
+
if (parsed && typeof parsed === "object") return parsed;
|
|
39
|
+
} catch {
|
|
40
|
+
}
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
function loadHooks(opts = {}) {
|
|
44
|
+
const out = [];
|
|
45
|
+
if (opts.projectRoot) {
|
|
46
|
+
const projPath = projectSettingsPath(opts.projectRoot);
|
|
47
|
+
const settings2 = readSettingsFile(projPath);
|
|
48
|
+
if (settings2) appendResolved(out, settings2, "project", projPath);
|
|
49
|
+
}
|
|
50
|
+
const globalPath = globalSettingsPath(opts.homeDir);
|
|
51
|
+
const settings = readSettingsFile(globalPath);
|
|
52
|
+
if (settings) appendResolved(out, settings, "global", globalPath);
|
|
53
|
+
return out;
|
|
54
|
+
}
|
|
55
|
+
function appendResolved(out, settings, scope, source) {
|
|
56
|
+
if (!settings.hooks) return;
|
|
57
|
+
for (const event of HOOK_EVENTS) {
|
|
58
|
+
const list = settings.hooks[event];
|
|
59
|
+
if (!Array.isArray(list)) continue;
|
|
60
|
+
for (const cfg of list) {
|
|
61
|
+
if (!cfg || typeof cfg.command !== "string" || cfg.command.trim() === "") continue;
|
|
62
|
+
out.push({ ...cfg, event, scope, source });
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
function matchesTool(hook, toolName) {
|
|
67
|
+
if (hook.event !== "PreToolUse" && hook.event !== "PostToolUse") return true;
|
|
68
|
+
const m = hook.match;
|
|
69
|
+
if (!m || m === "*") return true;
|
|
70
|
+
try {
|
|
71
|
+
const re = new RegExp(`^(?:${m})$`);
|
|
72
|
+
return re.test(toolName);
|
|
73
|
+
} catch {
|
|
74
|
+
return false;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
var HOOK_OUTPUT_CAP_BYTES = 256 * 1024;
|
|
78
|
+
function defaultSpawner(input) {
|
|
79
|
+
return new Promise((resolve) => {
|
|
80
|
+
const child = spawn(input.command, {
|
|
81
|
+
cwd: input.cwd,
|
|
82
|
+
shell: true,
|
|
83
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
84
|
+
});
|
|
85
|
+
const stdoutChunks = [];
|
|
86
|
+
const stderrChunks = [];
|
|
87
|
+
let stdoutBytes = 0;
|
|
88
|
+
let stderrBytes = 0;
|
|
89
|
+
let truncated = false;
|
|
90
|
+
let timedOut = false;
|
|
91
|
+
const timer = setTimeout(() => {
|
|
92
|
+
timedOut = true;
|
|
93
|
+
child.kill("SIGTERM");
|
|
94
|
+
setTimeout(() => {
|
|
95
|
+
try {
|
|
96
|
+
child.kill("SIGKILL");
|
|
97
|
+
} catch {
|
|
98
|
+
}
|
|
99
|
+
}, 500);
|
|
100
|
+
}, input.timeoutMs);
|
|
101
|
+
const onChunk = (kind, chunk) => {
|
|
102
|
+
const target = kind === "stdout" ? stdoutChunks : stderrChunks;
|
|
103
|
+
const seen = kind === "stdout" ? stdoutBytes : stderrBytes;
|
|
104
|
+
if (seen >= HOOK_OUTPUT_CAP_BYTES) {
|
|
105
|
+
truncated = true;
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
const remaining = HOOK_OUTPUT_CAP_BYTES - seen;
|
|
109
|
+
if (chunk.length > remaining) {
|
|
110
|
+
target.push(chunk.subarray(0, remaining));
|
|
111
|
+
if (kind === "stdout") stdoutBytes = HOOK_OUTPUT_CAP_BYTES;
|
|
112
|
+
else stderrBytes = HOOK_OUTPUT_CAP_BYTES;
|
|
113
|
+
truncated = true;
|
|
114
|
+
} else {
|
|
115
|
+
target.push(chunk);
|
|
116
|
+
if (kind === "stdout") stdoutBytes += chunk.length;
|
|
117
|
+
else stderrBytes += chunk.length;
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
child.stdout.on("data", (chunk) => onChunk("stdout", chunk));
|
|
121
|
+
child.stderr.on("data", (chunk) => onChunk("stderr", chunk));
|
|
122
|
+
child.once("error", (err) => {
|
|
123
|
+
clearTimeout(timer);
|
|
124
|
+
resolve({
|
|
125
|
+
exitCode: null,
|
|
126
|
+
stdout: Buffer.concat(stdoutChunks).toString("utf8"),
|
|
127
|
+
stderr: Buffer.concat(stderrChunks).toString("utf8"),
|
|
128
|
+
timedOut: false,
|
|
129
|
+
spawnError: err,
|
|
130
|
+
truncated: truncated || void 0
|
|
131
|
+
});
|
|
132
|
+
});
|
|
133
|
+
child.once("close", (code) => {
|
|
134
|
+
clearTimeout(timer);
|
|
135
|
+
resolve({
|
|
136
|
+
exitCode: code,
|
|
137
|
+
stdout: Buffer.concat(stdoutChunks).toString("utf8").trim(),
|
|
138
|
+
stderr: Buffer.concat(stderrChunks).toString("utf8").trim(),
|
|
139
|
+
timedOut,
|
|
140
|
+
truncated: truncated || void 0
|
|
141
|
+
});
|
|
142
|
+
});
|
|
143
|
+
try {
|
|
144
|
+
child.stdin.write(input.stdin);
|
|
145
|
+
child.stdin.end();
|
|
146
|
+
} catch {
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
function formatHookOutcomeMessage(outcome) {
|
|
151
|
+
if (outcome.decision === "pass") return "";
|
|
152
|
+
const detail = (outcome.stderr || outcome.stdout || "").trim();
|
|
153
|
+
const tag = `${outcome.hook.scope}/${outcome.hook.event}`;
|
|
154
|
+
const cmd = outcome.hook.command.length > 60 ? `${outcome.hook.command.slice(0, 60)}\u2026` : outcome.hook.command;
|
|
155
|
+
const truncTag = outcome.truncated ? t("hooks.truncated") : "";
|
|
156
|
+
const decision = t(`hooks.decision${capitalize(outcome.decision)}`);
|
|
157
|
+
return detail ? t("hooks.headWithDetail", { tag, cmd, decision, truncTag, detail }) : t("hooks.head", { tag, cmd, decision, truncTag });
|
|
158
|
+
}
|
|
159
|
+
function capitalize(s) {
|
|
160
|
+
return s.charAt(0).toUpperCase() + s.slice(1);
|
|
161
|
+
}
|
|
162
|
+
function decideOutcome(event, raw) {
|
|
163
|
+
if (raw.spawnError) return "error";
|
|
164
|
+
if (raw.timedOut) return BLOCKING_EVENTS.has(event) ? "block" : "warn";
|
|
165
|
+
if (raw.exitCode === 0) return "pass";
|
|
166
|
+
if (raw.exitCode === 2 && BLOCKING_EVENTS.has(event)) return "block";
|
|
167
|
+
return "warn";
|
|
168
|
+
}
|
|
169
|
+
async function runHooks(opts) {
|
|
170
|
+
const spawner = opts.spawner ?? defaultSpawner;
|
|
171
|
+
const event = opts.payload.event;
|
|
172
|
+
const toolName = opts.payload.toolName ?? "";
|
|
173
|
+
const matching = opts.hooks.filter((h) => h.event === event && matchesTool(h, toolName));
|
|
174
|
+
const outcomes = [];
|
|
175
|
+
let blocked = false;
|
|
176
|
+
const stdin = `${JSON.stringify(opts.payload)}
|
|
177
|
+
`;
|
|
178
|
+
for (const hook of matching) {
|
|
179
|
+
const start = Date.now();
|
|
180
|
+
const timeoutMs = hook.timeout ?? DEFAULT_TIMEOUTS_MS[event];
|
|
181
|
+
const cwd = hook.cwd ?? opts.payload.cwd;
|
|
182
|
+
const raw = await spawner({ command: hook.command, cwd, stdin, timeoutMs });
|
|
183
|
+
const decision = decideOutcome(event, raw);
|
|
184
|
+
outcomes.push({
|
|
185
|
+
hook,
|
|
186
|
+
decision,
|
|
187
|
+
exitCode: raw.exitCode,
|
|
188
|
+
stdout: raw.stdout,
|
|
189
|
+
stderr: raw.stderr || (raw.spawnError ? raw.spawnError.message : "") || (raw.timedOut ? `hook timed out after ${timeoutMs}ms` : ""),
|
|
190
|
+
durationMs: Date.now() - start,
|
|
191
|
+
truncated: raw.truncated
|
|
192
|
+
});
|
|
193
|
+
if (decision === "block") {
|
|
194
|
+
blocked = true;
|
|
195
|
+
break;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
return { event, outcomes, blocked };
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
export {
|
|
202
|
+
HOOK_EVENTS,
|
|
203
|
+
globalSettingsPath,
|
|
204
|
+
projectSettingsPath,
|
|
205
|
+
loadHooks,
|
|
206
|
+
formatHookOutcomeMessage,
|
|
207
|
+
runHooks
|
|
208
|
+
};
|
|
209
|
+
//# sourceMappingURL=chunk-KEQGPJBO.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/hooks.ts"],"sourcesContent":["/** Shell-command hooks; project scope first, then global. Exit 0=pass, 2=block on Pre*, other=warn. */\n\nimport { spawn } from \"node:child_process\";\nimport { existsSync, readFileSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { join } from \"node:path\";\nimport { t } from \"./i18n/index.js\";\n\nexport type HookEvent = \"PreToolUse\" | \"PostToolUse\" | \"UserPromptSubmit\" | \"Stop\";\n\n/** All four events as a const array — drives slash listing + validation. */\nexport const HOOK_EVENTS: readonly HookEvent[] = [\n \"PreToolUse\",\n \"PostToolUse\",\n \"UserPromptSubmit\",\n \"Stop\",\n] as const;\n\n/** Only the gating events can block the loop. */\nconst BLOCKING_EVENTS: ReadonlySet<HookEvent> = new Set([\"PreToolUse\", \"UserPromptSubmit\"]);\n\n/** Per-event default timeout. Tool/prompt hooks gate progress, so they're tight. */\nconst DEFAULT_TIMEOUTS_MS: Record<HookEvent, number> = {\n PreToolUse: 5_000,\n UserPromptSubmit: 5_000,\n PostToolUse: 30_000,\n Stop: 30_000,\n};\n\nexport type HookScope = \"project\" | \"global\";\n\nexport interface HookConfig {\n /** Anchored regex; `\"*\"` / omitted = every tool. Pre/PostToolUse only. */\n match?: string;\n /** Shell command to run. Spawned through the platform shell. */\n command: string;\n /** Optional human description — surfaced in `/hooks`. */\n description?: string;\n /** Per-hook timeout override in ms. */\n timeout?: number;\n /** Defaults: project scope → project root; global scope → process.cwd(). */\n cwd?: string;\n}\n\n/** Shape of `<scope>/.luckerr/settings.json` — only `hooks` for now. */\nexport interface HookSettings {\n hooks?: Partial<Record<HookEvent, HookConfig[]>>;\n}\n\n/** A loaded hook with its origin scope baked in (used for ordering and `/hooks`). */\nexport interface ResolvedHook extends HookConfig {\n event: HookEvent;\n scope: HookScope;\n /** Absolute path to the settings.json the hook came from. */\n source: string;\n}\n\n/** Outcome of a single hook invocation. */\nexport interface HookOutcome {\n /** Which hook fired. */\n hook: ResolvedHook;\n /** pass=exit 0; block=exit 2 on blocking event; warn=other non-zero; timeout=killed; error=spawn failed. */\n decision: \"pass\" | \"block\" | \"warn\" | \"timeout\" | \"error\";\n exitCode: number | null;\n /** Captured stdout (trimmed). May be empty. */\n stdout: string;\n /** Captured stderr (trimmed). The block / warn message comes from here. */\n stderr: string;\n durationMs: number;\n /** Output crossed the per-stream byte cap; surfaced so user knows we kept less than the script wrote. */\n truncated?: boolean;\n}\n\n/** Aggregate report for `runHooks`. */\nexport interface HookReport {\n event: HookEvent;\n outcomes: HookOutcome[];\n /** True iff at least one outcome was a `block` — only meaningful for blocking events. */\n blocked: boolean;\n}\n\nexport const HOOK_SETTINGS_FILENAME = \"settings.json\";\nexport const HOOK_SETTINGS_DIRNAME = \".luckerr\";\n\n/** Where the global settings.json lives. Equivalent to `~/.luckerr/settings.json`. */\nexport function globalSettingsPath(homeDirOverride?: string): string {\n return join(homeDirOverride ?? homedir(), HOOK_SETTINGS_DIRNAME, HOOK_SETTINGS_FILENAME);\n}\n\n/** Where the project settings.json lives for a given root. */\nexport function projectSettingsPath(projectRoot: string): string {\n return join(projectRoot, HOOK_SETTINGS_DIRNAME, HOOK_SETTINGS_FILENAME);\n}\n\nfunction readSettingsFile(path: string): HookSettings | null {\n if (!existsSync(path)) return null;\n try {\n const raw = readFileSync(path, \"utf8\");\n const parsed = JSON.parse(raw);\n if (parsed && typeof parsed === \"object\") return parsed as HookSettings;\n } catch {\n /* malformed JSON → treat as no hooks; do NOT throw, the user\n * shouldn't lose the whole CLI to a typo in their settings */\n }\n return null;\n}\n\n/** Project hooks fire before global; within a scope, array order. */\nexport interface LoadHookSettingsOptions {\n /** Absolute project root, if any. Without it, only global hooks load. */\n projectRoot?: string;\n /** Override `~` for tests. */\n homeDir?: string;\n}\n\nexport function loadHooks(opts: LoadHookSettingsOptions = {}): ResolvedHook[] {\n const out: ResolvedHook[] = [];\n if (opts.projectRoot) {\n const projPath = projectSettingsPath(opts.projectRoot);\n const settings = readSettingsFile(projPath);\n if (settings) appendResolved(out, settings, \"project\", projPath);\n }\n const globalPath = globalSettingsPath(opts.homeDir);\n const settings = readSettingsFile(globalPath);\n if (settings) appendResolved(out, settings, \"global\", globalPath);\n return out;\n}\n\nfunction appendResolved(\n out: ResolvedHook[],\n settings: HookSettings,\n scope: HookScope,\n source: string,\n): void {\n if (!settings.hooks) return;\n for (const event of HOOK_EVENTS) {\n const list = settings.hooks[event];\n if (!Array.isArray(list)) continue;\n for (const cfg of list) {\n if (!cfg || typeof cfg.command !== \"string\" || cfg.command.trim() === \"\") continue;\n out.push({ ...cfg, event, scope, source });\n }\n }\n}\n\n/** Match field is an ANCHORED regex — `\"file\"` won't trigger on `read_file`; use `\".*file\"`. */\nexport function matchesTool(hook: ResolvedHook, toolName: string): boolean {\n if (hook.event !== \"PreToolUse\" && hook.event !== \"PostToolUse\") return true;\n const m = hook.match;\n if (!m || m === \"*\") return true;\n try {\n const re = new RegExp(`^(?:${m})$`);\n return re.test(toolName);\n } catch {\n /* malformed regex → don't fire (safer than firing on every tool) */\n return false;\n }\n}\n\n/** Payload envelope passed to hook stdin. */\nexport interface HookPayload {\n event: HookEvent;\n cwd: string;\n toolName?: string;\n toolArgs?: unknown;\n toolResult?: string;\n prompt?: string;\n lastAssistantText?: string;\n turn?: number;\n}\n\n/** Test seam — same shape as Node's spawn but returns a Promise of the raw outcome bits. */\nexport interface HookSpawnInput {\n command: string;\n cwd: string;\n stdin: string;\n timeoutMs: number;\n}\n\nexport interface HookSpawnResult {\n exitCode: number | null;\n stdout: string;\n stderr: string;\n timedOut: boolean;\n /** True iff spawn() itself failed (ENOENT, EACCES, …). */\n spawnError?: Error;\n /** Output capped at byte limit — hook ran to completion but consumers see clipped view. */\n truncated?: boolean;\n}\n\n/** Per-stream cap — bounds heap exposure to a runaway child between spawn and timeout. */\nconst HOOK_OUTPUT_CAP_BYTES = 256 * 1024;\n\nexport type HookSpawner = (input: HookSpawnInput) => Promise<HookSpawnResult>;\n\n/** `shell: true` — hook is a shell command by contract; pipes / `&&` / env expansion must work. */\nfunction defaultSpawner(input: HookSpawnInput): Promise<HookSpawnResult> {\n return new Promise<HookSpawnResult>((resolve) => {\n const child = spawn(input.command, {\n cwd: input.cwd,\n shell: true,\n stdio: [\"pipe\", \"pipe\", \"pipe\"],\n });\n // Collect raw bytes per stream and decode once at close so a\n // multi-byte UTF-8 sequence split across data chunks doesn't\n // corrupt — same approach shell.ts uses for run_command output.\n const stdoutChunks: Buffer[] = [];\n const stderrChunks: Buffer[] = [];\n let stdoutBytes = 0;\n let stderrBytes = 0;\n let truncated = false;\n let timedOut = false;\n const timer = setTimeout(() => {\n timedOut = true;\n child.kill(\"SIGTERM\");\n // SIGTERM may not land on Windows for shell children — followed\n // by a hard kill a moment later if the process is still around.\n setTimeout(() => {\n try {\n child.kill(\"SIGKILL\");\n } catch {\n /* already gone */\n }\n }, 500);\n }, input.timeoutMs);\n\n const onChunk = (kind: \"stdout\" | \"stderr\", chunk: Buffer) => {\n const target = kind === \"stdout\" ? stdoutChunks : stderrChunks;\n const seen = kind === \"stdout\" ? stdoutBytes : stderrBytes;\n if (seen >= HOOK_OUTPUT_CAP_BYTES) {\n truncated = true;\n return;\n }\n const remaining = HOOK_OUTPUT_CAP_BYTES - seen;\n if (chunk.length > remaining) {\n target.push(chunk.subarray(0, remaining));\n if (kind === \"stdout\") stdoutBytes = HOOK_OUTPUT_CAP_BYTES;\n else stderrBytes = HOOK_OUTPUT_CAP_BYTES;\n truncated = true;\n } else {\n target.push(chunk);\n if (kind === \"stdout\") stdoutBytes += chunk.length;\n else stderrBytes += chunk.length;\n }\n };\n child.stdout.on(\"data\", (chunk: Buffer) => onChunk(\"stdout\", chunk));\n child.stderr.on(\"data\", (chunk: Buffer) => onChunk(\"stderr\", chunk));\n child.once(\"error\", (err) => {\n clearTimeout(timer);\n resolve({\n exitCode: null,\n stdout: Buffer.concat(stdoutChunks).toString(\"utf8\"),\n stderr: Buffer.concat(stderrChunks).toString(\"utf8\"),\n timedOut: false,\n spawnError: err,\n truncated: truncated || undefined,\n });\n });\n child.once(\"close\", (code) => {\n clearTimeout(timer);\n resolve({\n exitCode: code,\n stdout: Buffer.concat(stdoutChunks).toString(\"utf8\").trim(),\n stderr: Buffer.concat(stderrChunks).toString(\"utf8\").trim(),\n timedOut,\n truncated: truncated || undefined,\n });\n });\n\n try {\n child.stdin.write(input.stdin);\n child.stdin.end();\n } catch {\n /* stdin write can race with spawn errors; the close handler\n * still fires with exit 0/null */\n }\n });\n}\n\nexport function formatHookOutcomeMessage(outcome: HookOutcome): string {\n if (outcome.decision === \"pass\") return \"\";\n const detail = (outcome.stderr || outcome.stdout || \"\").trim();\n const tag = `${outcome.hook.scope}/${outcome.hook.event}`;\n const cmd =\n outcome.hook.command.length > 60\n ? `${outcome.hook.command.slice(0, 60)}…`\n : outcome.hook.command;\n const truncTag = outcome.truncated ? t(\"hooks.truncated\") : \"\";\n const decision = t(`hooks.decision${capitalize(outcome.decision)}`);\n return detail\n ? t(\"hooks.headWithDetail\", { tag, cmd, decision, truncTag, detail })\n : t(\"hooks.head\", { tag, cmd, decision, truncTag });\n}\n\nfunction capitalize(s: string): string {\n return s.charAt(0).toUpperCase() + s.slice(1);\n}\n\nexport function decideOutcome(\n event: HookEvent,\n raw: HookSpawnResult,\n): \"pass\" | \"block\" | \"warn\" | \"timeout\" | \"error\" {\n if (raw.spawnError) return \"error\";\n if (raw.timedOut) return BLOCKING_EVENTS.has(event) ? \"block\" : \"warn\";\n if (raw.exitCode === 0) return \"pass\";\n if (raw.exitCode === 2 && BLOCKING_EVENTS.has(event)) return \"block\";\n return \"warn\";\n}\n\nexport interface RunHooksOptions {\n payload: HookPayload;\n hooks: ResolvedHook[];\n /** Test seam — defaults to a real `spawn`. */\n spawner?: HookSpawner;\n}\n\n/** Stops at first `block` so a gating hook can prevent later hooks running against a phantom success. */\nexport async function runHooks(opts: RunHooksOptions): Promise<HookReport> {\n const spawner = opts.spawner ?? defaultSpawner;\n const event = opts.payload.event;\n const toolName = opts.payload.toolName ?? \"\";\n const matching = opts.hooks.filter((h) => h.event === event && matchesTool(h, toolName));\n\n const outcomes: HookOutcome[] = [];\n let blocked = false;\n const stdin = `${JSON.stringify(opts.payload)}\\n`;\n\n for (const hook of matching) {\n const start = Date.now();\n const timeoutMs = hook.timeout ?? DEFAULT_TIMEOUTS_MS[event];\n const cwd = hook.cwd ?? opts.payload.cwd;\n const raw = await spawner({ command: hook.command, cwd, stdin, timeoutMs });\n const decision = decideOutcome(event, raw);\n outcomes.push({\n hook,\n decision,\n exitCode: raw.exitCode,\n stdout: raw.stdout,\n stderr:\n raw.stderr ||\n (raw.spawnError ? raw.spawnError.message : \"\") ||\n (raw.timedOut ? `hook timed out after ${timeoutMs}ms` : \"\"),\n durationMs: Date.now() - start,\n truncated: raw.truncated,\n });\n if (decision === \"block\") {\n blocked = true;\n break;\n }\n }\n\n return { event, outcomes, blocked };\n}\n"],"mappings":";;;;;;;AAEA,SAAS,aAAa;AACtB,SAAS,YAAY,oBAAoB;AACzC,SAAS,eAAe;AACxB,SAAS,YAAY;AAMd,IAAM,cAAoC;AAAA,EAC/C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAGA,IAAM,kBAA0C,oBAAI,IAAI,CAAC,cAAc,kBAAkB,CAAC;AAG1F,IAAM,sBAAiD;AAAA,EACrD,YAAY;AAAA,EACZ,kBAAkB;AAAA,EAClB,aAAa;AAAA,EACb,MAAM;AACR;AAsDO,IAAM,yBAAyB;AAC/B,IAAM,wBAAwB;AAG9B,SAAS,mBAAmB,iBAAkC;AACnE,SAAO,KAAK,mBAAmB,QAAQ,GAAG,uBAAuB,sBAAsB;AACzF;AAGO,SAAS,oBAAoB,aAA6B;AAC/D,SAAO,KAAK,aAAa,uBAAuB,sBAAsB;AACxE;AAEA,SAAS,iBAAiB,MAAmC;AAC3D,MAAI,CAAC,WAAW,IAAI,EAAG,QAAO;AAC9B,MAAI;AACF,UAAM,MAAM,aAAa,MAAM,MAAM;AACrC,UAAM,SAAS,KAAK,MAAM,GAAG;AAC7B,QAAI,UAAU,OAAO,WAAW,SAAU,QAAO;AAAA,EACnD,QAAQ;AAAA,EAGR;AACA,SAAO;AACT;AAUO,SAAS,UAAU,OAAgC,CAAC,GAAmB;AAC5E,QAAM,MAAsB,CAAC;AAC7B,MAAI,KAAK,aAAa;AACpB,UAAM,WAAW,oBAAoB,KAAK,WAAW;AACrD,UAAMA,YAAW,iBAAiB,QAAQ;AAC1C,QAAIA,UAAU,gBAAe,KAAKA,WAAU,WAAW,QAAQ;AAAA,EACjE;AACA,QAAM,aAAa,mBAAmB,KAAK,OAAO;AAClD,QAAM,WAAW,iBAAiB,UAAU;AAC5C,MAAI,SAAU,gBAAe,KAAK,UAAU,UAAU,UAAU;AAChE,SAAO;AACT;AAEA,SAAS,eACP,KACA,UACA,OACA,QACM;AACN,MAAI,CAAC,SAAS,MAAO;AACrB,aAAW,SAAS,aAAa;AAC/B,UAAM,OAAO,SAAS,MAAM,KAAK;AACjC,QAAI,CAAC,MAAM,QAAQ,IAAI,EAAG;AAC1B,eAAW,OAAO,MAAM;AACtB,UAAI,CAAC,OAAO,OAAO,IAAI,YAAY,YAAY,IAAI,QAAQ,KAAK,MAAM,GAAI;AAC1E,UAAI,KAAK,EAAE,GAAG,KAAK,OAAO,OAAO,OAAO,CAAC;AAAA,IAC3C;AAAA,EACF;AACF;AAGO,SAAS,YAAY,MAAoB,UAA2B;AACzE,MAAI,KAAK,UAAU,gBAAgB,KAAK,UAAU,cAAe,QAAO;AACxE,QAAM,IAAI,KAAK;AACf,MAAI,CAAC,KAAK,MAAM,IAAK,QAAO;AAC5B,MAAI;AACF,UAAM,KAAK,IAAI,OAAO,OAAO,CAAC,IAAI;AAClC,WAAO,GAAG,KAAK,QAAQ;AAAA,EACzB,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAkCA,IAAM,wBAAwB,MAAM;AAKpC,SAAS,eAAe,OAAiD;AACvE,SAAO,IAAI,QAAyB,CAAC,YAAY;AAC/C,UAAM,QAAQ,MAAM,MAAM,SAAS;AAAA,MACjC,KAAK,MAAM;AAAA,MACX,OAAO;AAAA,MACP,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,IAChC,CAAC;AAID,UAAM,eAAyB,CAAC;AAChC,UAAM,eAAyB,CAAC;AAChC,QAAI,cAAc;AAClB,QAAI,cAAc;AAClB,QAAI,YAAY;AAChB,QAAI,WAAW;AACf,UAAM,QAAQ,WAAW,MAAM;AAC7B,iBAAW;AACX,YAAM,KAAK,SAAS;AAGpB,iBAAW,MAAM;AACf,YAAI;AACF,gBAAM,KAAK,SAAS;AAAA,QACtB,QAAQ;AAAA,QAER;AAAA,MACF,GAAG,GAAG;AAAA,IACR,GAAG,MAAM,SAAS;AAElB,UAAM,UAAU,CAAC,MAA2B,UAAkB;AAC5D,YAAM,SAAS,SAAS,WAAW,eAAe;AAClD,YAAM,OAAO,SAAS,WAAW,cAAc;AAC/C,UAAI,QAAQ,uBAAuB;AACjC,oBAAY;AACZ;AAAA,MACF;AACA,YAAM,YAAY,wBAAwB;AAC1C,UAAI,MAAM,SAAS,WAAW;AAC5B,eAAO,KAAK,MAAM,SAAS,GAAG,SAAS,CAAC;AACxC,YAAI,SAAS,SAAU,eAAc;AAAA,YAChC,eAAc;AACnB,oBAAY;AAAA,MACd,OAAO;AACL,eAAO,KAAK,KAAK;AACjB,YAAI,SAAS,SAAU,gBAAe,MAAM;AAAA,YACvC,gBAAe,MAAM;AAAA,MAC5B;AAAA,IACF;AACA,UAAM,OAAO,GAAG,QAAQ,CAAC,UAAkB,QAAQ,UAAU,KAAK,CAAC;AACnE,UAAM,OAAO,GAAG,QAAQ,CAAC,UAAkB,QAAQ,UAAU,KAAK,CAAC;AACnE,UAAM,KAAK,SAAS,CAAC,QAAQ;AAC3B,mBAAa,KAAK;AAClB,cAAQ;AAAA,QACN,UAAU;AAAA,QACV,QAAQ,OAAO,OAAO,YAAY,EAAE,SAAS,MAAM;AAAA,QACnD,QAAQ,OAAO,OAAO,YAAY,EAAE,SAAS,MAAM;AAAA,QACnD,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,WAAW,aAAa;AAAA,MAC1B,CAAC;AAAA,IACH,CAAC;AACD,UAAM,KAAK,SAAS,CAAC,SAAS;AAC5B,mBAAa,KAAK;AAClB,cAAQ;AAAA,QACN,UAAU;AAAA,QACV,QAAQ,OAAO,OAAO,YAAY,EAAE,SAAS,MAAM,EAAE,KAAK;AAAA,QAC1D,QAAQ,OAAO,OAAO,YAAY,EAAE,SAAS,MAAM,EAAE,KAAK;AAAA,QAC1D;AAAA,QACA,WAAW,aAAa;AAAA,MAC1B,CAAC;AAAA,IACH,CAAC;AAED,QAAI;AACF,YAAM,MAAM,MAAM,MAAM,KAAK;AAC7B,YAAM,MAAM,IAAI;AAAA,IAClB,QAAQ;AAAA,IAGR;AAAA,EACF,CAAC;AACH;AAEO,SAAS,yBAAyB,SAA8B;AACrE,MAAI,QAAQ,aAAa,OAAQ,QAAO;AACxC,QAAM,UAAU,QAAQ,UAAU,QAAQ,UAAU,IAAI,KAAK;AAC7D,QAAM,MAAM,GAAG,QAAQ,KAAK,KAAK,IAAI,QAAQ,KAAK,KAAK;AACvD,QAAM,MACJ,QAAQ,KAAK,QAAQ,SAAS,KAC1B,GAAG,QAAQ,KAAK,QAAQ,MAAM,GAAG,EAAE,CAAC,WACpC,QAAQ,KAAK;AACnB,QAAM,WAAW,QAAQ,YAAY,EAAE,iBAAiB,IAAI;AAC5D,QAAM,WAAW,EAAE,iBAAiB,WAAW,QAAQ,QAAQ,CAAC,EAAE;AAClE,SAAO,SACH,EAAE,wBAAwB,EAAE,KAAK,KAAK,UAAU,UAAU,OAAO,CAAC,IAClE,EAAE,cAAc,EAAE,KAAK,KAAK,UAAU,SAAS,CAAC;AACtD;AAEA,SAAS,WAAW,GAAmB;AACrC,SAAO,EAAE,OAAO,CAAC,EAAE,YAAY,IAAI,EAAE,MAAM,CAAC;AAC9C;AAEO,SAAS,cACd,OACA,KACiD;AACjD,MAAI,IAAI,WAAY,QAAO;AAC3B,MAAI,IAAI,SAAU,QAAO,gBAAgB,IAAI,KAAK,IAAI,UAAU;AAChE,MAAI,IAAI,aAAa,EAAG,QAAO;AAC/B,MAAI,IAAI,aAAa,KAAK,gBAAgB,IAAI,KAAK,EAAG,QAAO;AAC7D,SAAO;AACT;AAUA,eAAsB,SAAS,MAA4C;AACzE,QAAM,UAAU,KAAK,WAAW;AAChC,QAAM,QAAQ,KAAK,QAAQ;AAC3B,QAAM,WAAW,KAAK,QAAQ,YAAY;AAC1C,QAAM,WAAW,KAAK,MAAM,OAAO,CAAC,MAAM,EAAE,UAAU,SAAS,YAAY,GAAG,QAAQ,CAAC;AAEvF,QAAM,WAA0B,CAAC;AACjC,MAAI,UAAU;AACd,QAAM,QAAQ,GAAG,KAAK,UAAU,KAAK,OAAO,CAAC;AAAA;AAE7C,aAAW,QAAQ,UAAU;AAC3B,UAAM,QAAQ,KAAK,IAAI;AACvB,UAAM,YAAY,KAAK,WAAW,oBAAoB,KAAK;AAC3D,UAAM,MAAM,KAAK,OAAO,KAAK,QAAQ;AACrC,UAAM,MAAM,MAAM,QAAQ,EAAE,SAAS,KAAK,SAAS,KAAK,OAAO,UAAU,CAAC;AAC1E,UAAM,WAAW,cAAc,OAAO,GAAG;AACzC,aAAS,KAAK;AAAA,MACZ;AAAA,MACA;AAAA,MACA,UAAU,IAAI;AAAA,MACd,QAAQ,IAAI;AAAA,MACZ,QACE,IAAI,WACH,IAAI,aAAa,IAAI,WAAW,UAAU,QAC1C,IAAI,WAAW,wBAAwB,SAAS,OAAO;AAAA,MAC1D,YAAY,KAAK,IAAI,IAAI;AAAA,MACzB,WAAW,IAAI;AAAA,IACjB,CAAC;AACD,QAAI,aAAa,SAAS;AACxB,gBAAU;AACV;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,UAAU,QAAQ;AACpC;","names":["settings"]}
|
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { createRequire as __cr } from 'node:module'; if (typeof globalThis.require === 'undefined') { globalThis.require = __cr(import.meta.url); }
|
|
3
|
+
import {
|
|
4
|
+
cacheSavingsUsd,
|
|
5
|
+
claudeEquivalentCost,
|
|
6
|
+
costUsd
|
|
7
|
+
} from "./chunk-ANJSUESV.js";
|
|
8
|
+
|
|
9
|
+
// src/telemetry/usage.ts
|
|
10
|
+
import {
|
|
11
|
+
appendFileSync,
|
|
12
|
+
closeSync,
|
|
13
|
+
existsSync,
|
|
14
|
+
fstatSync,
|
|
15
|
+
mkdirSync,
|
|
16
|
+
openSync,
|
|
17
|
+
readFileSync,
|
|
18
|
+
readSync,
|
|
19
|
+
renameSync,
|
|
20
|
+
statSync,
|
|
21
|
+
unlinkSync,
|
|
22
|
+
writeFileSync
|
|
23
|
+
} from "fs";
|
|
24
|
+
import { homedir } from "os";
|
|
25
|
+
import { dirname, join } from "path";
|
|
26
|
+
function defaultUsageLogPath(homeDirOverride) {
|
|
27
|
+
return join(homeDirOverride ?? homedir(), ".luckerr", "usage.jsonl");
|
|
28
|
+
}
|
|
29
|
+
var USAGE_COMPACTION_THRESHOLD_BYTES = 5 * 1024 * 1024;
|
|
30
|
+
var USAGE_RETENTION_DAYS = 365;
|
|
31
|
+
function compactUsageLogIfLarge(path, now) {
|
|
32
|
+
let raw;
|
|
33
|
+
try {
|
|
34
|
+
const fd = openSync(path, "r");
|
|
35
|
+
try {
|
|
36
|
+
const stat = fstatSync(fd);
|
|
37
|
+
if (stat.size < USAGE_COMPACTION_THRESHOLD_BYTES) return;
|
|
38
|
+
const buf = Buffer.alloc(stat.size);
|
|
39
|
+
let read = 0;
|
|
40
|
+
while (read < stat.size) {
|
|
41
|
+
const n = readSync(fd, buf, read, stat.size - read, read);
|
|
42
|
+
if (n <= 0) break;
|
|
43
|
+
read += n;
|
|
44
|
+
}
|
|
45
|
+
raw = buf.toString("utf8", 0, read);
|
|
46
|
+
} finally {
|
|
47
|
+
closeSync(fd);
|
|
48
|
+
}
|
|
49
|
+
} catch {
|
|
50
|
+
return;
|
|
51
|
+
}
|
|
52
|
+
const cutoff = now - USAGE_RETENTION_DAYS * 24 * 60 * 60 * 1e3;
|
|
53
|
+
const lines = raw.split(/\r?\n/);
|
|
54
|
+
const kept = [];
|
|
55
|
+
for (const line of lines) {
|
|
56
|
+
if (!line.trim()) continue;
|
|
57
|
+
try {
|
|
58
|
+
const rec = JSON.parse(line);
|
|
59
|
+
if (isValidRecord(rec) && rec.ts >= cutoff) kept.push(line);
|
|
60
|
+
} catch {
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
if (kept.length === lines.filter((l) => l.trim()).length) return;
|
|
64
|
+
const tmp = `${path}.compacting`;
|
|
65
|
+
try {
|
|
66
|
+
writeFileSync(tmp, kept.length > 0 ? `${kept.join("\n")}
|
|
67
|
+
` : "", "utf8");
|
|
68
|
+
renameSync(tmp, path);
|
|
69
|
+
} catch {
|
|
70
|
+
try {
|
|
71
|
+
unlinkSync(tmp);
|
|
72
|
+
} catch {
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
function appendUsage(input) {
|
|
77
|
+
const record = {
|
|
78
|
+
ts: input.now ?? Date.now(),
|
|
79
|
+
session: input.session,
|
|
80
|
+
model: input.model,
|
|
81
|
+
promptTokens: input.usage.promptTokens,
|
|
82
|
+
completionTokens: input.usage.completionTokens,
|
|
83
|
+
cacheHitTokens: input.usage.promptCacheHitTokens,
|
|
84
|
+
cacheMissTokens: input.usage.promptCacheMissTokens,
|
|
85
|
+
costUsd: costUsd(input.model, input.usage),
|
|
86
|
+
claudeEquivUsd: claudeEquivalentCost(input.usage)
|
|
87
|
+
};
|
|
88
|
+
if (input.kind === "subagent") record.kind = "subagent";
|
|
89
|
+
if (input.subagent) record.subagent = input.subagent;
|
|
90
|
+
const path = input.path ?? defaultUsageLogPath();
|
|
91
|
+
try {
|
|
92
|
+
mkdirSync(dirname(path), { recursive: true });
|
|
93
|
+
appendFileSync(path, `${JSON.stringify(record)}
|
|
94
|
+
`, "utf8");
|
|
95
|
+
compactUsageLogIfLarge(path, record.ts);
|
|
96
|
+
} catch {
|
|
97
|
+
}
|
|
98
|
+
return record;
|
|
99
|
+
}
|
|
100
|
+
function readUsageLog(path = defaultUsageLogPath()) {
|
|
101
|
+
if (!existsSync(path)) return [];
|
|
102
|
+
let raw;
|
|
103
|
+
try {
|
|
104
|
+
raw = readFileSync(path, "utf8");
|
|
105
|
+
} catch {
|
|
106
|
+
return [];
|
|
107
|
+
}
|
|
108
|
+
const out = [];
|
|
109
|
+
for (const line of raw.split(/\r?\n/)) {
|
|
110
|
+
if (!line.trim()) continue;
|
|
111
|
+
try {
|
|
112
|
+
const rec = JSON.parse(line);
|
|
113
|
+
if (isValidRecord(rec)) out.push(rec);
|
|
114
|
+
} catch {
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return out;
|
|
118
|
+
}
|
|
119
|
+
function isValidRecord(rec) {
|
|
120
|
+
if (!rec || typeof rec !== "object") return false;
|
|
121
|
+
const r = rec;
|
|
122
|
+
return typeof r.ts === "number" && typeof r.model === "string" && typeof r.promptTokens === "number" && typeof r.completionTokens === "number" && typeof r.cacheHitTokens === "number" && typeof r.cacheMissTokens === "number" && typeof r.costUsd === "number" && typeof r.claudeEquivUsd === "number";
|
|
123
|
+
}
|
|
124
|
+
function bucketCacheHitRatio(b) {
|
|
125
|
+
const denom = b.cacheHitTokens + b.cacheMissTokens;
|
|
126
|
+
return denom > 0 ? b.cacheHitTokens / denom : 0;
|
|
127
|
+
}
|
|
128
|
+
function bucketSavingsFraction(b) {
|
|
129
|
+
return b.claudeEquivUsd > 0 ? 1 - b.costUsd / b.claudeEquivUsd : 0;
|
|
130
|
+
}
|
|
131
|
+
function emptyBucket(label, since) {
|
|
132
|
+
return {
|
|
133
|
+
label,
|
|
134
|
+
since,
|
|
135
|
+
turns: 0,
|
|
136
|
+
promptTokens: 0,
|
|
137
|
+
completionTokens: 0,
|
|
138
|
+
cacheHitTokens: 0,
|
|
139
|
+
cacheMissTokens: 0,
|
|
140
|
+
costUsd: 0,
|
|
141
|
+
claudeEquivUsd: 0,
|
|
142
|
+
cacheSavingsUsd: 0
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
function addToBucket(b, r) {
|
|
146
|
+
b.turns += 1;
|
|
147
|
+
b.promptTokens += r.promptTokens;
|
|
148
|
+
b.completionTokens += r.completionTokens;
|
|
149
|
+
b.cacheHitTokens += r.cacheHitTokens;
|
|
150
|
+
b.cacheMissTokens += r.cacheMissTokens;
|
|
151
|
+
b.costUsd += r.costUsd;
|
|
152
|
+
b.claudeEquivUsd += r.claudeEquivUsd;
|
|
153
|
+
b.cacheSavingsUsd += cacheSavingsUsd(r.model, r.cacheHitTokens);
|
|
154
|
+
}
|
|
155
|
+
function aggregateUsage(records, opts = {}) {
|
|
156
|
+
const now = opts.now ?? Date.now();
|
|
157
|
+
const day = 24 * 60 * 60 * 1e3;
|
|
158
|
+
const today = emptyBucket("today", now - day);
|
|
159
|
+
const week = emptyBucket("week", now - 7 * day);
|
|
160
|
+
const month = emptyBucket("month", now - 30 * day);
|
|
161
|
+
const all = emptyBucket("all-time", 0);
|
|
162
|
+
const modelCounts = /* @__PURE__ */ new Map();
|
|
163
|
+
const sessionCounts = /* @__PURE__ */ new Map();
|
|
164
|
+
let firstSeen = null;
|
|
165
|
+
let lastSeen = null;
|
|
166
|
+
const skillCounts = /* @__PURE__ */ new Map();
|
|
167
|
+
let subagentTotal = 0;
|
|
168
|
+
let subagentCost = 0;
|
|
169
|
+
let subagentDuration = 0;
|
|
170
|
+
for (const r of records) {
|
|
171
|
+
addToBucket(all, r);
|
|
172
|
+
if (r.ts >= today.since) addToBucket(today, r);
|
|
173
|
+
if (r.ts >= week.since) addToBucket(week, r);
|
|
174
|
+
if (r.ts >= month.since) addToBucket(month, r);
|
|
175
|
+
modelCounts.set(r.model, (modelCounts.get(r.model) ?? 0) + 1);
|
|
176
|
+
const sessKey = r.session ?? "(ephemeral)";
|
|
177
|
+
sessionCounts.set(sessKey, (sessionCounts.get(sessKey) ?? 0) + 1);
|
|
178
|
+
if (firstSeen === null || r.ts < firstSeen) firstSeen = r.ts;
|
|
179
|
+
if (lastSeen === null || r.ts > lastSeen) lastSeen = r.ts;
|
|
180
|
+
if (r.kind === "subagent") {
|
|
181
|
+
subagentTotal += 1;
|
|
182
|
+
subagentCost += r.costUsd;
|
|
183
|
+
const dur = r.subagent?.durationMs ?? 0;
|
|
184
|
+
subagentDuration += dur;
|
|
185
|
+
const key = r.subagent?.skillName?.trim() || "(adhoc)";
|
|
186
|
+
const prev = skillCounts.get(key) ?? { count: 0, costUsd: 0, durationMs: 0 };
|
|
187
|
+
prev.count += 1;
|
|
188
|
+
prev.costUsd += r.costUsd;
|
|
189
|
+
prev.durationMs += dur;
|
|
190
|
+
skillCounts.set(key, prev);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
const byModel = Array.from(modelCounts.entries()).map(([model, turns]) => ({ model, turns })).sort((a, b) => b.turns - a.turns);
|
|
194
|
+
const bySession = Array.from(sessionCounts.entries()).map(([session, turns]) => ({ session, turns })).sort((a, b) => b.turns - a.turns);
|
|
195
|
+
const subagents = subagentTotal > 0 ? {
|
|
196
|
+
total: subagentTotal,
|
|
197
|
+
costUsd: subagentCost,
|
|
198
|
+
totalDurationMs: subagentDuration,
|
|
199
|
+
bySkill: Array.from(skillCounts.entries()).map(([skillName, v]) => ({ skillName, ...v })).sort((a, b) => b.count - a.count)
|
|
200
|
+
} : void 0;
|
|
201
|
+
return {
|
|
202
|
+
buckets: [today, week, month, all],
|
|
203
|
+
byModel,
|
|
204
|
+
bySession,
|
|
205
|
+
firstSeen,
|
|
206
|
+
lastSeen,
|
|
207
|
+
subagents
|
|
208
|
+
};
|
|
209
|
+
}
|
|
210
|
+
function formatLogSize(path = defaultUsageLogPath()) {
|
|
211
|
+
if (!existsSync(path)) return "";
|
|
212
|
+
try {
|
|
213
|
+
const s = statSync(path);
|
|
214
|
+
const bytes = s.size;
|
|
215
|
+
if (bytes < 1024) return `${bytes} B`;
|
|
216
|
+
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
|
217
|
+
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
|
218
|
+
} catch {
|
|
219
|
+
return "";
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
export {
|
|
224
|
+
defaultUsageLogPath,
|
|
225
|
+
appendUsage,
|
|
226
|
+
readUsageLog,
|
|
227
|
+
bucketCacheHitRatio,
|
|
228
|
+
bucketSavingsFraction,
|
|
229
|
+
aggregateUsage,
|
|
230
|
+
formatLogSize
|
|
231
|
+
};
|
|
232
|
+
//# sourceMappingURL=chunk-M4K6U37F.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/telemetry/usage.ts"],"sourcesContent":["/** Append-only JSONL of per-turn tokens + cost; best-effort writes, never blocks the turn. No prompts/completions logged. */\n\nimport {\n appendFileSync,\n closeSync,\n existsSync,\n fstatSync,\n mkdirSync,\n openSync,\n readFileSync,\n readSync,\n renameSync,\n statSync,\n unlinkSync,\n writeFileSync,\n} from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport type { Usage } from \"../client.js\";\nimport {\n CLAUDE_SONNET_PRICING,\n DEEPSEEK_PRICING,\n cacheSavingsUsd,\n claudeEquivalentCost,\n costUsd,\n} from \"./stats.js\";\n\n/** One turn's snapshot — serialized verbatim as a JSONL line. */\nexport interface UsageRecord {\n /** Epoch millis when the record was written. */\n ts: number;\n /** Session name if the turn ran inside a persisted session, `null` for ephemeral. */\n session: string | null;\n /** Model id the turn ran against (drives the pricing lookup). */\n model: string;\n promptTokens: number;\n completionTokens: number;\n cacheHitTokens: number;\n cacheMissTokens: number;\n /** Total cost of the turn in USD. */\n costUsd: number;\n /** What the same turn would have cost at Claude Sonnet 4.6 rates. */\n claudeEquivUsd: number;\n /** Absent on legacy records — treat as \"turn\" when missing. */\n kind?: \"turn\" | \"subagent\";\n /** Present when `kind === \"subagent\"`. Attribution metadata for the /stats roll-up. */\n subagent?: {\n /** Skill that spawned it, when the spawn came from a `runAs: subagent` skill. */\n skillName?: string;\n /** First ~60 chars of the task prompt — enough context to recognize a run, never the full text. */\n taskPreview: string;\n /** Tool calls the child loop dispatched before returning. */\n toolIters: number;\n /** Wall-clock ms. */\n durationMs: number;\n };\n}\n\n/** Where the log lives. Tests override via `opts.path`. */\nexport function defaultUsageLogPath(homeDirOverride?: string): string {\n return join(homeDirOverride ?? homedir(), \".luckerr\", \"usage.jsonl\");\n}\n\nexport interface AppendUsageInput {\n session: string | null;\n model: string;\n usage: Usage;\n /** Override the timestamp (tests). */\n now?: number;\n /** Override the log path (tests). */\n path?: string;\n /** When appending a subagent summary row, set `kind: \"subagent\"` and populate `subagent`. */\n kind?: \"turn\" | \"subagent\";\n subagent?: UsageRecord[\"subagent\"];\n}\n\nconst USAGE_COMPACTION_THRESHOLD_BYTES = 5 * 1024 * 1024;\nconst USAGE_RETENTION_DAYS = 365;\n\nfunction compactUsageLogIfLarge(path: string, now: number): void {\n // Open once for the size check + read so they bind to the same fd\n // (CodeQL js/file-system-race). Concurrent appenders that grow the\n // log between check and read can no longer cause us to act on a\n // stale size and rewrite based on partial content.\n let raw: string;\n try {\n const fd = openSync(path, \"r\");\n try {\n const stat = fstatSync(fd);\n if (stat.size < USAGE_COMPACTION_THRESHOLD_BYTES) return;\n const buf = Buffer.alloc(stat.size);\n let read = 0;\n while (read < stat.size) {\n const n = readSync(fd, buf, read, stat.size - read, read);\n if (n <= 0) break;\n read += n;\n }\n raw = buf.toString(\"utf8\", 0, read);\n } finally {\n closeSync(fd);\n }\n } catch {\n return;\n }\n const cutoff = now - USAGE_RETENTION_DAYS * 24 * 60 * 60 * 1000;\n const lines = raw.split(/\\r?\\n/);\n const kept: string[] = [];\n for (const line of lines) {\n if (!line.trim()) continue;\n try {\n const rec = JSON.parse(line);\n if (isValidRecord(rec) && rec.ts >= cutoff) kept.push(line);\n } catch {\n /* skip malformed */\n }\n }\n // No-op when nothing aged out — avoids rewrite storms on fresh logs.\n if (kept.length === lines.filter((l) => l.trim()).length) return;\n // Write to a sibling tmp path then rename — atomic from a reader's\n // POV and severs CodeQL's stat→write taint chain. Concurrent\n // appenders during the compaction window lose their entries; we\n // accept that for a best-effort usage log.\n const tmp = `${path}.compacting`;\n try {\n writeFileSync(tmp, kept.length > 0 ? `${kept.join(\"\\n\")}\\n` : \"\", \"utf8\");\n renameSync(tmp, path);\n } catch {\n try {\n unlinkSync(tmp);\n } catch {\n /* tmp may not exist — ignore */\n }\n }\n}\n\n/** Returns the record so tests can assert cost fields without re-reading the log. */\nexport function appendUsage(input: AppendUsageInput): UsageRecord {\n const record: UsageRecord = {\n ts: input.now ?? Date.now(),\n session: input.session,\n model: input.model,\n promptTokens: input.usage.promptTokens,\n completionTokens: input.usage.completionTokens,\n cacheHitTokens: input.usage.promptCacheHitTokens,\n cacheMissTokens: input.usage.promptCacheMissTokens,\n costUsd: costUsd(input.model, input.usage),\n claudeEquivUsd: claudeEquivalentCost(input.usage),\n };\n if (input.kind === \"subagent\") record.kind = \"subagent\";\n if (input.subagent) record.subagent = input.subagent;\n\n const path = input.path ?? defaultUsageLogPath();\n try {\n mkdirSync(dirname(path), { recursive: true });\n appendFileSync(path, `${JSON.stringify(record)}\\n`, \"utf8\");\n compactUsageLogIfLarge(path, record.ts);\n } catch {\n /* best-effort — disk failure shouldn't break the chat */\n }\n return record;\n}\n\nexport function readUsageLog(path: string = defaultUsageLogPath()): UsageRecord[] {\n if (!existsSync(path)) return [];\n let raw: string;\n try {\n raw = readFileSync(path, \"utf8\");\n } catch {\n return [];\n }\n const out: UsageRecord[] = [];\n for (const line of raw.split(/\\r?\\n/)) {\n if (!line.trim()) continue;\n try {\n const rec = JSON.parse(line);\n if (isValidRecord(rec)) out.push(rec);\n } catch {\n /* skip malformed */\n }\n }\n return out;\n}\n\nfunction isValidRecord(rec: unknown): rec is UsageRecord {\n if (!rec || typeof rec !== \"object\") return false;\n const r = rec as Partial<UsageRecord>;\n return (\n typeof r.ts === \"number\" &&\n typeof r.model === \"string\" &&\n typeof r.promptTokens === \"number\" &&\n typeof r.completionTokens === \"number\" &&\n typeof r.cacheHitTokens === \"number\" &&\n typeof r.cacheMissTokens === \"number\" &&\n typeof r.costUsd === \"number\" &&\n typeof r.claudeEquivUsd === \"number\"\n );\n}\n\n/** One row of the `luckerr stats` dashboard — a rolled-up window. */\nexport interface UsageBucket {\n label: string;\n /** Start of the window as epoch millis. `0` = unbounded (all-time). */\n since: number;\n turns: number;\n promptTokens: number;\n completionTokens: number;\n cacheHitTokens: number;\n cacheMissTokens: number;\n costUsd: number;\n claudeEquivUsd: number;\n /** Recomputed from current pricing each aggregate — intentionally NOT frozen with `costUsd`. */\n cacheSavingsUsd: number;\n}\n\n/** Cache hit ratio for a bucket — zero denominator returns 0. */\nexport function bucketCacheHitRatio(b: UsageBucket): number {\n const denom = b.cacheHitTokens + b.cacheMissTokens;\n return denom > 0 ? b.cacheHitTokens / denom : 0;\n}\n\n/** Savings vs Claude as a fraction (0.94 = 94% savings). 0 if Claude cost is 0. */\nexport function bucketSavingsFraction(b: UsageBucket): number {\n return b.claudeEquivUsd > 0 ? 1 - b.costUsd / b.claudeEquivUsd : 0;\n}\n\nfunction emptyBucket(label: string, since: number): UsageBucket {\n return {\n label,\n since,\n turns: 0,\n promptTokens: 0,\n completionTokens: 0,\n cacheHitTokens: 0,\n cacheMissTokens: 0,\n costUsd: 0,\n claudeEquivUsd: 0,\n cacheSavingsUsd: 0,\n };\n}\n\nfunction addToBucket(b: UsageBucket, r: UsageRecord): void {\n b.turns += 1;\n b.promptTokens += r.promptTokens;\n b.completionTokens += r.completionTokens;\n b.cacheHitTokens += r.cacheHitTokens;\n b.cacheMissTokens += r.cacheMissTokens;\n b.costUsd += r.costUsd;\n b.claudeEquivUsd += r.claudeEquivUsd;\n b.cacheSavingsUsd += cacheSavingsUsd(r.model, r.cacheHitTokens);\n}\n\nexport interface AggregateOptions {\n /** Override `Date.now()` for deterministic tests. */\n now?: number;\n}\n\nexport interface UsageAggregate {\n /** Fixed-order rolling windows: today, week, month, all-time. */\n buckets: UsageBucket[];\n /** Model id → turn count. Sorted descending; top entry is the \"most used.\" */\n byModel: Array<{ model: string; turns: number }>;\n /** Session name → turn count. Sorted descending. Null sessions are grouped under `\"(ephemeral)\"`. */\n bySession: Array<{ session: string; turns: number }>;\n /** Earliest record's ts, or `null` when the log is empty. Drives \"saved $X since <date>\". */\n firstSeen: number | null;\n /** Latest record's ts, or `null` when the log is empty. */\n lastSeen: number | null;\n /** Undefined when no subagent records exist; counts spawns, not internal child-loop turns. */\n subagents?: SubagentAggregate;\n}\n\n/** Rolled-up view of all `kind: \"subagent\"` records. */\nexport interface SubagentAggregate {\n total: number;\n costUsd: number;\n totalDurationMs: number;\n /** Per-skill breakdown. Records without `skillName` (raw spawn_subagent calls) group under `\"(adhoc)\"`. */\n bySkill: Array<{ skillName: string; count: number; costUsd: number; durationMs: number }>;\n}\n\n/** Rolling 24h/7d/30d windows — avoids \"it's 00:03, 'today' is empty\" surprises. */\nexport function aggregateUsage(\n records: UsageRecord[],\n opts: AggregateOptions = {},\n): UsageAggregate {\n const now = opts.now ?? Date.now();\n const day = 24 * 60 * 60 * 1000;\n const today = emptyBucket(\"today\", now - day);\n const week = emptyBucket(\"week\", now - 7 * day);\n const month = emptyBucket(\"month\", now - 30 * day);\n const all = emptyBucket(\"all-time\", 0);\n\n const modelCounts = new Map<string, number>();\n const sessionCounts = new Map<string, number>();\n let firstSeen: number | null = null;\n let lastSeen: number | null = null;\n const skillCounts = new Map<string, { count: number; costUsd: number; durationMs: number }>();\n let subagentTotal = 0;\n let subagentCost = 0;\n let subagentDuration = 0;\n\n for (const r of records) {\n addToBucket(all, r);\n if (r.ts >= today.since) addToBucket(today, r);\n if (r.ts >= week.since) addToBucket(week, r);\n if (r.ts >= month.since) addToBucket(month, r);\n\n modelCounts.set(r.model, (modelCounts.get(r.model) ?? 0) + 1);\n const sessKey = r.session ?? \"(ephemeral)\";\n sessionCounts.set(sessKey, (sessionCounts.get(sessKey) ?? 0) + 1);\n\n if (firstSeen === null || r.ts < firstSeen) firstSeen = r.ts;\n if (lastSeen === null || r.ts > lastSeen) lastSeen = r.ts;\n\n if (r.kind === \"subagent\") {\n subagentTotal += 1;\n subagentCost += r.costUsd;\n const dur = r.subagent?.durationMs ?? 0;\n subagentDuration += dur;\n const key = r.subagent?.skillName?.trim() || \"(adhoc)\";\n const prev = skillCounts.get(key) ?? { count: 0, costUsd: 0, durationMs: 0 };\n prev.count += 1;\n prev.costUsd += r.costUsd;\n prev.durationMs += dur;\n skillCounts.set(key, prev);\n }\n }\n\n const byModel = Array.from(modelCounts.entries())\n .map(([model, turns]) => ({ model, turns }))\n .sort((a, b) => b.turns - a.turns);\n const bySession = Array.from(sessionCounts.entries())\n .map(([session, turns]) => ({ session, turns }))\n .sort((a, b) => b.turns - a.turns);\n\n const subagents: SubagentAggregate | undefined =\n subagentTotal > 0\n ? {\n total: subagentTotal,\n costUsd: subagentCost,\n totalDurationMs: subagentDuration,\n bySkill: Array.from(skillCounts.entries())\n .map(([skillName, v]) => ({ skillName, ...v }))\n .sort((a, b) => b.count - a.count),\n }\n : undefined;\n\n return {\n buckets: [today, week, month, all],\n byModel,\n bySession,\n firstSeen,\n lastSeen,\n subagents,\n };\n}\n\n/** File-size helper for the stats header — \"1.2 MB\" etc. Returns \"\" if missing. */\nexport function formatLogSize(path: string = defaultUsageLogPath()): string {\n if (!existsSync(path)) return \"\";\n try {\n const s = statSync(path);\n const bytes = s.size;\n if (bytes < 1024) return `${bytes} B`;\n if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;\n return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;\n } catch {\n return \"\";\n }\n}\n\n/** Re-exports for downstream consumers that also want the pricing constants. */\nexport { CLAUDE_SONNET_PRICING, DEEPSEEK_PRICING };\n"],"mappings":";;;;;;;;;AAEA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,eAAe;AACxB,SAAS,SAAS,YAAY;AA0CvB,SAAS,oBAAoB,iBAAkC;AACpE,SAAO,KAAK,mBAAmB,QAAQ,GAAG,YAAY,aAAa;AACrE;AAeA,IAAM,mCAAmC,IAAI,OAAO;AACpD,IAAM,uBAAuB;AAE7B,SAAS,uBAAuB,MAAc,KAAmB;AAK/D,MAAI;AACJ,MAAI;AACF,UAAM,KAAK,SAAS,MAAM,GAAG;AAC7B,QAAI;AACF,YAAM,OAAO,UAAU,EAAE;AACzB,UAAI,KAAK,OAAO,iCAAkC;AAClD,YAAM,MAAM,OAAO,MAAM,KAAK,IAAI;AAClC,UAAI,OAAO;AACX,aAAO,OAAO,KAAK,MAAM;AACvB,cAAM,IAAI,SAAS,IAAI,KAAK,MAAM,KAAK,OAAO,MAAM,IAAI;AACxD,YAAI,KAAK,EAAG;AACZ,gBAAQ;AAAA,MACV;AACA,YAAM,IAAI,SAAS,QAAQ,GAAG,IAAI;AAAA,IACpC,UAAE;AACA,gBAAU,EAAE;AAAA,IACd;AAAA,EACF,QAAQ;AACN;AAAA,EACF;AACA,QAAM,SAAS,MAAM,uBAAuB,KAAK,KAAK,KAAK;AAC3D,QAAM,QAAQ,IAAI,MAAM,OAAO;AAC/B,QAAM,OAAiB,CAAC;AACxB,aAAW,QAAQ,OAAO;AACxB,QAAI,CAAC,KAAK,KAAK,EAAG;AAClB,QAAI;AACF,YAAM,MAAM,KAAK,MAAM,IAAI;AAC3B,UAAI,cAAc,GAAG,KAAK,IAAI,MAAM,OAAQ,MAAK,KAAK,IAAI;AAAA,IAC5D,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,MAAI,KAAK,WAAW,MAAM,OAAO,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,OAAQ;AAK1D,QAAM,MAAM,GAAG,IAAI;AACnB,MAAI;AACF,kBAAc,KAAK,KAAK,SAAS,IAAI,GAAG,KAAK,KAAK,IAAI,CAAC;AAAA,IAAO,IAAI,MAAM;AACxE,eAAW,KAAK,IAAI;AAAA,EACtB,QAAQ;AACN,QAAI;AACF,iBAAW,GAAG;AAAA,IAChB,QAAQ;AAAA,IAER;AAAA,EACF;AACF;AAGO,SAAS,YAAY,OAAsC;AAChE,QAAM,SAAsB;AAAA,IAC1B,IAAI,MAAM,OAAO,KAAK,IAAI;AAAA,IAC1B,SAAS,MAAM;AAAA,IACf,OAAO,MAAM;AAAA,IACb,cAAc,MAAM,MAAM;AAAA,IAC1B,kBAAkB,MAAM,MAAM;AAAA,IAC9B,gBAAgB,MAAM,MAAM;AAAA,IAC5B,iBAAiB,MAAM,MAAM;AAAA,IAC7B,SAAS,QAAQ,MAAM,OAAO,MAAM,KAAK;AAAA,IACzC,gBAAgB,qBAAqB,MAAM,KAAK;AAAA,EAClD;AACA,MAAI,MAAM,SAAS,WAAY,QAAO,OAAO;AAC7C,MAAI,MAAM,SAAU,QAAO,WAAW,MAAM;AAE5C,QAAM,OAAO,MAAM,QAAQ,oBAAoB;AAC/C,MAAI;AACF,cAAU,QAAQ,IAAI,GAAG,EAAE,WAAW,KAAK,CAAC;AAC5C,mBAAe,MAAM,GAAG,KAAK,UAAU,MAAM,CAAC;AAAA,GAAM,MAAM;AAC1D,2BAAuB,MAAM,OAAO,EAAE;AAAA,EACxC,QAAQ;AAAA,EAER;AACA,SAAO;AACT;AAEO,SAAS,aAAa,OAAe,oBAAoB,GAAkB;AAChF,MAAI,CAAC,WAAW,IAAI,EAAG,QAAO,CAAC;AAC/B,MAAI;AACJ,MAAI;AACF,UAAM,aAAa,MAAM,MAAM;AAAA,EACjC,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACA,QAAM,MAAqB,CAAC;AAC5B,aAAW,QAAQ,IAAI,MAAM,OAAO,GAAG;AACrC,QAAI,CAAC,KAAK,KAAK,EAAG;AAClB,QAAI;AACF,YAAM,MAAM,KAAK,MAAM,IAAI;AAC3B,UAAI,cAAc,GAAG,EAAG,KAAI,KAAK,GAAG;AAAA,IACtC,QAAQ;AAAA,IAER;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,cAAc,KAAkC;AACvD,MAAI,CAAC,OAAO,OAAO,QAAQ,SAAU,QAAO;AAC5C,QAAM,IAAI;AACV,SACE,OAAO,EAAE,OAAO,YAChB,OAAO,EAAE,UAAU,YACnB,OAAO,EAAE,iBAAiB,YAC1B,OAAO,EAAE,qBAAqB,YAC9B,OAAO,EAAE,mBAAmB,YAC5B,OAAO,EAAE,oBAAoB,YAC7B,OAAO,EAAE,YAAY,YACrB,OAAO,EAAE,mBAAmB;AAEhC;AAmBO,SAAS,oBAAoB,GAAwB;AAC1D,QAAM,QAAQ,EAAE,iBAAiB,EAAE;AACnC,SAAO,QAAQ,IAAI,EAAE,iBAAiB,QAAQ;AAChD;AAGO,SAAS,sBAAsB,GAAwB;AAC5D,SAAO,EAAE,iBAAiB,IAAI,IAAI,EAAE,UAAU,EAAE,iBAAiB;AACnE;AAEA,SAAS,YAAY,OAAe,OAA4B;AAC9D,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP,cAAc;AAAA,IACd,kBAAkB;AAAA,IAClB,gBAAgB;AAAA,IAChB,iBAAiB;AAAA,IACjB,SAAS;AAAA,IACT,gBAAgB;AAAA,IAChB,iBAAiB;AAAA,EACnB;AACF;AAEA,SAAS,YAAY,GAAgB,GAAsB;AACzD,IAAE,SAAS;AACX,IAAE,gBAAgB,EAAE;AACpB,IAAE,oBAAoB,EAAE;AACxB,IAAE,kBAAkB,EAAE;AACtB,IAAE,mBAAmB,EAAE;AACvB,IAAE,WAAW,EAAE;AACf,IAAE,kBAAkB,EAAE;AACtB,IAAE,mBAAmB,gBAAgB,EAAE,OAAO,EAAE,cAAc;AAChE;AAgCO,SAAS,eACd,SACA,OAAyB,CAAC,GACV;AAChB,QAAM,MAAM,KAAK,OAAO,KAAK,IAAI;AACjC,QAAM,MAAM,KAAK,KAAK,KAAK;AAC3B,QAAM,QAAQ,YAAY,SAAS,MAAM,GAAG;AAC5C,QAAM,OAAO,YAAY,QAAQ,MAAM,IAAI,GAAG;AAC9C,QAAM,QAAQ,YAAY,SAAS,MAAM,KAAK,GAAG;AACjD,QAAM,MAAM,YAAY,YAAY,CAAC;AAErC,QAAM,cAAc,oBAAI,IAAoB;AAC5C,QAAM,gBAAgB,oBAAI,IAAoB;AAC9C,MAAI,YAA2B;AAC/B,MAAI,WAA0B;AAC9B,QAAM,cAAc,oBAAI,IAAoE;AAC5F,MAAI,gBAAgB;AACpB,MAAI,eAAe;AACnB,MAAI,mBAAmB;AAEvB,aAAW,KAAK,SAAS;AACvB,gBAAY,KAAK,CAAC;AAClB,QAAI,EAAE,MAAM,MAAM,MAAO,aAAY,OAAO,CAAC;AAC7C,QAAI,EAAE,MAAM,KAAK,MAAO,aAAY,MAAM,CAAC;AAC3C,QAAI,EAAE,MAAM,MAAM,MAAO,aAAY,OAAO,CAAC;AAE7C,gBAAY,IAAI,EAAE,QAAQ,YAAY,IAAI,EAAE,KAAK,KAAK,KAAK,CAAC;AAC5D,UAAM,UAAU,EAAE,WAAW;AAC7B,kBAAc,IAAI,UAAU,cAAc,IAAI,OAAO,KAAK,KAAK,CAAC;AAEhE,QAAI,cAAc,QAAQ,EAAE,KAAK,UAAW,aAAY,EAAE;AAC1D,QAAI,aAAa,QAAQ,EAAE,KAAK,SAAU,YAAW,EAAE;AAEvD,QAAI,EAAE,SAAS,YAAY;AACzB,uBAAiB;AACjB,sBAAgB,EAAE;AAClB,YAAM,MAAM,EAAE,UAAU,cAAc;AACtC,0BAAoB;AACpB,YAAM,MAAM,EAAE,UAAU,WAAW,KAAK,KAAK;AAC7C,YAAM,OAAO,YAAY,IAAI,GAAG,KAAK,EAAE,OAAO,GAAG,SAAS,GAAG,YAAY,EAAE;AAC3E,WAAK,SAAS;AACd,WAAK,WAAW,EAAE;AAClB,WAAK,cAAc;AACnB,kBAAY,IAAI,KAAK,IAAI;AAAA,IAC3B;AAAA,EACF;AAEA,QAAM,UAAU,MAAM,KAAK,YAAY,QAAQ,CAAC,EAC7C,IAAI,CAAC,CAAC,OAAO,KAAK,OAAO,EAAE,OAAO,MAAM,EAAE,EAC1C,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AACnC,QAAM,YAAY,MAAM,KAAK,cAAc,QAAQ,CAAC,EACjD,IAAI,CAAC,CAAC,SAAS,KAAK,OAAO,EAAE,SAAS,MAAM,EAAE,EAC9C,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEnC,QAAM,YACJ,gBAAgB,IACZ;AAAA,IACE,OAAO;AAAA,IACP,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,SAAS,MAAM,KAAK,YAAY,QAAQ,CAAC,EACtC,IAAI,CAAC,CAAC,WAAW,CAAC,OAAO,EAAE,WAAW,GAAG,EAAE,EAAE,EAC7C,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAAA,EACrC,IACA;AAEN,SAAO;AAAA,IACL,SAAS,CAAC,OAAO,MAAM,OAAO,GAAG;AAAA,IACjC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAGO,SAAS,cAAc,OAAe,oBAAoB,GAAW;AAC1E,MAAI,CAAC,WAAW,IAAI,EAAG,QAAO;AAC9B,MAAI;AACF,UAAM,IAAI,SAAS,IAAI;AACvB,UAAM,QAAQ,EAAE;AAChB,QAAI,QAAQ,KAAM,QAAO,GAAG,KAAK;AACjC,QAAI,QAAQ,OAAO,KAAM,QAAO,IAAI,QAAQ,MAAM,QAAQ,CAAC,CAAC;AAC5D,WAAO,IAAI,SAAS,OAAO,OAAO,QAAQ,CAAC,CAAC;AAAA,EAC9C,QAAQ;AACN,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { createRequire as __cr } from 'node:module'; if (typeof globalThis.require === 'undefined') { globalThis.require = __cr(import.meta.url); }
|
|
3
|
+
import {
|
|
4
|
+
indexCompatible,
|
|
5
|
+
querySemantic
|
|
6
|
+
} from "./chunk-PZOFBEDC.js";
|
|
7
|
+
|
|
8
|
+
// src/index/semantic/tool.ts
|
|
9
|
+
async function registerSemanticSearchTool(registry, opts) {
|
|
10
|
+
if (!await indexCompatible(opts.root, { provider: opts.provider, model: opts.model }))
|
|
11
|
+
return false;
|
|
12
|
+
const defaultTopK = opts.defaultTopK ?? 8;
|
|
13
|
+
const defaultMinScore = opts.defaultMinScore ?? 0.3;
|
|
14
|
+
registry.register({
|
|
15
|
+
name: "semantic_search",
|
|
16
|
+
description: "FIRST CHOICE for descriptive queries. Use this BEFORE search_content (grep) when the user describes WHAT code does ('where do we handle X', 'which file owns Y', 'how does Z work', 'find the logic that \u2026'). Returns ranked snippets ordered by semantic relevance \u2014 finds the right file even when your description shares no words with the code. Falls back to search_content / search_files only for: exact identifiers, regex patterns, or counting occurrences of a known token. If your first instinct is grep on a paraphrased question, you are wrong \u2014 try semantic_search first.",
|
|
17
|
+
readOnly: true,
|
|
18
|
+
parallelSafe: true,
|
|
19
|
+
parameters: {
|
|
20
|
+
type: "object",
|
|
21
|
+
properties: {
|
|
22
|
+
query: {
|
|
23
|
+
type: "string",
|
|
24
|
+
description: "Natural-language description, phrased as a question or noun phrase: 'where do we validate the session cookie?' / 'retry backoff logic' / 'code that prevents user changes from immediately landing on disk'. Do NOT pass exact identifiers \u2014 those are search_content's job."
|
|
25
|
+
},
|
|
26
|
+
topK: {
|
|
27
|
+
type: "integer",
|
|
28
|
+
description: `Number of snippets to return (1..16). Default ${defaultTopK}.`
|
|
29
|
+
},
|
|
30
|
+
minScore: {
|
|
31
|
+
type: "number",
|
|
32
|
+
description: `Drop snippets with cosine score below this (0..1). Default ${defaultMinScore}. Raise for stricter matches; lower if the index is small.`
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
required: ["query"]
|
|
36
|
+
},
|
|
37
|
+
fn: async (args, ctx) => {
|
|
38
|
+
const hits = await querySemantic(opts.root, args.query, {
|
|
39
|
+
topK: args.topK ?? defaultTopK,
|
|
40
|
+
minScore: args.minScore ?? defaultMinScore,
|
|
41
|
+
provider: opts.provider,
|
|
42
|
+
baseUrl: opts.baseUrl,
|
|
43
|
+
apiKey: opts.apiKey,
|
|
44
|
+
model: opts.model,
|
|
45
|
+
extraBody: opts.extraBody,
|
|
46
|
+
signal: ctx?.signal
|
|
47
|
+
});
|
|
48
|
+
if (hits === null) {
|
|
49
|
+
return "No semantic index found for this project. Run `luckerr index` to build one.";
|
|
50
|
+
}
|
|
51
|
+
if (hits.length === 0) {
|
|
52
|
+
return `query: ${args.query}
|
|
53
|
+
|
|
54
|
+
no matches above the score threshold (${args.minScore ?? defaultMinScore}).`;
|
|
55
|
+
}
|
|
56
|
+
return formatHits(args.query, hits);
|
|
57
|
+
}
|
|
58
|
+
});
|
|
59
|
+
return true;
|
|
60
|
+
}
|
|
61
|
+
function formatHits(query, hits) {
|
|
62
|
+
const lines = [`query: ${query}`, `
|
|
63
|
+
results (${hits.length}):`];
|
|
64
|
+
hits.forEach((h, i) => {
|
|
65
|
+
const { entry, score } = h;
|
|
66
|
+
lines.push(
|
|
67
|
+
`
|
|
68
|
+
${i + 1}. ${entry.path}:${entry.startLine}-${entry.endLine} (score ${score.toFixed(3)})`
|
|
69
|
+
);
|
|
70
|
+
const preview = entry.text.split("\n").slice(0, 8).join("\n");
|
|
71
|
+
lines.push(indentBlock(preview, " "));
|
|
72
|
+
if (entry.text.split("\n").length > 8) {
|
|
73
|
+
lines.push(
|
|
74
|
+
` \u2026(${entry.text.split("\n").length - 8} more lines \u2014 read_file ${entry.path}:${entry.startLine} for the full chunk)`
|
|
75
|
+
);
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
return lines.join("\n");
|
|
79
|
+
}
|
|
80
|
+
function indentBlock(text, prefix) {
|
|
81
|
+
return text.split("\n").map((l) => prefix + l).join("\n");
|
|
82
|
+
}
|
|
83
|
+
async function bootstrapSemanticSearchInCodeMode(registry, rootDir, opts = {}) {
|
|
84
|
+
if (await indexCompatible(rootDir, { provider: opts.provider, model: opts.model })) {
|
|
85
|
+
await registerSemanticSearchTool(registry, { ...opts, root: rootDir });
|
|
86
|
+
return { enabled: true };
|
|
87
|
+
}
|
|
88
|
+
return { enabled: false };
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
export {
|
|
92
|
+
registerSemanticSearchTool,
|
|
93
|
+
bootstrapSemanticSearchInCodeMode
|
|
94
|
+
};
|
|
95
|
+
//# sourceMappingURL=chunk-MIJI2WMN.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/index/semantic/tool.ts"],"sourcesContent":["import type { ToolRegistry } from \"../../tools.js\";\nimport { indexCompatible, indexExists, querySemantic } from \"./builder.js\";\nimport type { SearchHit } from \"./store.js\";\n\ntype SemanticToolOptions = {\n provider?: \"ollama\" | \"openai-compat\";\n baseUrl?: string;\n apiKey?: string;\n model?: string;\n extraBody?: Record<string, unknown>;\n timeoutMs?: number;\n root: string;\n defaultTopK?: number;\n defaultMinScore?: number;\n};\n\nexport async function registerSemanticSearchTool(\n registry: ToolRegistry,\n opts: SemanticToolOptions,\n): Promise<boolean> {\n if (!(await indexCompatible(opts.root, { provider: opts.provider, model: opts.model })))\n return false;\n const defaultTopK = opts.defaultTopK ?? 8;\n const defaultMinScore = opts.defaultMinScore ?? 0.3;\n\n registry.register({\n name: \"semantic_search\",\n description:\n \"FIRST CHOICE for descriptive queries. Use this BEFORE search_content (grep) when the user describes WHAT code does ('where do we handle X', 'which file owns Y', 'how does Z work', 'find the logic that …'). Returns ranked snippets ordered by semantic relevance — finds the right file even when your description shares no words with the code. Falls back to search_content / search_files only for: exact identifiers, regex patterns, or counting occurrences of a known token. If your first instinct is grep on a paraphrased question, you are wrong — try semantic_search first.\",\n readOnly: true,\n parallelSafe: true,\n parameters: {\n type: \"object\",\n properties: {\n query: {\n type: \"string\",\n description:\n \"Natural-language description, phrased as a question or noun phrase: 'where do we validate the session cookie?' / 'retry backoff logic' / 'code that prevents user changes from immediately landing on disk'. Do NOT pass exact identifiers — those are search_content's job.\",\n },\n topK: {\n type: \"integer\",\n description: `Number of snippets to return (1..16). Default ${defaultTopK}.`,\n },\n minScore: {\n type: \"number\",\n description: `Drop snippets with cosine score below this (0..1). Default ${defaultMinScore}. Raise for stricter matches; lower if the index is small.`,\n },\n },\n required: [\"query\"],\n },\n fn: async (args: { query: string; topK?: number; minScore?: number }, ctx) => {\n const hits = await querySemantic(opts.root, args.query, {\n topK: args.topK ?? defaultTopK,\n minScore: args.minScore ?? defaultMinScore,\n provider: opts.provider,\n baseUrl: opts.baseUrl,\n apiKey: opts.apiKey,\n model: opts.model,\n extraBody: opts.extraBody,\n signal: ctx?.signal,\n });\n if (hits === null) {\n return \"No semantic index found for this project. Run `luckerr index` to build one.\";\n }\n if (hits.length === 0) {\n return `query: ${args.query}\\n\\nno matches above the score threshold (${args.minScore ?? defaultMinScore}).`;\n }\n return formatHits(args.query, hits);\n },\n });\n return true;\n}\n\nexport function formatHits(query: string, hits: readonly SearchHit[]): string {\n const lines: string[] = [`query: ${query}`, `\\nresults (${hits.length}):`];\n hits.forEach((h, i) => {\n const { entry, score } = h;\n lines.push(\n `\\n${i + 1}. ${entry.path}:${entry.startLine}-${entry.endLine} (score ${score.toFixed(3)})`,\n );\n // Cap each snippet so a 60-line chunk doesn't dominate the\n // model's context. The full chunk is still discoverable via\n // read_file once the model picks the most relevant hit.\n const preview = entry.text.split(\"\\n\").slice(0, 8).join(\"\\n\");\n lines.push(indentBlock(preview, \" \"));\n if (entry.text.split(\"\\n\").length > 8) {\n lines.push(\n ` …(${entry.text.split(\"\\n\").length - 8} more lines — read_file ${entry.path}:${entry.startLine} for the full chunk)`,\n );\n }\n });\n return lines.join(\"\\n\");\n}\n\nfunction indentBlock(text: string, prefix: string): string {\n return text\n .split(\"\\n\")\n .map((l) => prefix + l)\n .join(\"\\n\");\n}\n\n/** Silent: register if index exists, else skip — no Ollama probe, no setup prompt. */\nexport async function bootstrapSemanticSearchInCodeMode(\n registry: ToolRegistry,\n rootDir: string,\n opts: Omit<SemanticToolOptions, \"root\" | \"defaultTopK\" | \"defaultMinScore\"> = {},\n): Promise<{ enabled: boolean }> {\n if (await indexCompatible(rootDir, { provider: opts.provider, model: opts.model })) {\n await registerSemanticSearchTool(registry, { ...opts, root: rootDir });\n return { enabled: true };\n }\n return { enabled: false };\n}\n"],"mappings":";;;;;;;;AAgBA,eAAsB,2BACpB,UACA,MACkB;AAClB,MAAI,CAAE,MAAM,gBAAgB,KAAK,MAAM,EAAE,UAAU,KAAK,UAAU,OAAO,KAAK,MAAM,CAAC;AACnF,WAAO;AACT,QAAM,cAAc,KAAK,eAAe;AACxC,QAAM,kBAAkB,KAAK,mBAAmB;AAEhD,WAAS,SAAS;AAAA,IAChB,MAAM;AAAA,IACN,aACE;AAAA,IACF,UAAU;AAAA,IACV,cAAc;AAAA,IACd,YAAY;AAAA,MACV,MAAM;AAAA,MACN,YAAY;AAAA,QACV,OAAO;AAAA,UACL,MAAM;AAAA,UACN,aACE;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,aAAa,iDAAiD,WAAW;AAAA,QAC3E;AAAA,QACA,UAAU;AAAA,UACR,MAAM;AAAA,UACN,aAAa,8DAA8D,eAAe;AAAA,QAC5F;AAAA,MACF;AAAA,MACA,UAAU,CAAC,OAAO;AAAA,IACpB;AAAA,IACA,IAAI,OAAO,MAA2D,QAAQ;AAC5E,YAAM,OAAO,MAAM,cAAc,KAAK,MAAM,KAAK,OAAO;AAAA,QACtD,MAAM,KAAK,QAAQ;AAAA,QACnB,UAAU,KAAK,YAAY;AAAA,QAC3B,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,QAAQ,KAAK;AAAA,QACb,OAAO,KAAK;AAAA,QACZ,WAAW,KAAK;AAAA,QAChB,QAAQ,KAAK;AAAA,MACf,CAAC;AACD,UAAI,SAAS,MAAM;AACjB,eAAO;AAAA,MACT;AACA,UAAI,KAAK,WAAW,GAAG;AACrB,eAAO,UAAU,KAAK,KAAK;AAAA;AAAA,wCAA6C,KAAK,YAAY,eAAe;AAAA,MAC1G;AACA,aAAO,WAAW,KAAK,OAAO,IAAI;AAAA,IACpC;AAAA,EACF,CAAC;AACD,SAAO;AACT;AAEO,SAAS,WAAW,OAAe,MAAoC;AAC5E,QAAM,QAAkB,CAAC,UAAU,KAAK,IAAI;AAAA,WAAc,KAAK,MAAM,IAAI;AACzE,OAAK,QAAQ,CAAC,GAAG,MAAM;AACrB,UAAM,EAAE,OAAO,MAAM,IAAI;AACzB,UAAM;AAAA,MACJ;AAAA,EAAK,IAAI,CAAC,KAAK,MAAM,IAAI,IAAI,MAAM,SAAS,IAAI,MAAM,OAAO,YAAY,MAAM,QAAQ,CAAC,CAAC;AAAA,IAC3F;AAIA,UAAM,UAAU,MAAM,KAAK,MAAM,IAAI,EAAE,MAAM,GAAG,CAAC,EAAE,KAAK,IAAI;AAC5D,UAAM,KAAK,YAAY,SAAS,KAAK,CAAC;AACtC,QAAI,MAAM,KAAK,MAAM,IAAI,EAAE,SAAS,GAAG;AACrC,YAAM;AAAA,QACJ,aAAQ,MAAM,KAAK,MAAM,IAAI,EAAE,SAAS,CAAC,gCAA2B,MAAM,IAAI,IAAI,MAAM,SAAS;AAAA,MACnG;AAAA,IACF;AAAA,EACF,CAAC;AACD,SAAO,MAAM,KAAK,IAAI;AACxB;AAEA,SAAS,YAAY,MAAc,QAAwB;AACzD,SAAO,KACJ,MAAM,IAAI,EACV,IAAI,CAAC,MAAM,SAAS,CAAC,EACrB,KAAK,IAAI;AACd;AAGA,eAAsB,kCACpB,UACA,SACA,OAA8E,CAAC,GAChD;AAC/B,MAAI,MAAM,gBAAgB,SAAS,EAAE,UAAU,KAAK,UAAU,OAAO,KAAK,MAAM,CAAC,GAAG;AAClF,UAAM,2BAA2B,UAAU,EAAE,GAAG,MAAM,MAAM,QAAQ,CAAC;AACrE,WAAO,EAAE,SAAS,KAAK;AAAA,EACzB;AACA,SAAO,EAAE,SAAS,MAAM;AAC1B;","names":[]}
|