@goobits/sherpa 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-3CILH2TO.js +387 -0
- package/dist/chunk-3CILH2TO.js.map +7 -0
- package/dist/chunk-5NF3BSD6.js +512 -0
- package/dist/chunk-5NF3BSD6.js.map +7 -0
- package/dist/chunk-IIU6U7TE.js +307 -0
- package/dist/chunk-IIU6U7TE.js.map +7 -0
- package/dist/chunk-LQZTKH3U.js +307 -0
- package/dist/chunk-LQZTKH3U.js.map +7 -0
- package/dist/cli.d.ts +11 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +84 -0
- package/dist/cli.js.map +7 -0
- package/dist/commands/init.d.ts +7 -0
- package/dist/commands/init.d.ts.map +1 -0
- package/dist/commands/init.js +333 -0
- package/dist/commands/init.js.map +1 -0
- package/dist/commands/post.d.ts +20 -0
- package/dist/commands/post.d.ts.map +1 -0
- package/dist/commands/post.js +183 -0
- package/dist/commands/post.js.map +1 -0
- package/dist/commands/pre.d.ts +18 -0
- package/dist/commands/pre.d.ts.map +1 -0
- package/dist/commands/pre.js +102 -0
- package/dist/commands/pre.js.map +1 -0
- package/dist/commands/status.d.ts +5 -0
- package/dist/commands/status.d.ts.map +1 -0
- package/dist/commands/status.js +48 -0
- package/dist/commands/status.js.map +1 -0
- package/dist/daemon-V2QDZTUB.js +89 -0
- package/dist/daemon-V2QDZTUB.js.map +7 -0
- package/dist/daemon.d.ts +9 -0
- package/dist/daemon.d.ts.map +1 -0
- package/dist/daemon.js +112 -0
- package/dist/daemon.js.map +1 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +36 -0
- package/dist/index.js.map +7 -0
- package/dist/parser.d.ts +21 -0
- package/dist/parser.d.ts.map +1 -0
- package/dist/parser.js +152 -0
- package/dist/parser.js.map +1 -0
- package/dist/reviewer/index.js +544 -0
- package/dist/reviewer/index.js.map +7 -0
- package/dist/rules.d.ts +21 -0
- package/dist/rules.d.ts.map +1 -0
- package/dist/rules.js +165 -0
- package/dist/rules.js.map +1 -0
- package/dist/status-Q6Z4TFJZ.js +52 -0
- package/dist/status-Q6Z4TFJZ.js.map +7 -0
- package/dist/types.d.ts +69 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +13 -0
- package/dist/types.js.map +1 -0
- package/package.json +52 -0
|
@@ -0,0 +1,387 @@
|
|
|
1
|
+
// ../core/dist/hooks.js
|
|
2
|
+
import { existsSync, readFileSync } from "fs";
|
|
3
|
+
import { join } from "path";
|
|
4
|
+
function readHookInput() {
|
|
5
|
+
const input = readFileSync(0, "utf-8");
|
|
6
|
+
return JSON.parse(input);
|
|
7
|
+
}
|
|
8
|
+
function writeHookOutput(data) {
|
|
9
|
+
console.log(JSON.stringify(data));
|
|
10
|
+
}
|
|
11
|
+
var EXIT = {
|
|
12
|
+
ALLOW: 0,
|
|
13
|
+
BLOCK: 2
|
|
14
|
+
};
|
|
15
|
+
function loadConfig(configName, defaults, searchPaths = [process.cwd(), join(process.cwd(), "..")]) {
|
|
16
|
+
for (const basePath of searchPaths) {
|
|
17
|
+
const configPath = join(basePath, configName);
|
|
18
|
+
if (existsSync(configPath)) {
|
|
19
|
+
try {
|
|
20
|
+
const content = readFileSync(configPath, "utf-8");
|
|
21
|
+
const loaded = JSON.parse(content);
|
|
22
|
+
return { ...defaults, ...loaded };
|
|
23
|
+
} catch {
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return defaults;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// ../core/dist/llm.js
|
|
31
|
+
import { existsSync as existsSync2, readFileSync as readFileSync2 } from "fs";
|
|
32
|
+
import { join as join2 } from "path";
|
|
33
|
+
import OpenAI from "openai";
|
|
34
|
+
function loadEnvFiles() {
|
|
35
|
+
const envPaths = [
|
|
36
|
+
join2(process.cwd(), ".env"),
|
|
37
|
+
join2(process.cwd(), "..", ".env"),
|
|
38
|
+
join2(process.cwd(), "..", "..", ".env"),
|
|
39
|
+
join2(process.cwd(), "packages", "backend", ".env")
|
|
40
|
+
];
|
|
41
|
+
for (const envPath of envPaths) {
|
|
42
|
+
if (existsSync2(envPath)) {
|
|
43
|
+
const content = readFileSync2(envPath, "utf-8");
|
|
44
|
+
for (const line of content.split("\n")) {
|
|
45
|
+
const trimmed = line.trim();
|
|
46
|
+
if (trimmed && !trimmed.startsWith("#")) {
|
|
47
|
+
const eqIdx = trimmed.indexOf("=");
|
|
48
|
+
if (eqIdx > 0) {
|
|
49
|
+
const key = trimmed.slice(0, eqIdx).trim();
|
|
50
|
+
const value = trimmed.slice(eqIdx + 1).trim();
|
|
51
|
+
if (!process.env[key]) {
|
|
52
|
+
process.env[key] = value;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
loadEnvFiles();
|
|
61
|
+
var PROVIDERS = {
|
|
62
|
+
cerebras: {
|
|
63
|
+
baseURL: "https://api.cerebras.ai/v1",
|
|
64
|
+
envKey: "CEREBRAS_API_KEY",
|
|
65
|
+
models: [
|
|
66
|
+
"zai-glm-4.7",
|
|
67
|
+
// Best quality, 100 req/day
|
|
68
|
+
"qwen-3-235b-a22b-instruct-2507",
|
|
69
|
+
// 235B, 1,440 req/day
|
|
70
|
+
"gpt-oss-120b"
|
|
71
|
+
// 120B, 14,400 req/day
|
|
72
|
+
]
|
|
73
|
+
},
|
|
74
|
+
groq: {
|
|
75
|
+
baseURL: "https://api.groq.com/openai/v1",
|
|
76
|
+
envKey: "GROQ_API_KEY",
|
|
77
|
+
models: [
|
|
78
|
+
"openai/gpt-oss-120b"
|
|
79
|
+
// Same 120B model
|
|
80
|
+
]
|
|
81
|
+
},
|
|
82
|
+
openai: {
|
|
83
|
+
baseURL: "https://api.openai.com/v1",
|
|
84
|
+
envKey: "OPENAI_API_KEY",
|
|
85
|
+
models: [
|
|
86
|
+
"gpt-4o-mini"
|
|
87
|
+
]
|
|
88
|
+
}
|
|
89
|
+
};
|
|
90
|
+
var modelIndex = /* @__PURE__ */ new Map();
|
|
91
|
+
var clients = /* @__PURE__ */ new Map();
|
|
92
|
+
function getAvailableProviders() {
|
|
93
|
+
return Object.keys(PROVIDERS).filter((provider) => !!process.env[PROVIDERS[provider].envKey]);
|
|
94
|
+
}
|
|
95
|
+
function getClient(provider) {
|
|
96
|
+
let client = clients.get(provider);
|
|
97
|
+
if (!client) {
|
|
98
|
+
const config = PROVIDERS[provider];
|
|
99
|
+
const apiKey = process.env[config.envKey];
|
|
100
|
+
if (!apiKey) {
|
|
101
|
+
throw new Error(`${config.envKey} environment variable not set`);
|
|
102
|
+
}
|
|
103
|
+
client = new OpenAI({
|
|
104
|
+
apiKey,
|
|
105
|
+
baseURL: config.baseURL
|
|
106
|
+
});
|
|
107
|
+
clients.set(provider, client);
|
|
108
|
+
}
|
|
109
|
+
return client;
|
|
110
|
+
}
|
|
111
|
+
function getDefaultProvider() {
|
|
112
|
+
const available = getAvailableProviders();
|
|
113
|
+
if (available.length === 0) {
|
|
114
|
+
throw new Error("No LLM provider configured. Set CEREBRAS_API_KEY, GROQ_API_KEY, or OPENAI_API_KEY");
|
|
115
|
+
}
|
|
116
|
+
return available[0];
|
|
117
|
+
}
|
|
118
|
+
function getCurrentModel(provider) {
|
|
119
|
+
const config = PROVIDERS[provider];
|
|
120
|
+
const idx = modelIndex.get(provider) || 0;
|
|
121
|
+
return config.models[Math.min(idx, config.models.length - 1)];
|
|
122
|
+
}
|
|
123
|
+
function rotateModel(provider) {
|
|
124
|
+
const config = PROVIDERS[provider];
|
|
125
|
+
const currentIdx = modelIndex.get(provider) || 0;
|
|
126
|
+
const nextIdx = currentIdx + 1;
|
|
127
|
+
if (nextIdx >= config.models.length) {
|
|
128
|
+
return null;
|
|
129
|
+
}
|
|
130
|
+
modelIndex.set(provider, nextIdx);
|
|
131
|
+
return config.models[nextIdx];
|
|
132
|
+
}
|
|
133
|
+
async function checkProviderStatus() {
|
|
134
|
+
const results = [];
|
|
135
|
+
for (const provider of getAvailableProviders()) {
|
|
136
|
+
try {
|
|
137
|
+
const client = getClient(provider);
|
|
138
|
+
const config = PROVIDERS[provider];
|
|
139
|
+
const response = await client.chat.completions.create({
|
|
140
|
+
model: config.models[0],
|
|
141
|
+
messages: [{ role: "user", content: "hi" }],
|
|
142
|
+
max_tokens: 1
|
|
143
|
+
}).asResponse();
|
|
144
|
+
const h = response.headers;
|
|
145
|
+
const getInt = (name) => parseInt(h.get(name) || "0") || void 0;
|
|
146
|
+
results.push({
|
|
147
|
+
provider,
|
|
148
|
+
available: true,
|
|
149
|
+
limits: {
|
|
150
|
+
// Groq style (per minute)
|
|
151
|
+
requestsPerMinute: getInt("x-ratelimit-limit-requests"),
|
|
152
|
+
requestsRemainingMinute: getInt("x-ratelimit-remaining-requests"),
|
|
153
|
+
tokensPerMinute: getInt("x-ratelimit-limit-tokens"),
|
|
154
|
+
tokensRemainingMinute: getInt("x-ratelimit-remaining-tokens"),
|
|
155
|
+
// Cerebras style (per day)
|
|
156
|
+
requestsPerDay: getInt("x-ratelimit-limit-requests-day"),
|
|
157
|
+
requestsRemainingDay: getInt("x-ratelimit-remaining-requests-day"),
|
|
158
|
+
tokensPerDay: getInt("x-ratelimit-limit-tokens-day"),
|
|
159
|
+
tokensRemainingDay: getInt("x-ratelimit-remaining-tokens-day")
|
|
160
|
+
}
|
|
161
|
+
});
|
|
162
|
+
} catch (error) {
|
|
163
|
+
results.push({
|
|
164
|
+
provider,
|
|
165
|
+
available: false,
|
|
166
|
+
error: error.message
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
return results;
|
|
171
|
+
}
|
|
172
|
+
function isRateLimitError(error) {
|
|
173
|
+
if (error && typeof error === "object") {
|
|
174
|
+
const status = error.status;
|
|
175
|
+
return status === 429;
|
|
176
|
+
}
|
|
177
|
+
return false;
|
|
178
|
+
}
|
|
179
|
+
async function chat(prompt, options = {}) {
|
|
180
|
+
const { system, fallback = true } = options;
|
|
181
|
+
let { provider, model } = options;
|
|
182
|
+
if (!provider) {
|
|
183
|
+
provider = getDefaultProvider();
|
|
184
|
+
}
|
|
185
|
+
if (!model) {
|
|
186
|
+
model = getCurrentModel(provider);
|
|
187
|
+
}
|
|
188
|
+
const messages = [];
|
|
189
|
+
if (system) {
|
|
190
|
+
messages.push({ role: "system", content: system });
|
|
191
|
+
}
|
|
192
|
+
messages.push({ role: "user", content: prompt });
|
|
193
|
+
let currentProvider = provider;
|
|
194
|
+
let currentModel = model;
|
|
195
|
+
while (true) {
|
|
196
|
+
try {
|
|
197
|
+
const response = await getClient(currentProvider).chat.completions.create({
|
|
198
|
+
model: currentModel,
|
|
199
|
+
messages
|
|
200
|
+
});
|
|
201
|
+
return response.choices[0]?.message?.content || "";
|
|
202
|
+
} catch (error) {
|
|
203
|
+
if (isRateLimitError(error)) {
|
|
204
|
+
const nextModel = rotateModel(currentProvider);
|
|
205
|
+
if (nextModel) {
|
|
206
|
+
console.error(`Rate limited on ${currentModel}, rotating to ${nextModel}`);
|
|
207
|
+
currentModel = nextModel;
|
|
208
|
+
continue;
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
if (fallback) {
|
|
212
|
+
const available = getAvailableProviders().filter((p) => p !== currentProvider);
|
|
213
|
+
if (available.length > 0) {
|
|
214
|
+
currentProvider = available[0];
|
|
215
|
+
currentModel = getCurrentModel(currentProvider);
|
|
216
|
+
console.error(`Falling back to ${currentProvider}/${currentModel}`);
|
|
217
|
+
continue;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
throw new Error(`LLM API error (${currentProvider}/${currentModel}): ${error}`);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// ../core/dist/files.js
|
|
226
|
+
import { readFileSync as readFileSync4, statSync } from "fs";
|
|
227
|
+
import { glob } from "glob";
|
|
228
|
+
|
|
229
|
+
// ../core/dist/git.js
|
|
230
|
+
import { exec, execSync } from "child_process";
|
|
231
|
+
import { existsSync as existsSync3, readFileSync as readFileSync3 } from "fs";
|
|
232
|
+
import ignore from "ignore";
|
|
233
|
+
import { dirname, join as join3, resolve } from "path";
|
|
234
|
+
import { promisify } from "util";
|
|
235
|
+
var execAsync = promisify(exec);
|
|
236
|
+
function loadGitignore(dir = ".") {
|
|
237
|
+
const ig = ignore();
|
|
238
|
+
const absoluteDir = resolve(dir);
|
|
239
|
+
let current = absoluteDir;
|
|
240
|
+
const gitignoreFiles = [];
|
|
241
|
+
while (current) {
|
|
242
|
+
const gitignorePath = join3(current, ".gitignore");
|
|
243
|
+
if (existsSync3(gitignorePath)) {
|
|
244
|
+
gitignoreFiles.unshift(gitignorePath);
|
|
245
|
+
}
|
|
246
|
+
if (existsSync3(join3(current, ".git"))) {
|
|
247
|
+
break;
|
|
248
|
+
}
|
|
249
|
+
const parent = dirname(current);
|
|
250
|
+
if (parent === current) {
|
|
251
|
+
break;
|
|
252
|
+
}
|
|
253
|
+
current = parent;
|
|
254
|
+
}
|
|
255
|
+
for (const path of gitignoreFiles) {
|
|
256
|
+
ig.add(readFileSync3(path, "utf8"));
|
|
257
|
+
}
|
|
258
|
+
ig.add([
|
|
259
|
+
"node_modules/",
|
|
260
|
+
"__pycache__/",
|
|
261
|
+
".venv/",
|
|
262
|
+
"venv/",
|
|
263
|
+
"dist/",
|
|
264
|
+
"build/",
|
|
265
|
+
".git/",
|
|
266
|
+
".eggs/",
|
|
267
|
+
"*.egg-info/"
|
|
268
|
+
]);
|
|
269
|
+
return ig;
|
|
270
|
+
}
|
|
271
|
+
function filterIgnored(paths, dir = ".") {
|
|
272
|
+
const ig = loadGitignore(dir);
|
|
273
|
+
return paths.filter((p) => !ig.ignores(p));
|
|
274
|
+
}
|
|
275
|
+
async function getDiff(base = "HEAD~1", path) {
|
|
276
|
+
const args = ["git", "diff", base];
|
|
277
|
+
if (path) {
|
|
278
|
+
args.push("--", path);
|
|
279
|
+
}
|
|
280
|
+
try {
|
|
281
|
+
const { stdout } = await execAsync(args.join(" "));
|
|
282
|
+
return stdout;
|
|
283
|
+
} catch (error) {
|
|
284
|
+
throw new Error(`Git diff failed: ${error}`);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
// ../core/dist/files.js
|
|
289
|
+
var CODE_EXTENSIONS = /* @__PURE__ */ new Set([
|
|
290
|
+
".py",
|
|
291
|
+
".rs",
|
|
292
|
+
".ts",
|
|
293
|
+
".js",
|
|
294
|
+
".svelte",
|
|
295
|
+
".tsx",
|
|
296
|
+
".jsx",
|
|
297
|
+
".go",
|
|
298
|
+
".java",
|
|
299
|
+
".rb",
|
|
300
|
+
".sh",
|
|
301
|
+
".json",
|
|
302
|
+
".yaml",
|
|
303
|
+
".yml",
|
|
304
|
+
".toml",
|
|
305
|
+
".md"
|
|
306
|
+
]);
|
|
307
|
+
async function findFiles(pattern, options = {}) {
|
|
308
|
+
const files = await glob(pattern, {
|
|
309
|
+
ignore: [
|
|
310
|
+
"**/node_modules/**",
|
|
311
|
+
"**/.venv/**",
|
|
312
|
+
"**/venv/**",
|
|
313
|
+
"**/dist/**",
|
|
314
|
+
"**/build/**",
|
|
315
|
+
"**/__pycache__/**",
|
|
316
|
+
"**/.git/**"
|
|
317
|
+
],
|
|
318
|
+
nodir: true,
|
|
319
|
+
dot: false
|
|
320
|
+
});
|
|
321
|
+
let filtered = filterIgnored(files);
|
|
322
|
+
if (options.codeOnly) {
|
|
323
|
+
filtered = filtered.filter((f) => {
|
|
324
|
+
const ext = f.substring(f.lastIndexOf("."));
|
|
325
|
+
return CODE_EXTENSIONS.has(ext);
|
|
326
|
+
});
|
|
327
|
+
}
|
|
328
|
+
return filtered.sort();
|
|
329
|
+
}
|
|
330
|
+
function formatWithLineNumbers(path) {
|
|
331
|
+
const content = readFileSync4(path, "utf8");
|
|
332
|
+
const lines = content.split("\n");
|
|
333
|
+
const totalLines = lines.length;
|
|
334
|
+
const numbered = lines.map((line, i) => `${String(i + 1).padStart(4)} | ${line}`);
|
|
335
|
+
return `
|
|
336
|
+
--- BEGIN FILE: ${path} (lines 1-${totalLines}) ---
|
|
337
|
+
${numbered.join("\n")}
|
|
338
|
+
--- END FILE: ${path} ---
|
|
339
|
+
`;
|
|
340
|
+
}
|
|
341
|
+
function readFilesWithLimit(paths, maxBytes = 12e4) {
|
|
342
|
+
const files = [];
|
|
343
|
+
let totalSize = 0;
|
|
344
|
+
let truncated = 0;
|
|
345
|
+
for (const path of paths) {
|
|
346
|
+
try {
|
|
347
|
+
statSync(path);
|
|
348
|
+
const formatted = formatWithLineNumbers(path);
|
|
349
|
+
if (totalSize + formatted.length > maxBytes) {
|
|
350
|
+
truncated = paths.length - files.length;
|
|
351
|
+
break;
|
|
352
|
+
}
|
|
353
|
+
files.push(formatted);
|
|
354
|
+
totalSize += formatted.length;
|
|
355
|
+
} catch {
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
return { files, truncated };
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
// ../core/dist/tokens.js
|
|
362
|
+
import { get_encoding } from "tiktoken";
|
|
363
|
+
var encoder = null;
|
|
364
|
+
function getEncoder() {
|
|
365
|
+
if (!encoder) {
|
|
366
|
+
encoder = get_encoding("cl100k_base");
|
|
367
|
+
}
|
|
368
|
+
return encoder;
|
|
369
|
+
}
|
|
370
|
+
function countTokens(text) {
|
|
371
|
+
return getEncoder().encode(text).length;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
export {
|
|
375
|
+
getDiff,
|
|
376
|
+
findFiles,
|
|
377
|
+
readFilesWithLimit,
|
|
378
|
+
readHookInput,
|
|
379
|
+
writeHookOutput,
|
|
380
|
+
EXIT,
|
|
381
|
+
loadConfig,
|
|
382
|
+
getAvailableProviders,
|
|
383
|
+
checkProviderStatus,
|
|
384
|
+
chat,
|
|
385
|
+
countTokens
|
|
386
|
+
};
|
|
387
|
+
//# sourceMappingURL=chunk-3CILH2TO.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../core/src/hooks.ts", "../../core/src/llm.ts", "../../core/src/files.ts", "../../core/src/git.ts", "../../core/src/tokens.ts"],
|
|
4
|
+
"sourcesContent": ["/**\n * Shared utilities for Claude Code hooks\n */\n\nimport { existsSync,readFileSync } from 'fs'\nimport { join } from 'path'\n\n/**\n * PreToolUse hook input structure\n */\nexport interface PreToolInput {\n\ttool_name?: string;\n\ttool_input?: {\n\t\tcommand?: string;\n\t\t[key: string]: unknown;\n\t};\n}\n\n/**\n * PostToolUse hook input structure\n */\nexport interface PostToolOutput {\n\ttool_name?: string;\n\ttool_input?: {\n\t\tcommand?: string;\n\t\t[key: string]: unknown;\n\t};\n\ttool_result?: {\n\t\tstdout?: string;\n\t\tstderr?: string;\n\t\texit_code?: number;\n\t\t[key: string]: unknown;\n\t};\n}\n\n/**\n * Read hook input from stdin\n */\nexport function readHookInput<T>(): T {\n\tconst input = readFileSync(0, 'utf-8')\n\treturn JSON.parse(input) as T\n}\n\n/**\n * Write hook output to stdout\n */\nexport function writeHookOutput(data: unknown): void {\n\tconsole.log(JSON.stringify(data))\n}\n\n/**\n * Exit codes for hooks\n */\nexport const EXIT = {\n\tALLOW: 0,\n\tBLOCK: 2\n} as const\n\n/**\n * Load JSON config with defaults\n */\nexport function loadConfig<T extends object>(\n\tconfigName: string,\n\tdefaults: T,\n\tsearchPaths: string[] = [ process.cwd(), join(process.cwd(), '..') ]\n): T {\n\tfor (const basePath of searchPaths) {\n\t\tconst configPath = join(basePath, configName)\n\t\tif (existsSync(configPath)) {\n\t\t\ttry {\n\t\t\t\tconst content = readFileSync(configPath, 'utf-8')\n\t\t\t\tconst loaded = JSON.parse(content)\n\t\t\t\treturn { ...defaults, ...loaded }\n\t\t\t} catch {\n\t\t\t\t// Ignore parse errors, use defaults\n\t\t\t}\n\t\t}\n\t}\n\treturn defaults\n}\n\n/**\n * Load config from environment variables with prefix\n */\nexport function loadEnvConfig<T extends object>(\n\tprefix: string,\n\tdefaults: T\n): T {\n\tconst result = { ...defaults }\n\n\tfor (const key of Object.keys(defaults)) {\n\t\tconst envKey = `${ prefix }_${ key.toUpperCase() }`\n\t\tconst envValue = process.env[envKey]\n\n\t\tif (envValue !== undefined) {\n\t\t\tconst defaultValue = defaults[key as keyof T]\n\n\t\t\t// Type coercion based on default value type\n\t\t\tif (typeof defaultValue === 'number') {\n\t\t\t\t(result as Record<string, unknown>)[key] = parseInt(envValue, 10)\n\t\t\t} else if (typeof defaultValue === 'boolean') {\n\t\t\t\t(result as Record<string, unknown>)[key] = envValue === 'true'\n\t\t\t} else {\n\t\t\t\t(result as Record<string, unknown>)[key] = envValue\n\t\t\t}\n\t\t}\n\t}\n\n\treturn result\n}\n", "/**\n * LLM utilities: Multi-provider support (Cerebras, Groq, OpenAI-compatible)\n */\n\nimport { existsSync, readFileSync } from 'fs'\nimport OpenAI from 'openai'\nimport { join } from 'path'\n\n// Auto-load .env files (simple implementation, no dotenv dependency)\nfunction loadEnvFiles(): void {\n const envPaths = [\n join(process.cwd(), '.env'),\n join(process.cwd(), '..', '.env'),\n join(process.cwd(), '..', '..', '.env'),\n join(process.cwd(), 'packages', 'backend', '.env')\n ]\n\n for (const envPath of envPaths) {\n if (existsSync(envPath)) {\n const content = readFileSync(envPath, 'utf-8')\n for (const line of content.split('\\n')) {\n const trimmed = line.trim()\n if (trimmed && !trimmed.startsWith('#')) {\n const eqIdx = trimmed.indexOf('=')\n if (eqIdx > 0) {\n const key = trimmed.slice(0, eqIdx).trim()\n const value = trimmed.slice(eqIdx + 1).trim()\n // Only set if not already defined\n if (!process.env[key]) {\n process.env[key] = value\n }\n }\n }\n }\n }\n }\n}\n\n// Load env on module init\nloadEnvFiles()\n\n/** Supported LLM providers */\nexport type Provider = 'cerebras' | 'groq' | 'openai'\n\n/** Provider configuration */\ninterface ProviderConfig {\n baseURL: string\n envKey: string\n models: string[] // Ordered best \u2192 worst, will rotate on rate limit\n}\n\nconst PROVIDERS: Record<Provider, ProviderConfig> = {\n cerebras: {\n baseURL: 'https://api.cerebras.ai/v1',\n envKey: 'CEREBRAS_API_KEY',\n models: [\n 'zai-glm-4.7', // Best quality, 100 req/day\n 'qwen-3-235b-a22b-instruct-2507', // 235B, 1,440 req/day\n 'gpt-oss-120b' // 120B, 14,400 req/day\n ]\n },\n groq: {\n baseURL: 'https://api.groq.com/openai/v1',\n envKey: 'GROQ_API_KEY',\n models: [\n 'openai/gpt-oss-120b' // Same 120B model\n ]\n },\n openai: {\n baseURL: 'https://api.openai.com/v1',\n envKey: 'OPENAI_API_KEY',\n models: ['gpt-4o-mini']\n }\n}\n\n// Track which model index to use per provider (rotates on rate limit)\nconst modelIndex = new Map<Provider, number>()\n\n// Client cache per provider\nconst clients = new Map<Provider, OpenAI>()\n\n/**\n * Get available providers (those with API keys set)\n */\nexport function getAvailableProviders(): Provider[] {\n return (Object.keys(PROVIDERS) as Provider[]).filter(\n (provider) => !!process.env[PROVIDERS[provider].envKey]\n )\n}\n\n/**\n * Get or create client for a specific provider\n */\nfunction getClient(provider: Provider): OpenAI {\n let client = clients.get(provider)\n if (!client) {\n const config = PROVIDERS[provider]\n const apiKey = process.env[config.envKey]\n if (!apiKey) {\n throw new Error(`${config.envKey} environment variable not set`)\n }\n client = new OpenAI({\n apiKey,\n baseURL: config.baseURL\n })\n clients.set(provider, client)\n }\n return client\n}\n\n/**\n * Get the default provider (first available)\n */\nfunction getDefaultProvider(): Provider {\n const available = getAvailableProviders()\n if (available.length === 0) {\n throw new Error(\n 'No LLM provider configured. Set CEREBRAS_API_KEY, GROQ_API_KEY, or OPENAI_API_KEY'\n )\n }\n return available[0]\n}\n\n/**\n * Get current model for provider (with rotation support)\n */\nfunction getCurrentModel(provider: Provider): string {\n const config = PROVIDERS[provider]\n const idx = modelIndex.get(provider) || 0\n return config.models[Math.min(idx, config.models.length - 1)]\n}\n\n/**\n * Rotate to next model for provider (called on rate limit)\n */\nfunction rotateModel(provider: Provider): string | null {\n const config = PROVIDERS[provider]\n const currentIdx = modelIndex.get(provider) || 0\n const nextIdx = currentIdx + 1\n\n if (nextIdx >= config.models.length) {\n return null // No more models\n }\n\n modelIndex.set(provider, nextIdx)\n return config.models[nextIdx]\n}\n\n/** Chat options */\nexport interface ChatOptions {\n system?: string\n model?: string\n provider?: Provider\n fallback?: boolean // Try next provider on failure\n}\n\n/** Rate limit info from provider */\nexport interface RateLimitInfo {\n provider: Provider\n available: boolean\n error?: string\n limits?: {\n requestsPerMinute?: number\n requestsRemainingMinute?: number\n requestsPerDay?: number\n requestsRemainingDay?: number\n tokensPerMinute?: number\n tokensRemainingMinute?: number\n tokensPerDay?: number\n tokensRemainingDay?: number\n }\n}\n\n/**\n * Check rate limits for all available providers\n */\nexport async function checkProviderStatus(): Promise<RateLimitInfo[]> {\n const results: RateLimitInfo[] = []\n\n for (const provider of getAvailableProviders()) {\n try {\n const client = getClient(provider)\n const config = PROVIDERS[provider]\n\n // Make a minimal request to get headers (use first model)\n const response = await client.chat.completions\n .create({\n model: config.models[0],\n messages: [{ role: 'user', content: 'hi' }],\n max_tokens: 1\n })\n .asResponse()\n\n const h = response.headers\n const getInt = (name: string) => parseInt(h.get(name) || '0') || undefined\n\n results.push({\n provider,\n available: true,\n limits: {\n // Groq style (per minute)\n requestsPerMinute: getInt('x-ratelimit-limit-requests'),\n requestsRemainingMinute: getInt('x-ratelimit-remaining-requests'),\n tokensPerMinute: getInt('x-ratelimit-limit-tokens'),\n tokensRemainingMinute: getInt('x-ratelimit-remaining-tokens'),\n // Cerebras style (per day)\n requestsPerDay: getInt('x-ratelimit-limit-requests-day'),\n requestsRemainingDay: getInt('x-ratelimit-remaining-requests-day'),\n tokensPerDay: getInt('x-ratelimit-limit-tokens-day'),\n tokensRemainingDay: getInt('x-ratelimit-remaining-tokens-day')\n }\n })\n } catch (error) {\n results.push({\n provider,\n available: false,\n error: (error as Error).message\n })\n }\n }\n\n return results\n}\n\n/**\n * Check if error is a rate limit (429)\n */\nfunction isRateLimitError(error: unknown): boolean {\n if (error && typeof error === 'object') {\n const status = (error as { status?: number }).status\n return status === 429\n }\n return false\n}\n\n/**\n * Chat with LLM (supports model rotation + provider fallback)\n *\n * Order: cerebras(zai-glm-4.7 \u2192 qwen-235b \u2192 gpt-oss-120b) \u2192 groq \u2192 openai\n */\nexport async function chat(prompt: string, options: ChatOptions = {}): Promise<string> {\n const { system, fallback = true } = options\n let { provider, model } = options\n\n // Get provider\n if (!provider) {\n provider = getDefaultProvider()\n }\n\n // Use specified model or current rotation model\n if (!model) {\n model = getCurrentModel(provider)\n }\n\n const messages: OpenAI.ChatCompletionMessageParam[] = []\n if (system) {\n messages.push({ role: 'system', content: system })\n }\n messages.push({ role: 'user', content: prompt })\n\n // Try current provider with model rotation\n let currentProvider = provider\n let currentModel = model\n\n while (true) {\n try {\n const response = await getClient(currentProvider).chat.completions.create({\n model: currentModel,\n messages\n })\n\n return response.choices[0]?.message?.content || ''\n } catch (error) {\n // On rate limit, try next model in same provider\n if (isRateLimitError(error)) {\n const nextModel = rotateModel(currentProvider)\n if (nextModel) {\n console.error(`Rate limited on ${currentModel}, rotating to ${nextModel}`)\n currentModel = nextModel\n continue\n }\n }\n\n // No more models in this provider, try next provider\n if (fallback) {\n const available = getAvailableProviders().filter((p) => p !== currentProvider)\n if (available.length > 0) {\n currentProvider = available[0]\n currentModel = getCurrentModel(currentProvider)\n console.error(`Falling back to ${currentProvider}/${currentModel}`)\n continue\n }\n }\n\n throw new Error(`LLM API error (${currentProvider}/${currentModel}): ${error}`)\n }\n }\n}\n", "/**\n * File utilities: glob patterns, line-numbered formatting\n */\n\nimport { readFileSync, statSync } from 'fs'\nimport { glob } from 'glob'\n\nimport { filterIgnored } from './git.js'\n\n/** Code file extensions to include */\nexport const CODE_EXTENSIONS = new Set([\n\t'.py',\n\t'.rs',\n\t'.ts',\n\t'.js',\n\t'.svelte',\n\t'.tsx',\n\t'.jsx',\n\t'.go',\n\t'.java',\n\t'.rb',\n\t'.sh',\n\t'.json',\n\t'.yaml',\n\t'.yml',\n\t'.toml',\n\t'.md'\n])\n\n/**\n * Find files matching glob pattern, respecting .gitignore\n */\nexport async function findFiles(\n\tpattern: string,\n\toptions: { codeOnly?: boolean } = {}\n): Promise<string[]> {\n\tconst files = await glob(pattern, {\n\t\tignore: [\n\t\t\t'**/node_modules/**',\n\t\t\t'**/.venv/**',\n\t\t\t'**/venv/**',\n\t\t\t'**/dist/**',\n\t\t\t'**/build/**',\n\t\t\t'**/__pycache__/**',\n\t\t\t'**/.git/**'\n\t\t],\n\t\tnodir: true,\n\t\tdot: false\n\t})\n\n\t// Filter by .gitignore\n\tlet filtered = filterIgnored(files)\n\n\t// Filter by code extensions if requested\n\tif (options.codeOnly) {\n\t\tfiltered = filtered.filter(f => {\n\t\t\tconst ext = f.substring(f.lastIndexOf('.'))\n\t\t\treturn CODE_EXTENSIONS.has(ext)\n\t\t})\n\t}\n\n\treturn filtered.sort()\n}\n\n/**\n * Format file content with line numbers\n */\nexport function formatWithLineNumbers(path: string): string {\n\tconst content = readFileSync(path, 'utf8')\n\tconst lines = content.split('\\n')\n\tconst totalLines = lines.length\n\n\tconst numbered = lines.map(\n\t\t(line, i) => `${ String(i + 1).padStart(4) } | ${ line }`\n\t)\n\n\treturn `\n--- BEGIN FILE: ${ path } (lines 1-${ totalLines }) ---\n${ numbered.join('\\n') }\n--- END FILE: ${ path } ---\n`\n}\n\n/**\n * Read multiple files with size limit\n */\nexport function readFilesWithLimit(\n\tpaths: string[],\n\tmaxBytes: number = 120_000\n): { files: string[]; truncated: number } {\n\tconst files: string[] = []\n\tlet totalSize = 0\n\tlet truncated = 0\n\n\tfor (const path of paths) {\n\t\ttry {\n\t\t\tstatSync(path) // Verify file exists and is readable\n\t\t\tconst formatted = formatWithLineNumbers(path)\n\n\t\t\tif (totalSize + formatted.length > maxBytes) {\n\t\t\t\ttruncated = paths.length - files.length\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tfiles.push(formatted)\n\t\t\ttotalSize += formatted.length\n\t\t} catch {\n\t\t\t// Skip unreadable files\n\t\t}\n\t}\n\n\treturn { files, truncated }\n}\n\n/**\n * Check if path is a file\n */\nexport function isFile(path: string): boolean {\n\ttry {\n\t\treturn statSync(path).isFile()\n\t} catch {\n\t\treturn false\n\t}\n}\n\n/**\n * Check if path is a directory\n */\nexport function isDirectory(path: string): boolean {\n\ttry {\n\t\treturn statSync(path).isDirectory()\n\t} catch {\n\t\treturn false\n\t}\n}\n", "/**\n * Git utilities: .gitignore parsing, diff operations\n */\n\nimport { exec,execSync } from 'child_process'\nimport { existsSync,readFileSync } from 'fs'\nimport ignore, { type Ignore } from 'ignore'\nimport { dirname,join, resolve } from 'path'\nimport { promisify } from 'util'\n\nconst execAsync = promisify(exec)\n\n/**\n * Load .gitignore patterns from directory and parents\n */\nexport function loadGitignore(dir: string = '.'): Ignore {\n\tconst ig = ignore()\n\tconst absoluteDir = resolve(dir)\n\n\t// Walk up the tree loading .gitignore files\n\tlet current = absoluteDir\n\tconst gitignoreFiles: string[] = []\n\n\twhile (current) {\n\t\tconst gitignorePath = join(current, '.gitignore')\n\t\tif (existsSync(gitignorePath)) {\n\t\t\tgitignoreFiles.unshift(gitignorePath) // Add to front (parent patterns first)\n\t\t}\n\n\t\t// Check if we've hit the git root\n\t\tif (existsSync(join(current, '.git'))) {\n\t\t\tbreak\n\t\t}\n\n\t\tconst parent = dirname(current)\n\t\tif (parent === current) {break}\n\t\tcurrent = parent\n\t}\n\n\t// Load patterns (parent first, then child - child overrides)\n\tfor (const path of gitignoreFiles) {\n\t\tig.add(readFileSync(path, 'utf8'))\n\t}\n\n\t// Always ignore common junk\n\tig.add([\n\t\t'node_modules/',\n\t\t'__pycache__/',\n\t\t'.venv/',\n\t\t'venv/',\n\t\t'dist/',\n\t\t'build/',\n\t\t'.git/',\n\t\t'.eggs/',\n\t\t'*.egg-info/'\n\t])\n\n\treturn ig\n}\n\n/**\n * Filter paths, removing those matched by .gitignore\n */\nexport function filterIgnored(paths: string[], dir: string = '.'): string[] {\n\tconst ig = loadGitignore(dir)\n\treturn paths.filter(p => !ig.ignores(p))\n}\n\n/**\n * Check if paths are ignored using git check-ignore (batch)\n */\nexport function getIgnoredPaths(paths: string[]): Set<string> {\n\tif (paths.length === 0) {return new Set()}\n\n\ttry {\n\t\tconst result = execSync('git check-ignore --stdin', {\n\t\t\tinput: paths.join('\\n'),\n\t\t\tencoding: 'utf8',\n\t\t\tstdio: [ 'pipe', 'pipe', 'pipe' ]\n\t\t})\n\t\treturn new Set(result.trim().split('\\n').filter(Boolean))\n\t} catch {\n\t\treturn new Set()\n\t}\n}\n\n/**\n * Get git diff\n */\nexport async function getDiff(\n\tbase: string = 'HEAD~1',\n\tpath?: string\n): Promise<string> {\n\tconst args = [ 'git', 'diff', base ]\n\tif (path) {args.push('--', path)}\n\n\ttry {\n\t\tconst { stdout } = await execAsync(args.join(' '))\n\t\treturn stdout\n\t} catch(error) {\n\t\tthrow new Error(`Git diff failed: ${ error }`)\n\t}\n}\n\n/**\n * Check if we're in a git repository\n */\nexport function isGitRepo(dir: string = '.'): boolean {\n\ttry {\n\t\texecSync('git rev-parse --git-dir', {\n\t\t\tcwd: dir,\n\t\t\tstdio: [ 'pipe', 'pipe', 'pipe' ]\n\t\t})\n\t\treturn true\n\t} catch {\n\t\treturn false\n\t}\n}\n", "/**\n * Token counting utilities using tiktoken (Rust/WASM)\n */\n\nimport { get_encoding, type Tiktoken } from 'tiktoken'\n\nlet encoder: Tiktoken | null = null\n\n/**\n * Get or create the tokenizer instance\n */\nfunction getEncoder(): Tiktoken {\n\tif (!encoder) {\n\t\t// cl100k_base is used by GPT-4, Claude uses similar tokenization\n\t\tencoder = get_encoding('cl100k_base')\n\t}\n\treturn encoder\n}\n\n/**\n * Count tokens accurately using tiktoken\n */\nexport function countTokens(text: string): number {\n\treturn getEncoder().encode(text).length\n}\n\n/**\n * Estimate tokens quickly without full tokenization\n * ~4 chars per token for code, useful for quick checks\n */\nexport function estimateTokens(text: string): number {\n\treturn Math.ceil(text.length / 4)\n}\n\n/**\n * Check if text exceeds token limit\n */\nexport function exceedsTokenLimit(text: string, limit: number): boolean {\n\t// Quick estimate first\n\tif (estimateTokens(text) < limit * 0.8) {\n\t\treturn false\n\t}\n\t// Accurate count if close to limit\n\treturn countTokens(text) > limit\n}\n\n/**\n * Truncate text to fit within token limit\n */\nexport function truncateToTokens(text: string, maxTokens: number): string {\n\tconst tokens = getEncoder().encode(text)\n\tif (tokens.length <= maxTokens) {\n\t\treturn text\n\t}\n\tconst truncated = tokens.slice(0, maxTokens)\n\tconst decoded = getEncoder().decode(truncated)\n\treturn new TextDecoder().decode(decoded)\n}\n\n/**\n * Free the encoder when done (optional, for memory cleanup)\n */\nexport function freeEncoder(): void {\n\tif (encoder) {\n\t\tencoder.free()\n\t\tencoder = null\n\t}\n}\n"],
|
|
5
|
+
"mappings": ";AAIA,SAAS,YAAW,oBAAoB;AACxC,SAAS,YAAY;AAiCf,SAAU,gBAAa;AAC5B,QAAM,QAAQ,aAAa,GAAG,OAAO;AACrC,SAAO,KAAK,MAAM,KAAK;AACxB;AAKM,SAAU,gBAAgB,MAAa;AAC5C,UAAQ,IAAI,KAAK,UAAU,IAAI,CAAC;AACjC;AAKO,IAAM,OAAO;EACnB,OAAO;EACP,OAAO;;AAMF,SAAU,WACf,YACA,UACA,cAAwB,CAAE,QAAQ,IAAG,GAAI,KAAK,QAAQ,IAAG,GAAI,IAAI,CAAC,GAAE;AAEpE,aAAW,YAAY,aAAa;AACnC,UAAM,aAAa,KAAK,UAAU,UAAU;AAC5C,QAAI,WAAW,UAAU,GAAG;AAC3B,UAAI;AACH,cAAM,UAAU,aAAa,YAAY,OAAO;AAChD,cAAM,SAAS,KAAK,MAAM,OAAO;AACjC,eAAO,EAAE,GAAG,UAAU,GAAG,OAAM;MAChC,QAAQ;MAER;IACD;EACD;AACA,SAAO;AACR;;;AC3EA,SAAS,cAAAA,aAAY,gBAAAC,qBAAoB;AACzC,SAAS,QAAAC,aAAY;AAErB,OAAO,YAAY;AAGnB,SAAS,eAAY;AACpB,QAAM,WAAW;IAChBA,MAAK,QAAQ,IAAG,GAAI,MAAM;IAC1BA,MAAK,QAAQ,IAAG,GAAI,MAAM,MAAM;IAChCA,MAAK,QAAQ,IAAG,GAAI,MAAM,MAAM,MAAM;IACtCA,MAAK,QAAQ,IAAG,GAAI,YAAY,WAAW,MAAM;;AAGlD,aAAW,WAAW,UAAU;AAC/B,QAAIF,YAAW,OAAO,GAAG;AACxB,YAAM,UAAUC,cAAa,SAAS,OAAO;AAC7C,iBAAW,QAAQ,QAAQ,MAAM,IAAI,GAAG;AACvC,cAAM,UAAU,KAAK,KAAI;AACzB,YAAI,WAAW,CAAC,QAAQ,WAAW,GAAG,GAAG;AACxC,gBAAM,QAAQ,QAAQ,QAAQ,GAAG;AACjC,cAAI,QAAQ,GAAG;AACd,kBAAM,MAAM,QAAQ,MAAM,GAAG,KAAK,EAAE,KAAI;AACxC,kBAAM,QAAQ,QAAQ,MAAM,QAAQ,CAAC,EAAE,KAAI;AAE3C,gBAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACtB,sBAAQ,IAAI,GAAG,IAAI;YACpB;UACD;QACD;MACD;IACD;EACD;AACD;AAGA,aAAY;AAYZ,IAAM,YAA8C;EACnD,UAAU;IACT,SAAS;IACT,QAAQ;IACR,QAAQ;MACP;;MACA;;MACA;;;;EAGF,MAAM;IACL,SAAS;IACT,QAAQ;IACR,QAAQ;MACP;;;;EAGF,QAAQ;IACP,SAAS;IACT,QAAQ;IACR,QAAQ;MACP;;;;AAMH,IAAM,aAAa,oBAAI,IAAG;AAG1B,IAAM,UAAU,oBAAI,IAAG;AAKjB,SAAU,wBAAqB;AACpC,SAAQ,OAAO,KAAK,SAAS,EAAiB,OAC7C,cAAY,CAAC,CAAC,QAAQ,IAAI,UAAU,QAAQ,EAAE,MAAM,CAAC;AAEvD;AAKA,SAAS,UAAU,UAAkB;AACpC,MAAI,SAAS,QAAQ,IAAI,QAAQ;AACjC,MAAI,CAAC,QAAQ;AACZ,UAAM,SAAS,UAAU,QAAQ;AACjC,UAAM,SAAS,QAAQ,IAAI,OAAO,MAAM;AACxC,QAAI,CAAC,QAAQ;AACZ,YAAM,IAAI,MAAM,GAAI,OAAO,MAAO,+BAA+B;IAClE;AACA,aAAS,IAAI,OAAO;MACnB;MACA,SAAS,OAAO;KAChB;AACD,YAAQ,IAAI,UAAU,MAAM;EAC7B;AACA,SAAO;AACR;AAKA,SAAS,qBAAkB;AAC1B,QAAM,YAAY,sBAAqB;AACvC,MAAI,UAAU,WAAW,GAAG;AAC3B,UAAM,IAAI,MAAM,mFAAmF;EACpG;AACA,SAAO,UAAU,CAAC;AACnB;AAKA,SAAS,gBAAgB,UAAkB;AAC1C,QAAM,SAAS,UAAU,QAAQ;AACjC,QAAM,MAAM,WAAW,IAAI,QAAQ,KAAK;AACxC,SAAO,OAAO,OAAO,KAAK,IAAI,KAAK,OAAO,OAAO,SAAS,CAAC,CAAC;AAC7D;AAKA,SAAS,YAAY,UAAkB;AACtC,QAAM,SAAS,UAAU,QAAQ;AACjC,QAAM,aAAa,WAAW,IAAI,QAAQ,KAAK;AAC/C,QAAM,UAAU,aAAa;AAE7B,MAAI,WAAW,OAAO,OAAO,QAAQ;AACpC,WAAO;EACR;AAEA,aAAW,IAAI,UAAU,OAAO;AAChC,SAAO,OAAO,OAAO,OAAO;AAC7B;AA8BA,eAAsB,sBAAmB;AACxC,QAAM,UAA2B,CAAA;AAEjC,aAAW,YAAY,sBAAqB,GAAI;AAC/C,QAAI;AACH,YAAM,SAAS,UAAU,QAAQ;AACjC,YAAM,SAAS,UAAU,QAAQ;AAGjC,YAAM,WAAW,MAAM,OAAO,KAAK,YAAY,OAAO;QACrD,OAAO,OAAO,OAAO,CAAC;QACtB,UAAU,CAAE,EAAE,MAAM,QAAQ,SAAS,KAAI,CAAE;QAC3C,YAAY;OACZ,EAAE,WAAU;AAEb,YAAM,IAAI,SAAS;AACnB,YAAM,SAAS,CAAC,SAAiB,SAAS,EAAE,IAAI,IAAI,KAAK,GAAG,KAAK;AAEjE,cAAQ,KAAK;QACZ;QACA,WAAW;QACX,QAAQ;;UAEP,mBAAmB,OAAO,4BAA4B;UACtD,yBAAyB,OAAO,gCAAgC;UAChE,iBAAiB,OAAO,0BAA0B;UAClD,uBAAuB,OAAO,8BAA8B;;UAE5D,gBAAgB,OAAO,gCAAgC;UACvD,sBAAsB,OAAO,oCAAoC;UACjE,cAAc,OAAO,8BAA8B;UACnD,oBAAoB,OAAO,kCAAkC;;OAE9D;IACF,SAAQ,OAAO;AACd,cAAQ,KAAK;QACZ;QACA,WAAW;QACX,OAAQ,MAAgB;OACxB;IACF;EACD;AAEA,SAAO;AACR;AAKA,SAAS,iBAAiB,OAAc;AACvC,MAAI,SAAS,OAAO,UAAU,UAAU;AACvC,UAAM,SAAU,MAA8B;AAC9C,WAAO,WAAW;EACnB;AACA,SAAO;AACR;AAOA,eAAsB,KACrB,QACA,UAAuB,CAAA,GAAE;AAEzB,QAAM,EAAE,QAAQ,WAAW,KAAI,IAAK;AACpC,MAAI,EAAE,UAAU,MAAK,IAAK;AAG1B,MAAI,CAAC,UAAU;AACd,eAAW,mBAAkB;EAC9B;AAGA,MAAI,CAAC,OAAO;AACX,YAAQ,gBAAgB,QAAQ;EACjC;AAEA,QAAM,WAAgD,CAAA;AACtD,MAAI,QAAQ;AACX,aAAS,KAAK,EAAE,MAAM,UAAU,SAAS,OAAM,CAAE;EAClD;AACA,WAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,OAAM,CAAE;AAG/C,MAAI,kBAAkB;AACtB,MAAI,eAAe;AAEnB,SAAO,MAAM;AACZ,QAAI;AACH,YAAM,WAAW,MAAM,UAAU,eAAe,EAAE,KAAK,YAAY,OAAO;QACzE,OAAO;QACP;OACA;AAED,aAAO,SAAS,QAAQ,CAAC,GAAG,SAAS,WAAW;IACjD,SAAS,OAAO;AAEf,UAAI,iBAAiB,KAAK,GAAG;AAC5B,cAAM,YAAY,YAAY,eAAe;AAC7C,YAAI,WAAW;AACd,kBAAQ,MAAM,mBAAoB,YAAa,iBAAkB,SAAU,EAAE;AAC7E,yBAAe;AACf;QACD;MACD;AAGA,UAAI,UAAU;AACb,cAAM,YAAY,sBAAqB,EAAG,OAAO,OAAK,MAAM,eAAe;AAC3E,YAAI,UAAU,SAAS,GAAG;AACzB,4BAAkB,UAAU,CAAC;AAC7B,yBAAe,gBAAgB,eAAe;AAC9C,kBAAQ,MAAM,mBAAoB,eAAgB,IAAK,YAAa,EAAE;AACtE;QACD;MACD;AAEA,YAAM,IAAI,MAAM,kBAAmB,eAAgB,IAAK,YAAa,MAAO,KAAM,EAAE;IACrF;EACD;AACD;;;ACvSA,SAAS,gBAAAE,eAAc,gBAAgB;AACvC,SAAS,YAAY;;;ACDrB,SAAS,MAAK,gBAAgB;AAC9B,SAAS,cAAAC,aAAW,gBAAAC,qBAAoB;AACxC,OAAO,YAA6B;AACpC,SAAS,SAAQ,QAAAC,OAAM,eAAe;AACtC,SAAS,iBAAiB;AAE1B,IAAM,YAAY,UAAU,IAAI;AAK1B,SAAU,cAAc,MAAc,KAAG;AAC9C,QAAM,KAAK,OAAM;AACjB,QAAM,cAAc,QAAQ,GAAG;AAG/B,MAAI,UAAU;AACd,QAAM,iBAA2B,CAAA;AAEjC,SAAO,SAAS;AACf,UAAM,gBAAgBA,MAAK,SAAS,YAAY;AAChD,QAAIF,YAAW,aAAa,GAAG;AAC9B,qBAAe,QAAQ,aAAa;IACrC;AAGA,QAAIA,YAAWE,MAAK,SAAS,MAAM,CAAC,GAAG;AACtC;IACD;AAEA,UAAM,SAAS,QAAQ,OAAO;AAC9B,QAAI,WAAW,SAAS;AAAC;IAAK;AAC9B,cAAU;EACX;AAGA,aAAW,QAAQ,gBAAgB;AAClC,OAAG,IAAID,cAAa,MAAM,MAAM,CAAC;EAClC;AAGA,KAAG,IAAI;IACN;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;GACA;AAED,SAAO;AACR;AAKM,SAAU,cAAc,OAAiB,MAAc,KAAG;AAC/D,QAAM,KAAK,cAAc,GAAG;AAC5B,SAAO,MAAM,OAAO,OAAK,CAAC,GAAG,QAAQ,CAAC,CAAC;AACxC;AAuBA,eAAsB,QACrB,OAAe,UACf,MAAa;AAEb,QAAM,OAAO,CAAE,OAAO,QAAQ,IAAI;AAClC,MAAI,MAAM;AAAC,SAAK,KAAK,MAAM,IAAI;EAAC;AAEhC,MAAI;AACH,UAAM,EAAE,OAAM,IAAK,MAAM,UAAU,KAAK,KAAK,GAAG,CAAC;AACjD,WAAO;EACR,SAAQ,OAAO;AACd,UAAM,IAAI,MAAM,oBAAqB,KAAM,EAAE;EAC9C;AACD;;;AD5FO,IAAM,kBAAkB,oBAAI,IAAI;EACtC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;CACA;AAKD,eAAsB,UACrB,SACA,UAAkC,CAAA,GAAE;AAEpC,QAAM,QAAQ,MAAM,KAAK,SAAS;IACjC,QAAQ;MACP;MACA;MACA;MACA;MACA;MACA;MACA;;IAED,OAAO;IACP,KAAK;GACL;AAGD,MAAI,WAAW,cAAc,KAAK;AAGlC,MAAI,QAAQ,UAAU;AACrB,eAAW,SAAS,OAAO,OAAI;AAC9B,YAAM,MAAM,EAAE,UAAU,EAAE,YAAY,GAAG,CAAC;AAC1C,aAAO,gBAAgB,IAAI,GAAG;IAC/B,CAAC;EACF;AAEA,SAAO,SAAS,KAAI;AACrB;AAKM,SAAU,sBAAsB,MAAY;AACjD,QAAM,UAAUE,cAAa,MAAM,MAAM;AACzC,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,aAAa,MAAM;AAEzB,QAAM,WAAW,MAAM,IACtB,CAAC,MAAM,MAAM,GAAI,OAAO,IAAI,CAAC,EAAE,SAAS,CAAC,CAAE,MAAO,IAAK,EAAE;AAG1D,SAAO;kBACW,IAAK,aAAc,UAAW;EAC9C,SAAS,KAAK,IAAI,CAAE;gBACN,IAAK;;AAEtB;AAKM,SAAU,mBACf,OACA,WAAmB,MAAO;AAE1B,QAAM,QAAkB,CAAA;AACxB,MAAI,YAAY;AAChB,MAAI,YAAY;AAEhB,aAAW,QAAQ,OAAO;AACzB,QAAI;AACH,eAAS,IAAI;AACb,YAAM,YAAY,sBAAsB,IAAI;AAE5C,UAAI,YAAY,UAAU,SAAS,UAAU;AAC5C,oBAAY,MAAM,SAAS,MAAM;AACjC;MACD;AAEA,YAAM,KAAK,SAAS;AACpB,mBAAa,UAAU;IACxB,QAAQ;IAER;EACD;AAEA,SAAO,EAAE,OAAO,UAAS;AAC1B;;;AE5GA,SAAS,oBAAmC;AAE5C,IAAI,UAA2B;AAK/B,SAAS,aAAU;AAClB,MAAI,CAAC,SAAS;AAEb,cAAU,aAAa,aAAa;EACrC;AACA,SAAO;AACR;AAKM,SAAU,YAAY,MAAY;AACvC,SAAO,WAAU,EAAG,OAAO,IAAI,EAAE;AAClC;",
|
|
6
|
+
"names": ["existsSync", "readFileSync", "join", "readFileSync", "existsSync", "readFileSync", "join", "readFileSync"]
|
|
7
|
+
}
|