akm-cli 0.0.0 → 0.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +385 -0
- package/README.md +249 -6
- package/dist/asset-spec.js +70 -0
- package/dist/cli.js +934 -0
- package/dist/common.js +192 -0
- package/dist/config-cli.js +233 -0
- package/dist/config.js +338 -0
- package/dist/db.js +371 -0
- package/dist/embedder.js +150 -0
- package/dist/errors.js +28 -0
- package/dist/file-context.js +162 -0
- package/dist/frontmatter.js +86 -0
- package/dist/github.js +17 -0
- package/dist/indexer.js +311 -0
- package/dist/init.js +43 -0
- package/dist/llm.js +87 -0
- package/dist/lockfile.js +60 -0
- package/dist/markdown.js +77 -0
- package/dist/matchers.js +159 -0
- package/dist/metadata.js +408 -0
- package/dist/origin-resolve.js +54 -0
- package/dist/paths.js +92 -0
- package/dist/registry-install.js +459 -0
- package/dist/registry-resolve.js +486 -0
- package/dist/registry-search.js +365 -0
- package/dist/registry-types.js +1 -0
- package/dist/renderers.js +386 -0
- package/dist/ripgrep-install.js +155 -0
- package/dist/ripgrep-resolve.js +78 -0
- package/dist/ripgrep.js +2 -0
- package/dist/self-update.js +226 -0
- package/dist/stash-add.js +71 -0
- package/dist/stash-clone.js +115 -0
- package/dist/stash-ref.js +73 -0
- package/dist/stash-registry.js +206 -0
- package/dist/stash-resolve.js +55 -0
- package/dist/stash-search.js +490 -0
- package/dist/stash-show.js +58 -0
- package/dist/stash-source.js +130 -0
- package/dist/stash-types.js +1 -0
- package/dist/walker.js +163 -0
- package/dist/warn.js +20 -0
- package/package.json +53 -7
- package/index.js +0 -4
|
@@ -0,0 +1,386 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Built-in asset renderers.
|
|
3
|
+
*
|
|
4
|
+
* Each renderer implements the show/search/metadata behavior for its asset
|
|
5
|
+
* type via the AssetRenderer interface from ./file-context. Renderers are
|
|
6
|
+
* registered at module-load time so that importing this module is sufficient
|
|
7
|
+
* to make them available.
|
|
8
|
+
*/
|
|
9
|
+
import fs from "node:fs";
|
|
10
|
+
import path from "node:path";
|
|
11
|
+
import { hasErrnoCode } from "./common";
|
|
12
|
+
import { registerRenderer } from "./file-context";
|
|
13
|
+
import { parseFrontmatter, toStringOrUndefined } from "./frontmatter";
|
|
14
|
+
import { extractFrontmatterOnly, extractLineRange, extractSection, formatToc, parseMarkdownToc } from "./markdown";
|
|
15
|
+
import { extractDescriptionFromComments, loadStashFile } from "./metadata";
|
|
16
|
+
// ── Interpreter auto-detection map ───────────────────────────────────────────
|
|
17
|
+
const INTERPRETER_MAP = {
|
|
18
|
+
".sh": "bash",
|
|
19
|
+
".ts": "bun",
|
|
20
|
+
".js": "bun",
|
|
21
|
+
".py": "python",
|
|
22
|
+
".rb": "ruby",
|
|
23
|
+
".go": "go run",
|
|
24
|
+
".ps1": "powershell -File",
|
|
25
|
+
".cmd": "cmd /c",
|
|
26
|
+
".bat": "cmd /c",
|
|
27
|
+
".pl": "perl",
|
|
28
|
+
".php": "php",
|
|
29
|
+
".lua": "lua",
|
|
30
|
+
".r": "Rscript",
|
|
31
|
+
".swift": "swift",
|
|
32
|
+
".kt": "kotlin",
|
|
33
|
+
".kts": "kotlin",
|
|
34
|
+
};
|
|
35
|
+
// ── Setup signal map ─────────────────────────────────────────────────────────
|
|
36
|
+
const SETUP_SIGNALS = {
|
|
37
|
+
"package.json": "bun install",
|
|
38
|
+
"requirements.txt": "pip install -r requirements.txt",
|
|
39
|
+
Gemfile: "bundle install",
|
|
40
|
+
"go.mod": "go mod download",
|
|
41
|
+
};
|
|
42
|
+
// ── Comment tag extraction ───────────────────────────────────────────────────
|
|
43
|
+
/**
|
|
44
|
+
* Extract `@run`, `@setup`, `@cwd` tags from script file header comments.
|
|
45
|
+
*
|
|
46
|
+
* Scans the first 50 lines of the file for comment lines containing
|
|
47
|
+
* `@run <value>`, `@setup <value>`, or `@cwd <value>`.
|
|
48
|
+
*/
|
|
49
|
+
export function extractCommentTags(filePath) {
|
|
50
|
+
let content;
|
|
51
|
+
try {
|
|
52
|
+
content = fs.readFileSync(filePath, "utf8");
|
|
53
|
+
}
|
|
54
|
+
catch {
|
|
55
|
+
return {};
|
|
56
|
+
}
|
|
57
|
+
const lines = content.split(/\r?\n/).slice(0, 50);
|
|
58
|
+
const hints = {};
|
|
59
|
+
for (const line of lines) {
|
|
60
|
+
const trimmed = line.trim();
|
|
61
|
+
// Match lines starting with comment markers: //, #, /*, *, ;, --
|
|
62
|
+
if (!/^(?:\/\/|#|\/?\*|;|--)/.test(trimmed) && !trimmed.startsWith("'"))
|
|
63
|
+
continue;
|
|
64
|
+
// Strip comment prefix
|
|
65
|
+
const cleaned = trimmed
|
|
66
|
+
.replace(/^(?:\/\/|##?|\/?\*\*?\/?|;|--)\s*/, "")
|
|
67
|
+
.replace(/\*\/\s*$/, "")
|
|
68
|
+
.trim();
|
|
69
|
+
const runMatch = cleaned.match(/^@run\s+(.+)/);
|
|
70
|
+
if (runMatch)
|
|
71
|
+
hints.run = runMatch[1].trim();
|
|
72
|
+
const setupMatch = cleaned.match(/^@setup\s+(.+)/);
|
|
73
|
+
if (setupMatch)
|
|
74
|
+
hints.setup = setupMatch[1].trim();
|
|
75
|
+
const cwdMatch = cleaned.match(/^@cwd\s+(.+)/);
|
|
76
|
+
if (cwdMatch)
|
|
77
|
+
hints.cwd = cwdMatch[1].trim();
|
|
78
|
+
}
|
|
79
|
+
return hints;
|
|
80
|
+
}
|
|
81
|
+
// ── Auto-detection ───────────────────────────────────────────────────────────
|
|
82
|
+
/**
|
|
83
|
+
* Auto-detect execution hints from the file extension and nearby files.
|
|
84
|
+
*
|
|
85
|
+
* 1. Maps the file extension to an interpreter via INTERPRETER_MAP.
|
|
86
|
+
* 2. Scans the file's directory for dependency signal files (package.json,
|
|
87
|
+
* requirements.txt, etc.) to suggest a setup command.
|
|
88
|
+
*/
|
|
89
|
+
export function detectExecHints(filePath) {
|
|
90
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
91
|
+
const hints = {};
|
|
92
|
+
// Interpreter from extension
|
|
93
|
+
const interpreter = INTERPRETER_MAP[ext];
|
|
94
|
+
if (interpreter) {
|
|
95
|
+
hints.run = `${interpreter} ${filePath}`;
|
|
96
|
+
}
|
|
97
|
+
// Setup from nearby dependency files
|
|
98
|
+
const dir = path.dirname(filePath);
|
|
99
|
+
try {
|
|
100
|
+
for (const [file, cmd] of Object.entries(SETUP_SIGNALS)) {
|
|
101
|
+
if (fs.existsSync(path.join(dir, file))) {
|
|
102
|
+
hints.setup = cmd;
|
|
103
|
+
hints.cwd = dir;
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
catch {
|
|
109
|
+
// Non-fatal: skip setup detection on FS errors
|
|
110
|
+
}
|
|
111
|
+
return hints;
|
|
112
|
+
}
|
|
113
|
+
// ── Resolution ───────────────────────────────────────────────────────────────
|
|
114
|
+
/**
|
|
115
|
+
* Resolve execution hints for a script asset.
|
|
116
|
+
*
|
|
117
|
+
* Resolution order (first non-empty value wins for each field):
|
|
118
|
+
* 1. `.stash.json` fields (`run`/`setup`/`cwd`) take priority
|
|
119
|
+
* 2. Script file header comments (`@run`/`@setup`/`@cwd`) second
|
|
120
|
+
* 3. Auto-detection from extension + dependency files last
|
|
121
|
+
*/
|
|
122
|
+
export function resolveExecHints(stashEntry, filePath) {
|
|
123
|
+
const stashHints = {
|
|
124
|
+
run: stashEntry?.run,
|
|
125
|
+
setup: stashEntry?.setup,
|
|
126
|
+
cwd: stashEntry?.cwd,
|
|
127
|
+
};
|
|
128
|
+
const commentHints = extractCommentTags(filePath);
|
|
129
|
+
const autoHints = detectExecHints(filePath);
|
|
130
|
+
return {
|
|
131
|
+
run: stashHints.run || commentHints.run || autoHints.run,
|
|
132
|
+
setup: stashHints.setup || commentHints.setup || autoHints.setup,
|
|
133
|
+
cwd: stashHints.cwd || commentHints.cwd || autoHints.cwd,
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
// ── Helpers ──────────────────────────────────────────────────────────────────
|
|
137
|
+
/**
|
|
138
|
+
* Derive a display name from the RenderContext.
|
|
139
|
+
*
|
|
140
|
+
* Prefers `matchResult.meta.name` when present; otherwise falls back to the
|
|
141
|
+
* POSIX-style relative path stripped of its extension.
|
|
142
|
+
*/
|
|
143
|
+
function deriveName(ctx) {
|
|
144
|
+
const metaName = ctx.matchResult.meta?.name;
|
|
145
|
+
if (typeof metaName === "string" && metaName)
|
|
146
|
+
return metaName;
|
|
147
|
+
// Strip the extension from the relPath for a reasonable fallback.
|
|
148
|
+
const ext = path.extname(ctx.relPath);
|
|
149
|
+
return ext ? ctx.relPath.slice(0, -ext.length) : ctx.relPath;
|
|
150
|
+
}
|
|
151
|
+
/**
|
|
152
|
+
* Load the matching StashEntry for a file path from the directory's .stash.json.
|
|
153
|
+
*/
|
|
154
|
+
function findStashEntryForFile(filePath) {
|
|
155
|
+
const dir = path.dirname(filePath);
|
|
156
|
+
const stashFile = loadStashFile(dir);
|
|
157
|
+
if (!stashFile)
|
|
158
|
+
return undefined;
|
|
159
|
+
const fileName = path.basename(filePath);
|
|
160
|
+
return stashFile.entries.find((e) => e.filename === fileName);
|
|
161
|
+
}
|
|
162
|
+
function extractParameters(template) {
|
|
163
|
+
const parameters = [];
|
|
164
|
+
if (/\$ARGUMENTS\b/i.test(template)) {
|
|
165
|
+
parameters.push("ARGUMENTS");
|
|
166
|
+
}
|
|
167
|
+
for (const match of template.matchAll(/\$([1-9])/g)) {
|
|
168
|
+
const parameter = `$${match[1]}`;
|
|
169
|
+
if (!parameters.includes(parameter)) {
|
|
170
|
+
parameters.push(parameter);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
for (const match of template.matchAll(/\{\{([a-zA-Z_][a-zA-Z0-9_]*)\}\}/g)) {
|
|
174
|
+
const parameter = match[1];
|
|
175
|
+
if (!parameters.includes(parameter)) {
|
|
176
|
+
parameters.push(parameter);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
return parameters.length > 0 ? parameters : undefined;
|
|
180
|
+
}
|
|
181
|
+
// ── 1. skill-md ──────────────────────────────────────────────────────────────
|
|
182
|
+
const skillMdRenderer = {
|
|
183
|
+
name: "skill-md",
|
|
184
|
+
buildShowResponse(ctx) {
|
|
185
|
+
const name = deriveName(ctx);
|
|
186
|
+
return {
|
|
187
|
+
type: "skill",
|
|
188
|
+
name,
|
|
189
|
+
path: ctx.absPath,
|
|
190
|
+
action: "Read and follow the instructions below",
|
|
191
|
+
content: ctx.content(),
|
|
192
|
+
};
|
|
193
|
+
},
|
|
194
|
+
};
|
|
195
|
+
// ── 2. command-md ────────────────────────────────────────────────────────────
|
|
196
|
+
const commandMdRenderer = {
|
|
197
|
+
name: "command-md",
|
|
198
|
+
buildShowResponse(ctx) {
|
|
199
|
+
const name = deriveName(ctx);
|
|
200
|
+
const parsedMd = parseFrontmatter(ctx.content());
|
|
201
|
+
const template = parsedMd.content;
|
|
202
|
+
return {
|
|
203
|
+
type: "command",
|
|
204
|
+
name,
|
|
205
|
+
path: ctx.absPath,
|
|
206
|
+
action: "Fill $ARGUMENTS placeholders in the template, then dispatch",
|
|
207
|
+
description: toStringOrUndefined(parsedMd.data.description),
|
|
208
|
+
template,
|
|
209
|
+
modelHint: parsedMd.data.model,
|
|
210
|
+
agent: toStringOrUndefined(parsedMd.data.agent),
|
|
211
|
+
parameters: extractParameters(template),
|
|
212
|
+
};
|
|
213
|
+
},
|
|
214
|
+
};
|
|
215
|
+
// ── 3. agent-md ──────────────────────────────────────────────────────────────
|
|
216
|
+
const agentMdRenderer = {
|
|
217
|
+
name: "agent-md",
|
|
218
|
+
buildShowResponse(ctx) {
|
|
219
|
+
const name = deriveName(ctx);
|
|
220
|
+
const parsedMd = parseFrontmatter(ctx.content());
|
|
221
|
+
return {
|
|
222
|
+
type: "agent",
|
|
223
|
+
name,
|
|
224
|
+
path: ctx.absPath,
|
|
225
|
+
action: "Dispatch using the prompt below verbatim. Use modelHint and toolPolicy if present.",
|
|
226
|
+
description: toStringOrUndefined(parsedMd.data.description),
|
|
227
|
+
prompt: parsedMd.content,
|
|
228
|
+
toolPolicy: parsedMd.data.tools,
|
|
229
|
+
modelHint: parsedMd.data.model,
|
|
230
|
+
};
|
|
231
|
+
},
|
|
232
|
+
};
|
|
233
|
+
// ── 4. knowledge-md ──────────────────────────────────────────────────────────
|
|
234
|
+
const knowledgeMdRenderer = {
|
|
235
|
+
name: "knowledge-md",
|
|
236
|
+
buildShowResponse(ctx) {
|
|
237
|
+
const name = deriveName(ctx);
|
|
238
|
+
const v = ctx.matchResult.meta?.view ?? { mode: "full" };
|
|
239
|
+
const content = ctx.content();
|
|
240
|
+
switch (v.mode) {
|
|
241
|
+
case "toc": {
|
|
242
|
+
const toc = parseMarkdownToc(content);
|
|
243
|
+
return {
|
|
244
|
+
type: "knowledge",
|
|
245
|
+
name,
|
|
246
|
+
path: ctx.absPath,
|
|
247
|
+
action: "Reference material - read the content below. Use 'toc' view for large documents.",
|
|
248
|
+
content: formatToc(toc),
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
case "frontmatter": {
|
|
252
|
+
const fm = extractFrontmatterOnly(content);
|
|
253
|
+
return {
|
|
254
|
+
type: "knowledge",
|
|
255
|
+
name,
|
|
256
|
+
path: ctx.absPath,
|
|
257
|
+
action: "Reference material - read the content below. Use 'toc' view for large documents.",
|
|
258
|
+
content: fm ?? "(no frontmatter)",
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
case "section": {
|
|
262
|
+
const section = extractSection(content, v.heading);
|
|
263
|
+
if (!section) {
|
|
264
|
+
return {
|
|
265
|
+
type: "knowledge",
|
|
266
|
+
name,
|
|
267
|
+
path: ctx.absPath,
|
|
268
|
+
action: "Reference material - read the content below. Use 'toc' view for large documents.",
|
|
269
|
+
content: `Section "${v.heading}" not found in ${name}. Try \`akm show <ref> toc\` to discover available headings.`,
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
return {
|
|
273
|
+
type: "knowledge",
|
|
274
|
+
name,
|
|
275
|
+
path: ctx.absPath,
|
|
276
|
+
action: "Reference material - read the content below. Use 'toc' view for large documents.",
|
|
277
|
+
content: section.content,
|
|
278
|
+
};
|
|
279
|
+
}
|
|
280
|
+
case "lines": {
|
|
281
|
+
return {
|
|
282
|
+
type: "knowledge",
|
|
283
|
+
name,
|
|
284
|
+
path: ctx.absPath,
|
|
285
|
+
action: "Reference material - read the content below. Use 'toc' view for large documents.",
|
|
286
|
+
content: extractLineRange(content, v.start, v.end),
|
|
287
|
+
};
|
|
288
|
+
}
|
|
289
|
+
default: {
|
|
290
|
+
return {
|
|
291
|
+
type: "knowledge",
|
|
292
|
+
name,
|
|
293
|
+
path: ctx.absPath,
|
|
294
|
+
action: "Reference material - read the content below. Use 'toc' view for large documents.",
|
|
295
|
+
content,
|
|
296
|
+
};
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
},
|
|
300
|
+
extractMetadata(entry, ctx) {
|
|
301
|
+
try {
|
|
302
|
+
const toc = parseMarkdownToc(ctx.content());
|
|
303
|
+
if (toc.headings.length > 0)
|
|
304
|
+
entry.toc = toc.headings;
|
|
305
|
+
}
|
|
306
|
+
catch {
|
|
307
|
+
// Non-fatal: skip TOC if file can't be read
|
|
308
|
+
}
|
|
309
|
+
},
|
|
310
|
+
};
|
|
311
|
+
// ── 5. script-source ─────────────────────────────────────────────────────────
|
|
312
|
+
const scriptSourceRenderer = {
|
|
313
|
+
name: "script-source",
|
|
314
|
+
buildShowResponse(ctx) {
|
|
315
|
+
const name = deriveName(ctx);
|
|
316
|
+
const ext = path.extname(ctx.absPath).toLowerCase();
|
|
317
|
+
// For extensions with a known interpreter, show exec hints
|
|
318
|
+
if (INTERPRETER_MAP[ext]) {
|
|
319
|
+
const stashEntry = findStashEntryForFile(ctx.absPath);
|
|
320
|
+
const hints = resolveExecHints(stashEntry, ctx.absPath);
|
|
321
|
+
if (hints.run) {
|
|
322
|
+
return {
|
|
323
|
+
type: "script",
|
|
324
|
+
name,
|
|
325
|
+
path: ctx.absPath,
|
|
326
|
+
action: "Execute the run command below",
|
|
327
|
+
run: hints.run,
|
|
328
|
+
setup: hints.setup,
|
|
329
|
+
cwd: hints.cwd,
|
|
330
|
+
};
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
// For other extensions or when no hints are available, show file content
|
|
334
|
+
return {
|
|
335
|
+
type: "script",
|
|
336
|
+
name,
|
|
337
|
+
path: ctx.absPath,
|
|
338
|
+
action: "Review the script source below",
|
|
339
|
+
content: ctx.content(),
|
|
340
|
+
};
|
|
341
|
+
},
|
|
342
|
+
enrichSearchHit(hit, _stashDir) {
|
|
343
|
+
const ext = path.extname(hit.path).toLowerCase();
|
|
344
|
+
if (!INTERPRETER_MAP[ext])
|
|
345
|
+
return;
|
|
346
|
+
try {
|
|
347
|
+
const stashEntry = findStashEntryForFile(hit.path);
|
|
348
|
+
const hints = resolveExecHints(stashEntry, hit.path);
|
|
349
|
+
hit.run = hints.run;
|
|
350
|
+
}
|
|
351
|
+
catch (error) {
|
|
352
|
+
if (!hasErrnoCode(error, "ENOENT"))
|
|
353
|
+
throw error;
|
|
354
|
+
}
|
|
355
|
+
},
|
|
356
|
+
extractMetadata(entry, ctx) {
|
|
357
|
+
if (ctx.ext !== ".md") {
|
|
358
|
+
const commentDesc = extractDescriptionFromComments(ctx.absPath);
|
|
359
|
+
if (commentDesc && !entry.description) {
|
|
360
|
+
entry.description = commentDesc;
|
|
361
|
+
entry.source = "comments";
|
|
362
|
+
entry.confidence = 0.7;
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
},
|
|
366
|
+
};
|
|
367
|
+
// ── Registration ─────────────────────────────────────────────────────────────
|
|
368
|
+
/** All built-in renderers. */
|
|
369
|
+
const builtinRenderers = [
|
|
370
|
+
skillMdRenderer,
|
|
371
|
+
commandMdRenderer,
|
|
372
|
+
agentMdRenderer,
|
|
373
|
+
knowledgeMdRenderer,
|
|
374
|
+
scriptSourceRenderer,
|
|
375
|
+
];
|
|
376
|
+
/**
|
|
377
|
+
* Register all built-in renderers with the file-context registry.
|
|
378
|
+
* Called once from the CLI entry point (or ensureBuiltinsRegistered).
|
|
379
|
+
*/
|
|
380
|
+
export function registerBuiltinRenderers() {
|
|
381
|
+
for (const renderer of builtinRenderers) {
|
|
382
|
+
registerRenderer(renderer);
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
// ── Named exports for testing ────────────────────────────────────────────────
|
|
386
|
+
export { skillMdRenderer, commandMdRenderer, agentMdRenderer, knowledgeMdRenderer, scriptSourceRenderer, INTERPRETER_MAP, SETUP_SIGNALS, };
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import { spawnSync } from "node:child_process";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { IS_WINDOWS } from "./common";
|
|
5
|
+
import { RG_BINARY, resolveRg } from "./ripgrep-resolve";
|
|
6
|
+
/**
|
|
7
|
+
* Platform and architecture detection for ripgrep binary downloads.
|
|
8
|
+
*/
|
|
9
|
+
function getRgPlatformTarget() {
|
|
10
|
+
const platform = process.platform;
|
|
11
|
+
const arch = process.arch;
|
|
12
|
+
if (platform === "linux" && arch === "x64") {
|
|
13
|
+
return { platform: "x86_64-unknown-linux-musl", arch: "x64", ext: ".tar.gz" };
|
|
14
|
+
}
|
|
15
|
+
if (platform === "linux" && arch === "arm64") {
|
|
16
|
+
return { platform: "aarch64-unknown-linux-gnu", arch: "arm64", ext: ".tar.gz" };
|
|
17
|
+
}
|
|
18
|
+
if (platform === "darwin" && arch === "x64") {
|
|
19
|
+
return { platform: "x86_64-apple-darwin", arch: "x64", ext: ".tar.gz" };
|
|
20
|
+
}
|
|
21
|
+
if (platform === "darwin" && arch === "arm64") {
|
|
22
|
+
return { platform: "aarch64-apple-darwin", arch: "arm64", ext: ".tar.gz" };
|
|
23
|
+
}
|
|
24
|
+
if (platform === "win32" && arch === "x64") {
|
|
25
|
+
return { platform: "x86_64-pc-windows-msvc", arch: "x64", ext: ".zip" };
|
|
26
|
+
}
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
const RG_VERSION = "14.1.1";
|
|
30
|
+
/**
|
|
31
|
+
* Ensure ripgrep is available. If not found on PATH or in the given binDir,
|
|
32
|
+
* download and install it to binDir.
|
|
33
|
+
*
|
|
34
|
+
* @param binDir - Directory to install ripgrep into (e.g. cache/bin from paths.ts)
|
|
35
|
+
* Returns the path to the ripgrep binary and whether it was newly installed.
|
|
36
|
+
*/
|
|
37
|
+
export function ensureRg(binDir) {
|
|
38
|
+
// Already available?
|
|
39
|
+
const existing = resolveRg(binDir);
|
|
40
|
+
if (existing) {
|
|
41
|
+
return { rgPath: existing, installed: false, version: getRgVersion(existing) };
|
|
42
|
+
}
|
|
43
|
+
// Determine platform
|
|
44
|
+
const target = getRgPlatformTarget();
|
|
45
|
+
if (!target) {
|
|
46
|
+
throw new Error(`Unsupported platform for ripgrep auto-install: ${process.platform}/${process.arch}. ` +
|
|
47
|
+
`Install ripgrep manually: https://github.com/BurntSushi/ripgrep#installation`);
|
|
48
|
+
}
|
|
49
|
+
if (!fs.existsSync(binDir)) {
|
|
50
|
+
fs.mkdirSync(binDir, { recursive: true });
|
|
51
|
+
}
|
|
52
|
+
const archiveName = `ripgrep-${RG_VERSION}-${target.platform}`;
|
|
53
|
+
const url = `https://github.com/BurntSushi/ripgrep/releases/download/${RG_VERSION}/${archiveName}${target.ext}`;
|
|
54
|
+
const destBinary = path.join(binDir, RG_BINARY);
|
|
55
|
+
if (target.ext === ".tar.gz") {
|
|
56
|
+
downloadAndExtractTarGz(url, archiveName, destBinary);
|
|
57
|
+
}
|
|
58
|
+
else {
|
|
59
|
+
downloadAndExtractZip(url, archiveName, destBinary);
|
|
60
|
+
}
|
|
61
|
+
// Make executable
|
|
62
|
+
if (!IS_WINDOWS) {
|
|
63
|
+
fs.chmodSync(destBinary, 0o755);
|
|
64
|
+
}
|
|
65
|
+
return { rgPath: destBinary, installed: true, version: RG_VERSION };
|
|
66
|
+
}
|
|
67
|
+
function downloadAndExtractTarGz(url, archiveName, destBinary) {
|
|
68
|
+
const destDir = path.dirname(destBinary);
|
|
69
|
+
const tmpTarGz = path.join(destDir, "rg-download.tar.gz");
|
|
70
|
+
try {
|
|
71
|
+
// Download archive to a temporary file without using a shell
|
|
72
|
+
const curlResult = spawnSync("curl", ["-fsSL", "-o", tmpTarGz, url], {
|
|
73
|
+
encoding: "utf8",
|
|
74
|
+
timeout: 60_000,
|
|
75
|
+
env: process.env,
|
|
76
|
+
});
|
|
77
|
+
if (curlResult.status !== 0) {
|
|
78
|
+
const err = curlResult.stderr?.trim() || curlResult.error?.message || "unknown error";
|
|
79
|
+
throw new Error(`Failed to download ripgrep from ${url}: ${err}`);
|
|
80
|
+
}
|
|
81
|
+
// Extract the specific binary from the archive into destDir
|
|
82
|
+
const tarResult = spawnSync("tar", ["xzf", tmpTarGz, "--strip-components=1", "-C", destDir, `${archiveName}/rg`], {
|
|
83
|
+
encoding: "utf8",
|
|
84
|
+
timeout: 60_000,
|
|
85
|
+
env: process.env,
|
|
86
|
+
});
|
|
87
|
+
if (tarResult.status !== 0) {
|
|
88
|
+
const err = tarResult.stderr?.trim() || tarResult.error?.message || "unknown error";
|
|
89
|
+
throw new Error(`Failed to extract ripgrep from ${url}: ${err}`);
|
|
90
|
+
}
|
|
91
|
+
if (!fs.existsSync(destBinary)) {
|
|
92
|
+
throw new Error(`ripgrep binary not found at ${destBinary} after extraction`);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
finally {
|
|
96
|
+
// Best-effort cleanup of temporary archive
|
|
97
|
+
try {
|
|
98
|
+
if (fs.existsSync(tmpTarGz)) {
|
|
99
|
+
fs.unlinkSync(tmpTarGz);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
catch {
|
|
103
|
+
// ignore cleanup errors
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
function downloadAndExtractZip(url, archiveName, destBinary) {
|
|
108
|
+
const destDir = path.dirname(destBinary);
|
|
109
|
+
const tmpZip = path.join(destDir, "rg-download.zip");
|
|
110
|
+
const expandedDir = path.join(destDir, archiveName);
|
|
111
|
+
try {
|
|
112
|
+
// Download
|
|
113
|
+
const dlResult = spawnSync("curl", ["-fsSL", "-o", tmpZip, url], {
|
|
114
|
+
encoding: "utf8",
|
|
115
|
+
timeout: 60_000,
|
|
116
|
+
env: process.env,
|
|
117
|
+
});
|
|
118
|
+
if (dlResult.status !== 0) {
|
|
119
|
+
throw new Error(dlResult.stderr?.trim() || "download failed");
|
|
120
|
+
}
|
|
121
|
+
// Extract the zip archive using separate spawnSync calls with argument arrays
|
|
122
|
+
// to avoid shell injection via path interpolation in PowerShell -Command strings
|
|
123
|
+
const expandResult = spawnSync("powershell", ["-Command", "Expand-Archive", "-Path", tmpZip, "-DestinationPath", destDir, "-Force"], {
|
|
124
|
+
encoding: "utf8",
|
|
125
|
+
timeout: 60_000,
|
|
126
|
+
env: process.env,
|
|
127
|
+
});
|
|
128
|
+
if (expandResult.status !== 0) {
|
|
129
|
+
throw new Error(expandResult.stderr?.trim() || "extraction failed");
|
|
130
|
+
}
|
|
131
|
+
const srcRgExe = path.join(destDir, archiveName, "rg.exe");
|
|
132
|
+
const moveResult = spawnSync("powershell", ["-Command", "Move-Item", "-Force", "-Path", srcRgExe, "-Destination", destBinary], {
|
|
133
|
+
encoding: "utf8",
|
|
134
|
+
timeout: 60_000,
|
|
135
|
+
env: process.env,
|
|
136
|
+
});
|
|
137
|
+
if (moveResult.status !== 0) {
|
|
138
|
+
throw new Error(moveResult.stderr?.trim() || "move failed");
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
finally {
|
|
142
|
+
if (fs.existsSync(tmpZip))
|
|
143
|
+
fs.unlinkSync(tmpZip);
|
|
144
|
+
if (fs.existsSync(expandedDir))
|
|
145
|
+
fs.rmSync(expandedDir, { recursive: true, force: true });
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
function getRgVersion(rgPath) {
|
|
149
|
+
const result = spawnSync(rgPath, ["--version"], { encoding: "utf8", timeout: 5_000, env: process.env });
|
|
150
|
+
if (result.status === 0 && result.stdout) {
|
|
151
|
+
const match = result.stdout.match(/ripgrep\s+([\d.]+)/);
|
|
152
|
+
return match ? match[1] : "unknown";
|
|
153
|
+
}
|
|
154
|
+
return "unknown";
|
|
155
|
+
}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { IS_WINDOWS } from "./common";
|
|
4
|
+
import { getBinDir } from "./paths";
|
|
5
|
+
export const RG_BINARY = IS_WINDOWS ? "rg.exe" : "rg";
|
|
6
|
+
function canExecute(filePath) {
|
|
7
|
+
if (!fs.existsSync(filePath))
|
|
8
|
+
return false;
|
|
9
|
+
if (IS_WINDOWS)
|
|
10
|
+
return true;
|
|
11
|
+
try {
|
|
12
|
+
fs.accessSync(filePath, fs.constants.X_OK);
|
|
13
|
+
return true;
|
|
14
|
+
}
|
|
15
|
+
catch {
|
|
16
|
+
return false;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
function resolveFromPath() {
|
|
20
|
+
const rawPath = process.env.PATH;
|
|
21
|
+
if (!rawPath)
|
|
22
|
+
return null;
|
|
23
|
+
const pathEntries = rawPath.split(path.delimiter).filter(Boolean);
|
|
24
|
+
if (IS_WINDOWS) {
|
|
25
|
+
const pathext = (process.env.PATHEXT || ".EXE;.CMD;.BAT;.COM")
|
|
26
|
+
.split(";")
|
|
27
|
+
.filter(Boolean)
|
|
28
|
+
.map((ext) => ext.toLowerCase());
|
|
29
|
+
for (const entry of pathEntries) {
|
|
30
|
+
const directCandidate = path.join(entry, "rg");
|
|
31
|
+
if (canExecute(directCandidate))
|
|
32
|
+
return directCandidate;
|
|
33
|
+
for (const ext of pathext) {
|
|
34
|
+
const candidate = path.join(entry, `rg${ext}`);
|
|
35
|
+
if (canExecute(candidate))
|
|
36
|
+
return candidate;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
return null;
|
|
40
|
+
}
|
|
41
|
+
for (const entry of pathEntries) {
|
|
42
|
+
const candidate = path.join(entry, "rg");
|
|
43
|
+
if (canExecute(candidate))
|
|
44
|
+
return candidate;
|
|
45
|
+
}
|
|
46
|
+
return null;
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Resolve the path to a usable ripgrep binary.
|
|
50
|
+
* Checks in order:
|
|
51
|
+
* 1. Provided binDir (or default cache bin dir) for rg
|
|
52
|
+
* 2. System PATH (rg)
|
|
53
|
+
* Returns null if ripgrep is not available.
|
|
54
|
+
*/
|
|
55
|
+
export function resolveRg(binDir) {
|
|
56
|
+
if (binDir) {
|
|
57
|
+
const directRg = path.join(binDir, RG_BINARY);
|
|
58
|
+
if (canExecute(directRg))
|
|
59
|
+
return directRg;
|
|
60
|
+
}
|
|
61
|
+
// Check default cache bin dir
|
|
62
|
+
try {
|
|
63
|
+
const defaultBinDir = getBinDir();
|
|
64
|
+
const cachedRg = path.join(defaultBinDir, RG_BINARY);
|
|
65
|
+
if (canExecute(cachedRg))
|
|
66
|
+
return cachedRg;
|
|
67
|
+
}
|
|
68
|
+
catch {
|
|
69
|
+
// getBinDir may fail if HOME is not set — fall through
|
|
70
|
+
}
|
|
71
|
+
return resolveFromPath();
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Check if ripgrep is available (in cache/bin or system PATH).
|
|
75
|
+
*/
|
|
76
|
+
export function isRgAvailable(binDir) {
|
|
77
|
+
return resolveRg(binDir) !== null;
|
|
78
|
+
}
|
package/dist/ripgrep.js
ADDED