agentikit 0.0.3 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +2 -0
- package/dist/index.js +1 -0
- package/dist/src/ripgrep.d.ts +36 -0
- package/dist/src/ripgrep.js +262 -0
- package/dist/src/similarity.js +24 -9
- package/dist/src/stash.d.ts +5 -0
- package/dist/src/stash.js +58 -5
- package/package.json +1 -1
- package/skills/stash/SKILL.md +10 -5
- package/src/ripgrep.ts +315 -0
- package/src/similarity.ts +20 -8
- package/src/stash.ts +70 -5
package/dist/index.d.ts
CHANGED
|
@@ -4,3 +4,5 @@ export type { AgentikitAssetType, AgentikitSearchType, SearchHit, SearchResponse
|
|
|
4
4
|
export { agentikitIndex } from "./src/indexer";
|
|
5
5
|
export type { IndexResponse } from "./src/indexer";
|
|
6
6
|
export type { StashEntry, StashFile, StashIntent } from "./src/metadata";
|
|
7
|
+
export { resolveRg, isRgAvailable, ensureRg } from "./src/ripgrep";
|
|
8
|
+
export type { EnsureRgResult } from "./src/ripgrep";
|
package/dist/index.js
CHANGED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Resolve the path to a usable ripgrep binary.
|
|
3
|
+
* Checks in order:
|
|
4
|
+
* 1. stashDir/bin/rg
|
|
5
|
+
* 2. system PATH (rg)
|
|
6
|
+
* Returns null if ripgrep is not available.
|
|
7
|
+
*/
|
|
8
|
+
export declare function resolveRg(stashDir?: string): string | null;
|
|
9
|
+
/**
|
|
10
|
+
* Check if ripgrep is available (either in stash/bin or system PATH).
|
|
11
|
+
*/
|
|
12
|
+
export declare function isRgAvailable(stashDir?: string): boolean;
|
|
13
|
+
export interface RgCandidateResult {
|
|
14
|
+
matchedFiles: string[];
|
|
15
|
+
usedRg: boolean;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Use ripgrep to find .stash.json files that match query tokens.
|
|
19
|
+
* Returns paths to matching .stash.json files.
|
|
20
|
+
*
|
|
21
|
+
* If ripgrep is not available or the query is empty, returns null
|
|
22
|
+
* to signal that the caller should skip pre-filtering.
|
|
23
|
+
*/
|
|
24
|
+
export declare function rgFilterCandidates(query: string, searchDir: string, stashDir?: string): RgCandidateResult | null;
|
|
25
|
+
export interface EnsureRgResult {
|
|
26
|
+
rgPath: string;
|
|
27
|
+
installed: boolean;
|
|
28
|
+
version: string;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Ensure ripgrep is available. If not found on PATH or in stash/bin,
|
|
32
|
+
* download and install it to stash/bin.
|
|
33
|
+
*
|
|
34
|
+
* Returns the path to the ripgrep binary and whether it was newly installed.
|
|
35
|
+
*/
|
|
36
|
+
export declare function ensureRg(stashDir: string): EnsureRgResult;
|
|
@@ -0,0 +1,262 @@
|
|
|
1
|
+
import { spawnSync } from "node:child_process";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
// ── ripgrep Resolution ──────────────────────────────────────────────────────
|
|
5
|
+
const IS_WINDOWS = process.platform === "win32";
|
|
6
|
+
const RG_BINARY = IS_WINDOWS ? "rg.exe" : "rg";
|
|
7
|
+
function canExecute(filePath) {
|
|
8
|
+
if (!fs.existsSync(filePath))
|
|
9
|
+
return false;
|
|
10
|
+
if (IS_WINDOWS)
|
|
11
|
+
return true;
|
|
12
|
+
try {
|
|
13
|
+
fs.accessSync(filePath, fs.constants.X_OK);
|
|
14
|
+
return true;
|
|
15
|
+
}
|
|
16
|
+
catch {
|
|
17
|
+
return false;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
function resolveFromPath() {
|
|
21
|
+
const rawPath = process.env.PATH;
|
|
22
|
+
if (!rawPath)
|
|
23
|
+
return null;
|
|
24
|
+
const pathEntries = rawPath.split(path.delimiter).filter(Boolean);
|
|
25
|
+
if (IS_WINDOWS) {
|
|
26
|
+
const pathext = (process.env.PATHEXT || ".EXE;.CMD;.BAT;.COM")
|
|
27
|
+
.split(";")
|
|
28
|
+
.filter(Boolean)
|
|
29
|
+
.map((ext) => ext.toLowerCase());
|
|
30
|
+
for (const entry of pathEntries) {
|
|
31
|
+
const directCandidate = path.join(entry, "rg");
|
|
32
|
+
if (canExecute(directCandidate))
|
|
33
|
+
return directCandidate;
|
|
34
|
+
for (const ext of pathext) {
|
|
35
|
+
const candidate = path.join(entry, `rg${ext}`);
|
|
36
|
+
if (canExecute(candidate))
|
|
37
|
+
return candidate;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return null;
|
|
41
|
+
}
|
|
42
|
+
for (const entry of pathEntries) {
|
|
43
|
+
const candidate = path.join(entry, "rg");
|
|
44
|
+
if (canExecute(candidate))
|
|
45
|
+
return candidate;
|
|
46
|
+
}
|
|
47
|
+
return null;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Resolve the path to a usable ripgrep binary.
|
|
51
|
+
* Checks in order:
|
|
52
|
+
* 1. stashDir/bin/rg
|
|
53
|
+
* 2. system PATH (rg)
|
|
54
|
+
* Returns null if ripgrep is not available.
|
|
55
|
+
*/
|
|
56
|
+
export function resolveRg(stashDir) {
|
|
57
|
+
// Check stash bin directory first
|
|
58
|
+
if (stashDir) {
|
|
59
|
+
const stashRg = path.join(stashDir, "bin", RG_BINARY);
|
|
60
|
+
if (canExecute(stashRg))
|
|
61
|
+
return stashRg;
|
|
62
|
+
}
|
|
63
|
+
return resolveFromPath();
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Check if ripgrep is available (either in stash/bin or system PATH).
|
|
67
|
+
*/
|
|
68
|
+
export function isRgAvailable(stashDir) {
|
|
69
|
+
return resolveRg(stashDir) !== null;
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Use ripgrep to find .stash.json files that match query tokens.
|
|
73
|
+
* Returns paths to matching .stash.json files.
|
|
74
|
+
*
|
|
75
|
+
* If ripgrep is not available or the query is empty, returns null
|
|
76
|
+
* to signal that the caller should skip pre-filtering.
|
|
77
|
+
*/
|
|
78
|
+
export function rgFilterCandidates(query, searchDir, stashDir) {
|
|
79
|
+
if (!query.trim())
|
|
80
|
+
return null;
|
|
81
|
+
const rgPath = resolveRg(stashDir);
|
|
82
|
+
if (!rgPath)
|
|
83
|
+
return null;
|
|
84
|
+
// Tokenize the query into an OR pattern for ripgrep
|
|
85
|
+
const tokens = query
|
|
86
|
+
.toLowerCase()
|
|
87
|
+
.replace(/[^a-z0-9\s]/g, " ")
|
|
88
|
+
.split(/\s+/)
|
|
89
|
+
.filter((t) => t.length > 1);
|
|
90
|
+
if (tokens.length === 0)
|
|
91
|
+
return null;
|
|
92
|
+
const pattern = tokens.join("|");
|
|
93
|
+
const result = spawnSync(rgPath, [
|
|
94
|
+
"-i", // case insensitive
|
|
95
|
+
"-l", // files-with-matches only
|
|
96
|
+
"--hidden", // include hidden files such as .stash.json
|
|
97
|
+
"--no-ignore", // include ignored files to ensure metadata is searchable
|
|
98
|
+
"--glob", ".stash.json", // only search .stash.json files
|
|
99
|
+
pattern,
|
|
100
|
+
searchDir,
|
|
101
|
+
], {
|
|
102
|
+
encoding: "utf8",
|
|
103
|
+
timeout: 10_000,
|
|
104
|
+
});
|
|
105
|
+
if (result.status !== 0 && result.status !== 1) {
|
|
106
|
+
// rg exit code 1 = no matches (normal), anything else = error
|
|
107
|
+
return null;
|
|
108
|
+
}
|
|
109
|
+
const files = (result.stdout || "")
|
|
110
|
+
.trim()
|
|
111
|
+
.split(/\r?\n/)
|
|
112
|
+
.filter((f) => f.length > 0);
|
|
113
|
+
return { matchedFiles: files, usedRg: true };
|
|
114
|
+
}
|
|
115
|
+
// ── ripgrep Installation ────────────────────────────────────────────────────
|
|
116
|
+
/**
|
|
117
|
+
* Platform and architecture detection for ripgrep binary downloads.
|
|
118
|
+
*/
|
|
119
|
+
function getRgPlatformTarget() {
|
|
120
|
+
const platform = process.platform;
|
|
121
|
+
const arch = process.arch;
|
|
122
|
+
if (platform === "linux" && arch === "x64") {
|
|
123
|
+
return { platform: "x86_64-unknown-linux-musl", arch: "x64", ext: ".tar.gz" };
|
|
124
|
+
}
|
|
125
|
+
if (platform === "linux" && arch === "arm64") {
|
|
126
|
+
return { platform: "aarch64-unknown-linux-gnu", arch: "arm64", ext: ".tar.gz" };
|
|
127
|
+
}
|
|
128
|
+
if (platform === "darwin" && arch === "x64") {
|
|
129
|
+
return { platform: "x86_64-apple-darwin", arch: "x64", ext: ".tar.gz" };
|
|
130
|
+
}
|
|
131
|
+
if (platform === "darwin" && arch === "arm64") {
|
|
132
|
+
return { platform: "aarch64-apple-darwin", arch: "arm64", ext: ".tar.gz" };
|
|
133
|
+
}
|
|
134
|
+
if (platform === "win32" && arch === "x64") {
|
|
135
|
+
return { platform: "x86_64-pc-windows-msvc", arch: "x64", ext: ".zip" };
|
|
136
|
+
}
|
|
137
|
+
return null;
|
|
138
|
+
}
|
|
139
|
+
const RG_VERSION = "14.1.1";
|
|
140
|
+
/**
|
|
141
|
+
* Ensure ripgrep is available. If not found on PATH or in stash/bin,
|
|
142
|
+
* download and install it to stash/bin.
|
|
143
|
+
*
|
|
144
|
+
* Returns the path to the ripgrep binary and whether it was newly installed.
|
|
145
|
+
*/
|
|
146
|
+
export function ensureRg(stashDir) {
|
|
147
|
+
// Already available?
|
|
148
|
+
const existing = resolveRg(stashDir);
|
|
149
|
+
if (existing) {
|
|
150
|
+
return { rgPath: existing, installed: false, version: getRgVersion(existing) };
|
|
151
|
+
}
|
|
152
|
+
// Determine platform
|
|
153
|
+
const target = getRgPlatformTarget();
|
|
154
|
+
if (!target) {
|
|
155
|
+
throw new Error(`Unsupported platform for ripgrep auto-install: ${process.platform}/${process.arch}. ` +
|
|
156
|
+
`Install ripgrep manually: https://github.com/BurntSushi/ripgrep#installation`);
|
|
157
|
+
}
|
|
158
|
+
const binDir = path.join(stashDir, "bin");
|
|
159
|
+
if (!fs.existsSync(binDir)) {
|
|
160
|
+
fs.mkdirSync(binDir, { recursive: true });
|
|
161
|
+
}
|
|
162
|
+
const archiveName = `ripgrep-${RG_VERSION}-${target.platform}`;
|
|
163
|
+
const url = `https://github.com/BurntSushi/ripgrep/releases/download/${RG_VERSION}/${archiveName}${target.ext}`;
|
|
164
|
+
const destBinary = path.join(binDir, RG_BINARY);
|
|
165
|
+
if (target.ext === ".tar.gz") {
|
|
166
|
+
downloadAndExtractTarGz(url, archiveName, destBinary);
|
|
167
|
+
}
|
|
168
|
+
else {
|
|
169
|
+
downloadAndExtractZip(url, archiveName, destBinary);
|
|
170
|
+
}
|
|
171
|
+
// Make executable
|
|
172
|
+
if (!IS_WINDOWS) {
|
|
173
|
+
fs.chmodSync(destBinary, 0o755);
|
|
174
|
+
}
|
|
175
|
+
return { rgPath: destBinary, installed: true, version: RG_VERSION };
|
|
176
|
+
}
|
|
177
|
+
function downloadAndExtractTarGz(url, archiveName, destBinary) {
|
|
178
|
+
const destDir = path.dirname(destBinary);
|
|
179
|
+
const tmpTarGz = path.join(destDir, "rg-download.tar.gz");
|
|
180
|
+
try {
|
|
181
|
+
// Download archive to a temporary file without using a shell
|
|
182
|
+
const curlResult = spawnSync("curl", ["-fsSL", "-o", tmpTarGz, url], {
|
|
183
|
+
encoding: "utf8",
|
|
184
|
+
timeout: 60_000,
|
|
185
|
+
});
|
|
186
|
+
if (curlResult.status !== 0) {
|
|
187
|
+
const err = curlResult.stderr?.trim() || curlResult.error?.message || "unknown error";
|
|
188
|
+
throw new Error(`Failed to download ripgrep from ${url}: ${err}`);
|
|
189
|
+
}
|
|
190
|
+
// Extract the specific binary from the archive into destDir
|
|
191
|
+
const tarResult = spawnSync("tar", [
|
|
192
|
+
"xzf",
|
|
193
|
+
tmpTarGz,
|
|
194
|
+
"--strip-components=1",
|
|
195
|
+
"-C",
|
|
196
|
+
destDir,
|
|
197
|
+
`${archiveName}/rg`,
|
|
198
|
+
], {
|
|
199
|
+
encoding: "utf8",
|
|
200
|
+
timeout: 60_000,
|
|
201
|
+
});
|
|
202
|
+
if (tarResult.status !== 0) {
|
|
203
|
+
const err = tarResult.stderr?.trim() || tarResult.error?.message || "unknown error";
|
|
204
|
+
throw new Error(`Failed to extract ripgrep from ${url}: ${err}`);
|
|
205
|
+
}
|
|
206
|
+
if (!fs.existsSync(destBinary)) {
|
|
207
|
+
throw new Error(`ripgrep binary not found at ${destBinary} after extraction`);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
finally {
|
|
211
|
+
// Best-effort cleanup of temporary archive
|
|
212
|
+
try {
|
|
213
|
+
if (fs.existsSync(tmpTarGz)) {
|
|
214
|
+
fs.unlinkSync(tmpTarGz);
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
catch {
|
|
218
|
+
// ignore cleanup errors
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
function downloadAndExtractZip(url, archiveName, destBinary) {
|
|
223
|
+
const destDir = path.dirname(destBinary);
|
|
224
|
+
const tmpZip = path.join(destDir, "rg-download.zip");
|
|
225
|
+
const expandedDir = path.join(destDir, archiveName);
|
|
226
|
+
try {
|
|
227
|
+
// Download
|
|
228
|
+
const dlResult = spawnSync("curl", ["-fsSL", "-o", tmpZip, url], {
|
|
229
|
+
encoding: "utf8",
|
|
230
|
+
timeout: 60_000,
|
|
231
|
+
});
|
|
232
|
+
if (dlResult.status !== 0) {
|
|
233
|
+
throw new Error(dlResult.stderr?.trim() || "download failed");
|
|
234
|
+
}
|
|
235
|
+
// Extract just the rg.exe
|
|
236
|
+
const extractResult = spawnSync("powershell", [
|
|
237
|
+
"-Command",
|
|
238
|
+
`Expand-Archive -Path "${tmpZip}" -DestinationPath "${destDir}" -Force; ` +
|
|
239
|
+
`Move-Item -Force "${path.join(destDir, archiveName, "rg.exe")}" "${destBinary}"`,
|
|
240
|
+
], {
|
|
241
|
+
encoding: "utf8",
|
|
242
|
+
timeout: 60_000,
|
|
243
|
+
});
|
|
244
|
+
if (extractResult.status !== 0) {
|
|
245
|
+
throw new Error(extractResult.stderr?.trim() || "extraction failed");
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
finally {
|
|
249
|
+
if (fs.existsSync(tmpZip))
|
|
250
|
+
fs.unlinkSync(tmpZip);
|
|
251
|
+
if (fs.existsSync(expandedDir))
|
|
252
|
+
fs.rmSync(expandedDir, { recursive: true, force: true });
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
function getRgVersion(rgPath) {
|
|
256
|
+
const result = spawnSync(rgPath, ["--version"], { encoding: "utf8", timeout: 5_000 });
|
|
257
|
+
if (result.status === 0 && result.stdout) {
|
|
258
|
+
const match = result.stdout.match(/ripgrep\s+([\d.]+)/);
|
|
259
|
+
return match ? match[1] : "unknown";
|
|
260
|
+
}
|
|
261
|
+
return "unknown";
|
|
262
|
+
}
|
package/dist/src/similarity.js
CHANGED
|
@@ -148,18 +148,33 @@ export class TfIdfAdapter {
|
|
|
148
148
|
}
|
|
149
149
|
substringFallback(query, limit, typeFilter) {
|
|
150
150
|
const q = query.toLowerCase();
|
|
151
|
+
const tokens = tokenize(q);
|
|
151
152
|
return this.documents
|
|
152
|
-
.
|
|
153
|
+
.map((d) => {
|
|
153
154
|
if (typeFilter && typeFilter !== "any" && d.entry.entry.type !== typeFilter)
|
|
154
|
-
return
|
|
155
|
-
|
|
155
|
+
return null;
|
|
156
|
+
// Check if any query token matches the document text or name
|
|
157
|
+
const text = d.entry.text;
|
|
158
|
+
const name = d.entry.entry.name.toLowerCase();
|
|
159
|
+
let matchCount = 0;
|
|
160
|
+
for (const token of tokens) {
|
|
161
|
+
if (text.includes(token) || name.includes(token))
|
|
162
|
+
matchCount++;
|
|
163
|
+
}
|
|
164
|
+
// Also check full substring match
|
|
165
|
+
if (text.includes(q) || name.includes(q))
|
|
166
|
+
matchCount = Math.max(matchCount, tokens.length);
|
|
167
|
+
if (matchCount === 0)
|
|
168
|
+
return null;
|
|
169
|
+
return {
|
|
170
|
+
entry: d.entry.entry,
|
|
171
|
+
path: d.entry.path,
|
|
172
|
+
score: Math.round((matchCount / Math.max(tokens.length, 1)) * 500) / 1000,
|
|
173
|
+
};
|
|
156
174
|
})
|
|
157
|
-
.
|
|
158
|
-
.
|
|
159
|
-
|
|
160
|
-
path: d.entry.path,
|
|
161
|
-
score: 0.5,
|
|
162
|
-
}));
|
|
175
|
+
.filter((d) => d !== null)
|
|
176
|
+
.sort((a, b) => b.score - a.score)
|
|
177
|
+
.slice(0, limit);
|
|
163
178
|
}
|
|
164
179
|
}
|
|
165
180
|
// ── Tokenization ────────────────────────────────────────────────────────────
|
package/dist/src/stash.d.ts
CHANGED
package/dist/src/stash.js
CHANGED
|
@@ -3,6 +3,7 @@ import fs from "node:fs";
|
|
|
3
3
|
import path from "node:path";
|
|
4
4
|
import { loadSearchIndex, buildSearchText } from "./indexer";
|
|
5
5
|
import { TfIdfAdapter } from "./similarity";
|
|
6
|
+
import { rgFilterCandidates, ensureRg } from "./ripgrep";
|
|
6
7
|
const IS_WINDOWS = process.platform === "win32";
|
|
7
8
|
const TOOL_EXTENSIONS = new Set([".sh", ".ts", ".js", ".ps1", ".cmd", ".bat"]);
|
|
8
9
|
const DEFAULT_LIMIT = 20;
|
|
@@ -57,28 +58,56 @@ function trySemanticSearch(query, searchType, limit, stashDir) {
|
|
|
57
58
|
return null;
|
|
58
59
|
if (index.stashDir !== stashDir)
|
|
59
60
|
return null;
|
|
60
|
-
|
|
61
|
+
// Stage 1: ripgrep candidate filtering
|
|
62
|
+
// Use rg to pre-filter .stash.json files that contain query tokens,
|
|
63
|
+
// then only run TF-IDF ranking on those candidates.
|
|
64
|
+
let candidateEntries = index.entries;
|
|
65
|
+
if (query) {
|
|
66
|
+
const rgResult = rgFilterCandidates(query, stashDir, stashDir);
|
|
67
|
+
if (rgResult && rgResult.usedRg) {
|
|
68
|
+
const matchedDirs = new Set(rgResult.matchedFiles.map((f) => path.dirname(f)));
|
|
69
|
+
candidateEntries = index.entries.filter((ie) => matchedDirs.has(ie.dirPath));
|
|
70
|
+
// If rg found nothing but we have a query, still fall through to TF-IDF
|
|
71
|
+
// on all entries — rg is a fast pre-filter, not the final authority
|
|
72
|
+
if (candidateEntries.length === 0) {
|
|
73
|
+
candidateEntries = index.entries;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
// Stage 2: TF-IDF semantic ranking
|
|
78
|
+
const scoredEntries = candidateEntries.map((ie) => ({
|
|
61
79
|
id: `${ie.entry.type}:${ie.entry.name}`,
|
|
62
80
|
text: buildSearchText(ie.entry),
|
|
63
81
|
entry: ie.entry,
|
|
64
82
|
path: ie.path,
|
|
65
83
|
}));
|
|
66
84
|
let adapter;
|
|
67
|
-
if (index.tfidf) {
|
|
68
|
-
|
|
85
|
+
if (index.tfidf && !query) {
|
|
86
|
+
// Use cached TF-IDF state for empty queries (listing all)
|
|
87
|
+
const allScored = index.entries.map((ie) => ({
|
|
88
|
+
id: `${ie.entry.type}:${ie.entry.name}`,
|
|
89
|
+
text: buildSearchText(ie.entry),
|
|
90
|
+
entry: ie.entry,
|
|
91
|
+
path: ie.path,
|
|
92
|
+
}));
|
|
93
|
+
adapter = TfIdfAdapter.deserialize(index.tfidf, allScored);
|
|
69
94
|
}
|
|
70
95
|
else {
|
|
96
|
+
// Rebuild adapter from candidate subset
|
|
71
97
|
adapter = new TfIdfAdapter();
|
|
72
98
|
adapter.buildIndex(scoredEntries);
|
|
73
99
|
}
|
|
74
100
|
const typeFilter = searchType === "any" ? undefined : searchType;
|
|
75
101
|
const results = adapter.search(query, limit, typeFilter);
|
|
76
102
|
return results.map((r) => {
|
|
103
|
+
// Derive the openRef name from the filesystem path, not the stash entry name,
|
|
104
|
+
// because agentikitOpen resolves assets by their relative path under the type root.
|
|
105
|
+
const openRefName = deriveOpenRefName(r.entry.type, r.path, stashDir);
|
|
77
106
|
const hit = {
|
|
78
107
|
type: r.entry.type,
|
|
79
108
|
name: r.entry.name,
|
|
80
109
|
path: r.path,
|
|
81
|
-
openRef: makeOpenRef(r.entry.type,
|
|
110
|
+
openRef: makeOpenRef(r.entry.type, openRefName),
|
|
82
111
|
description: r.entry.description,
|
|
83
112
|
tags: r.entry.tags,
|
|
84
113
|
score: r.score,
|
|
@@ -96,6 +125,21 @@ function trySemanticSearch(query, searchType, limit, stashDir) {
|
|
|
96
125
|
return hit;
|
|
97
126
|
});
|
|
98
127
|
}
|
|
128
|
+
/**
|
|
129
|
+
* Derive the correct openRef name for a semantic search result.
|
|
130
|
+
* Tools use their relative file path (e.g., "deploy/deploy-k8s.sh"),
|
|
131
|
+
* skills use directory name, commands/agents use relative .md path.
|
|
132
|
+
*/
|
|
133
|
+
function deriveOpenRefName(type, filePath, stashDir) {
|
|
134
|
+
const indexer = ASSET_INDEXERS[type];
|
|
135
|
+
const root = path.join(stashDir, indexer.dir);
|
|
136
|
+
if (type === "skill") {
|
|
137
|
+
// Skills resolve by directory name relative to skills/
|
|
138
|
+
const rel = toPosix(path.dirname(path.relative(root, filePath)));
|
|
139
|
+
return rel === "." ? path.basename(path.dirname(filePath)) : rel;
|
|
140
|
+
}
|
|
141
|
+
return toPosix(path.relative(root, filePath));
|
|
142
|
+
}
|
|
99
143
|
export function agentikitOpen(input) {
|
|
100
144
|
const parsed = parseOpenRef(input.ref);
|
|
101
145
|
const stashDir = resolveStashDir();
|
|
@@ -571,7 +615,16 @@ export function agentikitInit() {
|
|
|
571
615
|
}
|
|
572
616
|
}
|
|
573
617
|
process.env.AGENTIKIT_STASH_DIR = stashDir;
|
|
574
|
-
|
|
618
|
+
// Ensure ripgrep is available (install to stash/bin if needed)
|
|
619
|
+
let ripgrep;
|
|
620
|
+
try {
|
|
621
|
+
const rgResult = ensureRg(stashDir);
|
|
622
|
+
ripgrep = rgResult;
|
|
623
|
+
}
|
|
624
|
+
catch {
|
|
625
|
+
// Non-fatal: ripgrep is optional, search works without it
|
|
626
|
+
}
|
|
627
|
+
return { stashDir, created, envSet, profileUpdated, ripgrep };
|
|
575
628
|
}
|
|
576
629
|
function hasErrnoCode(error, code) {
|
|
577
630
|
if (typeof error !== "object" || error === null || !("code" in error))
|
package/package.json
CHANGED
package/skills/stash/SKILL.md
CHANGED
|
@@ -28,7 +28,7 @@ Run this after adding new extensions to enable semantic search ranking.
|
|
|
28
28
|
|
|
29
29
|
### Search the stash
|
|
30
30
|
|
|
31
|
-
Find assets by semantic
|
|
31
|
+
Find assets using a two-stage search pipeline: ripgrep pre-filters `.stash.json` metadata files for fast candidate discovery, then TF-IDF ranks results by semantic relevance. Falls back to name substring matching when no index exists.
|
|
32
32
|
|
|
33
33
|
```bash
|
|
34
34
|
agentikit search [query] [--type tool|skill|command|agent|any] [--limit N]
|
|
@@ -58,11 +58,16 @@ agentikit run <openRef>
|
|
|
58
58
|
|
|
59
59
|
Returns the tool's stdout/stderr output and exit code.
|
|
60
60
|
|
|
61
|
+
## Dependencies
|
|
62
|
+
|
|
63
|
+
`agentikit init` will auto-install [ripgrep](https://github.com/BurntSushi/ripgrep) to `stash/bin/` if not already on PATH. Ripgrep is used for fast candidate filtering during search.
|
|
64
|
+
|
|
61
65
|
## Workflow
|
|
62
66
|
|
|
63
|
-
1.
|
|
64
|
-
2.
|
|
65
|
-
3.
|
|
66
|
-
4.
|
|
67
|
+
1. Initialize: `agentikit init` (creates stash dirs, installs ripgrep)
|
|
68
|
+
2. Build the index: `agentikit index`
|
|
69
|
+
3. Search for assets: `agentikit search "deploy" --type tool`
|
|
70
|
+
4. Inspect a result: `agentikit open <openRef>`
|
|
71
|
+
5. Run a tool: `agentikit run <openRef>`
|
|
67
72
|
|
|
68
73
|
All output is JSON for easy parsing.
|
package/src/ripgrep.ts
ADDED
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
import { spawnSync } from "node:child_process"
|
|
2
|
+
import fs from "node:fs"
|
|
3
|
+
import path from "node:path"
|
|
4
|
+
|
|
5
|
+
// ── ripgrep Resolution ──────────────────────────────────────────────────────
|
|
6
|
+
|
|
7
|
+
const IS_WINDOWS = process.platform === "win32"
|
|
8
|
+
const RG_BINARY = IS_WINDOWS ? "rg.exe" : "rg"
|
|
9
|
+
|
|
10
|
+
function canExecute(filePath: string): boolean {
|
|
11
|
+
if (!fs.existsSync(filePath)) return false
|
|
12
|
+
if (IS_WINDOWS) return true
|
|
13
|
+
try {
|
|
14
|
+
fs.accessSync(filePath, fs.constants.X_OK)
|
|
15
|
+
return true
|
|
16
|
+
} catch {
|
|
17
|
+
return false
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function resolveFromPath(): string | null {
|
|
22
|
+
const rawPath = process.env.PATH
|
|
23
|
+
if (!rawPath) return null
|
|
24
|
+
|
|
25
|
+
const pathEntries = rawPath.split(path.delimiter).filter(Boolean)
|
|
26
|
+
|
|
27
|
+
if (IS_WINDOWS) {
|
|
28
|
+
const pathext = (process.env.PATHEXT || ".EXE;.CMD;.BAT;.COM")
|
|
29
|
+
.split(";")
|
|
30
|
+
.filter(Boolean)
|
|
31
|
+
.map((ext) => ext.toLowerCase())
|
|
32
|
+
|
|
33
|
+
for (const entry of pathEntries) {
|
|
34
|
+
const directCandidate = path.join(entry, "rg")
|
|
35
|
+
if (canExecute(directCandidate)) return directCandidate
|
|
36
|
+
|
|
37
|
+
for (const ext of pathext) {
|
|
38
|
+
const candidate = path.join(entry, `rg${ext}`)
|
|
39
|
+
if (canExecute(candidate)) return candidate
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
return null
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
for (const entry of pathEntries) {
|
|
46
|
+
const candidate = path.join(entry, "rg")
|
|
47
|
+
if (canExecute(candidate)) return candidate
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return null
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Resolve the path to a usable ripgrep binary.
|
|
55
|
+
* Checks in order:
|
|
56
|
+
* 1. stashDir/bin/rg
|
|
57
|
+
* 2. system PATH (rg)
|
|
58
|
+
* Returns null if ripgrep is not available.
|
|
59
|
+
*/
|
|
60
|
+
export function resolveRg(stashDir?: string): string | null {
|
|
61
|
+
// Check stash bin directory first
|
|
62
|
+
if (stashDir) {
|
|
63
|
+
const stashRg = path.join(stashDir, "bin", RG_BINARY)
|
|
64
|
+
if (canExecute(stashRg)) return stashRg
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
return resolveFromPath()
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Check if ripgrep is available (either in stash/bin or system PATH).
|
|
72
|
+
*/
|
|
73
|
+
export function isRgAvailable(stashDir?: string): boolean {
|
|
74
|
+
return resolveRg(stashDir) !== null
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// ── ripgrep Candidate Filtering ─────────────────────────────────────────────
|
|
78
|
+
|
|
79
|
+
export interface RgCandidateResult {
|
|
80
|
+
matchedFiles: string[]
|
|
81
|
+
usedRg: boolean
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Use ripgrep to find .stash.json files that match query tokens.
|
|
86
|
+
* Returns paths to matching .stash.json files.
|
|
87
|
+
*
|
|
88
|
+
* If ripgrep is not available or the query is empty, returns null
|
|
89
|
+
* to signal that the caller should skip pre-filtering.
|
|
90
|
+
*/
|
|
91
|
+
export function rgFilterCandidates(
|
|
92
|
+
query: string,
|
|
93
|
+
searchDir: string,
|
|
94
|
+
stashDir?: string,
|
|
95
|
+
): RgCandidateResult | null {
|
|
96
|
+
if (!query.trim()) return null
|
|
97
|
+
|
|
98
|
+
const rgPath = resolveRg(stashDir)
|
|
99
|
+
if (!rgPath) return null
|
|
100
|
+
|
|
101
|
+
// Tokenize the query into an OR pattern for ripgrep
|
|
102
|
+
const tokens = query
|
|
103
|
+
.toLowerCase()
|
|
104
|
+
.replace(/[^a-z0-9\s]/g, " ")
|
|
105
|
+
.split(/\s+/)
|
|
106
|
+
.filter((t) => t.length > 1)
|
|
107
|
+
|
|
108
|
+
if (tokens.length === 0) return null
|
|
109
|
+
|
|
110
|
+
const pattern = tokens.join("|")
|
|
111
|
+
|
|
112
|
+
const result = spawnSync(rgPath, [
|
|
113
|
+
"-i", // case insensitive
|
|
114
|
+
"-l", // files-with-matches only
|
|
115
|
+
"--hidden", // include hidden files such as .stash.json
|
|
116
|
+
"--no-ignore", // include ignored files to ensure metadata is searchable
|
|
117
|
+
"--glob", ".stash.json", // only search .stash.json files
|
|
118
|
+
pattern,
|
|
119
|
+
searchDir,
|
|
120
|
+
], {
|
|
121
|
+
encoding: "utf8",
|
|
122
|
+
timeout: 10_000,
|
|
123
|
+
})
|
|
124
|
+
|
|
125
|
+
if (result.status !== 0 && result.status !== 1) {
|
|
126
|
+
// rg exit code 1 = no matches (normal), anything else = error
|
|
127
|
+
return null
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
const files = (result.stdout || "")
|
|
131
|
+
.trim()
|
|
132
|
+
.split(/\r?\n/)
|
|
133
|
+
.filter((f) => f.length > 0)
|
|
134
|
+
|
|
135
|
+
return { matchedFiles: files, usedRg: true }
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// ── ripgrep Installation ────────────────────────────────────────────────────
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* Platform and architecture detection for ripgrep binary downloads.
|
|
142
|
+
*/
|
|
143
|
+
function getRgPlatformTarget(): { platform: string; arch: string; ext: string } | null {
|
|
144
|
+
const platform = process.platform
|
|
145
|
+
const arch = process.arch
|
|
146
|
+
|
|
147
|
+
if (platform === "linux" && arch === "x64") {
|
|
148
|
+
return { platform: "x86_64-unknown-linux-musl", arch: "x64", ext: ".tar.gz" }
|
|
149
|
+
}
|
|
150
|
+
if (platform === "linux" && arch === "arm64") {
|
|
151
|
+
return { platform: "aarch64-unknown-linux-gnu", arch: "arm64", ext: ".tar.gz" }
|
|
152
|
+
}
|
|
153
|
+
if (platform === "darwin" && arch === "x64") {
|
|
154
|
+
return { platform: "x86_64-apple-darwin", arch: "x64", ext: ".tar.gz" }
|
|
155
|
+
}
|
|
156
|
+
if (platform === "darwin" && arch === "arm64") {
|
|
157
|
+
return { platform: "aarch64-apple-darwin", arch: "arm64", ext: ".tar.gz" }
|
|
158
|
+
}
|
|
159
|
+
if (platform === "win32" && arch === "x64") {
|
|
160
|
+
return { platform: "x86_64-pc-windows-msvc", arch: "x64", ext: ".zip" }
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
return null
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
const RG_VERSION = "14.1.1"
|
|
167
|
+
|
|
168
|
+
export interface EnsureRgResult {
|
|
169
|
+
rgPath: string
|
|
170
|
+
installed: boolean
|
|
171
|
+
version: string
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Ensure ripgrep is available. If not found on PATH or in stash/bin,
|
|
176
|
+
* download and install it to stash/bin.
|
|
177
|
+
*
|
|
178
|
+
* Returns the path to the ripgrep binary and whether it was newly installed.
|
|
179
|
+
*/
|
|
180
|
+
export function ensureRg(stashDir: string): EnsureRgResult {
|
|
181
|
+
// Already available?
|
|
182
|
+
const existing = resolveRg(stashDir)
|
|
183
|
+
if (existing) {
|
|
184
|
+
return { rgPath: existing, installed: false, version: getRgVersion(existing) }
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
// Determine platform
|
|
188
|
+
const target = getRgPlatformTarget()
|
|
189
|
+
if (!target) {
|
|
190
|
+
throw new Error(
|
|
191
|
+
`Unsupported platform for ripgrep auto-install: ${process.platform}/${process.arch}. ` +
|
|
192
|
+
`Install ripgrep manually: https://github.com/BurntSushi/ripgrep#installation`
|
|
193
|
+
)
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
const binDir = path.join(stashDir, "bin")
|
|
197
|
+
if (!fs.existsSync(binDir)) {
|
|
198
|
+
fs.mkdirSync(binDir, { recursive: true })
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
const archiveName = `ripgrep-${RG_VERSION}-${target.platform}`
|
|
202
|
+
const url = `https://github.com/BurntSushi/ripgrep/releases/download/${RG_VERSION}/${archiveName}${target.ext}`
|
|
203
|
+
const destBinary = path.join(binDir, RG_BINARY)
|
|
204
|
+
|
|
205
|
+
if (target.ext === ".tar.gz") {
|
|
206
|
+
downloadAndExtractTarGz(url, archiveName, destBinary)
|
|
207
|
+
} else {
|
|
208
|
+
downloadAndExtractZip(url, archiveName, destBinary)
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// Make executable
|
|
212
|
+
if (!IS_WINDOWS) {
|
|
213
|
+
fs.chmodSync(destBinary, 0o755)
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
return { rgPath: destBinary, installed: true, version: RG_VERSION }
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
function downloadAndExtractTarGz(url: string, archiveName: string, destBinary: string): void {
|
|
220
|
+
const destDir = path.dirname(destBinary)
|
|
221
|
+
const tmpTarGz = path.join(destDir, "rg-download.tar.gz")
|
|
222
|
+
|
|
223
|
+
try {
|
|
224
|
+
// Download archive to a temporary file without using a shell
|
|
225
|
+
const curlResult = spawnSync(
|
|
226
|
+
"curl",
|
|
227
|
+
["-fsSL", "-o", tmpTarGz, url],
|
|
228
|
+
{
|
|
229
|
+
encoding: "utf8",
|
|
230
|
+
timeout: 60_000,
|
|
231
|
+
}
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
if (curlResult.status !== 0) {
|
|
235
|
+
const err = curlResult.stderr?.trim() || curlResult.error?.message || "unknown error"
|
|
236
|
+
throw new Error(`Failed to download ripgrep from ${url}: ${err}`)
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// Extract the specific binary from the archive into destDir
|
|
240
|
+
const tarResult = spawnSync(
|
|
241
|
+
"tar",
|
|
242
|
+
[
|
|
243
|
+
"xzf",
|
|
244
|
+
tmpTarGz,
|
|
245
|
+
"--strip-components=1",
|
|
246
|
+
"-C",
|
|
247
|
+
destDir,
|
|
248
|
+
`${archiveName}/rg`,
|
|
249
|
+
],
|
|
250
|
+
{
|
|
251
|
+
encoding: "utf8",
|
|
252
|
+
timeout: 60_000,
|
|
253
|
+
}
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
if (tarResult.status !== 0) {
|
|
257
|
+
const err = tarResult.stderr?.trim() || tarResult.error?.message || "unknown error"
|
|
258
|
+
throw new Error(`Failed to extract ripgrep from ${url}: ${err}`)
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
if (!fs.existsSync(destBinary)) {
|
|
262
|
+
throw new Error(`ripgrep binary not found at ${destBinary} after extraction`)
|
|
263
|
+
}
|
|
264
|
+
} finally {
|
|
265
|
+
// Best-effort cleanup of temporary archive
|
|
266
|
+
try {
|
|
267
|
+
if (fs.existsSync(tmpTarGz)) {
|
|
268
|
+
fs.unlinkSync(tmpTarGz)
|
|
269
|
+
}
|
|
270
|
+
} catch {
|
|
271
|
+
// ignore cleanup errors
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
function downloadAndExtractZip(url: string, archiveName: string, destBinary: string): void {
|
|
277
|
+
const destDir = path.dirname(destBinary)
|
|
278
|
+
const tmpZip = path.join(destDir, "rg-download.zip")
|
|
279
|
+
const expandedDir = path.join(destDir, archiveName)
|
|
280
|
+
try {
|
|
281
|
+
// Download
|
|
282
|
+
const dlResult = spawnSync("curl", ["-fsSL", "-o", tmpZip, url], {
|
|
283
|
+
encoding: "utf8",
|
|
284
|
+
timeout: 60_000,
|
|
285
|
+
})
|
|
286
|
+
if (dlResult.status !== 0) {
|
|
287
|
+
throw new Error(dlResult.stderr?.trim() || "download failed")
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
// Extract just the rg.exe
|
|
291
|
+
const extractResult = spawnSync("powershell", [
|
|
292
|
+
"-Command",
|
|
293
|
+
`Expand-Archive -Path "${tmpZip}" -DestinationPath "${destDir}" -Force; ` +
|
|
294
|
+
`Move-Item -Force "${path.join(destDir, archiveName, "rg.exe")}" "${destBinary}"`,
|
|
295
|
+
], {
|
|
296
|
+
encoding: "utf8",
|
|
297
|
+
timeout: 60_000,
|
|
298
|
+
})
|
|
299
|
+
if (extractResult.status !== 0) {
|
|
300
|
+
throw new Error(extractResult.stderr?.trim() || "extraction failed")
|
|
301
|
+
}
|
|
302
|
+
} finally {
|
|
303
|
+
if (fs.existsSync(tmpZip)) fs.unlinkSync(tmpZip)
|
|
304
|
+
if (fs.existsSync(expandedDir)) fs.rmSync(expandedDir, { recursive: true, force: true })
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
function getRgVersion(rgPath: string): string {
|
|
309
|
+
const result = spawnSync(rgPath, ["--version"], { encoding: "utf8", timeout: 5_000 })
|
|
310
|
+
if (result.status === 0 && result.stdout) {
|
|
311
|
+
const match = result.stdout.match(/ripgrep\s+([\d.]+)/)
|
|
312
|
+
return match ? match[1] : "unknown"
|
|
313
|
+
}
|
|
314
|
+
return "unknown"
|
|
315
|
+
}
|
package/src/similarity.ts
CHANGED
|
@@ -208,17 +208,29 @@ export class TfIdfAdapter implements SearchAdapter {
|
|
|
208
208
|
|
|
209
209
|
private substringFallback(query: string, limit: number, typeFilter?: string): ScoredResult[] {
|
|
210
210
|
const q = query.toLowerCase()
|
|
211
|
+
const tokens = tokenize(q)
|
|
211
212
|
return this.documents
|
|
212
|
-
.
|
|
213
|
-
if (typeFilter && typeFilter !== "any" && d.entry.entry.type !== typeFilter) return
|
|
214
|
-
|
|
213
|
+
.map((d) => {
|
|
214
|
+
if (typeFilter && typeFilter !== "any" && d.entry.entry.type !== typeFilter) return null
|
|
215
|
+
// Check if any query token matches the document text or name
|
|
216
|
+
const text = d.entry.text
|
|
217
|
+
const name = d.entry.entry.name.toLowerCase()
|
|
218
|
+
let matchCount = 0
|
|
219
|
+
for (const token of tokens) {
|
|
220
|
+
if (text.includes(token) || name.includes(token)) matchCount++
|
|
221
|
+
}
|
|
222
|
+
// Also check full substring match
|
|
223
|
+
if (text.includes(q) || name.includes(q)) matchCount = Math.max(matchCount, tokens.length)
|
|
224
|
+
if (matchCount === 0) return null
|
|
225
|
+
return {
|
|
226
|
+
entry: d.entry.entry,
|
|
227
|
+
path: d.entry.path,
|
|
228
|
+
score: Math.round((matchCount / Math.max(tokens.length, 1)) * 500) / 1000,
|
|
229
|
+
}
|
|
215
230
|
})
|
|
231
|
+
.filter((d): d is ScoredResult => d !== null)
|
|
232
|
+
.sort((a, b) => b.score - a.score)
|
|
216
233
|
.slice(0, limit)
|
|
217
|
-
.map((d) => ({
|
|
218
|
-
entry: d.entry.entry,
|
|
219
|
-
path: d.entry.path,
|
|
220
|
-
score: 0.5,
|
|
221
|
-
}))
|
|
222
234
|
}
|
|
223
235
|
}
|
|
224
236
|
|
package/src/stash.ts
CHANGED
|
@@ -3,6 +3,7 @@ import fs from "node:fs"
|
|
|
3
3
|
import path from "node:path"
|
|
4
4
|
import { loadSearchIndex, buildSearchText } from "./indexer"
|
|
5
5
|
import { TfIdfAdapter, type ScoredEntry } from "./similarity"
|
|
6
|
+
import { rgFilterCandidates, ensureRg } from "./ripgrep"
|
|
6
7
|
|
|
7
8
|
export type AgentikitAssetType = "tool" | "skill" | "command" | "agent"
|
|
8
9
|
export type AgentikitSearchType = AgentikitAssetType | "any"
|
|
@@ -134,7 +135,25 @@ function trySemanticSearch(
|
|
|
134
135
|
if (!index || !index.entries || index.entries.length === 0) return null
|
|
135
136
|
if (index.stashDir !== stashDir) return null
|
|
136
137
|
|
|
137
|
-
|
|
138
|
+
// Stage 1: ripgrep candidate filtering
|
|
139
|
+
// Use rg to pre-filter .stash.json files that contain query tokens,
|
|
140
|
+
// then only run TF-IDF ranking on those candidates.
|
|
141
|
+
let candidateEntries = index.entries
|
|
142
|
+
if (query) {
|
|
143
|
+
const rgResult = rgFilterCandidates(query, stashDir, stashDir)
|
|
144
|
+
if (rgResult && rgResult.usedRg) {
|
|
145
|
+
const matchedDirs = new Set(rgResult.matchedFiles.map((f) => path.dirname(f)))
|
|
146
|
+
candidateEntries = index.entries.filter((ie) => matchedDirs.has(ie.dirPath))
|
|
147
|
+
// If rg found nothing but we have a query, still fall through to TF-IDF
|
|
148
|
+
// on all entries — rg is a fast pre-filter, not the final authority
|
|
149
|
+
if (candidateEntries.length === 0) {
|
|
150
|
+
candidateEntries = index.entries
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Stage 2: TF-IDF semantic ranking
|
|
156
|
+
const scoredEntries: ScoredEntry[] = candidateEntries.map((ie) => ({
|
|
138
157
|
id: `${ie.entry.type}:${ie.entry.name}`,
|
|
139
158
|
text: buildSearchText(ie.entry),
|
|
140
159
|
entry: ie.entry,
|
|
@@ -142,9 +161,17 @@ function trySemanticSearch(
|
|
|
142
161
|
}))
|
|
143
162
|
|
|
144
163
|
let adapter: TfIdfAdapter
|
|
145
|
-
if (index.tfidf) {
|
|
146
|
-
|
|
164
|
+
if (index.tfidf && !query) {
|
|
165
|
+
// Use cached TF-IDF state for empty queries (listing all)
|
|
166
|
+
const allScored: ScoredEntry[] = index.entries.map((ie) => ({
|
|
167
|
+
id: `${ie.entry.type}:${ie.entry.name}`,
|
|
168
|
+
text: buildSearchText(ie.entry),
|
|
169
|
+
entry: ie.entry,
|
|
170
|
+
path: ie.path,
|
|
171
|
+
}))
|
|
172
|
+
adapter = TfIdfAdapter.deserialize(index.tfidf as any, allScored)
|
|
147
173
|
} else {
|
|
174
|
+
// Rebuild adapter from candidate subset
|
|
148
175
|
adapter = new TfIdfAdapter()
|
|
149
176
|
adapter.buildIndex(scoredEntries)
|
|
150
177
|
}
|
|
@@ -153,11 +180,15 @@ function trySemanticSearch(
|
|
|
153
180
|
const results = adapter.search(query, limit, typeFilter)
|
|
154
181
|
|
|
155
182
|
return results.map((r): SearchHit => {
|
|
183
|
+
// Derive the openRef name from the filesystem path, not the stash entry name,
|
|
184
|
+
// because agentikitOpen resolves assets by their relative path under the type root.
|
|
185
|
+
const openRefName = deriveOpenRefName(r.entry.type, r.path, stashDir)
|
|
186
|
+
|
|
156
187
|
const hit: SearchHit = {
|
|
157
188
|
type: r.entry.type,
|
|
158
189
|
name: r.entry.name,
|
|
159
190
|
path: r.path,
|
|
160
|
-
openRef: makeOpenRef(r.entry.type,
|
|
191
|
+
openRef: makeOpenRef(r.entry.type, openRefName),
|
|
161
192
|
description: r.entry.description,
|
|
162
193
|
tags: r.entry.tags,
|
|
163
194
|
score: r.score,
|
|
@@ -177,6 +208,26 @@ function trySemanticSearch(
|
|
|
177
208
|
})
|
|
178
209
|
}
|
|
179
210
|
|
|
211
|
+
/**
|
|
212
|
+
* Derive the correct openRef name for a semantic search result.
|
|
213
|
+
* Tools use their relative file path (e.g., "deploy/deploy-k8s.sh"),
|
|
214
|
+
* skills use directory name, commands/agents use relative .md path.
|
|
215
|
+
*/
|
|
216
|
+
function deriveOpenRefName(
|
|
217
|
+
type: AgentikitAssetType,
|
|
218
|
+
filePath: string,
|
|
219
|
+
stashDir: string,
|
|
220
|
+
): string {
|
|
221
|
+
const indexer = ASSET_INDEXERS[type]
|
|
222
|
+
const root = path.join(stashDir, indexer.dir)
|
|
223
|
+
if (type === "skill") {
|
|
224
|
+
// Skills resolve by directory name relative to skills/
|
|
225
|
+
const rel = toPosix(path.dirname(path.relative(root, filePath)))
|
|
226
|
+
return rel === "." ? path.basename(path.dirname(filePath)) : rel
|
|
227
|
+
}
|
|
228
|
+
return toPosix(path.relative(root, filePath))
|
|
229
|
+
}
|
|
230
|
+
|
|
180
231
|
export function agentikitOpen(input: { ref: string }): OpenResponse {
|
|
181
232
|
const parsed = parseOpenRef(input.ref)
|
|
182
233
|
const stashDir = resolveStashDir()
|
|
@@ -622,6 +673,11 @@ export interface InitResponse {
|
|
|
622
673
|
created: boolean
|
|
623
674
|
envSet: boolean
|
|
624
675
|
profileUpdated?: string
|
|
676
|
+
ripgrep?: {
|
|
677
|
+
rgPath: string
|
|
678
|
+
installed: boolean
|
|
679
|
+
version: string
|
|
680
|
+
}
|
|
625
681
|
}
|
|
626
682
|
|
|
627
683
|
export function agentikitInit(): InitResponse {
|
|
@@ -686,7 +742,16 @@ export function agentikitInit(): InitResponse {
|
|
|
686
742
|
|
|
687
743
|
process.env.AGENTIKIT_STASH_DIR = stashDir
|
|
688
744
|
|
|
689
|
-
|
|
745
|
+
// Ensure ripgrep is available (install to stash/bin if needed)
|
|
746
|
+
let ripgrep: InitResponse["ripgrep"]
|
|
747
|
+
try {
|
|
748
|
+
const rgResult = ensureRg(stashDir)
|
|
749
|
+
ripgrep = rgResult
|
|
750
|
+
} catch {
|
|
751
|
+
// Non-fatal: ripgrep is optional, search works without it
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
return { stashDir, created, envSet, profileUpdated, ripgrep }
|
|
690
755
|
}
|
|
691
756
|
|
|
692
757
|
function hasErrnoCode(error: unknown, code: string): boolean {
|