akm-cli 0.0.0 → 0.0.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,162 @@
1
+ /**
2
+ * Flexible asset resolution system.
3
+ *
4
+ * Provides a rich FileContext built once per file during walking, plus a
5
+ * matcher/renderer registry that decouples asset classification from rendering.
6
+ */
7
+ import fs from "node:fs";
8
+ import path from "node:path";
9
+ import { toPosix } from "./common";
10
+ import { parseFrontmatter } from "./frontmatter";
11
+ /**
12
+ * Build a FileContext from a stash root and an absolute file path.
13
+ *
14
+ * Path-derived fields are computed eagerly. The content, frontmatter, and
15
+ * stat getters use lazy caching so the file is only read from disk when
16
+ * (and if) a matcher or renderer actually needs it.
17
+ */
18
+ export function buildFileContext(stashRoot, absPath) {
19
+ const relPath = toPosix(path.relative(stashRoot, absPath));
20
+ const ext = path.extname(absPath).toLowerCase();
21
+ const fileName = path.basename(absPath);
22
+ const parentDirAbs = path.dirname(absPath);
23
+ const parentDir = path.basename(parentDirAbs);
24
+ // Compute ancestor directory segments from the POSIX relPath's directory portion.
25
+ // For "scripts/azure/deploy/run.sh" the dir portion is "scripts/azure/deploy"
26
+ // which splits into ["scripts", "azure", "deploy"].
27
+ const relDir = toPosix(path.dirname(relPath));
28
+ const ancestorDirs = relDir === "." ? [] : relDir.split("/").filter((seg) => seg.length > 0);
29
+ // Lazy caches
30
+ let cachedContent;
31
+ let cachedFrontmatter;
32
+ let frontmatterComputed = false;
33
+ let cachedStat;
34
+ return {
35
+ absPath,
36
+ relPath,
37
+ ext,
38
+ fileName,
39
+ parentDir,
40
+ parentDirAbs,
41
+ ancestorDirs,
42
+ stashRoot,
43
+ content() {
44
+ if (cachedContent === undefined) {
45
+ cachedContent = fs.readFileSync(absPath, "utf8");
46
+ }
47
+ return cachedContent;
48
+ },
49
+ frontmatter() {
50
+ if (!frontmatterComputed) {
51
+ const raw = this.content();
52
+ const parsed = parseFrontmatter(raw);
53
+ cachedFrontmatter = Object.keys(parsed.data).length > 0 ? parsed.data : null;
54
+ frontmatterComputed = true;
55
+ }
56
+ return cachedFrontmatter ?? null;
57
+ },
58
+ stat() {
59
+ if (cachedStat === undefined) {
60
+ cachedStat = fs.statSync(absPath);
61
+ }
62
+ return cachedStat;
63
+ },
64
+ };
65
+ }
66
+ // ── Registry ─────────────────────────────────────────────────────────────────
67
+ /** Ordered list of registered matchers. Later registrations win ties. */
68
+ const matchers = [];
69
+ /** Renderer lookup by name. */
70
+ const renderers = new Map();
71
+ let builtinsInitialized = false;
72
+ /**
73
+ * Ensure that built-in matchers and renderers are registered.
74
+ * Called lazily on first use of runMatchers/getRenderer.
75
+ */
76
+ function ensureBuiltinsRegistered() {
77
+ if (builtinsInitialized)
78
+ return;
79
+ builtinsInitialized = true;
80
+ // Lazy inline require avoids a top-level static import cycle.
81
+ // These are only evaluated once.
82
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
83
+ const { registerBuiltinMatchers } = require("./matchers");
84
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
85
+ const { registerBuiltinRenderers } = require("./renderers");
86
+ registerBuiltinMatchers();
87
+ registerBuiltinRenderers();
88
+ }
89
+ /**
90
+ * Register an AssetMatcher.
91
+ *
92
+ * Matchers are evaluated in registration order. When two matchers produce
93
+ * the same specificity score, the one registered later wins.
94
+ */
95
+ export function registerMatcher(matcher) {
96
+ matchers.push(matcher);
97
+ }
98
+ /**
99
+ * Register an AssetRenderer.
100
+ *
101
+ * If a renderer with the same name already exists it is silently replaced.
102
+ */
103
+ export function registerRenderer(renderer) {
104
+ renderers.set(renderer.name, renderer);
105
+ }
106
+ /**
107
+ * Look up a renderer by name.
108
+ */
109
+ export function getRenderer(name) {
110
+ ensureBuiltinsRegistered();
111
+ return renderers.get(name);
112
+ }
113
+ /**
114
+ * Return all registered renderers (snapshot, safe to iterate).
115
+ */
116
+ export function getAllRenderers() {
117
+ ensureBuiltinsRegistered();
118
+ return Array.from(renderers.values());
119
+ }
120
+ /**
121
+ * Run every registered matcher against a FileContext and return the
122
+ * highest-specificity result.
123
+ *
124
+ * Resolution rules:
125
+ * 1. Every matcher is invoked; null returns are discarded.
126
+ * 2. Results are ranked by specificity (descending).
127
+ * 3. Ties are broken by registration order: the matcher registered later wins
128
+ * (this lets user-registered matchers override built-in ones).
129
+ * 4. Returns null when no matcher claims the file.
130
+ */
131
+ export function runMatchers(ctx) {
132
+ ensureBuiltinsRegistered();
133
+ // Collect (result, registrationIndex) pairs from all matchers.
134
+ const hits = [];
135
+ for (let i = 0; i < matchers.length; i++) {
136
+ const result = matchers[i](ctx);
137
+ if (result !== null) {
138
+ hits.push({ result, index: i });
139
+ }
140
+ }
141
+ if (hits.length === 0)
142
+ return null;
143
+ // Sort by specificity descending, then by registration index descending (later wins ties).
144
+ hits.sort((a, b) => {
145
+ const specDiff = b.result.specificity - a.result.specificity;
146
+ if (specDiff !== 0)
147
+ return specDiff;
148
+ return b.index - a.index;
149
+ });
150
+ return hits[0].result;
151
+ }
152
+ /**
153
+ * Build a RenderContext by merging a FileContext with its winning MatchResult
154
+ * and the list of stash search paths.
155
+ */
156
+ export function buildRenderContext(ctx, match, stashDirs) {
157
+ return {
158
+ ...ctx,
159
+ matchResult: match,
160
+ stashDirs,
161
+ };
162
+ }
@@ -0,0 +1,86 @@
1
+ /**
2
+ * Shared frontmatter parsing utilities.
3
+ *
4
+ * Provides a single, canonical YAML-subset frontmatter parser used by both
5
+ * the stash open logic and the metadata generator.
6
+ */
7
+ /**
8
+ * Parse YAML-subset frontmatter from a Markdown (or similar) string.
9
+ *
10
+ * Returns the parsed key-value data and the remaining body content.
11
+ */
12
+ export function parseFrontmatter(raw) {
13
+ const parsedBlock = parseFrontmatterBlock(raw);
14
+ if (!parsedBlock) {
15
+ return { data: {}, content: raw, frontmatter: null, bodyStartLine: 1 };
16
+ }
17
+ const data = {};
18
+ let currentKey = null;
19
+ let nested = null;
20
+ for (const line of parsedBlock.frontmatter.split(/\r?\n/)) {
21
+ const indented = line.match(/^ {2}(\w[\w-]*):\s*(.+)$/);
22
+ if (indented && currentKey && nested) {
23
+ nested[indented[1]] = parseYamlScalar(indented[2].trim());
24
+ continue;
25
+ }
26
+ const top = line.match(/^(\w[\w-]*):\s*(.*)$/);
27
+ if (!top) {
28
+ continue;
29
+ }
30
+ currentKey = top[1];
31
+ const value = top[2].trim();
32
+ if (value === "") {
33
+ nested = {};
34
+ data[currentKey] = nested;
35
+ }
36
+ else {
37
+ nested = null;
38
+ data[currentKey] = parseYamlScalar(value);
39
+ }
40
+ }
41
+ return {
42
+ data,
43
+ content: parsedBlock.content,
44
+ frontmatter: parsedBlock.frontmatter,
45
+ bodyStartLine: parsedBlock.bodyStartLine,
46
+ };
47
+ }
48
+ export function parseFrontmatterBlock(raw) {
49
+ const match = raw.match(/^---\r?\n([\s\S]*?)\r?\n---\r?\n?([\s\S]*)$/);
50
+ if (!match)
51
+ return null;
52
+ return {
53
+ frontmatter: match[1],
54
+ content: match[2],
55
+ bodyStartLine: countLines(raw.slice(0, match[0].length - match[2].length)) + 1,
56
+ };
57
+ }
58
+ function countLines(text) {
59
+ if (text.length === 0)
60
+ return 0;
61
+ return text.split(/\r?\n/).length - 1;
62
+ }
63
+ /**
64
+ * Parse a simple YAML scalar value (string, boolean, or number).
65
+ */
66
+ export function parseYamlScalar(value) {
67
+ if (value === "")
68
+ return "";
69
+ if (value === "true")
70
+ return true;
71
+ if (value === "false")
72
+ return false;
73
+ const asNumber = Number(value);
74
+ if (!Number.isNaN(asNumber))
75
+ return asNumber;
76
+ if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith("'") && value.endsWith("'"))) {
77
+ return value.slice(1, -1);
78
+ }
79
+ return value;
80
+ }
81
+ /**
82
+ * Coerce an unknown value to a trimmed string, or return undefined if empty/non-string.
83
+ */
84
+ export function toStringOrUndefined(value) {
85
+ return typeof value === "string" && value.trim() ? value : undefined;
86
+ }
package/dist/github.js ADDED
@@ -0,0 +1,17 @@
1
+ export const GITHUB_API_BASE = "https://api.github.com";
2
+ export function githubHeaders() {
3
+ const token = process.env.GITHUB_TOKEN?.trim();
4
+ const headers = {
5
+ Accept: "application/vnd.github+json",
6
+ "User-Agent": "akm-registry",
7
+ };
8
+ if (token)
9
+ headers.Authorization = `Bearer ${token}`;
10
+ return headers;
11
+ }
12
+ export function asRecord(value) {
13
+ return typeof value === "object" && value !== null && !Array.isArray(value) ? value : {};
14
+ }
15
+ export function asString(value) {
16
+ return typeof value === "string" && value ? value : undefined;
17
+ }
@@ -0,0 +1,311 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ import { resolveStashDir } from "./common";
4
+ import { closeDatabase, DB_VERSION, deleteEntriesByDir, getEntriesByDir, getEntryCount, getMeta, isVecAvailable, openDatabase, rebuildFts, setMeta, upsertEmbedding, upsertEntry, warnIfVecMissing, } from "./db";
5
+ import { generateMetadataFlat, loadStashFile } from "./metadata";
6
+ import { getDbPath } from "./paths";
7
+ import { walkStashFlat } from "./walker";
8
+ import { warn } from "./warn";
9
+ // ── Indexer ──────────────────────────────────────────────────────────────────
10
+ export async function agentikitIndex(options) {
11
+ const stashDir = options?.stashDir || resolveStashDir();
12
+ // Load config and resolve all stash sources
13
+ const { loadConfig } = await import("./config.js");
14
+ const config = loadConfig();
15
+ const { resolveAllStashDirs } = await import("./stash-source.js");
16
+ const allStashDirs = resolveAllStashDirs(stashDir);
17
+ const t0 = Date.now();
18
+ // Open database — pass embedding dimension from config if available
19
+ const dbPath = getDbPath();
20
+ const embeddingDim = config.embedding?.dimension;
21
+ const db = openDatabase(dbPath, embeddingDim ? { embeddingDim } : undefined);
22
+ try {
23
+ // Check if we should do incremental
24
+ const prevStashDir = getMeta(db, "stashDir");
25
+ const prevBuiltAt = getMeta(db, "builtAt");
26
+ const isIncremental = !options?.full && prevStashDir === stashDir && !!prevBuiltAt;
27
+ const builtAtMs = isIncremental && prevBuiltAt ? new Date(prevBuiltAt).getTime() : 0;
28
+ if (options?.full || !isIncremental) {
29
+ // Wipe all entries for full rebuild or stashDir change
30
+ // Delete from child tables first to respect foreign key constraints
31
+ try {
32
+ db.exec("DELETE FROM embeddings");
33
+ }
34
+ catch {
35
+ /* ignore */
36
+ }
37
+ if (isVecAvailable(db)) {
38
+ try {
39
+ db.exec("DELETE FROM entries_vec");
40
+ }
41
+ catch {
42
+ /* ignore */
43
+ }
44
+ }
45
+ db.exec("DELETE FROM entries_fts");
46
+ db.exec("DELETE FROM entries");
47
+ }
48
+ const tWalkStart = Date.now();
49
+ // Walk stash dirs and index entries
50
+ const { scannedDirs, skippedDirs, generatedCount, dirsNeedingLlm } = indexEntries(db, allStashDirs, stashDir, isIncremental, builtAtMs);
51
+ // Enhance entries with LLM if configured
52
+ await enhanceDirsWithLlm(db, config, dirsNeedingLlm);
53
+ const tWalkEnd = Date.now();
54
+ // Rebuild FTS after all inserts
55
+ rebuildFts(db);
56
+ const tFtsEnd = Date.now();
57
+ // Generate embeddings if semantic search is enabled
58
+ const hasEmbeddings = await generateEmbeddingsForDb(db, config);
59
+ const tEmbedEnd = Date.now();
60
+ // Update metadata
61
+ setMeta(db, "version", String(DB_VERSION));
62
+ setMeta(db, "builtAt", new Date().toISOString());
63
+ setMeta(db, "stashDir", stashDir);
64
+ setMeta(db, "stashDirs", JSON.stringify(allStashDirs));
65
+ setMeta(db, "hasEmbeddings", hasEmbeddings ? "1" : "0");
66
+ const totalEntries = getEntryCount(db);
67
+ // Warn on every index run if using JS fallback with many entries
68
+ warnIfVecMissing(db);
69
+ const tEnd = Date.now();
70
+ return {
71
+ stashDir,
72
+ totalEntries,
73
+ generatedMetadata: generatedCount,
74
+ indexPath: dbPath,
75
+ mode: isIncremental ? "incremental" : "full",
76
+ directoriesScanned: scannedDirs,
77
+ directoriesSkipped: skippedDirs,
78
+ timing: {
79
+ totalMs: tEnd - t0,
80
+ walkMs: tWalkEnd - tWalkStart,
81
+ embedMs: tEmbedEnd - tFtsEnd,
82
+ ftsMs: tFtsEnd - tWalkEnd,
83
+ },
84
+ };
85
+ }
86
+ finally {
87
+ closeDatabase(db);
88
+ }
89
+ }
90
+ // ── Extracted helpers for indexing ────────────────────────────────────────────
91
+ function indexEntries(db, allStashDirs, _stashDir, isIncremental, builtAtMs) {
92
+ let scannedDirs = 0;
93
+ let skippedDirs = 0;
94
+ let generatedCount = 0;
95
+ const seenPaths = new Set();
96
+ const dirsNeedingLlm = [];
97
+ const insertTransaction = db.transaction(() => {
98
+ for (const currentStashDir of allStashDirs) {
99
+ // Walk the entire stash directory — matchers classify each file
100
+ const fileContexts = walkStashFlat(currentStashDir);
101
+ // Group files by parent directory
102
+ const dirGroups = new Map();
103
+ for (const ctx of fileContexts) {
104
+ const dir = ctx.parentDirAbs;
105
+ const group = dirGroups.get(dir);
106
+ if (group)
107
+ group.push(ctx.absPath);
108
+ else
109
+ dirGroups.set(dir, [ctx.absPath]);
110
+ }
111
+ for (const [dirPath, files] of dirGroups) {
112
+ if (seenPaths.has(path.resolve(dirPath)))
113
+ continue;
114
+ seenPaths.add(path.resolve(dirPath));
115
+ // Incremental: skip directories that haven't changed
116
+ if (isIncremental) {
117
+ const prevEntries = getEntriesByDir(db, dirPath);
118
+ if (prevEntries.length > 0 && !isDirStale(dirPath, files, prevEntries, builtAtMs)) {
119
+ skippedDirs++;
120
+ continue;
121
+ }
122
+ }
123
+ scannedDirs++;
124
+ // Delete old entries for this dir (will be re-inserted)
125
+ deleteEntriesByDir(db, dirPath);
126
+ // Try loading existing .stash.json (user metadata overrides)
127
+ let stash = loadStashFile(dirPath);
128
+ if (stash) {
129
+ // Check for files on disk that aren't covered by existing .stash.json entries.
130
+ const coveredFiles = new Set(stash.entries.map((e) => (e.filename ? path.basename(e.filename) : "")).filter((e) => !!e));
131
+ const uncoveredFiles = files.filter((f) => !coveredFiles.has(path.basename(f)));
132
+ if (uncoveredFiles.length > 0) {
133
+ const generated = generateMetadataFlat(currentStashDir, uncoveredFiles);
134
+ if (generated.entries.length > 0) {
135
+ stash = { entries: [...stash.entries, ...generated.entries] };
136
+ generatedCount += generated.entries.length;
137
+ }
138
+ }
139
+ }
140
+ if (!stash) {
141
+ const generated = generateMetadataFlat(currentStashDir, files);
142
+ if (generated.entries.length > 0) {
143
+ stash = { entries: generated.entries };
144
+ generatedCount += generated.entries.length;
145
+ }
146
+ }
147
+ if (stash) {
148
+ for (const entry of stash.entries) {
149
+ const entryPath = entry.filename ? path.join(dirPath, entry.filename) : files[0] || dirPath;
150
+ const entryKey = `${currentStashDir}:${entry.type}:${entry.name}`;
151
+ const searchText = buildSearchText(entry);
152
+ const entryWithSize = attachFileSize(entry, entryPath);
153
+ upsertEntry(db, entryKey, dirPath, entryPath, currentStashDir, entryWithSize, searchText);
154
+ }
155
+ // Collect dirs needing LLM enhancement during the first walk
156
+ if (stash.entries.some((e) => e.quality === "generated")) {
157
+ dirsNeedingLlm.push({ dirPath, files, currentStashDir, stash });
158
+ }
159
+ }
160
+ }
161
+ }
162
+ });
163
+ insertTransaction();
164
+ return { scannedDirs, skippedDirs, generatedCount, dirsNeedingLlm };
165
+ }
166
+ async function enhanceDirsWithLlm(db, config, dirsNeedingLlm) {
167
+ if (!config.llm || dirsNeedingLlm.length === 0)
168
+ return;
169
+ for (const { dirPath, files, currentStashDir, stash: originalStash } of dirsNeedingLlm) {
170
+ // Only enhance generated entries; user-provided overrides should not be overwritten
171
+ const generatedEntries = originalStash.entries.filter((e) => e.quality === "generated");
172
+ if (generatedEntries.length === 0)
173
+ continue;
174
+ const generatedStash = { entries: generatedEntries };
175
+ const enhanced = await enhanceStashWithLlm(config.llm, generatedStash, dirPath, files);
176
+ // Re-upsert only the enhanced (generated) entries
177
+ for (const entry of enhanced.entries) {
178
+ const entryPath = entry.filename ? path.join(dirPath, entry.filename) : files[0] || dirPath;
179
+ const entryKey = `${currentStashDir}:${entry.type}:${entry.name}`;
180
+ const searchText = buildSearchText(entry);
181
+ upsertEntry(db, entryKey, dirPath, entryPath, currentStashDir, attachFileSize(entry, entryPath), searchText);
182
+ }
183
+ }
184
+ }
185
+ async function generateEmbeddingsForDb(db, config) {
186
+ if (!config.semanticSearch)
187
+ return false;
188
+ try {
189
+ const { embedBatch } = await import("./embedder.js");
190
+ const allEntries = getAllEntriesForEmbedding(db);
191
+ if (allEntries.length === 0)
192
+ return true;
193
+ const texts = allEntries.map((e) => e.searchText);
194
+ const embeddings = await embedBatch(texts, config.embedding);
195
+ for (let i = 0; i < allEntries.length; i++) {
196
+ upsertEmbedding(db, allEntries[i].id, embeddings[i]);
197
+ }
198
+ return true;
199
+ }
200
+ catch (error) {
201
+ warn("Embedding generation failed, continuing without:", error instanceof Error ? error.message : String(error));
202
+ return false;
203
+ }
204
+ }
205
+ // ── Helpers ─────────────────────────────────────────────────────────────────
206
+ function getAllEntriesForEmbedding(db) {
207
+ return db
208
+ .prepare(`
209
+ SELECT e.id, e.search_text AS searchText FROM entries e
210
+ WHERE NOT EXISTS (SELECT 1 FROM embeddings b WHERE b.id = e.id)
211
+ `)
212
+ .all();
213
+ }
214
+ function attachFileSize(entry, entryPath) {
215
+ try {
216
+ return { ...entry, fileSize: fs.statSync(entryPath).size };
217
+ }
218
+ catch {
219
+ return entry;
220
+ }
221
+ }
222
+ /** Set of all known type directory names */
223
+ function isDirStale(dirPath, currentFiles, previousEntries, builtAtMs) {
224
+ // Check if file set changed (additions or deletions)
225
+ const prevFileNames = new Set(previousEntries.map((ie) => ie.entry.filename).filter((e) => !!e));
226
+ const currFileNames = new Set(currentFiles.map((f) => path.basename(f)));
227
+ if (prevFileNames.size !== currFileNames.size)
228
+ return true;
229
+ for (const name of currFileNames) {
230
+ if (!prevFileNames.has(name))
231
+ return true;
232
+ }
233
+ // Check modification times of current files
234
+ for (const file of currentFiles) {
235
+ try {
236
+ if (fs.statSync(file).mtimeMs > builtAtMs)
237
+ return true;
238
+ }
239
+ catch {
240
+ return true;
241
+ }
242
+ }
243
+ // Check .stash.json modification time
244
+ const stashPath = path.join(dirPath, ".stash.json");
245
+ try {
246
+ if (fs.statSync(stashPath).mtimeMs > builtAtMs)
247
+ return true;
248
+ }
249
+ catch {
250
+ // file doesn't exist, not stale
251
+ }
252
+ return false;
253
+ }
254
+ async function enhanceStashWithLlm(llmConfig, stash, _dirPath, files) {
255
+ const { enhanceMetadata } = await import("./llm.js");
256
+ const enhanced = [];
257
+ for (const entry of stash.entries) {
258
+ try {
259
+ const entryFile = entry.filename
260
+ ? (files.find((f) => path.basename(f) === entry.filename) ?? files[0])
261
+ : files[0];
262
+ let fileContent;
263
+ if (entryFile) {
264
+ try {
265
+ fileContent = fs.readFileSync(entryFile, "utf8");
266
+ }
267
+ catch {
268
+ /* ignore unreadable files */
269
+ }
270
+ }
271
+ const improvements = await enhanceMetadata(llmConfig, entry, fileContent);
272
+ const updated = { ...entry };
273
+ if (improvements.description)
274
+ updated.description = improvements.description;
275
+ if (improvements.searchHints?.length)
276
+ updated.searchHints = improvements.searchHints;
277
+ if (improvements.tags?.length)
278
+ updated.tags = improvements.tags;
279
+ enhanced.push(updated);
280
+ }
281
+ catch {
282
+ enhanced.push(entry);
283
+ }
284
+ }
285
+ return { entries: enhanced };
286
+ }
287
+ export function buildSearchText(entry) {
288
+ const parts = [entry.name.replace(/[-_]/g, " ")];
289
+ if (entry.description)
290
+ parts.push(entry.description);
291
+ if (entry.tags)
292
+ parts.push(entry.tags.join(" "));
293
+ if (entry.examples)
294
+ parts.push(entry.examples.join(" "));
295
+ if (entry.aliases)
296
+ parts.push(entry.aliases.join(" "));
297
+ if (entry.searchHints)
298
+ parts.push(entry.searchHints.join(" "));
299
+ if (entry.intent) {
300
+ if (entry.intent.when)
301
+ parts.push(entry.intent.when);
302
+ if (entry.intent.input)
303
+ parts.push(entry.intent.input);
304
+ if (entry.intent.output)
305
+ parts.push(entry.intent.output);
306
+ }
307
+ if (entry.toc) {
308
+ parts.push(entry.toc.map((h) => h.text).join(" "));
309
+ }
310
+ return parts.join(" ").toLowerCase();
311
+ }
package/dist/init.js ADDED
@@ -0,0 +1,43 @@
1
+ /**
2
+ * akm initialization logic.
3
+ *
4
+ * Creates the working stash directory structure, persists the stashDir
5
+ * in config.json, and ensures ripgrep is available.
6
+ */
7
+ import fs from "node:fs";
8
+ import path from "node:path";
9
+ import { TYPE_DIRS } from "./asset-spec";
10
+ import { getConfigPath, loadConfig, saveConfig } from "./config";
11
+ import { getBinDir, getDefaultStashDir } from "./paths";
12
+ import { ensureRg } from "./ripgrep-install";
13
+ export async function agentikitInit(options) {
14
+ const stashDir = options?.dir ? path.resolve(options.dir) : getDefaultStashDir();
15
+ let created = false;
16
+ if (!fs.existsSync(stashDir)) {
17
+ fs.mkdirSync(stashDir, { recursive: true });
18
+ created = true;
19
+ }
20
+ for (const sub of Object.values(TYPE_DIRS)) {
21
+ const subDir = path.join(stashDir, sub);
22
+ if (!fs.existsSync(subDir)) {
23
+ fs.mkdirSync(subDir, { recursive: true });
24
+ }
25
+ }
26
+ // Persist stashDir in config.json
27
+ const configPath = getConfigPath();
28
+ const existing = loadConfig();
29
+ if (!existing.stashDir || existing.stashDir !== stashDir) {
30
+ saveConfig({ ...existing, stashDir });
31
+ }
32
+ // Ensure ripgrep is available (install to cache/bin if needed)
33
+ let ripgrep;
34
+ try {
35
+ const binDir = getBinDir();
36
+ const rgResult = ensureRg(binDir);
37
+ ripgrep = rgResult;
38
+ }
39
+ catch {
40
+ // Non-fatal: ripgrep is optional, search works without it
41
+ }
42
+ return { stashDir, created, configPath, ripgrep };
43
+ }