akm-cli 0.0.21 → 0.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +8 -5
  2. package/dist/asset-spec.js +91 -10
  3. package/dist/cli.js +195 -55
  4. package/dist/common.js +15 -2
  5. package/dist/config-cli.js +65 -6
  6. package/dist/config.js +206 -22
  7. package/dist/create-provider-registry.js +18 -0
  8. package/dist/db.js +156 -53
  9. package/dist/embedder.js +36 -18
  10. package/dist/errors.js +6 -0
  11. package/dist/file-context.js +18 -19
  12. package/dist/frontmatter.js +19 -3
  13. package/dist/indexer.js +126 -89
  14. package/dist/{stash-registry.js → installed-kits.js} +16 -24
  15. package/dist/kit-include.js +108 -0
  16. package/dist/local-search.js +429 -0
  17. package/dist/lockfile.js +47 -5
  18. package/dist/matchers.js +6 -0
  19. package/dist/metadata.js +20 -10
  20. package/dist/paths.js +4 -0
  21. package/dist/providers/skills-sh.js +3 -2
  22. package/dist/providers/static-index.js +4 -9
  23. package/dist/registry-build-index.js +356 -0
  24. package/dist/registry-factory.js +19 -0
  25. package/dist/registry-install.js +114 -109
  26. package/dist/registry-resolve.js +44 -9
  27. package/dist/registry-search.js +14 -9
  28. package/dist/renderers.js +23 -7
  29. package/dist/ripgrep-install.js +9 -4
  30. package/dist/self-update.js +31 -4
  31. package/dist/stash-add.js +75 -6
  32. package/dist/stash-clone.js +1 -1
  33. package/dist/stash-provider-factory.js +52 -0
  34. package/dist/stash-provider.js +1 -0
  35. package/dist/stash-providers/filesystem.js +42 -0
  36. package/dist/stash-providers/index.js +9 -0
  37. package/dist/stash-providers/openviking.js +337 -0
  38. package/dist/stash-resolve.js +4 -4
  39. package/dist/stash-search.js +70 -401
  40. package/dist/stash-show.js +24 -5
  41. package/dist/stash-source.js +19 -11
  42. package/dist/walker.js +15 -10
  43. package/dist/warn.js +7 -0
  44. package/package.json +1 -1
  45. package/dist/provider-registry.js +0 -8
package/dist/indexer.js CHANGED
@@ -26,31 +26,28 @@ export async function agentikitIndex(options) {
26
26
  const isIncremental = !options?.full && prevStashDir === stashDir && !!prevBuiltAt;
27
27
  const builtAtMs = isIncremental && prevBuiltAt ? new Date(prevBuiltAt).getTime() : 0;
28
28
  if (options?.full || !isIncremental) {
29
- // Wipe all entries for full rebuild or stashDir change
30
- // Delete from child tables first to respect foreign key constraints
31
- try {
32
- db.exec("DELETE FROM embeddings");
33
- }
34
- catch {
35
- /* ignore */
36
- }
37
- if (isVecAvailable(db)) {
38
- try {
39
- db.exec("DELETE FROM entries_vec");
40
- }
41
- catch {
42
- /* ignore */
43
- }
44
- }
45
- db.exec("DELETE FROM entries_fts");
46
- db.exec("DELETE FROM entries");
29
+ // HI-5: the delete is now merged into the insert transaction inside
30
+ // indexEntries() so that a reader never sees an empty database between
31
+ // the wipe and the re-inserts. The doFullDelete flag signals this path.
47
32
  }
48
33
  else {
49
34
  // Incremental: purge entries from stash dirs that have been removed
50
35
  // (e.g. after `akm remove`) so orphaned entries don't linger.
51
36
  const prevStashDirsJson = getMeta(db, "stashDirs");
52
37
  if (prevStashDirsJson) {
53
- const prevStashDirs = JSON.parse(prevStashDirsJson);
38
+ let prevStashDirs = [];
39
+ try {
40
+ const parsed = JSON.parse(prevStashDirsJson);
41
+ if (Array.isArray(parsed)) {
42
+ prevStashDirs = parsed.filter((d) => typeof d === "string");
43
+ }
44
+ else {
45
+ warn("index_meta stashDirs value is not an array — treating as empty");
46
+ }
47
+ }
48
+ catch {
49
+ warn("index_meta stashDirs value is corrupt JSON — treating as empty");
50
+ }
54
51
  const currentSet = new Set(allStashDirs);
55
52
  for (const dir of prevStashDirs) {
56
53
  if (!currentSet.has(dir)) {
@@ -60,11 +57,15 @@ export async function agentikitIndex(options) {
60
57
  }
61
58
  }
62
59
  const tWalkStart = Date.now();
63
- // Walk stash dirs and index entries
64
- const { scannedDirs, skippedDirs, generatedCount, dirsNeedingLlm } = indexEntries(db, allStashDirs, stashDir, isIncremental, builtAtMs);
60
+ // Walk stash dirs and index entries.
61
+ // doFullDelete=true merges the wipe into the same transaction as the
62
+ // inserts (HI-5) so readers never see an empty database mid-rebuild.
63
+ const doFullDelete = options?.full || !isIncremental;
64
+ const { scannedDirs, skippedDirs, generatedCount, dirsNeedingLlm } = await indexEntries(db, allStashDirs, stashDir, isIncremental, builtAtMs, doFullDelete);
65
+ const tWalkEnd = Date.now();
65
66
  // Enhance entries with LLM if configured
66
67
  await enhanceDirsWithLlm(db, config, dirsNeedingLlm);
67
- const tWalkEnd = Date.now();
68
+ const tLlmEnd = Date.now();
68
69
  // Rebuild FTS after all inserts
69
70
  rebuildFts(db);
70
71
  const tFtsEnd = Date.now();
@@ -92,8 +93,9 @@ export async function agentikitIndex(options) {
92
93
  timing: {
93
94
  totalMs: tEnd - t0,
94
95
  walkMs: tWalkEnd - tWalkStart,
96
+ llmMs: tLlmEnd - tWalkEnd,
95
97
  embedMs: tEmbedEnd - tFtsEnd,
96
- ftsMs: tFtsEnd - tWalkEnd,
98
+ ftsMs: tFtsEnd - tLlmEnd,
97
99
  },
98
100
  };
99
101
  }
@@ -102,74 +104,102 @@ export async function agentikitIndex(options) {
102
104
  }
103
105
  }
104
106
  // ── Extracted helpers for indexing ────────────────────────────────────────────
105
- function indexEntries(db, allStashDirs, _stashDir, isIncremental, builtAtMs) {
107
+ async function indexEntries(db, allStashDirs, _stashDir, isIncremental, builtAtMs, doFullDelete = false) {
106
108
  let scannedDirs = 0;
107
109
  let skippedDirs = 0;
108
110
  let generatedCount = 0;
109
111
  const seenPaths = new Set();
110
112
  const dirsNeedingLlm = [];
111
- const insertTransaction = db.transaction(() => {
112
- for (const currentStashDir of allStashDirs) {
113
- // Walk the entire stash directory — matchers classify each file
114
- const fileContexts = walkStashFlat(currentStashDir);
115
- // Group files by parent directory
116
- const dirGroups = new Map();
117
- for (const ctx of fileContexts) {
118
- const dir = ctx.parentDirAbs;
119
- const group = dirGroups.get(dir);
120
- if (group)
121
- group.push(ctx.absPath);
122
- else
123
- dirGroups.set(dir, [ctx.absPath]);
113
+ const dirRecords = [];
114
+ for (const currentStashDir of allStashDirs) {
115
+ const fileContexts = walkStashFlat(currentStashDir);
116
+ const dirGroups = new Map();
117
+ for (const ctx of fileContexts) {
118
+ const dir = ctx.parentDirAbs;
119
+ const group = dirGroups.get(dir);
120
+ if (group)
121
+ group.push(ctx.absPath);
122
+ else
123
+ dirGroups.set(dir, [ctx.absPath]);
124
+ }
125
+ for (const [dirPath, files] of dirGroups) {
126
+ if (seenPaths.has(path.resolve(dirPath))) {
127
+ dirRecords.push({ dirPath, currentStashDir, files, stash: null, skip: true });
128
+ continue;
124
129
  }
125
- for (const [dirPath, files] of dirGroups) {
126
- if (seenPaths.has(path.resolve(dirPath)))
130
+ seenPaths.add(path.resolve(dirPath));
131
+ // Incremental: skip directories that haven't changed
132
+ if (isIncremental) {
133
+ const prevEntries = getEntriesByDir(db, dirPath);
134
+ if (prevEntries.length > 0 && !isDirStale(dirPath, files, prevEntries, builtAtMs)) {
135
+ skippedDirs++;
136
+ dirRecords.push({ dirPath, currentStashDir, files, stash: null, skip: true });
127
137
  continue;
128
- seenPaths.add(path.resolve(dirPath));
129
- // Incremental: skip directories that haven't changed
130
- if (isIncremental) {
131
- const prevEntries = getEntriesByDir(db, dirPath);
132
- if (prevEntries.length > 0 && !isDirStale(dirPath, files, prevEntries, builtAtMs)) {
133
- skippedDirs++;
134
- continue;
135
- }
136
138
  }
137
- scannedDirs++;
138
- // Delete old entries for this dir (will be re-inserted)
139
- deleteEntriesByDir(db, dirPath);
140
- // Try loading existing .stash.json (user metadata overrides)
141
- let stash = loadStashFile(dirPath);
142
- if (stash) {
143
- // Check for files on disk that aren't covered by existing .stash.json entries.
144
- const coveredFiles = new Set(stash.entries.map((e) => (e.filename ? path.basename(e.filename) : "")).filter((e) => !!e));
145
- const uncoveredFiles = files.filter((f) => !coveredFiles.has(path.basename(f)));
146
- if (uncoveredFiles.length > 0) {
147
- const generated = generateMetadataFlat(currentStashDir, uncoveredFiles);
148
- if (generated.entries.length > 0) {
149
- stash = { entries: [...stash.entries, ...generated.entries] };
150
- generatedCount += generated.entries.length;
151
- }
152
- }
153
- }
154
- if (!stash) {
155
- const generated = generateMetadataFlat(currentStashDir, files);
139
+ }
140
+ scannedDirs++;
141
+ // Try loading existing .stash.json (user metadata overrides)
142
+ let stash = loadStashFile(dirPath);
143
+ if (stash) {
144
+ const coveredFiles = new Set(stash.entries.map((e) => (e.filename ? path.basename(e.filename) : "")).filter((e) => !!e));
145
+ const uncoveredFiles = files.filter((f) => !coveredFiles.has(path.basename(f)));
146
+ if (uncoveredFiles.length > 0) {
147
+ const generated = await generateMetadataFlat(currentStashDir, uncoveredFiles);
156
148
  if (generated.entries.length > 0) {
157
- stash = { entries: generated.entries };
149
+ stash = { entries: [...stash.entries, ...generated.entries] };
158
150
  generatedCount += generated.entries.length;
159
151
  }
160
152
  }
161
- if (stash) {
162
- for (const entry of stash.entries) {
163
- const entryPath = entry.filename ? path.join(dirPath, entry.filename) : files[0] || dirPath;
164
- const entryKey = `${currentStashDir}:${entry.type}:${entry.name}`;
165
- const searchText = buildSearchText(entry);
166
- const entryWithSize = attachFileSize(entry, entryPath);
167
- upsertEntry(db, entryKey, dirPath, entryPath, currentStashDir, entryWithSize, searchText);
168
- }
169
- // Collect dirs needing LLM enhancement during the first walk
170
- if (stash.entries.some((e) => e.quality === "generated")) {
171
- dirsNeedingLlm.push({ dirPath, files, currentStashDir, stash });
172
- }
153
+ }
154
+ if (!stash) {
155
+ const generated = await generateMetadataFlat(currentStashDir, files);
156
+ if (generated.entries.length > 0) {
157
+ stash = { entries: generated.entries };
158
+ generatedCount += generated.entries.length;
159
+ }
160
+ }
161
+ dirRecords.push({ dirPath, currentStashDir, files, stash, skip: false });
162
+ }
163
+ }
164
+ // Phase 2 (sync): write all pre-generated metadata inside a single transaction.
165
+ const insertTransaction = db.transaction(() => {
166
+ // HI-5: Perform the full-rebuild wipe as the FIRST step of the insert
167
+ // transaction so delete and re-insert are atomic — a concurrent reader
168
+ // never observes an empty database between the two operations.
169
+ if (doFullDelete) {
170
+ try {
171
+ db.exec("DELETE FROM embeddings");
172
+ }
173
+ catch {
174
+ /* ignore */
175
+ }
176
+ if (isVecAvailable(db)) {
177
+ try {
178
+ db.exec("DELETE FROM entries_vec");
179
+ }
180
+ catch {
181
+ /* ignore */
182
+ }
183
+ }
184
+ db.exec("DELETE FROM entries_fts");
185
+ db.exec("DELETE FROM entries");
186
+ }
187
+ for (const { dirPath, currentStashDir, files, stash, skip } of dirRecords) {
188
+ if (skip)
189
+ continue;
190
+ // Delete old entries for this dir (will be re-inserted)
191
+ deleteEntriesByDir(db, dirPath);
192
+ if (stash) {
193
+ for (const entry of stash.entries) {
194
+ const entryPath = entry.filename ? path.join(dirPath, entry.filename) : files[0] || dirPath;
195
+ const entryKey = `${currentStashDir}:${entry.type}:${entry.name}`;
196
+ const searchText = buildSearchText(entry);
197
+ const entryWithSize = attachFileSize(entry, entryPath);
198
+ upsertEntry(db, entryKey, dirPath, entryPath, currentStashDir, entryWithSize, searchText);
199
+ }
200
+ // Collect dirs needing LLM enhancement during the first walk
201
+ if (stash.entries.some((e) => e.quality === "generated")) {
202
+ dirsNeedingLlm.push({ dirPath, files, currentStashDir, stash });
173
203
  }
174
204
  }
175
205
  }
@@ -187,13 +217,16 @@ async function enhanceDirsWithLlm(db, config, dirsNeedingLlm) {
187
217
  continue;
188
218
  const generatedStash = { entries: generatedEntries };
189
219
  const enhanced = await enhanceStashWithLlm(config.llm, generatedStash, dirPath, files);
190
- // Re-upsert only the enhanced (generated) entries
191
- for (const entry of enhanced.entries) {
192
- const entryPath = entry.filename ? path.join(dirPath, entry.filename) : files[0] || dirPath;
193
- const entryKey = `${currentStashDir}:${entry.type}:${entry.name}`;
194
- const searchText = buildSearchText(entry);
195
- upsertEntry(db, entryKey, dirPath, entryPath, currentStashDir, attachFileSize(entry, entryPath), searchText);
196
- }
220
+ // HI-2: Re-upsert the enhanced entries in a single transaction so a crash
221
+ // cannot leave half the entries updated and the rest stale.
222
+ db.transaction(() => {
223
+ for (const entry of enhanced.entries) {
224
+ const entryPath = entry.filename ? path.join(dirPath, entry.filename) : files[0] || dirPath;
225
+ const entryKey = `${currentStashDir}:${entry.type}:${entry.name}`;
226
+ const searchText = buildSearchText(entry);
227
+ upsertEntry(db, entryKey, dirPath, entryPath, currentStashDir, attachFileSize(entry, entryPath), searchText);
228
+ }
229
+ })();
197
230
  }
198
231
  }
199
232
  async function generateEmbeddingsForDb(db, config) {
@@ -206,9 +239,13 @@ async function generateEmbeddingsForDb(db, config) {
206
239
  return true;
207
240
  const texts = allEntries.map((e) => e.searchText);
208
241
  const embeddings = await embedBatch(texts, config.embedding);
209
- for (let i = 0; i < allEntries.length; i++) {
210
- upsertEmbedding(db, allEntries[i].id, embeddings[i]);
211
- }
242
+ // HI-3: Wrap all embedding upserts in a single transaction so partial
243
+ // state is rolled back on failure rather than leaving the table half-filled.
244
+ db.transaction(() => {
245
+ for (let i = 0; i < allEntries.length; i++) {
246
+ upsertEmbedding(db, allEntries[i].id, embeddings[i]);
247
+ }
248
+ })();
212
249
  return true;
213
250
  }
214
251
  catch (error) {
@@ -1,3 +1,14 @@
1
+ /**
2
+ * Installed-kit operations: list, remove, update.
3
+ *
4
+ * Manages the set of kits that have been added to the local stash via
5
+ * `akm add`. Each installed kit has a cache directory and a stash root that
6
+ * is added to the search path.
7
+ *
8
+ * Not to be confused with:
9
+ * - registry-factory.ts — factory map for kit-discovery registry providers
10
+ * - stash-provider-factory.ts — factory map for runtime stash data sources
11
+ */
1
12
  import fs from "node:fs";
2
13
  import { resolveStashDir } from "./common";
3
14
  import { loadConfig } from "./config";
@@ -76,7 +87,7 @@ export async function agentikitUpdate(input) {
76
87
  }
77
88
  const installed = await installRegistryRef(entry.ref);
78
89
  upsertInstalledRegistryEntry(toInstalledEntry(installed));
79
- upsertLockEntry({
90
+ await upsertLockEntry({
80
91
  id: installed.id,
81
92
  source: installed.source,
82
93
  ref: installed.ref,
@@ -159,31 +170,12 @@ function resolveInstalledTarget(installed, target) {
159
170
  throw new NotFoundError(`No installed kit matched target: ${target}`);
160
171
  }
161
172
  function toInstalledEntry(status) {
162
- return {
163
- id: status.id,
164
- source: status.source,
165
- ref: status.ref,
166
- artifactUrl: status.artifactUrl,
167
- resolvedVersion: status.resolvedVersion,
168
- resolvedRevision: status.resolvedRevision,
169
- stashRoot: status.stashRoot,
170
- cacheDir: status.cacheDir,
171
- installedAt: status.installedAt,
172
- };
173
+ // KitInstallStatus extends InstalledKitEntry; omit the extra extractedDir field.
174
+ const { extractedDir: _extractedDir, ...base } = status;
175
+ return base;
173
176
  }
174
177
  function toInstallStatus(status) {
175
- return {
176
- id: status.id,
177
- source: status.source,
178
- ref: status.ref,
179
- artifactUrl: status.artifactUrl,
180
- resolvedVersion: status.resolvedVersion,
181
- resolvedRevision: status.resolvedRevision,
182
- stashRoot: status.stashRoot,
183
- cacheDir: status.cacheDir,
184
- extractedDir: status.extractedDir,
185
- installedAt: status.installedAt,
186
- };
178
+ return { ...status };
187
179
  }
188
180
  function cleanupDirectoryBestEffort(target) {
189
181
  try {
@@ -0,0 +1,108 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ import { isWithin } from "./common";
4
+ // ── Helpers ─────────────────────────────────────────────────────────────────
5
+ /** Keys to check in package.json for akm include configuration. */
6
+ const INCLUDE_CONFIG_KEYS = ["akm", "agentikit"];
7
+ function readPackageJsonAt(dirPath) {
8
+ try {
9
+ const raw = fs.readFileSync(path.join(dirPath, "package.json"), "utf8");
10
+ const parsed = JSON.parse(raw);
11
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
12
+ return parsed;
13
+ }
14
+ return undefined;
15
+ }
16
+ catch {
17
+ return undefined;
18
+ }
19
+ }
20
+ function extractIncludeList(pkg) {
21
+ if (!pkg)
22
+ return undefined;
23
+ for (const key of INCLUDE_CONFIG_KEYS) {
24
+ const config = pkg[key];
25
+ if (typeof config !== "object" || config === null || Array.isArray(config))
26
+ continue;
27
+ const { include } = config;
28
+ if (!Array.isArray(include))
29
+ continue;
30
+ const list = include
31
+ .filter((v) => typeof v === "string")
32
+ .map((v) => v.trim())
33
+ .filter(Boolean);
34
+ if (list.length > 0)
35
+ return list;
36
+ }
37
+ return undefined;
38
+ }
39
+ // ── Public API ───────────────────────────────────────────────────────────────
40
+ /**
41
+ * Walk up the directory tree from `startDir` to `boundary` (inclusive) looking
42
+ * for a package.json that declares an `akm.include` or `agentikit.include` list.
43
+ * Returns the first config found, or `undefined` if none is found within the
44
+ * boundary.
45
+ */
46
+ export function findNearestIncludeConfig(startDir, boundary) {
47
+ let current = path.resolve(startDir);
48
+ const resolvedBoundary = path.resolve(boundary);
49
+ while (isWithin(current, resolvedBoundary)) {
50
+ const pkg = readPackageJsonAt(current);
51
+ const include = extractIncludeList(pkg);
52
+ if (include && include.length > 0) {
53
+ return { baseDir: current, include };
54
+ }
55
+ if (current === resolvedBoundary)
56
+ break;
57
+ const parent = path.dirname(current);
58
+ if (parent === current)
59
+ break;
60
+ current = parent;
61
+ }
62
+ return undefined;
63
+ }
64
+ /**
65
+ * Copy each glob/path in `includeGlobs` from `sourceDir` to `destDir`.
66
+ *
67
+ * Uses `isWithin()` to prevent path-traversal attacks: any entry that escapes
68
+ * `sourceDir` throws immediately rather than silently being skipped.
69
+ *
70
+ * @throws {Error} if an include path escapes `sourceDir` or does not exist on disk.
71
+ */
72
+ export function copyIncludedPaths(includeGlobs, sourceDir, destDir) {
73
+ for (const entry of includeGlobs) {
74
+ const resolvedSource = path.resolve(sourceDir, entry);
75
+ if (!isWithin(resolvedSource, sourceDir)) {
76
+ throw new Error(`Path in akm.include escapes the package root: ${entry}`);
77
+ }
78
+ if (!fs.existsSync(resolvedSource)) {
79
+ throw new Error(`Path in akm.include does not exist: ${entry}`);
80
+ }
81
+ if (path.basename(resolvedSource) === ".git") {
82
+ continue;
83
+ }
84
+ const relativePath = path.relative(sourceDir, resolvedSource);
85
+ if (!relativePath || relativePath === ".") {
86
+ copyDirectoryContents(sourceDir, destDir);
87
+ continue;
88
+ }
89
+ copyPath(resolvedSource, path.join(destDir, relativePath));
90
+ }
91
+ }
92
+ // ── Private helpers ─────────────────────────────────────────────────────────
93
+ function copyDirectoryContents(sourceDir, destinationDir) {
94
+ for (const entry of fs.readdirSync(sourceDir, { withFileTypes: true })) {
95
+ if (entry.name === ".git")
96
+ continue;
97
+ copyPath(path.join(sourceDir, entry.name), path.join(destinationDir, entry.name));
98
+ }
99
+ }
100
+ function copyPath(sourcePath, destinationPath) {
101
+ const stat = fs.statSync(sourcePath);
102
+ fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
103
+ if (stat.isDirectory()) {
104
+ fs.cpSync(sourcePath, destinationPath, { recursive: true, force: true });
105
+ return;
106
+ }
107
+ fs.copyFileSync(sourcePath, destinationPath);
108
+ }