akm-cli 0.7.4 → 0.8.0-rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (158) hide show
  1. package/{CHANGELOG.md → .github/CHANGELOG.md} +34 -1
  2. package/.github/LICENSE +374 -0
  3. package/dist/cli/parse-args.js +43 -0
  4. package/dist/cli.js +1007 -593
  5. package/dist/commands/agent-dispatch.js +102 -0
  6. package/dist/commands/agent-support.js +62 -0
  7. package/dist/commands/config-cli.js +68 -84
  8. package/dist/commands/consolidate.js +823 -0
  9. package/dist/commands/curate.js +1 -0
  10. package/dist/commands/distill-promotion-policy.js +658 -0
  11. package/dist/commands/distill.js +250 -48
  12. package/dist/commands/eval-cases.js +40 -0
  13. package/dist/commands/events.js +12 -24
  14. package/dist/commands/graph.js +222 -0
  15. package/dist/commands/health.js +376 -0
  16. package/dist/commands/help/help-accept.md +9 -0
  17. package/dist/commands/help/help-improve.md +53 -0
  18. package/dist/commands/help/help-proposals.md +15 -0
  19. package/dist/commands/help/help-propose.md +17 -0
  20. package/dist/commands/help/help-reject.md +8 -0
  21. package/dist/commands/history.js +3 -30
  22. package/dist/commands/improve.js +1170 -0
  23. package/dist/commands/info.js +2 -2
  24. package/dist/commands/init.js +2 -2
  25. package/dist/commands/install-audit.js +5 -1
  26. package/dist/commands/installed-stashes.js +118 -138
  27. package/dist/commands/knowledge.js +133 -0
  28. package/dist/commands/lint/agent-linter.js +46 -0
  29. package/dist/commands/lint/base-linter.js +251 -0
  30. package/dist/commands/lint/command-linter.js +46 -0
  31. package/dist/commands/lint/default-linter.js +13 -0
  32. package/dist/commands/lint/index.js +107 -0
  33. package/dist/commands/lint/knowledge-linter.js +13 -0
  34. package/dist/commands/lint/memory-linter.js +58 -0
  35. package/dist/commands/lint/registry.js +33 -0
  36. package/dist/commands/lint/skill-linter.js +42 -0
  37. package/dist/commands/lint/task-linter.js +47 -0
  38. package/dist/commands/lint/types.js +1 -0
  39. package/dist/commands/lint/workflow-linter.js +53 -0
  40. package/dist/commands/lint.js +1 -0
  41. package/dist/commands/migration-help.js +2 -2
  42. package/dist/commands/proposal.js +8 -7
  43. package/dist/commands/propose.js +113 -43
  44. package/dist/commands/reflect.js +175 -41
  45. package/dist/commands/registry-search.js +2 -2
  46. package/dist/commands/remember.js +55 -1
  47. package/dist/commands/schema-repair.js +130 -0
  48. package/dist/commands/search.js +21 -5
  49. package/dist/commands/show.js +131 -52
  50. package/dist/commands/source-add.js +10 -10
  51. package/dist/commands/source-manage.js +11 -19
  52. package/dist/commands/tasks.js +385 -0
  53. package/dist/commands/url-checker.js +39 -0
  54. package/dist/commands/vault.js +7 -33
  55. package/dist/core/action-contributors.js +25 -0
  56. package/dist/core/asset-registry.js +5 -17
  57. package/dist/core/asset-spec.js +11 -1
  58. package/dist/core/common.js +94 -0
  59. package/dist/core/concurrent.js +22 -0
  60. package/dist/core/config.js +229 -122
  61. package/dist/core/events.js +87 -123
  62. package/dist/core/frontmatter.js +3 -1
  63. package/dist/core/markdown.js +17 -0
  64. package/dist/core/memory-improve.js +678 -0
  65. package/dist/core/parse.js +155 -0
  66. package/dist/core/paths.js +101 -3
  67. package/dist/core/proposal-validators.js +61 -0
  68. package/dist/core/proposals.js +49 -38
  69. package/dist/core/state-db.js +775 -0
  70. package/dist/core/time.js +51 -0
  71. package/dist/core/warn.js +59 -1
  72. package/dist/indexer/db-search.js +86 -472
  73. package/dist/indexer/db.js +392 -6
  74. package/dist/indexer/ensure-index.js +133 -0
  75. package/dist/indexer/graph-boost.js +247 -94
  76. package/dist/indexer/graph-db.js +201 -0
  77. package/dist/indexer/graph-dedup.js +99 -0
  78. package/dist/indexer/graph-extraction.js +417 -74
  79. package/dist/indexer/index-context.js +10 -0
  80. package/dist/indexer/indexer.js +466 -298
  81. package/dist/indexer/llm-cache.js +47 -0
  82. package/dist/indexer/match-contributors.js +141 -0
  83. package/dist/indexer/matchers.js +24 -190
  84. package/dist/indexer/memory-inference.js +63 -29
  85. package/dist/indexer/metadata-contributors.js +26 -0
  86. package/dist/indexer/metadata.js +188 -175
  87. package/dist/indexer/path-resolver.js +89 -0
  88. package/dist/indexer/ranking-contributors.js +204 -0
  89. package/dist/indexer/ranking.js +74 -0
  90. package/dist/indexer/search-hit-enrichers.js +22 -0
  91. package/dist/indexer/search-source.js +24 -9
  92. package/dist/indexer/semantic-status.js +2 -16
  93. package/dist/indexer/walker.js +25 -0
  94. package/dist/integrations/agent/config.js +175 -3
  95. package/dist/integrations/agent/index.js +3 -1
  96. package/dist/integrations/agent/pipeline.js +39 -0
  97. package/dist/integrations/agent/profiles.js +67 -5
  98. package/dist/integrations/agent/prompts.js +114 -29
  99. package/dist/integrations/agent/runners.js +31 -0
  100. package/dist/integrations/agent/sdk-runner.js +120 -0
  101. package/dist/integrations/agent/spawn.js +136 -28
  102. package/dist/integrations/lockfile.js +10 -18
  103. package/dist/integrations/session-logs/index.js +65 -0
  104. package/dist/integrations/session-logs/providers/claude-code.js +56 -0
  105. package/dist/integrations/session-logs/providers/opencode.js +52 -0
  106. package/dist/integrations/session-logs/types.js +1 -0
  107. package/dist/llm/call-ai.js +74 -0
  108. package/dist/llm/client.js +63 -86
  109. package/dist/llm/feature-gate.js +27 -16
  110. package/dist/llm/graph-extract.js +297 -64
  111. package/dist/llm/memory-infer.js +52 -71
  112. package/dist/llm/metadata-enhance.js +39 -22
  113. package/dist/llm/prompts/graph-extract-user-prompt.md +12 -0
  114. package/dist/output/cli-hints-full.md +277 -0
  115. package/dist/output/cli-hints-short.md +65 -0
  116. package/dist/output/cli-hints.js +2 -309
  117. package/dist/output/renderers.js +196 -124
  118. package/dist/output/shapes.js +41 -3
  119. package/dist/output/text.js +257 -21
  120. package/dist/registry/providers/skills-sh.js +61 -49
  121. package/dist/registry/providers/static-index.js +44 -48
  122. package/dist/setup/setup.js +510 -11
  123. package/dist/sources/provider-factory.js +2 -1
  124. package/dist/sources/providers/git.js +44 -2
  125. package/dist/sources/website-ingest.js +4 -0
  126. package/dist/tasks/backends/cron.js +200 -0
  127. package/dist/tasks/backends/exec-utils.js +25 -0
  128. package/dist/tasks/backends/index.js +32 -0
  129. package/dist/tasks/backends/launchd-template.xml +19 -0
  130. package/dist/tasks/backends/launchd.js +184 -0
  131. package/dist/tasks/backends/schtasks-template.xml +29 -0
  132. package/dist/tasks/backends/schtasks.js +212 -0
  133. package/dist/tasks/parser.js +198 -0
  134. package/dist/tasks/resolveAkmBin.js +84 -0
  135. package/dist/tasks/runner.js +432 -0
  136. package/dist/tasks/schedule.js +208 -0
  137. package/dist/tasks/schema.js +13 -0
  138. package/dist/tasks/validator.js +59 -0
  139. package/dist/wiki/index-template.md +12 -0
  140. package/dist/wiki/ingest-workflow-template.md +54 -0
  141. package/dist/wiki/log-template.md +8 -0
  142. package/dist/wiki/schema-template.md +61 -0
  143. package/dist/wiki/wiki-templates.js +12 -0
  144. package/dist/wiki/wiki.js +10 -61
  145. package/dist/workflows/authoring.js +5 -25
  146. package/dist/workflows/db.js +9 -0
  147. package/dist/workflows/renderer.js +8 -3
  148. package/dist/workflows/runs.js +73 -88
  149. package/dist/workflows/scope-key.js +76 -0
  150. package/dist/workflows/validator.js +1 -1
  151. package/dist/workflows/workflow-template.md +24 -0
  152. package/docs/README.md +3 -0
  153. package/docs/migration/release-notes/0.7.0.md +1 -1
  154. package/docs/migration/release-notes/0.7.4.md +1 -1
  155. package/docs/migration/release-notes/0.7.5.md +20 -0
  156. package/docs/migration/release-notes/0.8.0.md +43 -0
  157. package/package.json +4 -3
  158. package/dist/templates/wiki-templates.js +0 -100
@@ -1,14 +1,13 @@
1
1
  import fs from "node:fs";
2
2
  import path from "node:path";
3
3
  import { isHttpUrl, resolveStashDir, toErrorMessage } from "../core/common";
4
+ import { concurrentMap } from "../core/concurrent";
4
5
  import { getDbPath } from "../core/paths";
5
6
  import { isVerbose, warn, warnVerbose } from "../core/warn";
6
7
  import { resolveIndexPassLLM } from "../llm/index-passes";
7
8
  import { takeWorkflowDocument } from "../workflows/document-cache";
8
- import { closeDatabase, deleteEntriesByDir, deleteEntriesByStashDir, deleteIndexDirStatesByStashDir, getEmbeddingCount, getEntriesByDir, getEntryCount, getIndexDirState, getMeta, isVecAvailable, openDatabase, openExistingDatabase, rebuildFts, setMeta, upsertEmbedding, upsertEntry, upsertIndexDirState, upsertUtilityScore, warnIfVecMissing, } from "./db";
9
- import { runGraphExtractionPass } from "./graph-extraction";
10
- import { runMemoryInferencePass } from "./memory-inference";
11
- import { applyCuratedFrontmatter, applyWikiFrontmatter, generateMetadataFlat, isWorkflowSkipWarning, loadStashFile, shouldIndexStashFile, } from "./metadata";
9
+ import { clearStaleCacheEntries, closeDatabase, deleteEntriesByDir, deleteEntriesByStashDir, deleteIndexDirStatesByStashDir, getAllEntriesForEmbedding, getEmbeddingCount, getEntriesByDir, getEntryCount, getIndexDirState, getMeta, isVecAvailable, openDatabase, openExistingDatabase, rebuildFts, relinkUsageEvents, setMeta, upsertEmbedding, upsertEntry, upsertIndexDirState, upsertUtilityScore, upsertWorkflowDocument, warnIfVecMissing, } from "./db";
10
+ import { applyCuratedFrontmatter, applyWikiFrontmatter, generateMetadataFlat, isEnrichmentComplete, isWorkflowSkipWarning, loadStashFile, shouldIndexStashFile, } from "./metadata";
12
11
  import { buildSearchText } from "./search-fields";
13
12
  import { classifySemanticFailure, clearSemanticStatus, deriveSemanticProviderFingerprint, writeSemanticStatus, } from "./semantic-status";
14
13
  import { ensureUsageEventsSchema, purgeOldUsageEvents } from "./usage-events";
@@ -18,19 +17,196 @@ function throwIfAborted(signal) {
18
17
  throw signal.reason instanceof Error ? signal.reason : new Error("index interrupted");
19
18
  }
20
19
  }
20
+ function getDefaultLlmConcurrency(llmConfig) {
21
+ if (typeof llmConfig?.concurrency === "number")
22
+ return llmConfig.concurrency;
23
+ if (!llmConfig?.endpoint)
24
+ return 1;
25
+ try {
26
+ const url = new URL(llmConfig.endpoint);
27
+ const host = url.hostname.toLowerCase();
28
+ if (host === "localhost" || host === "127.0.0.1" || host === "::1" || host.endsWith(".localhost"))
29
+ return 1;
30
+ }
31
+ catch {
32
+ return 1;
33
+ }
34
+ return 4;
35
+ }
36
+ // ── Phase functions ──────────────────────────────────────────────────────────
37
+ /**
38
+ * Source cache phase: ensure git stash caches are up to date and purge orphaned
39
+ * entries from removed sources (incremental only).
40
+ */
41
+ async function runSourceCachePhase(ctx) {
42
+ const { db, config, sourceDirs, isIncremental, full } = ctx;
43
+ if (isIncremental && !full) {
44
+ // Purge entries from stash dirs that have been removed since the last run
45
+ // (e.g. after `akm remove`) so orphaned entries don't linger.
46
+ const prevStashDirsJson = getMeta(db, "stashDirs");
47
+ if (prevStashDirsJson) {
48
+ let prevStashDirs = [];
49
+ try {
50
+ const parsed = JSON.parse(prevStashDirsJson);
51
+ if (Array.isArray(parsed)) {
52
+ prevStashDirs = parsed.filter((d) => typeof d === "string");
53
+ }
54
+ else {
55
+ warn("index_meta stashDirs value is not an array — treating as empty");
56
+ }
57
+ }
58
+ catch {
59
+ warn("index_meta stashDirs value is corrupt JSON — treating as empty");
60
+ }
61
+ const currentSet = new Set(sourceDirs);
62
+ for (const dir of prevStashDirs) {
63
+ if (!currentSet.has(dir)) {
64
+ ctx.hadRemovedSources = true;
65
+ deleteEntriesByStashDir(db, dir);
66
+ deleteIndexDirStatesByStashDir(db, dir);
67
+ }
68
+ }
69
+ }
70
+ }
71
+ // Source caches are hydrated before akmIndex() calls this phase; nothing
72
+ // further to do here. The flag is exposed on ctx for runWalkPhase().
73
+ void config;
74
+ }
75
+ /**
76
+ * Walk phase: scan the filesystem, generate metadata, and persist entries to
77
+ * the database. Also kicks off LLM enrichment for directories that need it.
78
+ *
79
+ * Writes `ctx.scannedDirs`, `ctx.skippedDirs`, `ctx.generatedCount`,
80
+ * `ctx.walkWarnings`, and `ctx.dirsNeedingLlm` for downstream phases.
81
+ */
82
+ async function runWalkPhase(ctx) {
83
+ const { db, sources, isIncremental, builtAtMs, hadRemovedSources, full, reEnrich, signal, onProgress, config } = ctx;
84
+ throwIfAborted(signal);
85
+ ctx.timing.tWalkStart = Date.now();
86
+ const doFullDelete = full || !isIncremental;
87
+ const { scannedDirs, skippedDirs, generatedCount, dirsNeedingLlm, warnings } = await indexEntries(db, sources, isIncremental, builtAtMs, hadRemovedSources, doFullDelete, onProgress);
88
+ ctx.scannedDirs = scannedDirs;
89
+ ctx.skippedDirs = skippedDirs;
90
+ ctx.generatedCount = generatedCount;
91
+ ctx.walkWarnings = warnings;
92
+ ctx.dirsNeedingLlm = dirsNeedingLlm;
93
+ onProgress({
94
+ phase: "scan",
95
+ message: `Scanned ${scannedDirs} ${scannedDirs === 1 ? "directory" : "directories"} and skipped ${skippedDirs}.`,
96
+ });
97
+ // Workflow validation noise gate (issue #273): suppress per-spec stderr lines
98
+ // at default verbosity and emit a single summary instead.
99
+ // In verbose mode the per-spec lines are already printed by
100
+ // buildMetadataSkipWarning at generation time — no second pass needed here.
101
+ if (!isVerbose()) {
102
+ const workflowSkipWarnings = warnings.filter(isWorkflowSkipWarning);
103
+ const skippedWorkflowCount = workflowSkipWarnings.length;
104
+ if (skippedWorkflowCount > 0) {
105
+ const noun = skippedWorkflowCount === 1 ? "workflow spec" : "workflow specs";
106
+ warn(`${skippedWorkflowCount} ${noun} skipped due to validation errors; ` +
107
+ "rerun with --verbose (or AKM_VERBOSE=1) to see details.");
108
+ }
109
+ }
110
+ ctx.timing.tWalkEnd = Date.now();
111
+ throwIfAborted(signal);
112
+ // LLM enrichment for directories that need it
113
+ await enhanceDirsWithLlm(db, config, dirsNeedingLlm, onProgress, signal, true, reEnrich);
114
+ onProgress({
115
+ phase: "llm",
116
+ message: resolveIndexPassLLM("enrichment", config)
117
+ ? `LLM enhancement reviewed ${dirsNeedingLlm.length} ${dirsNeedingLlm.length === 1 ? "directory" : "directories"}.`
118
+ : "LLM enhancement disabled.",
119
+ });
120
+ ctx.timing.tLlmEnd = Date.now();
121
+ }
122
+ /**
123
+ * Embedding phase: generate and store vector embeddings for all unembedded
124
+ * entries. Writes `ctx.embeddingResult` for the finalize phase.
125
+ */
126
+ async function runEmbeddingPhase(ctx) {
127
+ const { db, config, signal, onProgress } = ctx;
128
+ throwIfAborted(signal);
129
+ ctx.embeddingResult = await generateEmbeddingsForDb(db, config, onProgress);
130
+ ctx.timing.tEmbedEnd = Date.now();
131
+ }
132
+ /**
133
+ * Finalize phase: rebuild FTS, re-link usage events, recompute utility scores,
134
+ * regenerate wiki indexes, update index metadata, and emit the verify event.
135
+ */
136
+ async function runFinalizePhase(ctx) {
137
+ const { db, config, sources, sourceDirs, isIncremental, stashDir, signal, onProgress } = ctx;
138
+ // Rebuild FTS after all inserts. Use incremental mode when this whole
139
+ // index run is incremental — only entries touched by `upsertEntry`
140
+ // since the last rebuild are re-indexed.
141
+ rebuildFts(db, { incremental: isIncremental });
142
+ onProgress({
143
+ phase: "fts",
144
+ message: isIncremental ? "Rebuilt full-text search index (dirty rows only)." : "Rebuilt full-text search index.",
145
+ });
146
+ ctx.timing.tFtsEnd = Date.now();
147
+ // Re-link detached usage_events and recompute utility scores.
148
+ relinkUsageEvents(db);
149
+ recomputeUtilityScores(db);
150
+ // Purge LLM cache entries for assets that no longer exist in the index.
151
+ try {
152
+ clearStaleCacheEntries(db);
153
+ }
154
+ catch {
155
+ /* ignore */
156
+ }
157
+ // Regenerate each wiki's index.md from its pages' frontmatter. Best-effort.
158
+ try {
159
+ const { regenerateAllWikiIndexes } = await import("../wiki/wiki.js");
160
+ regenerateAllWikiIndexes(stashDir);
161
+ }
162
+ catch {
163
+ /* best-effort */
164
+ }
165
+ throwIfAborted(signal);
166
+ // Update index metadata
167
+ const embeddingResult = ctx.embeddingResult ?? { success: false };
168
+ setMeta(db, "builtAt", new Date().toISOString());
169
+ setMeta(db, "stashDir", stashDir);
170
+ setMeta(db, "stashDirs", JSON.stringify(sourceDirs));
171
+ setMeta(db, "hasEmbeddings", embeddingResult.success ? "1" : "0");
172
+ warnIfVecMissing(db);
173
+ const totalEntries = getEntryCount(db);
174
+ const verification = verifyIndexState(db, config, totalEntries, embeddingResult);
175
+ if (config.semanticSearchMode === "off") {
176
+ clearSemanticStatus();
177
+ }
178
+ else {
179
+ writeSemanticStatus({
180
+ status: verification.semanticStatus === "disabled" ? "pending" : verification.semanticStatus,
181
+ ...(embeddingResult.reason ? { reason: embeddingResult.reason } : {}),
182
+ ...(embeddingResult.message ? { message: embeddingResult.message } : {}),
183
+ providerFingerprint: deriveSemanticProviderFingerprint(config.embedding),
184
+ lastCheckedAt: new Date().toISOString(),
185
+ entryCount: verification.entryCount,
186
+ embeddingCount: verification.embeddingCount,
187
+ });
188
+ }
189
+ onProgress({ phase: "verify", message: verification.message });
190
+ // Store verification result and totalEntries on ctx for the caller to use
191
+ ctx._verification = verification;
192
+ ctx._totalEntries = totalEntries;
193
+ // suppress unused warning — sources was previously used inline
194
+ void sources;
195
+ }
21
196
  // ── Indexer ──────────────────────────────────────────────────────────────────
22
197
  export async function akmIndex(options) {
23
198
  const stashDir = options?.stashDir || resolveStashDir();
24
199
  const onProgress = options?.onProgress ?? (() => { });
25
200
  const signal = options?.signal;
26
- const enrich = options?.enrich === true;
201
+ const reEnrich = options?.reEnrich === true;
202
+ const full = options?.full === true;
27
203
  // Load config and resolve all stash sources
28
204
  const { loadConfig } = await import("../core/config.js");
29
205
  const config = loadConfig();
30
206
  // Ensure git stash caches are extracted before resolving stash dirs,
31
207
  // so their content directories exist on disk for the walker to discover.
32
208
  const { ensureSourceCaches, resolveSourceEntries } = await import("./search-source.js");
33
- await ensureSourceCaches(config, { force: options?.full === true });
209
+ await ensureSourceCaches(config, { force: full });
34
210
  const allSourceEntries = resolveSourceEntries(stashDir, config);
35
211
  const allSourceDirs = allSourceEntries.map((s) => s.path);
36
212
  const t0 = Date.now();
@@ -39,11 +215,41 @@ export async function akmIndex(options) {
39
215
  const embeddingDim = config.embedding?.dimension;
40
216
  const db = openDatabase(dbPath, embeddingDim ? { embeddingDim } : undefined);
41
217
  try {
42
- // Check if we should do incremental
218
+ // Determine incremental vs full mode
43
219
  const prevStashDir = getMeta(db, "stashDir");
44
220
  const prevBuiltAt = getMeta(db, "builtAt");
45
- const isIncremental = !options?.full && prevStashDir === stashDir && !!prevBuiltAt;
221
+ const isIncremental = !full && prevStashDir === stashDir && !!prevBuiltAt;
46
222
  const builtAtMs = isIncremental && prevBuiltAt ? new Date(prevBuiltAt).getTime() : 0;
223
+ // Assemble the run context
224
+ const ctx = {
225
+ db,
226
+ config,
227
+ sources: allSourceEntries,
228
+ sourceDirs: allSourceDirs,
229
+ full,
230
+ reEnrich,
231
+ stashDir,
232
+ onProgress,
233
+ signal,
234
+ timing: {
235
+ t0,
236
+ tWalkStart: t0,
237
+ tWalkEnd: t0,
238
+ tLlmEnd: t0,
239
+ tFtsEnd: t0,
240
+ tEmbedEnd: t0,
241
+ },
242
+ isIncremental,
243
+ builtAtMs,
244
+ hadRemovedSources: false,
245
+ scannedDirs: 0,
246
+ skippedDirs: 0,
247
+ generatedCount: 0,
248
+ walkWarnings: [],
249
+ dirsNeedingLlm: [],
250
+ embeddingResult: null,
251
+ graphExtractionResult: null,
252
+ };
47
253
  onProgress({
48
254
  phase: "summary",
49
255
  message: buildIndexSummaryMessage({
@@ -51,218 +257,34 @@ export async function akmIndex(options) {
51
257
  sourcesCount: allSourceDirs.length,
52
258
  semanticSearchMode: config.semanticSearchMode,
53
259
  embeddingProvider: getEmbeddingProvider(config.embedding),
54
- llmEnabled: enrich && !!resolveIndexPassLLM("enrichment", config),
260
+ llmEnabled: !!resolveIndexPassLLM("enrichment", config),
55
261
  vecAvailable: isVecAvailable(db),
56
262
  }),
57
263
  });
58
- let hadRemovedSources = false;
59
- if (options?.full || !isIncremental) {
60
- // The delete is now merged into the insert transaction inside
61
- // indexEntries() so that a reader never sees an empty database between
62
- // the wipe and the re-inserts. The doFullDelete flag signals this path.
63
- }
64
- else {
65
- // Incremental: purge entries from stash dirs that have been removed
66
- // (e.g. after `akm remove`) so orphaned entries don't linger.
67
- const prevStashDirsJson = getMeta(db, "stashDirs");
68
- if (prevStashDirsJson) {
69
- let prevStashDirs = [];
70
- try {
71
- const parsed = JSON.parse(prevStashDirsJson);
72
- if (Array.isArray(parsed)) {
73
- prevStashDirs = parsed.filter((d) => typeof d === "string");
74
- }
75
- else {
76
- warn("index_meta stashDirs value is not an array — treating as empty");
77
- }
78
- }
79
- catch {
80
- warn("index_meta stashDirs value is corrupt JSON — treating as empty");
81
- }
82
- const currentSet = new Set(allSourceDirs);
83
- for (const dir of prevStashDirs) {
84
- if (!currentSet.has(dir)) {
85
- hadRemovedSources = true;
86
- deleteEntriesByStashDir(db, dir);
87
- deleteIndexDirStatesByStashDir(db, dir);
88
- }
89
- }
90
- }
91
- }
92
- throwIfAborted(signal);
93
- // Memory inference pass (#201). Runs before the walk so any derived-memory
94
- // children that get written are picked up by the walker in this same run
95
- // and don't have to wait for the next `akm index`. Gated entirely by
96
- // `resolveIndexPassLLM("memory", config)` — when the user has no
97
- // `akm.llm` block or has set `index.memory.llm = false`, this is a no-op
98
- // and existing inferred children are left in place.
99
- if (enrich) {
100
- try {
101
- const inferenceResult = await runMemoryInferencePass(config, allSourceEntries, signal);
102
- if (inferenceResult.writtenFacts > 0) {
103
- onProgress({
104
- phase: "llm",
105
- message: `Memory inference wrote ${inferenceResult.writtenFacts} derived memor${inferenceResult.writtenFacts === 1 ? "y" : "ies"} from ${inferenceResult.splitParents} parent memor${inferenceResult.splitParents === 1 ? "y" : "ies"}.`,
106
- });
107
- }
108
- }
109
- catch (err) {
110
- warn(`Memory inference pass aborted: ${err instanceof Error ? err.message : String(err)}`);
111
- }
112
- }
113
- else {
114
- onProgress({
115
- phase: "llm",
116
- message: "LLM passes disabled; rerun with --enrich to enable inference and enrichment.",
117
- });
118
- }
119
- // Graph extraction pass (#207). Runs after memory inference so any
120
- // atomic-fact children that just got written are visible to the graph
121
- // walk. Persists `<stashRoot>/.akm/graph.json` — an indexer artifact,
122
- // NOT a user-visible asset, so it is not routed through
123
- // writeAssetToSource. The artifact feeds the existing FTS5+boosts
124
- // pipeline as a single boost component (see graph-boost.ts); there is
125
- // no parallel scoring track. Disabled when either gate (the locked
126
- // `llm.features.graph_extraction` feature flag or the per-pass
127
- // `index.graph.llm` toggle) is off; the existing graph file is
128
- // preserved on disk in that case.
129
- if (enrich) {
130
- try {
131
- const graphResult = await runGraphExtractionPass(config, allSourceEntries, signal);
132
- if (graphResult.written) {
133
- onProgress({
134
- phase: "llm",
135
- message: `Graph extraction wrote ${graphResult.totalEntities} entit${graphResult.totalEntities === 1 ? "y" : "ies"} and ${graphResult.totalRelations} relation${graphResult.totalRelations === 1 ? "" : "s"} from ${graphResult.extracted} file${graphResult.extracted === 1 ? "" : "s"}.`,
136
- });
137
- }
138
- }
139
- catch (err) {
140
- warn(`Graph extraction pass aborted: ${err instanceof Error ? err.message : String(err)}`);
141
- }
142
- }
143
- throwIfAborted(signal);
144
- const tWalkStart = Date.now();
145
- // Walk stash dirs and index entries.
146
- // doFullDelete=true merges the wipe into the same transaction as the
147
- // inserts so readers never see an empty database mid-rebuild.
148
- const doFullDelete = options?.full || !isIncremental;
149
- const { scannedDirs, skippedDirs, generatedCount, dirsNeedingLlm, warnings } = await indexEntries(db, allSourceEntries, isIncremental, builtAtMs, hadRemovedSources, doFullDelete, onProgress);
150
- onProgress({
151
- phase: "scan",
152
- message: `Scanned ${scannedDirs} ${scannedDirs === 1 ? "directory" : "directories"} and skipped ${skippedDirs}.`,
153
- });
154
- // Workflow validation noise gate (issue #273): per-spec stderr lines from
155
- // `buildMetadataSkipWarning` are suppressed at default verbosity in
156
- // `metadata.ts`. Replace them with a single summary line so operators
157
- // running a cold-start search against a fresh registry-cloned source
158
- // don't get the impression akm is broken. Verbose mode keeps the
159
- // per-spec output instead of (not in addition to) the summary.
160
- if (!isVerbose()) {
161
- const skippedWorkflowCount = warnings.filter(isWorkflowSkipWarning).length;
162
- if (skippedWorkflowCount > 0) {
163
- const noun = skippedWorkflowCount === 1 ? "workflow spec" : "workflow specs";
164
- warn(`${skippedWorkflowCount} ${noun} skipped due to validation errors; ` +
165
- "rerun with --verbose (or AKM_VERBOSE=1) to see details.");
166
- }
167
- }
168
- const tWalkEnd = Date.now();
169
- throwIfAborted(signal);
170
- // Enhance entries with LLM if configured
171
- await enhanceDirsWithLlm(db, config, dirsNeedingLlm, signal, enrich);
172
- onProgress({
173
- phase: "llm",
174
- message: enrich && resolveIndexPassLLM("enrichment", config)
175
- ? `LLM enhancement reviewed ${dirsNeedingLlm.length} ${dirsNeedingLlm.length === 1 ? "directory" : "directories"}.`
176
- : "LLM enhancement disabled.",
177
- });
178
- const tLlmEnd = Date.now();
179
- throwIfAborted(signal);
180
- // Rebuild FTS after all inserts. Use incremental mode when this whole
181
- // index run is incremental — only entries touched by `upsertEntry`
182
- // since the last rebuild are re-indexed, instead of re-scanning every
183
- // row on every `akm index` invocation.
184
- rebuildFts(db, { incremental: isIncremental });
185
- onProgress({
186
- phase: "fts",
187
- message: isIncremental ? "Rebuilt full-text search index (dirty rows only)." : "Rebuilt full-text search index.",
188
- });
189
- const tFtsEnd = Date.now();
190
- // Re-link detached usage_events to their new entry_ids via entry_ref.
191
- // entry_ref is "type:name" (e.g., "skill:code-review"), entry_key is "stashDir:type:name".
192
- // Use substr to extract the "type:name" suffix from entry_key for exact comparison
193
- // (avoids LIKE which would require escaping % and _ in user-facing names).
194
- try {
195
- db.exec(`
196
- UPDATE usage_events SET entry_id = (
197
- SELECT e.id FROM entries e
198
- WHERE substr(e.entry_key, length(e.entry_key) - length(usage_events.entry_ref)) = ':' || usage_events.entry_ref
199
- LIMIT 1
200
- )
201
- WHERE entry_id IS NULL AND entry_ref IS NOT NULL
202
- `);
203
- }
204
- catch {
205
- /* ignore if table doesn't exist yet */
206
- }
207
- // Recompute utility scores from usage_events after FTS rebuild
208
- recomputeUtilityScores(db);
209
- // Regenerate each wiki's index.md from its pages' frontmatter. Best-effort
210
- // — errors are caught inside regenerateAllWikiIndexes and never block the
211
- // index run. The primary stash is the only target: additional sources
212
- // are read-only caches, and regenerating their indexes would mutate
213
- // cache content.
214
- try {
215
- const { regenerateAllWikiIndexes } = await import("../wiki/wiki.js");
216
- regenerateAllWikiIndexes(stashDir);
217
- }
218
- catch {
219
- /* best-effort */
220
- }
221
- throwIfAborted(signal);
222
- // Generate embeddings if semantic search is enabled
223
- const embeddingResult = await generateEmbeddingsForDb(db, config, onProgress);
224
- const tEmbedEnd = Date.now();
225
- // Update metadata
226
- setMeta(db, "builtAt", new Date().toISOString());
227
- setMeta(db, "stashDir", stashDir);
228
- setMeta(db, "stashDirs", JSON.stringify(allSourceDirs));
229
- setMeta(db, "hasEmbeddings", embeddingResult.success ? "1" : "0");
230
- const totalEntries = getEntryCount(db);
231
- // Warn on every index run if using JS fallback with many entries
232
- warnIfVecMissing(db);
233
- const tEnd = Date.now();
234
- const verification = verifyIndexState(db, config, totalEntries, embeddingResult);
235
- if (config.semanticSearchMode === "off") {
236
- clearSemanticStatus();
237
- }
238
- else {
239
- writeSemanticStatus({
240
- status: verification.semanticStatus === "disabled" ? "pending" : verification.semanticStatus,
241
- ...(embeddingResult.reason ? { reason: embeddingResult.reason } : {}),
242
- ...(embeddingResult.message ? { message: embeddingResult.message } : {}),
243
- providerFingerprint: deriveSemanticProviderFingerprint(config.embedding),
244
- lastCheckedAt: new Date().toISOString(),
245
- entryCount: verification.entryCount,
246
- embeddingCount: verification.embeddingCount,
247
- });
248
- }
249
- onProgress({ phase: "verify", message: verification.message });
264
+ // ── Phase sequence ───────────────────────────────────────────────────────
265
+ await runSourceCachePhase(ctx);
266
+ await runWalkPhase(ctx);
267
+ await runEmbeddingPhase(ctx);
268
+ await runFinalizePhase(ctx);
269
+ // ────────────────────────────────────────────────────────────────────────
270
+ const { _verification: verification, _totalEntries: totalEntries } = ctx;
271
+ const { timing } = ctx;
250
272
  return {
251
273
  stashDir,
252
274
  totalEntries,
253
- generatedMetadata: generatedCount,
275
+ generatedMetadata: ctx.generatedCount,
254
276
  indexPath: dbPath,
255
277
  mode: isIncremental ? "incremental" : "full",
256
- directoriesScanned: scannedDirs,
257
- directoriesSkipped: skippedDirs,
258
- ...(warnings.length > 0 ? { warnings } : {}),
278
+ directoriesScanned: ctx.scannedDirs,
279
+ directoriesSkipped: ctx.skippedDirs,
280
+ ...(ctx.walkWarnings.length > 0 ? { warnings: ctx.walkWarnings } : {}),
259
281
  verification,
260
282
  timing: {
261
- totalMs: tEnd - t0,
262
- walkMs: tWalkEnd - tWalkStart,
263
- llmMs: tLlmEnd - tWalkEnd,
264
- embedMs: tEmbedEnd - tFtsEnd,
265
- ftsMs: tFtsEnd - tLlmEnd,
283
+ totalMs: Date.now() - timing.t0,
284
+ walkMs: timing.tWalkEnd - timing.tWalkStart,
285
+ llmMs: timing.tLlmEnd - timing.tWalkEnd,
286
+ embedMs: timing.tEmbedEnd - timing.tFtsEnd,
287
+ ftsMs: timing.tFtsEnd - timing.tLlmEnd,
266
288
  },
267
289
  };
268
290
  }
@@ -500,8 +522,10 @@ async function indexEntries(db, allSourceEntries, isIncremental, builtAtMs, hadR
500
522
  if (stash) {
501
523
  for (const entry of stash.entries) {
502
524
  const entryPath = entry.filename ? path.join(dirPath, entry.filename) : null;
503
- if (!entryPath)
504
- continue; // skip unresolvable entries
525
+ if (!entryPath) {
526
+ warn(`Skipping entry with no resolvable path in ${dirPath}`);
527
+ continue;
528
+ }
505
529
  if (!shouldIndexStashFile(currentStashDir, entryPath))
506
530
  continue;
507
531
  // Skip if a higher-priority stash root already indexed this asset
@@ -523,7 +547,9 @@ async function indexEntries(db, allSourceEntries, isIncremental, builtAtMs, hadR
523
547
  }
524
548
  }
525
549
  }
526
- // Collect dirs needing LLM enhancement during the first walk
550
+ // Collect dirs needing LLM enhancement during the first walk.
551
+ // Only dirs with "generated" entries need enrichment (unless reEnrich
552
+ // forces re-processing of already-enriched entries).
527
553
  if (stash.entries.some((e) => e.quality === "generated")) {
528
554
  dirsNeedingLlm.push({ dirPath, files, currentStashDir, stash });
529
555
  }
@@ -541,7 +567,7 @@ async function indexEntries(db, allSourceEntries, isIncremental, builtAtMs, hadR
541
567
  reason: persistedReason,
542
568
  });
543
569
  if (persistedRows === 0) {
544
- warnVerbose(`[index] zero-row ${dirPath}: ${persistedReason}`);
570
+ warn(`[index] zero-row ${dirPath}: ${persistedReason}`);
545
571
  }
546
572
  }
547
573
  });
@@ -640,9 +666,7 @@ function inferZeroRowReason(stash, priorReason, warnings, dirPath, dedupedRows)
640
666
  return "empty-generated-set";
641
667
  return `zero-row:${priorReason?.kind ?? "unknown"}`;
642
668
  }
643
- async function enhanceDirsWithLlm(db, config, dirsNeedingLlm, signal, enrich = false) {
644
- if (!enrich)
645
- return;
669
+ async function enhanceDirsWithLlm(db, config, dirsNeedingLlm, onProgress, signal, _enrich = false, reEnrich = false) {
646
670
  // Resolve per-pass LLM config via the unified shim. Returns undefined when
647
671
  // either no `akm.llm` is configured or the user opted this pass out via
648
672
  // `index.enrichment.llm = false`. (#208)
@@ -653,24 +677,142 @@ async function enhanceDirsWithLlm(db, config, dirsNeedingLlm, signal, enrich = f
653
677
  // as a single visible warning instead of silently degrading every entry
654
678
  // and leaving the user wondering why nothing got enhanced.
655
679
  const summary = { attempted: 0, succeeded: 0, failureSamples: [] };
656
- for (const { dirPath, files, currentStashDir, stash: originalStash } of dirsNeedingLlm) {
657
- throwIfAborted(signal);
658
- // Only enhance generated entries; user-provided overrides should not be overwritten
659
- const generatedEntries = originalStash.entries.filter((e) => e.quality === "generated");
660
- if (generatedEntries.length === 0)
661
- continue;
662
- const generatedStash = { entries: generatedEntries };
663
- const enhanced = await enhanceStashWithLlm(llmConfig, generatedStash, files, summary, signal);
664
- // Re-upsert the enhanced entries in a single transaction so a crash
665
- // cannot leave half the entries updated and the rest stale.
666
- db.transaction(() => {
667
- for (const entry of enhanced.entries) {
668
- const entryPath = entry.filename ? path.join(dirPath, entry.filename) : files[0] || dirPath;
669
- const entryKey = `${currentStashDir}:${entry.type}:${entry.name}`;
670
- const searchText = buildSearchText(entry);
671
- upsertEntry(db, entryKey, dirPath, entryPath, currentStashDir, attachFileSize(entry, entryPath), searchText);
672
- }
673
- })();
680
+ let completedDirs = 0;
681
+ let completedEntries = 0;
682
+ const totalDirs = dirsNeedingLlm.length;
683
+ const totalEntries = dirsNeedingLlm.reduce((sum, { stash }) => {
684
+ const entriesToEnhance = stash.entries.filter((e) => {
685
+ if (e.quality !== "generated" && !(reEnrich && e.quality === "enriched"))
686
+ return false;
687
+ if (!reEnrich && isEnrichmentComplete(e))
688
+ return false;
689
+ return true;
690
+ });
691
+ return sum + entriesToEnhance.length;
692
+ }, 0);
693
+ // P3 wall-clock budget for the enrichment pass. Defaults to llm.timeoutMs
694
+ // (or 10 minutes if not set). Users can extend this via llm.timeoutMs in
695
+ // config no separate knob needed.
696
+ const budgetMs = (llmConfig.timeoutMs ?? 10 * 60 * 1000) * Math.max(totalEntries, 1);
697
+ const enrichDeadline = AbortSignal.timeout(budgetMs);
698
+ let deadlineHit = false;
699
+ const enrichSignal = (() => {
700
+ if (!signal)
701
+ return enrichDeadline;
702
+ // Combine: abort when either fires.
703
+ const controller = new AbortController();
704
+ const onAbort = () => controller.abort();
705
+ signal.addEventListener("abort", onAbort, { once: true });
706
+ enrichDeadline.addEventListener("abort", () => {
707
+ deadlineHit = true;
708
+ controller.abort();
709
+ }, { once: true });
710
+ return controller.signal;
711
+ })();
712
+ if (totalEntries > 0) {
713
+ onProgress?.({
714
+ phase: "llm",
715
+ message: `LLM enhancement starting for ${totalEntries} entr${totalEntries === 1 ? "y" : "ies"} ` +
716
+ `across ${totalDirs} director${totalDirs === 1 ? "y" : "ies"} (concurrency ${getDefaultLlmConcurrency(llmConfig)}).`,
717
+ processed: 0,
718
+ total: totalEntries,
719
+ });
720
+ }
721
+ let currentDirLabel;
722
+ let lastProgressAt = Date.now();
723
+ let heartbeatTimer;
724
+ if (totalEntries > 0 && onProgress) {
725
+ heartbeatTimer = setInterval(() => {
726
+ if (Date.now() - lastProgressAt < 15000)
727
+ return;
728
+ onProgress({
729
+ phase: "llm",
730
+ message: `Still enriching ${completedEntries}/${totalEntries} entr${totalEntries === 1 ? "y" : "ies"}` +
731
+ (currentDirLabel ? `; waiting on ${currentDirLabel}` : "") +
732
+ ".",
733
+ processed: completedEntries,
734
+ total: totalEntries,
735
+ });
736
+ lastProgressAt = Date.now();
737
+ }, 15000);
738
+ }
739
+ try {
740
+ await concurrentMap(dirsNeedingLlm, async ({ dirPath, files, currentStashDir, stash: originalStash }) => {
741
+ if (enrichSignal.aborted)
742
+ return undefined;
743
+ // Only enhance generated entries (or all when reEnrich=true);
744
+ // user-provided overrides should not be overwritten.
745
+ // Skip entries that are already fully enriched (description + tags + searchHints)
746
+ // unless the caller explicitly requests re-enrichment via reEnrich=true.
747
+ const entriesToEnhance = originalStash.entries.filter((e) => {
748
+ if (e.quality !== "generated" && !(reEnrich && e.quality === "enriched"))
749
+ return false;
750
+ if (!reEnrich && isEnrichmentComplete(e)) {
751
+ warnVerbose(`[akm] skipping LLM enrichment for "${e.name}" — entry already complete`);
752
+ return false;
753
+ }
754
+ return true;
755
+ });
756
+ if (entriesToEnhance.length === 0)
757
+ return undefined;
758
+ currentDirLabel = path.relative(currentStashDir, dirPath) || ".";
759
+ onProgress?.({
760
+ phase: "llm",
761
+ message: `Enhancing ${currentDirLabel} ` +
762
+ `(${entriesToEnhance.length} entr${entriesToEnhance.length === 1 ? "y" : "ies"}).`,
763
+ processed: completedEntries,
764
+ total: totalEntries,
765
+ });
766
+ lastProgressAt = Date.now();
767
+ const targetStash = { entries: entriesToEnhance };
768
+ const entryKeys = entriesToEnhance.map((e) => `${currentStashDir}:${e.type}:${e.name}`);
769
+ const enhanced = await enhanceStashWithLlm(llmConfig, targetStash, files, summary, enrichSignal, db, entryKeys, reEnrich, config, (event) => {
770
+ completedEntries++;
771
+ lastProgressAt = Date.now();
772
+ onProgress?.({
773
+ phase: "llm",
774
+ message: `Enhanced ${completedEntries}/${totalEntries} entr${totalEntries === 1 ? "y" : "ies"}; ` +
775
+ `${completedDirs}/${totalDirs} director${totalDirs === 1 ? "y" : "ies"} complete` +
776
+ (event.entryName ? `; current ${event.entryName}` : "") +
777
+ (currentDirLabel ? ` in ${currentDirLabel}` : "") +
778
+ (event.outcome === "cache-hit" ? " (cache hit)" : ""),
779
+ processed: completedEntries,
780
+ total: totalEntries,
781
+ });
782
+ });
783
+ // Re-upsert the enhanced entries in a single transaction so a crash
784
+ // cannot leave half the entries updated and the rest stale.
785
+ db.transaction(() => {
786
+ for (const entry of enhanced.entries) {
787
+ const entryPath = entry.filename ? path.join(dirPath, entry.filename) : files[0] || dirPath;
788
+ const entryKey = `${currentStashDir}:${entry.type}:${entry.name}`;
789
+ const searchText = buildSearchText(entry);
790
+ upsertEntry(db, entryKey, dirPath, entryPath, currentStashDir, attachFileSize(entry, entryPath), searchText);
791
+ }
792
+ })();
793
+ completedDirs++;
794
+ lastProgressAt = Date.now();
795
+ onProgress?.({
796
+ phase: "llm",
797
+ message: `Completed ${completedDirs}/${totalDirs} director${totalDirs === 1 ? "y" : "ies"}; ` +
798
+ `${completedEntries}/${totalEntries} entr${totalEntries === 1 ? "y" : "ies"} processed.`,
799
+ processed: completedEntries,
800
+ total: totalEntries,
801
+ });
802
+ return undefined;
803
+ },
804
+ // Default concurrency of 4 works well for cloud LLM APIs. Local model
805
+ // servers (LM Studio, Ollama) run one inference at a time — set
806
+ // `llm.concurrency: 1` in config.json to avoid "Model reloaded" / 500
807
+ // errors from concurrent request overload.
808
+ getDefaultLlmConcurrency(llmConfig));
809
+ }
810
+ finally {
811
+ if (heartbeatTimer)
812
+ clearInterval(heartbeatTimer);
813
+ }
814
+ if (deadlineHit) {
815
+ warn("[akm] LLM enrichment budget exceeded. Re-run `akm index` to continue. Increase llm.timeoutMs for a larger budget.");
674
816
  }
675
817
  if (summary.attempted > 0 && summary.succeeded === 0) {
676
818
  const sample = summary.failureSamples.length ? ` Example: ${summary.failureSamples[0]}` : "";
@@ -769,14 +911,6 @@ async function generateEmbeddingsForDb(db, config, onProgress, signal) {
769
911
  }
770
912
  }
771
913
  // ── Helpers ─────────────────────────────────────────────────────────────────
772
- function getAllEntriesForEmbedding(db) {
773
- return db
774
- .prepare(`
775
- SELECT e.id, e.search_text AS searchText, e.entry_key AS entryKey, e.file_path AS filePath FROM entries e
776
- WHERE NOT EXISTS (SELECT 1 FROM embeddings b WHERE b.id = e.id)
777
- `)
778
- .all();
779
- }
780
914
  function attachFileSize(entry, entryPath) {
781
915
  try {
782
916
  return { ...entry, fileSize: fs.statSync(entryPath).size };
@@ -785,28 +919,6 @@ function attachFileSize(entry, entryPath) {
785
919
  return entry;
786
920
  }
787
921
  }
788
- function upsertWorkflowDocument(db, entryId, doc, content) {
789
- const sourceHash = computeSourceHash(content);
790
- db.prepare(`INSERT INTO workflow_documents (entry_id, schema_version, document_json, source_path, source_hash, updated_at)
791
- VALUES (?, ?, ?, ?, ?, ?)
792
- ON CONFLICT(entry_id) DO UPDATE SET
793
- schema_version = excluded.schema_version,
794
- document_json = excluded.document_json,
795
- source_path = excluded.source_path,
796
- source_hash = excluded.source_hash,
797
- updated_at = excluded.updated_at`).run(entryId, doc.schemaVersion, JSON.stringify(doc), doc.source.path, sourceHash, new Date().toISOString());
798
- }
799
- function computeSourceHash(content) {
800
- // Cheap, stable identity for the source markdown — used by future
801
- // incremental fast-paths that skip re-validation when content is unchanged.
802
- // Not security-sensitive; FNV-1a over the bytes is sufficient.
803
- let hash = 0x811c9dc5;
804
- for (let i = 0; i < content.length; i++) {
805
- hash ^= content[i];
806
- hash = Math.imul(hash, 0x01000193);
807
- }
808
- return (hash >>> 0).toString(16);
809
- }
810
922
  function buildIndexSummaryMessage(options) {
811
923
  const stashSourceLabel = options.sourcesCount === 1 ? "stash source" : "stash sources";
812
924
  const semanticDetail = getSemanticSearchLabel(options.semanticSearchMode, options.embeddingProvider, options.vecAvailable);
@@ -899,11 +1011,12 @@ function resolveIndexedFiles(dirPath, files, stash) {
899
1011
  }
900
1012
  return resolved.size > 0 ? [...resolved] : files;
901
1013
  }
902
- async function enhanceStashWithLlm(llmConfig, stash, files, summary, signal) {
1014
+ async function enhanceStashWithLlm(llmConfig, stash, files, summary, signal, db, entryKeys, reEnrich, akmConfig, onEntryDone) {
903
1015
  const { enhanceMetadata } = await import("../llm/metadata-enhance");
904
- const enhanced = [];
905
- for (const entry of stash.entries) {
906
- throwIfAborted(signal);
1016
+ const { computeBodyHash, getLlmCacheEntry, upsertLlmCacheEntry } = await import("./db.js");
1017
+ const results = await concurrentMap(stash.entries, async (entry, idx) => {
1018
+ if (signal?.aborted)
1019
+ return entry;
907
1020
  summary.attempted++;
908
1021
  try {
909
1022
  const entryFile = entry.filename
@@ -915,10 +1028,38 @@ async function enhanceStashWithLlm(llmConfig, stash, files, summary, signal) {
915
1028
  fileContent = fs.readFileSync(entryFile, "utf8");
916
1029
  }
917
1030
  catch {
918
- /* ignore unreadable files */
1031
+ warn(`Could not read file for LLM enrichment: ${entry.filename ?? entry.name}`);
919
1032
  }
920
1033
  }
921
- const improvements = await enhanceMetadata(llmConfig, entry, fileContent, signal);
1034
+ // Incremental cache: skip LLM call when file body is unchanged and
1035
+ // --re-enrich was not requested. The cache key is the entry_key
1036
+ // (stashDir:type:name) which is stable across index runs.
1037
+ const cacheBody = fileContent ?? `${entry.name}\n${entry.description ?? ""}`;
1038
+ const bodyHash = computeBodyHash(cacheBody);
1039
+ const cacheKey = entryKeys?.[idx] ?? `${entry.type}:${entry.name}`;
1040
+ if (db && !reEnrich) {
1041
+ const cached = getLlmCacheEntry(db, cacheKey, bodyHash);
1042
+ if (cached) {
1043
+ try {
1044
+ const parsed = JSON.parse(cached.resultJson);
1045
+ const updated = { ...entry };
1046
+ if (parsed.description)
1047
+ updated.description = parsed.description;
1048
+ if (parsed.searchHints?.length)
1049
+ updated.searchHints = parsed.searchHints;
1050
+ if (parsed.tags?.length)
1051
+ updated.tags = parsed.tags;
1052
+ updated.quality = "enriched";
1053
+ summary.succeeded++;
1054
+ onEntryDone?.({ entryName: entry.name, outcome: "cache-hit" });
1055
+ return updated;
1056
+ }
1057
+ catch {
1058
+ warn(`LLM enrichment cache entry corrupt for ${entry.name}; re-running enrichment`);
1059
+ }
1060
+ }
1061
+ }
1062
+ const improvements = await enhanceMetadata(llmConfig, entry, fileContent, signal, akmConfig);
922
1063
  const updated = { ...entry };
923
1064
  if (improvements.description)
924
1065
  updated.description = improvements.description;
@@ -926,19 +1067,39 @@ async function enhanceStashWithLlm(llmConfig, stash, files, summary, signal) {
926
1067
  updated.searchHints = improvements.searchHints;
927
1068
  if (improvements.tags?.length)
928
1069
  updated.tags = improvements.tags;
929
- enhanced.push(updated);
1070
+ // Mark as enriched so subsequent index runs skip re-enrichment (P2)
1071
+ updated.quality = "enriched";
1072
+ // Persist to cache so the next run can skip the LLM call when the
1073
+ // file body has not changed.
1074
+ if (db) {
1075
+ upsertLlmCacheEntry(db, cacheKey, bodyHash, JSON.stringify({
1076
+ description: improvements.description,
1077
+ searchHints: improvements.searchHints,
1078
+ tags: improvements.tags,
1079
+ }));
1080
+ }
930
1081
  summary.succeeded++;
1082
+ onEntryDone?.({ entryName: entry.name, outcome: "llm" });
1083
+ return updated;
931
1084
  }
932
1085
  catch (err) {
933
- enhanced.push(entry);
934
1086
  const msg = toErrorMessage(err);
935
1087
  // failureSamples is bounded to 3 items, so a linear scan is cheaper
936
1088
  // than maintaining a parallel Set for membership checks (#177 review).
937
1089
  if (summary.failureSamples.length < 3 && !summary.failureSamples.includes(msg)) {
938
1090
  summary.failureSamples.push(msg);
939
1091
  }
1092
+ onEntryDone?.({ entryName: entry.name, outcome: "failed" });
1093
+ return entry;
940
1094
  }
941
- }
1095
+ },
1096
+ // Default concurrency of 4 works well for cloud LLM APIs. Set
1097
+ // `llm.concurrency: 1` in config.json for local model servers.
1098
+ getDefaultLlmConcurrency(llmConfig));
1099
+ // concurrentMap returns Array<T | undefined>; filter out undefined slots
1100
+ // (which can only occur if the callback itself returned undefined, which
1101
+ // it never does above — but TypeScript needs the filter for type safety).
1102
+ const enhanced = results.map((r, i) => r ?? stash.entries[i]);
942
1103
  return { entries: enhanced };
943
1104
  }
944
1105
  /**
@@ -1018,13 +1179,13 @@ export async function lookup(ref) {
1018
1179
  const dbPath = getDbPath();
1019
1180
  const db = openExistingDatabase(dbPath);
1020
1181
  try {
1021
- // entry_key shape: `${stashDir}:${type}:${name}`. Suffix-match on
1022
- // `:type:name` so we can scope by source dir as a prefix when origin is
1023
- // supplied. Use parameterised queries throughout — names may include
1024
- // user-supplied glob characters.
1025
1182
  const escapeLike = (value) => value.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
1026
- const suffix = `:${ref.type}:${ref.name}`;
1027
- const escapedSuffix = escapeLike(suffix);
1183
+ // Canonical names strip .md for markdown assets, but users often pass
1184
+ // refs with .md (e.g. command:release.md). Normalize by trying both.
1185
+ const nameVariants = [ref.name];
1186
+ if (ref.name.endsWith(".md")) {
1187
+ nameVariants.push(ref.name.slice(0, -3));
1188
+ }
1028
1189
  const candidateDirs = (() => {
1029
1190
  if (!ref.origin)
1030
1191
  return sources.map((s) => s.path);
@@ -1035,20 +1196,24 @@ export async function lookup(ref) {
1035
1196
  })();
1036
1197
  if (candidateDirs.length === 0)
1037
1198
  return null;
1038
- for (const dir of candidateDirs) {
1039
- const escapedDir = escapeLike(dir);
1040
- const row = db
1041
- .prepare("SELECT entry_key AS entryKey, file_path AS filePath, stash_dir AS stashDir, entry_type AS type FROM entries " +
1042
- "WHERE entry_key LIKE ? ESCAPE '\\' AND entry_type = ? LIMIT 1")
1043
- .get(`${escapedDir}${escapedSuffix}`, ref.type);
1044
- if (row) {
1045
- return {
1046
- entryKey: row.entryKey,
1047
- filePath: row.filePath,
1048
- stashDir: row.stashDir,
1049
- type: row.type,
1050
- name: ref.name,
1051
- };
1199
+ for (const name of nameVariants) {
1200
+ const suffix = `:${ref.type}:${name}`;
1201
+ const escapedSuffix = escapeLike(suffix);
1202
+ for (const dir of candidateDirs) {
1203
+ const escapedDir = escapeLike(dir);
1204
+ const row = db
1205
+ .prepare("SELECT entry_key AS entryKey, file_path AS filePath, stash_dir AS stashDir, entry_type AS type FROM entries " +
1206
+ "WHERE entry_key LIKE ? ESCAPE '\\' AND entry_type = ? LIMIT 1")
1207
+ .get(`${escapedDir}${escapedSuffix}`, ref.type);
1208
+ if (row) {
1209
+ return {
1210
+ entryKey: row.entryKey,
1211
+ filePath: row.filePath,
1212
+ stashDir: row.stashDir,
1213
+ type: row.type,
1214
+ name: ref.name,
1215
+ };
1216
+ }
1052
1217
  }
1053
1218
  }
1054
1219
  return null;
@@ -1113,23 +1278,26 @@ export function recomputeUtilityScores(db) {
1113
1278
  }
1114
1279
  // Batch-load existing utility scores
1115
1280
  const existingScores = new Map();
1116
- const scoreRows = db.prepare("SELECT entry_id, utility FROM utility_scores").all();
1281
+ const scoreRows = db.prepare("SELECT entry_id, utility, last_used_at FROM utility_scores").all();
1117
1282
  for (const row of scoreRows) {
1118
- existingScores.set(row.entry_id, row.utility);
1283
+ existingScores.set(row.entry_id, { utility: row.utility, lastUsedAt: row.last_used_at ?? undefined });
1119
1284
  }
1285
+ const now = new Date().toISOString();
1120
1286
  for (const row of usageRows) {
1121
1287
  const selectRate = row.search_count > 0 ? Math.min(1, row.show_count / row.search_count) : 0;
1122
1288
  const feedbackTotal = row.positive_feedback_count + row.negative_feedback_count;
1123
1289
  const feedbackRate = feedbackTotal > 0 ? Math.max(0, row.positive_feedback_count - row.negative_feedback_count) / feedbackTotal : 0;
1124
1290
  const effectiveRate = Math.max(selectRate, feedbackRate);
1125
- const prevUtility = existingScores.get(row.entry_id) ?? 0;
1291
+ const existing = existingScores.get(row.entry_id);
1292
+ const prevUtility = existing?.utility ?? 0;
1126
1293
  const utility = prevUtility * emaDecay + effectiveRate * emaNew;
1294
+ const lastUsedAt = effectiveRate > 0.5 ? now : (existing?.lastUsedAt ?? undefined);
1127
1295
  upsertUtilityScore(db, row.entry_id, {
1128
1296
  utility,
1129
1297
  showCount: row.show_count,
1130
1298
  searchCount: row.search_count,
1131
1299
  selectRate,
1132
- lastUsedAt: row.last_used_at ?? undefined,
1300
+ lastUsedAt,
1133
1301
  });
1134
1302
  }
1135
1303
  setMeta(db, "last_utility_computed_at", new Date().toISOString());