akm-cli 0.7.5 → 0.8.0-rc.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (155) hide show
  1. package/.github/CHANGELOG.md +1 -1
  2. package/dist/cli/parse-args.js +86 -0
  3. package/dist/cli.js +1023 -521
  4. package/dist/commands/agent-dispatch.js +107 -0
  5. package/dist/commands/agent-support.js +62 -0
  6. package/dist/commands/config-cli.js +68 -84
  7. package/dist/commands/consolidate.js +812 -0
  8. package/dist/commands/distill-promotion-policy.js +658 -0
  9. package/dist/commands/distill.js +218 -43
  10. package/dist/commands/eval-cases.js +40 -0
  11. package/dist/commands/events.js +2 -23
  12. package/dist/commands/graph.js +222 -0
  13. package/dist/commands/health.js +376 -0
  14. package/dist/commands/help/help-accept.md +9 -0
  15. package/dist/commands/help/help-improve.md +53 -0
  16. package/dist/commands/help/help-proposals.md +15 -0
  17. package/dist/commands/help/help-propose.md +17 -0
  18. package/dist/commands/help/help-reject.md +8 -0
  19. package/dist/commands/history.js +3 -30
  20. package/dist/commands/improve.js +1161 -0
  21. package/dist/commands/info.js +2 -2
  22. package/dist/commands/init.js +2 -2
  23. package/dist/commands/install-audit.js +5 -1
  24. package/dist/commands/installed-stashes.js +118 -138
  25. package/dist/commands/knowledge.js +133 -0
  26. package/dist/commands/lint/agent-linter.js +46 -0
  27. package/dist/commands/lint/base-linter.js +291 -0
  28. package/dist/commands/lint/command-linter.js +46 -0
  29. package/dist/commands/lint/default-linter.js +13 -0
  30. package/dist/commands/lint/index.js +145 -0
  31. package/dist/commands/lint/knowledge-linter.js +13 -0
  32. package/dist/commands/lint/memory-linter.js +58 -0
  33. package/dist/commands/lint/registry.js +33 -0
  34. package/dist/commands/lint/skill-linter.js +42 -0
  35. package/dist/commands/lint/task-linter.js +47 -0
  36. package/dist/commands/lint/types.js +1 -0
  37. package/dist/commands/lint/vault-key-rules.js +67 -0
  38. package/dist/commands/lint/workflow-linter.js +53 -0
  39. package/dist/commands/lint.js +1 -0
  40. package/dist/commands/proposal.js +8 -7
  41. package/dist/commands/propose.js +71 -28
  42. package/dist/commands/reflect.js +135 -35
  43. package/dist/commands/registry-search.js +2 -2
  44. package/dist/commands/remember.js +54 -0
  45. package/dist/commands/schema-repair.js +130 -0
  46. package/dist/commands/search.js +21 -5
  47. package/dist/commands/show.js +125 -20
  48. package/dist/commands/source-add.js +10 -10
  49. package/dist/commands/source-manage.js +11 -19
  50. package/dist/commands/tasks.js +385 -0
  51. package/dist/commands/url-checker.js +39 -0
  52. package/dist/commands/vault.js +168 -77
  53. package/dist/core/action-contributors.js +25 -0
  54. package/dist/core/asset-ref.js +4 -0
  55. package/dist/core/asset-registry.js +4 -16
  56. package/dist/core/asset-spec.js +10 -0
  57. package/dist/core/common.js +100 -0
  58. package/dist/core/concurrent.js +22 -0
  59. package/dist/core/config.js +233 -133
  60. package/dist/core/events.js +73 -126
  61. package/dist/core/frontmatter.js +0 -6
  62. package/dist/core/markdown.js +17 -0
  63. package/dist/core/memory-improve.js +678 -0
  64. package/dist/core/parse.js +155 -0
  65. package/dist/core/paths.js +101 -3
  66. package/dist/core/proposal-validators.js +61 -0
  67. package/dist/core/proposals.js +49 -38
  68. package/dist/core/state-db.js +731 -0
  69. package/dist/core/time.js +51 -0
  70. package/dist/core/warn.js +59 -1
  71. package/dist/indexer/db-search.js +52 -238
  72. package/dist/indexer/db.js +403 -54
  73. package/dist/indexer/ensure-index.js +61 -0
  74. package/dist/indexer/graph-boost.js +247 -94
  75. package/dist/indexer/graph-db.js +201 -0
  76. package/dist/indexer/graph-dedup.js +99 -0
  77. package/dist/indexer/graph-extraction.js +409 -76
  78. package/dist/indexer/index-context.js +10 -0
  79. package/dist/indexer/indexer.js +456 -290
  80. package/dist/indexer/llm-cache.js +47 -0
  81. package/dist/indexer/matchers.js +124 -160
  82. package/dist/indexer/memory-inference.js +63 -29
  83. package/dist/indexer/metadata-contributors.js +26 -0
  84. package/dist/indexer/metadata.js +196 -197
  85. package/dist/indexer/path-resolver.js +89 -0
  86. package/dist/indexer/ranking-contributors.js +204 -0
  87. package/dist/indexer/ranking.js +74 -0
  88. package/dist/indexer/search-hit-enrichers.js +22 -0
  89. package/dist/indexer/search-source.js +24 -9
  90. package/dist/indexer/semantic-status.js +2 -16
  91. package/dist/indexer/walker.js +25 -0
  92. package/dist/integrations/agent/builders.js +109 -0
  93. package/dist/integrations/agent/config.js +203 -3
  94. package/dist/integrations/agent/index.js +5 -2
  95. package/dist/integrations/agent/model-aliases.js +63 -0
  96. package/dist/integrations/agent/profiles.js +67 -5
  97. package/dist/integrations/agent/prompts.js +77 -72
  98. package/dist/integrations/agent/sdk-runner.js +120 -0
  99. package/dist/integrations/agent/spawn.js +93 -22
  100. package/dist/integrations/lockfile.js +10 -18
  101. package/dist/integrations/session-logs/index.js +65 -0
  102. package/dist/integrations/session-logs/providers/claude-code.js +56 -0
  103. package/dist/integrations/session-logs/providers/opencode.js +52 -0
  104. package/dist/integrations/session-logs/types.js +1 -0
  105. package/dist/llm/call-ai.js +74 -0
  106. package/dist/llm/client.js +61 -122
  107. package/dist/llm/feature-gate.js +27 -16
  108. package/dist/llm/graph-extract.js +297 -62
  109. package/dist/llm/memory-infer.js +49 -71
  110. package/dist/llm/metadata-enhance.js +39 -22
  111. package/dist/llm/prompts/graph-extract-user-prompt.md +12 -0
  112. package/dist/output/cli-hints-full.md +277 -0
  113. package/dist/output/cli-hints-short.md +65 -0
  114. package/dist/output/cli-hints.js +2 -318
  115. package/dist/output/renderers.js +220 -256
  116. package/dist/output/shapes.js +101 -93
  117. package/dist/output/text.js +256 -17
  118. package/dist/registry/providers/skills-sh.js +61 -49
  119. package/dist/registry/providers/static-index.js +44 -48
  120. package/dist/registry/resolve.js +8 -16
  121. package/dist/setup/setup.js +510 -11
  122. package/dist/sources/provider-factory.js +2 -1
  123. package/dist/sources/providers/filesystem.js +16 -23
  124. package/dist/sources/providers/git.js +4 -5
  125. package/dist/sources/providers/website.js +15 -22
  126. package/dist/sources/website-ingest.js +4 -0
  127. package/dist/tasks/backends/cron.js +200 -0
  128. package/dist/tasks/backends/exec-utils.js +25 -0
  129. package/dist/tasks/backends/index.js +32 -0
  130. package/dist/tasks/backends/launchd-template.xml +19 -0
  131. package/dist/tasks/backends/launchd.js +184 -0
  132. package/dist/tasks/backends/schtasks-template.xml +29 -0
  133. package/dist/tasks/backends/schtasks.js +212 -0
  134. package/dist/tasks/parser.js +198 -0
  135. package/dist/tasks/resolveAkmBin.js +84 -0
  136. package/dist/tasks/runner.js +432 -0
  137. package/dist/tasks/schedule.js +208 -0
  138. package/dist/tasks/schema.js +13 -0
  139. package/dist/tasks/validator.js +59 -0
  140. package/dist/wiki/index-template.md +12 -0
  141. package/dist/wiki/ingest-workflow-template.md +54 -0
  142. package/dist/wiki/log-template.md +8 -0
  143. package/dist/wiki/schema-template.md +61 -0
  144. package/dist/wiki/wiki-templates.js +12 -0
  145. package/dist/wiki/wiki.js +10 -61
  146. package/dist/workflows/authoring.js +5 -25
  147. package/dist/workflows/renderer.js +8 -3
  148. package/dist/workflows/runs.js +59 -91
  149. package/dist/workflows/validator.js +1 -1
  150. package/dist/workflows/workflow-template.md +24 -0
  151. package/docs/README.md +5 -2
  152. package/docs/migration/release-notes/0.7.0.md +1 -1
  153. package/docs/migration/release-notes/0.8.0.md +43 -0
  154. package/package.json +3 -2
  155. package/dist/templates/wiki-templates.js +0 -100
@@ -1,14 +1,14 @@
1
1
  import fs from "node:fs";
2
2
  import path from "node:path";
3
+ import { SCRIPT_EXTENSIONS } from "../core/asset-spec";
3
4
  import { isHttpUrl, resolveStashDir, toErrorMessage } from "../core/common";
5
+ import { concurrentMap } from "../core/concurrent";
4
6
  import { getDbPath } from "../core/paths";
5
7
  import { isVerbose, warn, warnVerbose } from "../core/warn";
6
8
  import { resolveIndexPassLLM } from "../llm/index-passes";
7
9
  import { takeWorkflowDocument } from "../workflows/document-cache";
8
- import { closeDatabase, deleteEntriesByDir, deleteEntriesByStashDir, deleteIndexDirStatesByStashDir, getEmbeddingCount, getEntriesByDir, getEntryCount, getIndexDirState, getMeta, isVecAvailable, openDatabase, openExistingDatabase, rebuildFts, setMeta, upsertEmbedding, upsertEntry, upsertIndexDirState, upsertUtilityScore, warnIfVecMissing, } from "./db";
9
- import { runGraphExtractionPass } from "./graph-extraction";
10
- import { runMemoryInferencePass } from "./memory-inference";
11
- import { applyCuratedFrontmatter, applyWikiFrontmatter, generateMetadataFlat, isWorkflowSkipWarning, loadStashFile, shouldIndexStashFile, } from "./metadata";
10
+ import { clearStaleCacheEntries, closeDatabase, deleteEntriesByDir, deleteEntriesByStashDir, deleteIndexDirStatesByStashDir, getAllEntriesForEmbedding, getEmbeddingCount, getEntriesByDir, getEntryCount, getIndexDirState, getMeta, isVecAvailable, openDatabase, openExistingDatabase, rebuildFts, relinkUsageEvents, setMeta, upsertEmbedding, upsertEntry, upsertIndexDirState, upsertUtilityScore, upsertWorkflowDocument, warnIfVecMissing, } from "./db";
11
+ import { applyCuratedFrontmatter, applyWikiFrontmatter, generateMetadataFlat, isEnrichmentComplete, isWorkflowSkipWarning, loadStashFile, shouldIndexStashFile, } from "./metadata";
12
12
  import { buildSearchText } from "./search-fields";
13
13
  import { classifySemanticFailure, clearSemanticStatus, deriveSemanticProviderFingerprint, writeSemanticStatus, } from "./semantic-status";
14
14
  import { ensureUsageEventsSchema, purgeOldUsageEvents } from "./usage-events";
@@ -18,19 +18,196 @@ function throwIfAborted(signal) {
18
18
  throw signal.reason instanceof Error ? signal.reason : new Error("index interrupted");
19
19
  }
20
20
  }
21
+ function getDefaultLlmConcurrency(llmConfig) {
22
+ if (typeof llmConfig?.concurrency === "number")
23
+ return llmConfig.concurrency;
24
+ if (!llmConfig?.endpoint)
25
+ return 1;
26
+ try {
27
+ const url = new URL(llmConfig.endpoint);
28
+ const host = url.hostname.toLowerCase();
29
+ if (host === "localhost" || host === "127.0.0.1" || host === "::1" || host.endsWith(".localhost"))
30
+ return 1;
31
+ }
32
+ catch {
33
+ return 1;
34
+ }
35
+ return 4;
36
+ }
37
+ // ── Phase functions ──────────────────────────────────────────────────────────
38
+ /**
39
+ * Source cache phase: ensure git stash caches are up to date and purge orphaned
40
+ * entries from removed sources (incremental only).
41
+ */
42
+ async function runSourceCachePhase(ctx) {
43
+ const { db, config, sourceDirs, isIncremental, full } = ctx;
44
+ if (isIncremental && !full) {
45
+ // Purge entries from stash dirs that have been removed since the last run
46
+ // (e.g. after `akm remove`) so orphaned entries don't linger.
47
+ const prevStashDirsJson = getMeta(db, "stashDirs");
48
+ if (prevStashDirsJson) {
49
+ let prevStashDirs = [];
50
+ try {
51
+ const parsed = JSON.parse(prevStashDirsJson);
52
+ if (Array.isArray(parsed)) {
53
+ prevStashDirs = parsed.filter((d) => typeof d === "string");
54
+ }
55
+ else {
56
+ warn("index_meta stashDirs value is not an array — treating as empty");
57
+ }
58
+ }
59
+ catch {
60
+ warn("index_meta stashDirs value is corrupt JSON — treating as empty");
61
+ }
62
+ const currentSet = new Set(sourceDirs);
63
+ for (const dir of prevStashDirs) {
64
+ if (!currentSet.has(dir)) {
65
+ ctx.hadRemovedSources = true;
66
+ deleteEntriesByStashDir(db, dir);
67
+ deleteIndexDirStatesByStashDir(db, dir);
68
+ }
69
+ }
70
+ }
71
+ }
72
+ // Source caches are hydrated before akmIndex() calls this phase; nothing
73
+ // further to do here. The flag is exposed on ctx for runWalkPhase().
74
+ void config;
75
+ }
76
+ /**
77
+ * Walk phase: scan the filesystem, generate metadata, and persist entries to
78
+ * the database. Also kicks off LLM enrichment for directories that need it.
79
+ *
80
+ * Writes `ctx.scannedDirs`, `ctx.skippedDirs`, `ctx.generatedCount`,
81
+ * `ctx.walkWarnings`, and `ctx.dirsNeedingLlm` for downstream phases.
82
+ */
83
+ async function runWalkPhase(ctx) {
84
+ const { db, sources, isIncremental, builtAtMs, hadRemovedSources, full, reEnrich, signal, onProgress, config } = ctx;
85
+ throwIfAborted(signal);
86
+ ctx.timing.tWalkStart = Date.now();
87
+ const doFullDelete = full || !isIncremental;
88
+ const { scannedDirs, skippedDirs, generatedCount, dirsNeedingLlm, warnings } = await indexEntries(db, sources, isIncremental, builtAtMs, hadRemovedSources, doFullDelete, onProgress);
89
+ ctx.scannedDirs = scannedDirs;
90
+ ctx.skippedDirs = skippedDirs;
91
+ ctx.generatedCount = generatedCount;
92
+ ctx.walkWarnings = warnings;
93
+ ctx.dirsNeedingLlm = dirsNeedingLlm;
94
+ onProgress({
95
+ phase: "scan",
96
+ message: `Scanned ${scannedDirs} ${scannedDirs === 1 ? "directory" : "directories"} and skipped ${skippedDirs}.`,
97
+ });
98
+ // Workflow validation noise gate (issue #273): suppress per-spec stderr lines
99
+ // at default verbosity and emit a single summary instead.
100
+ // In verbose mode the per-spec lines are already printed by
101
+ // buildMetadataSkipWarning at generation time — no second pass needed here.
102
+ if (!isVerbose()) {
103
+ const workflowSkipWarnings = warnings.filter(isWorkflowSkipWarning);
104
+ const skippedWorkflowCount = workflowSkipWarnings.length;
105
+ if (skippedWorkflowCount > 0) {
106
+ const noun = skippedWorkflowCount === 1 ? "workflow spec" : "workflow specs";
107
+ warn(`${skippedWorkflowCount} ${noun} skipped due to validation errors; ` +
108
+ "rerun with --verbose (or AKM_VERBOSE=1) to see details.");
109
+ }
110
+ }
111
+ ctx.timing.tWalkEnd = Date.now();
112
+ throwIfAborted(signal);
113
+ // LLM enrichment for directories that need it
114
+ await enhanceDirsWithLlm(db, config, dirsNeedingLlm, onProgress, signal, true, reEnrich);
115
+ onProgress({
116
+ phase: "llm",
117
+ message: resolveIndexPassLLM("enrichment", config)
118
+ ? `LLM enhancement reviewed ${dirsNeedingLlm.length} ${dirsNeedingLlm.length === 1 ? "directory" : "directories"}.`
119
+ : "LLM enhancement disabled.",
120
+ });
121
+ ctx.timing.tLlmEnd = Date.now();
122
+ }
123
+ /**
124
+ * Embedding phase: generate and store vector embeddings for all unembedded
125
+ * entries. Writes `ctx.embeddingResult` for the finalize phase.
126
+ */
127
+ async function runEmbeddingPhase(ctx) {
128
+ const { db, config, signal, onProgress } = ctx;
129
+ throwIfAborted(signal);
130
+ ctx.embeddingResult = await generateEmbeddingsForDb(db, config, onProgress);
131
+ ctx.timing.tEmbedEnd = Date.now();
132
+ }
133
+ /**
134
+ * Finalize phase: rebuild FTS, re-link usage events, recompute utility scores,
135
+ * regenerate wiki indexes, update index metadata, and emit the verify event.
136
+ */
137
+ async function runFinalizePhase(ctx) {
138
+ const { db, config, sources, sourceDirs, isIncremental, stashDir, signal, onProgress } = ctx;
139
+ // Rebuild FTS after all inserts. Use incremental mode when this whole
140
+ // index run is incremental — only entries touched by `upsertEntry`
141
+ // since the last rebuild are re-indexed.
142
+ rebuildFts(db, { incremental: isIncremental });
143
+ onProgress({
144
+ phase: "fts",
145
+ message: isIncremental ? "Rebuilt full-text search index (dirty rows only)." : "Rebuilt full-text search index.",
146
+ });
147
+ ctx.timing.tFtsEnd = Date.now();
148
+ // Re-link detached usage_events and recompute utility scores.
149
+ relinkUsageEvents(db);
150
+ recomputeUtilityScores(db);
151
+ // Purge LLM cache entries for assets that no longer exist in the index.
152
+ try {
153
+ clearStaleCacheEntries(db);
154
+ }
155
+ catch {
156
+ /* ignore */
157
+ }
158
+ // Regenerate each wiki's index.md from its pages' frontmatter. Best-effort.
159
+ try {
160
+ const { regenerateAllWikiIndexes } = await import("../wiki/wiki.js");
161
+ regenerateAllWikiIndexes(stashDir);
162
+ }
163
+ catch {
164
+ /* best-effort */
165
+ }
166
+ throwIfAborted(signal);
167
+ // Update index metadata
168
+ const embeddingResult = ctx.embeddingResult ?? { success: false };
169
+ setMeta(db, "builtAt", new Date().toISOString());
170
+ setMeta(db, "stashDir", stashDir);
171
+ setMeta(db, "stashDirs", JSON.stringify(sourceDirs));
172
+ setMeta(db, "hasEmbeddings", embeddingResult.success ? "1" : "0");
173
+ warnIfVecMissing(db);
174
+ const totalEntries = getEntryCount(db);
175
+ const verification = verifyIndexState(db, config, totalEntries, embeddingResult);
176
+ if (config.semanticSearchMode === "off") {
177
+ clearSemanticStatus();
178
+ }
179
+ else {
180
+ writeSemanticStatus({
181
+ status: verification.semanticStatus === "disabled" ? "pending" : verification.semanticStatus,
182
+ ...(embeddingResult.reason ? { reason: embeddingResult.reason } : {}),
183
+ ...(embeddingResult.message ? { message: embeddingResult.message } : {}),
184
+ providerFingerprint: deriveSemanticProviderFingerprint(config.embedding),
185
+ lastCheckedAt: new Date().toISOString(),
186
+ entryCount: verification.entryCount,
187
+ embeddingCount: verification.embeddingCount,
188
+ });
189
+ }
190
+ onProgress({ phase: "verify", message: verification.message });
191
+ // Store verification result and totalEntries on ctx for the caller to use
192
+ ctx._verification = verification;
193
+ ctx._totalEntries = totalEntries;
194
+ // suppress unused warning — sources was previously used inline
195
+ void sources;
196
+ }
21
197
  // ── Indexer ──────────────────────────────────────────────────────────────────
22
198
  export async function akmIndex(options) {
23
199
  const stashDir = options?.stashDir || resolveStashDir();
24
200
  const onProgress = options?.onProgress ?? (() => { });
25
201
  const signal = options?.signal;
26
- const enrich = options?.enrich === true;
202
+ const reEnrich = options?.reEnrich === true;
203
+ const full = options?.full === true;
27
204
  // Load config and resolve all stash sources
28
205
  const { loadConfig } = await import("../core/config.js");
29
206
  const config = loadConfig();
30
207
  // Ensure git stash caches are extracted before resolving stash dirs,
31
208
  // so their content directories exist on disk for the walker to discover.
32
209
  const { ensureSourceCaches, resolveSourceEntries } = await import("./search-source.js");
33
- await ensureSourceCaches(config, { force: options?.full === true });
210
+ await ensureSourceCaches(config, { force: full });
34
211
  const allSourceEntries = resolveSourceEntries(stashDir, config);
35
212
  const allSourceDirs = allSourceEntries.map((s) => s.path);
36
213
  const t0 = Date.now();
@@ -39,11 +216,41 @@ export async function akmIndex(options) {
39
216
  const embeddingDim = config.embedding?.dimension;
40
217
  const db = openDatabase(dbPath, embeddingDim ? { embeddingDim } : undefined);
41
218
  try {
42
- // Check if we should do incremental
219
+ // Determine incremental vs full mode
43
220
  const prevStashDir = getMeta(db, "stashDir");
44
221
  const prevBuiltAt = getMeta(db, "builtAt");
45
- const isIncremental = !options?.full && prevStashDir === stashDir && !!prevBuiltAt;
222
+ const isIncremental = !full && prevStashDir === stashDir && !!prevBuiltAt;
46
223
  const builtAtMs = isIncremental && prevBuiltAt ? new Date(prevBuiltAt).getTime() : 0;
224
+ // Assemble the run context
225
+ const ctx = {
226
+ db,
227
+ config,
228
+ sources: allSourceEntries,
229
+ sourceDirs: allSourceDirs,
230
+ full,
231
+ reEnrich,
232
+ stashDir,
233
+ onProgress,
234
+ signal,
235
+ timing: {
236
+ t0,
237
+ tWalkStart: t0,
238
+ tWalkEnd: t0,
239
+ tLlmEnd: t0,
240
+ tFtsEnd: t0,
241
+ tEmbedEnd: t0,
242
+ },
243
+ isIncremental,
244
+ builtAtMs,
245
+ hadRemovedSources: false,
246
+ scannedDirs: 0,
247
+ skippedDirs: 0,
248
+ generatedCount: 0,
249
+ walkWarnings: [],
250
+ dirsNeedingLlm: [],
251
+ embeddingResult: null,
252
+ graphExtractionResult: null,
253
+ };
47
254
  onProgress({
48
255
  phase: "summary",
49
256
  message: buildIndexSummaryMessage({
@@ -51,230 +258,34 @@ export async function akmIndex(options) {
51
258
  sourcesCount: allSourceDirs.length,
52
259
  semanticSearchMode: config.semanticSearchMode,
53
260
  embeddingProvider: getEmbeddingProvider(config.embedding),
54
- llmEnabled: enrich && !!resolveIndexPassLLM("enrichment", config),
261
+ llmEnabled: !!resolveIndexPassLLM("enrichment", config),
55
262
  vecAvailable: isVecAvailable(db),
56
263
  }),
57
264
  });
58
- let hadRemovedSources = false;
59
- if (options?.full || !isIncremental) {
60
- // The delete is now merged into the insert transaction inside
61
- // indexEntries() so that a reader never sees an empty database between
62
- // the wipe and the re-inserts. The doFullDelete flag signals this path.
63
- }
64
- else {
65
- // Incremental: purge entries from stash dirs that have been removed
66
- // (e.g. after `akm remove`) so orphaned entries don't linger.
67
- const prevStashDirsJson = getMeta(db, "stashDirs");
68
- if (prevStashDirsJson) {
69
- let prevStashDirs = [];
70
- try {
71
- const parsed = JSON.parse(prevStashDirsJson);
72
- if (Array.isArray(parsed)) {
73
- prevStashDirs = parsed.filter((d) => typeof d === "string");
74
- }
75
- else {
76
- warn("index_meta stashDirs value is not an array — treating as empty");
77
- }
78
- }
79
- catch {
80
- warn("index_meta stashDirs value is corrupt JSON — treating as empty");
81
- }
82
- const currentSet = new Set(allSourceDirs);
83
- for (const dir of prevStashDirs) {
84
- if (!currentSet.has(dir)) {
85
- hadRemovedSources = true;
86
- deleteEntriesByStashDir(db, dir);
87
- deleteIndexDirStatesByStashDir(db, dir);
88
- }
89
- }
90
- }
91
- }
92
- throwIfAborted(signal);
93
- // Memory inference pass (#201). Runs before the walk so any derived-memory
94
- // children that get written are picked up by the walker in this same run
95
- // and don't have to wait for the next `akm index`. Gated entirely by
96
- // `resolveIndexPassLLM("memory", config)` — when the user has no
97
- // `akm.llm` block or has set `index.memory.llm = false`, this is a no-op
98
- // and existing inferred children are left in place.
99
- if (enrich) {
100
- try {
101
- const inferenceResult = await runMemoryInferencePass(config, allSourceEntries, signal);
102
- if (inferenceResult.writtenFacts > 0 || inferenceResult.skippedNoFacts > 0) {
103
- onProgress({
104
- phase: "llm",
105
- message: `Memory inference reviewed ${inferenceResult.considered} ` +
106
- `${inferenceResult.considered === 1 ? "memory" : "memories"}; wrote ` +
107
- `${inferenceResult.writtenFacts} derived memor${inferenceResult.writtenFacts === 1 ? "y" : "ies"} ` +
108
- `from ${inferenceResult.splitParents} parent memor${inferenceResult.splitParents === 1 ? "y" : "ies"}` +
109
- (inferenceResult.skippedNoFacts > 0
110
- ? `; skipped ${inferenceResult.skippedNoFacts} ${inferenceResult.skippedNoFacts === 1 ? "memory" : "memories"} with unusable LLM responses`
111
- : "") +
112
- ".",
113
- });
114
- }
115
- if (inferenceResult.skippedNoFacts > 0) {
116
- warn(`Memory inference skipped ${inferenceResult.skippedNoFacts} ` +
117
- `${inferenceResult.skippedNoFacts === 1 ? "memory" : "memories"} because the LLM returned empty, invalid, or incomplete derived payloads. ` +
118
- "Check your model and token budget.");
119
- }
120
- }
121
- catch (err) {
122
- warn(`Memory inference pass aborted: ${err instanceof Error ? err.message : String(err)}`);
123
- }
124
- }
125
- else {
126
- onProgress({
127
- phase: "llm",
128
- message: "LLM passes disabled; rerun with --enrich to enable inference and enrichment.",
129
- });
130
- }
131
- // Graph extraction pass (#207). Runs after memory inference so any
132
- // atomic-fact children that just got written are visible to the graph
133
- // walk. Persists `<stashRoot>/.akm/graph.json` — an indexer artifact,
134
- // NOT a user-visible asset, so it is not routed through
135
- // writeAssetToSource. The artifact feeds the existing FTS5+boosts
136
- // pipeline as a single boost component (see graph-boost.ts); there is
137
- // no parallel scoring track. Disabled when either gate (the locked
138
- // `llm.features.graph_extraction` feature flag or the per-pass
139
- // `index.graph.llm` toggle) is off; the existing graph file is
140
- // preserved on disk in that case.
141
- if (enrich) {
142
- try {
143
- const graphResult = await runGraphExtractionPass(config, allSourceEntries, signal);
144
- if (graphResult.written) {
145
- onProgress({
146
- phase: "llm",
147
- message: `Graph extraction wrote ${graphResult.totalEntities} entit${graphResult.totalEntities === 1 ? "y" : "ies"} and ${graphResult.totalRelations} relation${graphResult.totalRelations === 1 ? "" : "s"} from ${graphResult.extracted} file${graphResult.extracted === 1 ? "" : "s"}.`,
148
- });
149
- }
150
- }
151
- catch (err) {
152
- warn(`Graph extraction pass aborted: ${err instanceof Error ? err.message : String(err)}`);
153
- }
154
- }
155
- throwIfAborted(signal);
156
- const tWalkStart = Date.now();
157
- // Walk stash dirs and index entries.
158
- // doFullDelete=true merges the wipe into the same transaction as the
159
- // inserts so readers never see an empty database mid-rebuild.
160
- const doFullDelete = options?.full || !isIncremental;
161
- const { scannedDirs, skippedDirs, generatedCount, dirsNeedingLlm, warnings } = await indexEntries(db, allSourceEntries, isIncremental, builtAtMs, hadRemovedSources, doFullDelete, onProgress);
162
- onProgress({
163
- phase: "scan",
164
- message: `Scanned ${scannedDirs} ${scannedDirs === 1 ? "directory" : "directories"} and skipped ${skippedDirs}.`,
165
- });
166
- // Workflow validation noise gate (issue #273): per-spec stderr lines from
167
- // `buildMetadataSkipWarning` are suppressed at default verbosity in
168
- // `metadata.ts`. Replace them with a single summary line so operators
169
- // running a cold-start search against a fresh registry-cloned source
170
- // don't get the impression akm is broken. Verbose mode keeps the
171
- // per-spec output instead of (not in addition to) the summary.
172
- if (!isVerbose()) {
173
- const skippedWorkflowCount = warnings.filter(isWorkflowSkipWarning).length;
174
- if (skippedWorkflowCount > 0) {
175
- const noun = skippedWorkflowCount === 1 ? "workflow spec" : "workflow specs";
176
- warn(`${skippedWorkflowCount} ${noun} skipped due to validation errors; ` +
177
- "rerun with --verbose (or AKM_VERBOSE=1) to see details.");
178
- }
179
- }
180
- const tWalkEnd = Date.now();
181
- throwIfAborted(signal);
182
- // Enhance entries with LLM if configured
183
- await enhanceDirsWithLlm(db, config, dirsNeedingLlm, signal, enrich);
184
- onProgress({
185
- phase: "llm",
186
- message: enrich && resolveIndexPassLLM("enrichment", config)
187
- ? `LLM enhancement reviewed ${dirsNeedingLlm.length} ${dirsNeedingLlm.length === 1 ? "directory" : "directories"}.`
188
- : "LLM enhancement disabled.",
189
- });
190
- const tLlmEnd = Date.now();
191
- throwIfAborted(signal);
192
- // Rebuild FTS after all inserts. Use incremental mode when this whole
193
- // index run is incremental — only entries touched by `upsertEntry`
194
- // since the last rebuild are re-indexed, instead of re-scanning every
195
- // row on every `akm index` invocation.
196
- rebuildFts(db, { incremental: isIncremental });
197
- onProgress({
198
- phase: "fts",
199
- message: isIncremental ? "Rebuilt full-text search index (dirty rows only)." : "Rebuilt full-text search index.",
200
- });
201
- const tFtsEnd = Date.now();
202
- // Re-link detached usage_events to their new entry_ids via entry_ref.
203
- // entry_ref is "type:name" (e.g., "skill:code-review"), entry_key is "stashDir:type:name".
204
- // Use substr to extract the "type:name" suffix from entry_key for exact comparison
205
- // (avoids LIKE which would require escaping % and _ in user-facing names).
206
- try {
207
- db.exec(`
208
- UPDATE usage_events SET entry_id = (
209
- SELECT e.id FROM entries e
210
- WHERE substr(e.entry_key, length(e.entry_key) - length(usage_events.entry_ref)) = ':' || usage_events.entry_ref
211
- LIMIT 1
212
- )
213
- WHERE entry_id IS NULL AND entry_ref IS NOT NULL
214
- `);
215
- }
216
- catch {
217
- /* ignore if table doesn't exist yet */
218
- }
219
- // Recompute utility scores from usage_events after FTS rebuild
220
- recomputeUtilityScores(db);
221
- // Regenerate each wiki's index.md from its pages' frontmatter. Best-effort
222
- // — errors are caught inside regenerateAllWikiIndexes and never block the
223
- // index run. The primary stash is the only target: additional sources
224
- // are read-only caches, and regenerating their indexes would mutate
225
- // cache content.
226
- try {
227
- const { regenerateAllWikiIndexes } = await import("../wiki/wiki.js");
228
- regenerateAllWikiIndexes(stashDir);
229
- }
230
- catch {
231
- /* best-effort */
232
- }
233
- throwIfAborted(signal);
234
- // Generate embeddings if semantic search is enabled
235
- const embeddingResult = await generateEmbeddingsForDb(db, config, onProgress);
236
- const tEmbedEnd = Date.now();
237
- // Update metadata
238
- setMeta(db, "builtAt", new Date().toISOString());
239
- setMeta(db, "stashDir", stashDir);
240
- setMeta(db, "stashDirs", JSON.stringify(allSourceDirs));
241
- setMeta(db, "hasEmbeddings", embeddingResult.success ? "1" : "0");
242
- const totalEntries = getEntryCount(db);
243
- // Warn on every index run if using JS fallback with many entries
244
- warnIfVecMissing(db);
245
- const tEnd = Date.now();
246
- const verification = verifyIndexState(db, config, totalEntries, embeddingResult);
247
- if (config.semanticSearchMode === "off") {
248
- clearSemanticStatus();
249
- }
250
- else {
251
- writeSemanticStatus({
252
- status: verification.semanticStatus === "disabled" ? "pending" : verification.semanticStatus,
253
- ...(embeddingResult.reason ? { reason: embeddingResult.reason } : {}),
254
- ...(embeddingResult.message ? { message: embeddingResult.message } : {}),
255
- providerFingerprint: deriveSemanticProviderFingerprint(config.embedding),
256
- lastCheckedAt: new Date().toISOString(),
257
- entryCount: verification.entryCount,
258
- embeddingCount: verification.embeddingCount,
259
- });
260
- }
261
- onProgress({ phase: "verify", message: verification.message });
265
+ // ── Phase sequence ───────────────────────────────────────────────────────
266
+ await runSourceCachePhase(ctx);
267
+ await runWalkPhase(ctx);
268
+ await runEmbeddingPhase(ctx);
269
+ await runFinalizePhase(ctx);
270
+ // ────────────────────────────────────────────────────────────────────────
271
+ const { _verification: verification, _totalEntries: totalEntries } = ctx;
272
+ const { timing } = ctx;
262
273
  return {
263
274
  stashDir,
264
275
  totalEntries,
265
- generatedMetadata: generatedCount,
276
+ generatedMetadata: ctx.generatedCount,
266
277
  indexPath: dbPath,
267
278
  mode: isIncremental ? "incremental" : "full",
268
- directoriesScanned: scannedDirs,
269
- directoriesSkipped: skippedDirs,
270
- ...(warnings.length > 0 ? { warnings } : {}),
279
+ directoriesScanned: ctx.scannedDirs,
280
+ directoriesSkipped: ctx.skippedDirs,
281
+ ...(ctx.walkWarnings.length > 0 ? { warnings: ctx.walkWarnings } : {}),
271
282
  verification,
272
283
  timing: {
273
- totalMs: tEnd - t0,
274
- walkMs: tWalkEnd - tWalkStart,
275
- llmMs: tLlmEnd - tWalkEnd,
276
- embedMs: tEmbedEnd - tFtsEnd,
277
- ftsMs: tFtsEnd - tLlmEnd,
284
+ totalMs: Date.now() - timing.t0,
285
+ walkMs: timing.tWalkEnd - timing.tWalkStart,
286
+ llmMs: timing.tLlmEnd - timing.tWalkEnd,
287
+ embedMs: timing.tEmbedEnd - timing.tFtsEnd,
288
+ ftsMs: timing.tFtsEnd - timing.tLlmEnd,
278
289
  },
279
290
  };
280
291
  }
@@ -512,8 +523,10 @@ async function indexEntries(db, allSourceEntries, isIncremental, builtAtMs, hadR
512
523
  if (stash) {
513
524
  for (const entry of stash.entries) {
514
525
  const entryPath = entry.filename ? path.join(dirPath, entry.filename) : null;
515
- if (!entryPath)
516
- continue; // skip unresolvable entries
526
+ if (!entryPath) {
527
+ warn(`Skipping entry with no resolvable path in ${dirPath}`);
528
+ continue;
529
+ }
517
530
  if (!shouldIndexStashFile(currentStashDir, entryPath))
518
531
  continue;
519
532
  // Skip if a higher-priority stash root already indexed this asset
@@ -535,7 +548,9 @@ async function indexEntries(db, allSourceEntries, isIncremental, builtAtMs, hadR
535
548
  }
536
549
  }
537
550
  }
538
- // Collect dirs needing LLM enhancement during the first walk
551
+ // Collect dirs needing LLM enhancement during the first walk.
552
+ // Only dirs with "generated" entries need enrichment (unless reEnrich
553
+ // forces re-processing of already-enriched entries).
539
554
  if (stash.entries.some((e) => e.quality === "generated")) {
540
555
  dirsNeedingLlm.push({ dirPath, files, currentStashDir, stash });
541
556
  }
@@ -553,7 +568,20 @@ async function indexEntries(db, allSourceEntries, isIncremental, builtAtMs, hadR
553
568
  reason: persistedReason,
554
569
  });
555
570
  if (persistedRows === 0) {
556
- warnVerbose(`[index] zero-row ${dirPath}: ${persistedReason}`);
571
+ // Warn only when the dir had files that *could* produce entries (.md or
572
+ // known script extensions). Dirs with only non-indexable types (.json,
573
+ // .yaml, .conf, .env, .gitkeep) or deduped-only rows are expected and
574
+ // not actionable at normal log level.
575
+ const hasIndexableExtension = files.some((f) => {
576
+ const ext = path.extname(f).toLowerCase();
577
+ return ext === ".md" || SCRIPT_EXTENSIONS.has(ext);
578
+ });
579
+ if (persistedReason !== "deduped-zero-row" && hasIndexableExtension) {
580
+ warn(`[index] zero-row ${dirPath}: ${persistedReason}`);
581
+ }
582
+ else {
583
+ warnVerbose(`[index] zero-row ${dirPath}: ${persistedReason}`);
584
+ }
557
585
  }
558
586
  }
559
587
  });
@@ -652,9 +680,7 @@ function inferZeroRowReason(stash, priorReason, warnings, dirPath, dedupedRows)
652
680
  return "empty-generated-set";
653
681
  return `zero-row:${priorReason?.kind ?? "unknown"}`;
654
682
  }
655
- async function enhanceDirsWithLlm(db, config, dirsNeedingLlm, signal, enrich = false) {
656
- if (!enrich)
657
- return;
683
+ async function enhanceDirsWithLlm(db, config, dirsNeedingLlm, onProgress, signal, _enrich = false, reEnrich = false) {
658
684
  // Resolve per-pass LLM config via the unified shim. Returns undefined when
659
685
  // either no `akm.llm` is configured or the user opted this pass out via
660
686
  // `index.enrichment.llm = false`. (#208)
@@ -665,24 +691,142 @@ async function enhanceDirsWithLlm(db, config, dirsNeedingLlm, signal, enrich = f
665
691
  // as a single visible warning instead of silently degrading every entry
666
692
  // and leaving the user wondering why nothing got enhanced.
667
693
  const summary = { attempted: 0, succeeded: 0, failureSamples: [] };
668
- for (const { dirPath, files, currentStashDir, stash: originalStash } of dirsNeedingLlm) {
669
- throwIfAborted(signal);
670
- // Only enhance generated entries; user-provided overrides should not be overwritten
671
- const generatedEntries = originalStash.entries.filter((e) => e.quality === "generated");
672
- if (generatedEntries.length === 0)
673
- continue;
674
- const generatedStash = { entries: generatedEntries };
675
- const enhanced = await enhanceStashWithLlm(llmConfig, generatedStash, files, summary, signal);
676
- // Re-upsert the enhanced entries in a single transaction so a crash
677
- // cannot leave half the entries updated and the rest stale.
678
- db.transaction(() => {
679
- for (const entry of enhanced.entries) {
680
- const entryPath = entry.filename ? path.join(dirPath, entry.filename) : files[0] || dirPath;
681
- const entryKey = `${currentStashDir}:${entry.type}:${entry.name}`;
682
- const searchText = buildSearchText(entry);
683
- upsertEntry(db, entryKey, dirPath, entryPath, currentStashDir, attachFileSize(entry, entryPath), searchText);
684
- }
685
- })();
694
+ let completedDirs = 0;
695
+ let completedEntries = 0;
696
+ const totalDirs = dirsNeedingLlm.length;
697
+ const totalEntries = dirsNeedingLlm.reduce((sum, { stash }) => {
698
+ const entriesToEnhance = stash.entries.filter((e) => {
699
+ if (e.quality !== "generated" && !(reEnrich && e.quality === "enriched"))
700
+ return false;
701
+ if (!reEnrich && isEnrichmentComplete(e))
702
+ return false;
703
+ return true;
704
+ });
705
+ return sum + entriesToEnhance.length;
706
+ }, 0);
707
+ // P3 wall-clock budget for the enrichment pass. Defaults to llm.timeoutMs
708
+ // (or 10 minutes if not set). Users can extend this via llm.timeoutMs in
709
+ // config no separate knob needed.
710
+ const budgetMs = (llmConfig.timeoutMs ?? 10 * 60 * 1000) * Math.max(totalEntries, 1);
711
+ const enrichDeadline = AbortSignal.timeout(budgetMs);
712
+ let deadlineHit = false;
713
+ const enrichSignal = (() => {
714
+ if (!signal)
715
+ return enrichDeadline;
716
+ // Combine: abort when either fires.
717
+ const controller = new AbortController();
718
+ const onAbort = () => controller.abort();
719
+ signal.addEventListener("abort", onAbort, { once: true });
720
+ enrichDeadline.addEventListener("abort", () => {
721
+ deadlineHit = true;
722
+ controller.abort();
723
+ }, { once: true });
724
+ return controller.signal;
725
+ })();
726
+ if (totalEntries > 0) {
727
+ onProgress?.({
728
+ phase: "llm",
729
+ message: `LLM enhancement starting for ${totalEntries} entr${totalEntries === 1 ? "y" : "ies"} ` +
730
+ `across ${totalDirs} director${totalDirs === 1 ? "y" : "ies"} (concurrency ${getDefaultLlmConcurrency(llmConfig)}).`,
731
+ processed: 0,
732
+ total: totalEntries,
733
+ });
734
+ }
735
+ let currentDirLabel;
736
+ let lastProgressAt = Date.now();
737
+ let heartbeatTimer;
738
+ if (totalEntries > 0 && onProgress) {
739
+ heartbeatTimer = setInterval(() => {
740
+ if (Date.now() - lastProgressAt < 15000)
741
+ return;
742
+ onProgress({
743
+ phase: "llm",
744
+ message: `Still enriching ${completedEntries}/${totalEntries} entr${totalEntries === 1 ? "y" : "ies"}` +
745
+ (currentDirLabel ? `; waiting on ${currentDirLabel}` : "") +
746
+ ".",
747
+ processed: completedEntries,
748
+ total: totalEntries,
749
+ });
750
+ lastProgressAt = Date.now();
751
+ }, 15000);
752
+ }
753
+ try {
754
+ await concurrentMap(dirsNeedingLlm, async ({ dirPath, files, currentStashDir, stash: originalStash }) => {
755
+ if (enrichSignal.aborted)
756
+ return undefined;
757
+ // Only enhance generated entries (or all when reEnrich=true);
758
+ // user-provided overrides should not be overwritten.
759
+ // Skip entries that are already fully enriched (description + tags + searchHints)
760
+ // unless the caller explicitly requests re-enrichment via reEnrich=true.
761
+ const entriesToEnhance = originalStash.entries.filter((e) => {
762
+ if (e.quality !== "generated" && !(reEnrich && e.quality === "enriched"))
763
+ return false;
764
+ if (!reEnrich && isEnrichmentComplete(e)) {
765
+ warnVerbose(`[akm] skipping LLM enrichment for "${e.name}" — entry already complete`);
766
+ return false;
767
+ }
768
+ return true;
769
+ });
770
+ if (entriesToEnhance.length === 0)
771
+ return undefined;
772
+ currentDirLabel = path.relative(currentStashDir, dirPath) || ".";
773
+ onProgress?.({
774
+ phase: "llm",
775
+ message: `Enhancing ${currentDirLabel} ` +
776
+ `(${entriesToEnhance.length} entr${entriesToEnhance.length === 1 ? "y" : "ies"}).`,
777
+ processed: completedEntries,
778
+ total: totalEntries,
779
+ });
780
+ lastProgressAt = Date.now();
781
+ const targetStash = { entries: entriesToEnhance };
782
+ const entryKeys = entriesToEnhance.map((e) => `${currentStashDir}:${e.type}:${e.name}`);
783
+ const enhanced = await enhanceStashWithLlm(llmConfig, targetStash, files, summary, enrichSignal, db, entryKeys, reEnrich, config, (event) => {
784
+ completedEntries++;
785
+ lastProgressAt = Date.now();
786
+ onProgress?.({
787
+ phase: "llm",
788
+ message: `Enhanced ${completedEntries}/${totalEntries} entr${totalEntries === 1 ? "y" : "ies"}; ` +
789
+ `${completedDirs}/${totalDirs} director${totalDirs === 1 ? "y" : "ies"} complete` +
790
+ (event.entryName ? `; current ${event.entryName}` : "") +
791
+ (currentDirLabel ? ` in ${currentDirLabel}` : "") +
792
+ (event.outcome === "cache-hit" ? " (cache hit)" : ""),
793
+ processed: completedEntries,
794
+ total: totalEntries,
795
+ });
796
+ });
797
+ // Re-upsert the enhanced entries in a single transaction so a crash
798
+ // cannot leave half the entries updated and the rest stale.
799
+ db.transaction(() => {
800
+ for (const entry of enhanced.entries) {
801
+ const entryPath = entry.filename ? path.join(dirPath, entry.filename) : files[0] || dirPath;
802
+ const entryKey = `${currentStashDir}:${entry.type}:${entry.name}`;
803
+ const searchText = buildSearchText(entry);
804
+ upsertEntry(db, entryKey, dirPath, entryPath, currentStashDir, attachFileSize(entry, entryPath), searchText);
805
+ }
806
+ })();
807
+ completedDirs++;
808
+ lastProgressAt = Date.now();
809
+ onProgress?.({
810
+ phase: "llm",
811
+ message: `Completed ${completedDirs}/${totalDirs} director${totalDirs === 1 ? "y" : "ies"}; ` +
812
+ `${completedEntries}/${totalEntries} entr${totalEntries === 1 ? "y" : "ies"} processed.`,
813
+ processed: completedEntries,
814
+ total: totalEntries,
815
+ });
816
+ return undefined;
817
+ },
818
+ // Default concurrency of 4 works well for cloud LLM APIs. Local model
819
+ // servers (LM Studio, Ollama) run one inference at a time — set
820
+ // `llm.concurrency: 1` in config.json to avoid "Model reloaded" / 500
821
+ // errors from concurrent request overload.
822
+ getDefaultLlmConcurrency(llmConfig));
823
+ }
824
+ finally {
825
+ if (heartbeatTimer)
826
+ clearInterval(heartbeatTimer);
827
+ }
828
+ if (deadlineHit) {
829
+ warn("[akm] LLM enrichment budget exceeded. Re-run `akm index` to continue. Increase llm.timeoutMs for a larger budget.");
686
830
  }
687
831
  if (summary.attempted > 0 && summary.succeeded === 0) {
688
832
  const sample = summary.failureSamples.length ? ` Example: ${summary.failureSamples[0]}` : "";
@@ -781,14 +925,6 @@ async function generateEmbeddingsForDb(db, config, onProgress, signal) {
781
925
  }
782
926
  }
783
927
  // ── Helpers ─────────────────────────────────────────────────────────────────
784
- function getAllEntriesForEmbedding(db) {
785
- return db
786
- .prepare(`
787
- SELECT e.id, e.search_text AS searchText, e.entry_key AS entryKey, e.file_path AS filePath FROM entries e
788
- WHERE NOT EXISTS (SELECT 1 FROM embeddings b WHERE b.id = e.id)
789
- `)
790
- .all();
791
- }
792
928
  function attachFileSize(entry, entryPath) {
793
929
  try {
794
930
  return { ...entry, fileSize: fs.statSync(entryPath).size };
@@ -797,28 +933,6 @@ function attachFileSize(entry, entryPath) {
797
933
  return entry;
798
934
  }
799
935
  }
800
- function upsertWorkflowDocument(db, entryId, doc, content) {
801
- const sourceHash = computeSourceHash(content);
802
- db.prepare(`INSERT INTO workflow_documents (entry_id, schema_version, document_json, source_path, source_hash, updated_at)
803
- VALUES (?, ?, ?, ?, ?, ?)
804
- ON CONFLICT(entry_id) DO UPDATE SET
805
- schema_version = excluded.schema_version,
806
- document_json = excluded.document_json,
807
- source_path = excluded.source_path,
808
- source_hash = excluded.source_hash,
809
- updated_at = excluded.updated_at`).run(entryId, doc.schemaVersion, JSON.stringify(doc), doc.source.path, sourceHash, new Date().toISOString());
810
- }
811
- function computeSourceHash(content) {
812
- // Cheap, stable identity for the source markdown — used by future
813
- // incremental fast-paths that skip re-validation when content is unchanged.
814
- // Not security-sensitive; FNV-1a over the bytes is sufficient.
815
- let hash = 0x811c9dc5;
816
- for (let i = 0; i < content.length; i++) {
817
- hash ^= content[i];
818
- hash = Math.imul(hash, 0x01000193);
819
- }
820
- return (hash >>> 0).toString(16);
821
- }
822
936
  function buildIndexSummaryMessage(options) {
823
937
  const stashSourceLabel = options.sourcesCount === 1 ? "stash source" : "stash sources";
824
938
  const semanticDetail = getSemanticSearchLabel(options.semanticSearchMode, options.embeddingProvider, options.vecAvailable);
@@ -911,11 +1025,12 @@ function resolveIndexedFiles(dirPath, files, stash) {
911
1025
  }
912
1026
  return resolved.size > 0 ? [...resolved] : files;
913
1027
  }
914
- async function enhanceStashWithLlm(llmConfig, stash, files, summary, signal) {
1028
+ async function enhanceStashWithLlm(llmConfig, stash, files, summary, signal, db, entryKeys, reEnrich, akmConfig, onEntryDone) {
915
1029
  const { enhanceMetadata } = await import("../llm/metadata-enhance");
916
- const enhanced = [];
917
- for (const entry of stash.entries) {
918
- throwIfAborted(signal);
1030
+ const { computeBodyHash, getLlmCacheEntry, upsertLlmCacheEntry } = await import("./db.js");
1031
+ const results = await concurrentMap(stash.entries, async (entry, idx) => {
1032
+ if (signal?.aborted)
1033
+ return entry;
919
1034
  summary.attempted++;
920
1035
  try {
921
1036
  const entryFile = entry.filename
@@ -927,10 +1042,38 @@ async function enhanceStashWithLlm(llmConfig, stash, files, summary, signal) {
927
1042
  fileContent = fs.readFileSync(entryFile, "utf8");
928
1043
  }
929
1044
  catch {
930
- /* ignore unreadable files */
1045
+ warn(`Could not read file for LLM enrichment: ${entry.filename ?? entry.name}`);
931
1046
  }
932
1047
  }
933
- const improvements = await enhanceMetadata(llmConfig, entry, fileContent, signal);
1048
+ // Incremental cache: skip LLM call when file body is unchanged and
1049
+ // --re-enrich was not requested. The cache key is the entry_key
1050
+ // (stashDir:type:name) which is stable across index runs.
1051
+ const cacheBody = fileContent ?? `${entry.name}\n${entry.description ?? ""}`;
1052
+ const bodyHash = computeBodyHash(cacheBody);
1053
+ const cacheKey = entryKeys?.[idx] ?? `${entry.type}:${entry.name}`;
1054
+ if (db && !reEnrich) {
1055
+ const cached = getLlmCacheEntry(db, cacheKey, bodyHash);
1056
+ if (cached) {
1057
+ try {
1058
+ const parsed = JSON.parse(cached.resultJson);
1059
+ const updated = { ...entry };
1060
+ if (parsed.description)
1061
+ updated.description = parsed.description;
1062
+ if (parsed.searchHints?.length)
1063
+ updated.searchHints = parsed.searchHints;
1064
+ if (parsed.tags?.length)
1065
+ updated.tags = parsed.tags;
1066
+ updated.quality = "enriched";
1067
+ summary.succeeded++;
1068
+ onEntryDone?.({ entryName: entry.name, outcome: "cache-hit" });
1069
+ return updated;
1070
+ }
1071
+ catch {
1072
+ warn(`LLM enrichment cache entry corrupt for ${entry.name}; re-running enrichment`);
1073
+ }
1074
+ }
1075
+ }
1076
+ const improvements = await enhanceMetadata(llmConfig, entry, fileContent, signal, akmConfig);
934
1077
  const updated = { ...entry };
935
1078
  if (improvements.description)
936
1079
  updated.description = improvements.description;
@@ -938,19 +1081,39 @@ async function enhanceStashWithLlm(llmConfig, stash, files, summary, signal) {
938
1081
  updated.searchHints = improvements.searchHints;
939
1082
  if (improvements.tags?.length)
940
1083
  updated.tags = improvements.tags;
941
- enhanced.push(updated);
1084
+ // Mark as enriched so subsequent index runs skip re-enrichment (P2)
1085
+ updated.quality = "enriched";
1086
+ // Persist to cache so the next run can skip the LLM call when the
1087
+ // file body has not changed.
1088
+ if (db) {
1089
+ upsertLlmCacheEntry(db, cacheKey, bodyHash, JSON.stringify({
1090
+ description: improvements.description,
1091
+ searchHints: improvements.searchHints,
1092
+ tags: improvements.tags,
1093
+ }));
1094
+ }
942
1095
  summary.succeeded++;
1096
+ onEntryDone?.({ entryName: entry.name, outcome: "llm" });
1097
+ return updated;
943
1098
  }
944
1099
  catch (err) {
945
- enhanced.push(entry);
946
1100
  const msg = toErrorMessage(err);
947
1101
  // failureSamples is bounded to 3 items, so a linear scan is cheaper
948
1102
  // than maintaining a parallel Set for membership checks (#177 review).
949
1103
  if (summary.failureSamples.length < 3 && !summary.failureSamples.includes(msg)) {
950
1104
  summary.failureSamples.push(msg);
951
1105
  }
1106
+ onEntryDone?.({ entryName: entry.name, outcome: "failed" });
1107
+ return entry;
952
1108
  }
953
- }
1109
+ },
1110
+ // Default concurrency of 4 works well for cloud LLM APIs. Set
1111
+ // `llm.concurrency: 1` in config.json for local model servers.
1112
+ getDefaultLlmConcurrency(llmConfig));
1113
+ // concurrentMap returns Array<T | undefined>; filter out undefined slots
1114
+ // (which can only occur if the callback itself returned undefined, which
1115
+ // it never does above — but TypeScript needs the filter for type safety).
1116
+ const enhanced = results.map((r, i) => r ?? stash.entries[i]);
954
1117
  return { entries: enhanced };
955
1118
  }
956
1119
  /**
@@ -1129,23 +1292,26 @@ export function recomputeUtilityScores(db) {
1129
1292
  }
1130
1293
  // Batch-load existing utility scores
1131
1294
  const existingScores = new Map();
1132
- const scoreRows = db.prepare("SELECT entry_id, utility FROM utility_scores").all();
1295
+ const scoreRows = db.prepare("SELECT entry_id, utility, last_used_at FROM utility_scores").all();
1133
1296
  for (const row of scoreRows) {
1134
- existingScores.set(row.entry_id, row.utility);
1297
+ existingScores.set(row.entry_id, { utility: row.utility, lastUsedAt: row.last_used_at ?? undefined });
1135
1298
  }
1299
+ const now = new Date().toISOString();
1136
1300
  for (const row of usageRows) {
1137
1301
  const selectRate = row.search_count > 0 ? Math.min(1, row.show_count / row.search_count) : 0;
1138
1302
  const feedbackTotal = row.positive_feedback_count + row.negative_feedback_count;
1139
1303
  const feedbackRate = feedbackTotal > 0 ? Math.max(0, row.positive_feedback_count - row.negative_feedback_count) / feedbackTotal : 0;
1140
1304
  const effectiveRate = Math.max(selectRate, feedbackRate);
1141
- const prevUtility = existingScores.get(row.entry_id) ?? 0;
1305
+ const existing = existingScores.get(row.entry_id);
1306
+ const prevUtility = existing?.utility ?? 0;
1142
1307
  const utility = prevUtility * emaDecay + effectiveRate * emaNew;
1308
+ const lastUsedAt = effectiveRate > 0.5 ? now : (existing?.lastUsedAt ?? undefined);
1143
1309
  upsertUtilityScore(db, row.entry_id, {
1144
1310
  utility,
1145
1311
  showCount: row.show_count,
1146
1312
  searchCount: row.search_count,
1147
1313
  selectRate,
1148
- lastUsedAt: row.last_used_at ?? undefined,
1314
+ lastUsedAt,
1149
1315
  });
1150
1316
  }
1151
1317
  setMeta(db, "last_utility_computed_at", new Date().toISOString());