akm-cli 0.6.0-rc1 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/CHANGELOG.md +33 -0
  2. package/README.md +9 -9
  3. package/dist/cli.js +199 -114
  4. package/dist/{completions.js → commands/completions.js} +1 -1
  5. package/dist/{config-cli.js → commands/config-cli.js} +109 -11
  6. package/dist/{curate.js → commands/curate.js} +8 -3
  7. package/dist/{info.js → commands/info.js} +15 -9
  8. package/dist/{init.js → commands/init.js} +4 -4
  9. package/dist/{install-audit.js → commands/install-audit.js} +4 -7
  10. package/dist/{installed-stashes.js → commands/installed-stashes.js} +77 -31
  11. package/dist/{migration-help.js → commands/migration-help.js} +2 -2
  12. package/dist/{registry-search.js → commands/registry-search.js} +8 -6
  13. package/dist/{remember.js → commands/remember.js} +55 -49
  14. package/dist/{stash-search.js → commands/search.js} +28 -69
  15. package/dist/{self-update.js → commands/self-update.js} +69 -3
  16. package/dist/{stash-show.js → commands/show.js} +104 -84
  17. package/dist/{stash-add.js → commands/source-add.js} +42 -32
  18. package/dist/{stash-clone.js → commands/source-clone.js} +12 -10
  19. package/dist/{stash-source-manage.js → commands/source-manage.js} +24 -24
  20. package/dist/{vault.js → commands/vault.js} +43 -0
  21. package/dist/{stash-ref.js → core/asset-ref.js} +4 -4
  22. package/dist/{asset-registry.js → core/asset-registry.js} +1 -1
  23. package/dist/{asset-spec.js → core/asset-spec.js} +1 -1
  24. package/dist/{config.js → core/config.js} +133 -56
  25. package/dist/core/errors.js +90 -0
  26. package/dist/{frontmatter.js → core/frontmatter.js} +5 -3
  27. package/dist/core/write-source.js +280 -0
  28. package/dist/{db-search.js → indexer/db-search.js} +25 -19
  29. package/dist/{db.js → indexer/db.js} +79 -47
  30. package/dist/{file-context.js → indexer/file-context.js} +3 -3
  31. package/dist/{indexer.js → indexer/indexer.js} +132 -33
  32. package/dist/{manifest.js → indexer/manifest.js} +10 -10
  33. package/dist/{matchers.js → indexer/matchers.js} +3 -6
  34. package/dist/{metadata.js → indexer/metadata.js} +9 -5
  35. package/dist/{search-source.js → indexer/search-source.js} +52 -41
  36. package/dist/{semantic-status.js → indexer/semantic-status.js} +2 -2
  37. package/dist/{walker.js → indexer/walker.js} +1 -1
  38. package/dist/{lockfile.js → integrations/lockfile.js} +1 -1
  39. package/dist/{llm-client.js → llm/client.js} +1 -1
  40. package/dist/{embedders → llm/embedders}/local.js +2 -2
  41. package/dist/{embedders → llm/embedders}/remote.js +1 -1
  42. package/dist/{embedders → llm/embedders}/types.js +1 -1
  43. package/dist/{metadata-enhance.js → llm/metadata-enhance.js} +2 -2
  44. package/dist/{cli-hints.js → output/cli-hints.js} +3 -0
  45. package/dist/{output-context.js → output/context.js} +21 -3
  46. package/dist/{renderers.js → output/renderers.js} +9 -65
  47. package/dist/{output-shapes.js → output/shapes.js} +18 -4
  48. package/dist/{output-text.js → output/text.js} +2 -2
  49. package/dist/{registry-build-index.js → registry/build-index.js} +16 -7
  50. package/dist/{create-provider-registry.js → registry/create-provider-registry.js} +6 -2
  51. package/dist/registry/factory.js +33 -0
  52. package/dist/{origin-resolve.js → registry/origin-resolve.js} +1 -1
  53. package/dist/{providers → registry/providers}/index.js +1 -1
  54. package/dist/{providers → registry/providers}/skills-sh.js +59 -3
  55. package/dist/{providers → registry/providers}/static-index.js +80 -12
  56. package/dist/registry/providers/types.js +25 -0
  57. package/dist/{registry-resolve.js → registry/resolve.js} +3 -3
  58. package/dist/{detect.js → setup/detect.js} +0 -27
  59. package/dist/{ripgrep-install.js → setup/ripgrep-install.js} +1 -1
  60. package/dist/{ripgrep-resolve.js → setup/ripgrep-resolve.js} +2 -2
  61. package/dist/{setup.js → setup/setup.js} +16 -56
  62. package/dist/{stash-include.js → sources/include.js} +1 -1
  63. package/dist/sources/provider-factory.js +36 -0
  64. package/dist/sources/provider.js +21 -0
  65. package/dist/sources/providers/filesystem.js +35 -0
  66. package/dist/{stash-providers → sources/providers}/git.js +53 -64
  67. package/dist/{stash-providers → sources/providers}/index.js +3 -4
  68. package/dist/sources/providers/install-types.js +14 -0
  69. package/dist/{stash-providers → sources/providers}/npm.js +42 -41
  70. package/dist/{stash-providers → sources/providers}/provider-utils.js +3 -3
  71. package/dist/{stash-providers → sources/providers}/sync-from-ref.js +2 -2
  72. package/dist/{stash-providers → sources/providers}/tar-utils.js +11 -8
  73. package/dist/{stash-providers → sources/providers}/website.js +29 -65
  74. package/dist/{stash-resolve.js → sources/resolve.js} +8 -7
  75. package/dist/{wiki.js → wiki/wiki.js} +34 -18
  76. package/dist/{workflow-authoring.js → workflows/authoring.js} +37 -14
  77. package/dist/{workflow-cli.js → workflows/cli.js} +2 -1
  78. package/dist/{workflow-db.js → workflows/db.js} +1 -1
  79. package/dist/workflows/document-cache.js +20 -0
  80. package/dist/workflows/parser.js +379 -0
  81. package/dist/workflows/renderer.js +78 -0
  82. package/dist/{workflow-runs.js → workflows/runs.js} +72 -28
  83. package/dist/workflows/schema.js +11 -0
  84. package/dist/workflows/validator.js +48 -0
  85. package/docs/migration/release-notes/0.6.0.md +91 -23
  86. package/package.json +1 -1
  87. package/dist/errors.js +0 -45
  88. package/dist/llm.js +0 -16
  89. package/dist/registry-factory.js +0 -19
  90. package/dist/ripgrep.js +0 -2
  91. package/dist/stash-provider-factory.js +0 -35
  92. package/dist/stash-provider.js +0 -3
  93. package/dist/stash-providers/filesystem.js +0 -71
  94. package/dist/stash-providers/openviking.js +0 -348
  95. package/dist/stash-types.js +0 -1
  96. package/dist/workflow-markdown.js +0 -260
  97. /package/dist/{common.js → core/common.js} +0 -0
  98. /package/dist/{markdown.js → core/markdown.js} +0 -0
  99. /package/dist/{paths.js → core/paths.js} +0 -0
  100. /package/dist/{warn.js → core/warn.js} +0 -0
  101. /package/dist/{search-fields.js → indexer/search-fields.js} +0 -0
  102. /package/dist/{usage-events.js → indexer/usage-events.js} +0 -0
  103. /package/dist/{github.js → integrations/github.js} +0 -0
  104. /package/dist/{embedder.js → llm/embedder.js} +0 -0
  105. /package/dist/{embedders → llm/embedders}/cache.js +0 -0
  106. /package/dist/{registry-provider.js → registry/types.js} +0 -0
  107. /package/dist/{setup-steps.js → setup/steps.js} +0 -0
  108. /package/dist/{registry-types.js → sources/types.js} +0 -0
@@ -6,10 +6,10 @@
6
6
  * CLI entry point stays focused on argument parsing + output routing.
7
7
  */
8
8
  import { stringify as yamlStringify } from "yaml";
9
- import { tryReadStdinText } from "./common";
10
- import { loadConfig } from "./config";
11
- import { UsageError } from "./errors";
12
- import { warn } from "./warn";
9
+ import { toErrorMessage, tryReadStdinText } from "../core/common";
10
+ import { loadConfig } from "../core/config";
11
+ import { UsageError } from "../core/errors";
12
+ import { warn } from "../core/warn";
13
13
  /**
14
14
  * Parse a shorthand duration string to a number of milliseconds.
15
15
  * Supports: `30d` (days), `12h` (hours), `6m` (months, approximated as 30d).
@@ -17,7 +17,7 @@ import { warn } from "./warn";
17
17
  export function parseDuration(s) {
18
18
  const match = s.trim().match(/^(\d+)([dhm])$/i);
19
19
  if (!match)
20
- throw new UsageError(`Invalid --expires format "${s}". Use shorthand like 30d, 12h, or 6m.`);
20
+ throw new UsageError(`Invalid --expires format "${s}". Use shorthand like 30d, 12h, or 6m.`, "INVALID_FLAG_VALUE");
21
21
  const n = Number(match[1]);
22
22
  const unit = match[2].toLowerCase();
23
23
  if (unit === "d")
@@ -40,15 +40,15 @@ export function parseDuration(s) {
40
40
  */
41
41
  export function buildMemoryFrontmatter(fields) {
42
42
  const obj = {};
43
- if (fields.description && fields.description.trim())
43
+ if (fields.description?.trim())
44
44
  obj.description = fields.description;
45
45
  if (fields.tags && fields.tags.length > 0)
46
46
  obj.tags = fields.tags;
47
- if (fields.source && fields.source.trim())
47
+ if (fields.source?.trim())
48
48
  obj.source = fields.source;
49
- if (fields.observed_at && fields.observed_at.trim())
49
+ if (fields.observed_at?.trim())
50
50
  obj.observed_at = fields.observed_at;
51
- if (fields.expires && fields.expires.trim())
51
+ if (fields.expires?.trim())
52
52
  obj.expires = fields.expires;
53
53
  if (fields.subjective)
54
54
  obj.subjective = true;
@@ -86,38 +86,32 @@ export function runAutoHeuristics(body) {
86
86
  const urlMatch = body.match(/https?:\/\/[^\s)>'"]+/);
87
87
  const source = urlMatch ? urlMatch[0] : undefined;
88
88
  // ISO date token or obvious relative date phrase → observed_at
89
- let observed_at;
90
- const isoMatch = body.match(/\b(\d{4}-\d{2}-\d{2})\b/);
91
- if (isoMatch) {
92
- observed_at = isoMatch[1];
93
- }
94
- else {
95
- const relMatch = body.match(/\b(today|yesterday|last\s+week|last\s+month)\b/i);
96
- if (relMatch) {
97
- const phrase = relMatch[1].toLowerCase();
98
- const now = new Date();
99
- if (phrase === "today") {
100
- observed_at = now.toISOString().slice(0, 10);
101
- }
102
- else if (phrase === "yesterday") {
103
- const d = new Date(now);
104
- d.setDate(d.getDate() - 1);
105
- observed_at = d.toISOString().slice(0, 10);
106
- }
107
- else if (phrase.startsWith("last week")) {
108
- const d = new Date(now);
109
- d.setDate(d.getDate() - 7);
110
- observed_at = d.toISOString().slice(0, 10);
111
- }
112
- else if (phrase.startsWith("last month")) {
113
- const d = new Date(now);
114
- d.setMonth(d.getMonth() - 1);
115
- observed_at = d.toISOString().slice(0, 10);
116
- }
117
- }
118
- }
89
+ const observed_at = detectObservedAt(body);
119
90
  return { tags, source, observed_at, subjective };
120
91
  }
92
+ const RELATIVE_DATE_OFFSETS = {
93
+ today: () => { },
94
+ yesterday: (d) => d.setDate(d.getDate() - 1),
95
+ "last week": (d) => d.setDate(d.getDate() - 7),
96
+ "last month": (d) => d.setMonth(d.getMonth() - 1),
97
+ };
98
+ function detectObservedAt(body) {
99
+ const isoMatch = body.match(/\b(\d{4}-\d{2}-\d{2})\b/);
100
+ if (isoMatch)
101
+ return isoMatch[1];
102
+ const relMatch = body.match(/\b(today|yesterday|last\s+week|last\s+month)\b/i);
103
+ if (!relMatch)
104
+ return undefined;
105
+ // Normalise the matched phrase: lowercase, collapse internal whitespace,
106
+ // so "last week" matches the lookup table key.
107
+ const phrase = relMatch[1].toLowerCase().replace(/\s+/g, " ");
108
+ const offset = RELATIVE_DATE_OFFSETS[phrase];
109
+ if (!offset)
110
+ return undefined;
111
+ const d = new Date();
112
+ offset(d);
113
+ return d.toISOString().slice(0, 10);
114
+ }
121
115
  /** Hard timeout for the `--enrich` LLM call. Write-path must not block on a misbehaving endpoint. */
122
116
  const LLM_ENRICH_TIMEOUT_MS = 10_000;
123
117
  /**
@@ -131,7 +125,8 @@ export async function runLlmEnrich(body) {
131
125
  warn("Warning: --enrich requires an LLM to be configured. Run `akm config set llm` to configure one.");
132
126
  return { tags: [] };
133
127
  }
134
- const { chatCompletion, parseJsonResponse } = await import("./llm.js");
128
+ const llmConfig = config.llm;
129
+ const { chatCompletion, parseJsonResponse } = await import("../llm/client");
135
130
  const prompt = `You are a memory tagger for a developer knowledge base.
136
131
  Given the memory text below, return ONLY a JSON object with these fields:
137
132
  - "tags": array of 1-5 short lowercase keyword tags
@@ -143,13 +138,25 @@ ${body.slice(0, 2000)}
143
138
 
144
139
  Return ONLY the JSON object, no prose, no markdown fences.`;
145
140
  try {
146
- const result = await Promise.race([
147
- chatCompletion(config.llm, [
148
- { role: "system", content: "Return only valid JSON. No prose." },
149
- { role: "user", content: prompt },
150
- ], { maxTokens: 256, temperature: 0.1 }),
151
- new Promise((_, reject) => setTimeout(() => reject(new Error("LLM enrichment timed out")), LLM_ENRICH_TIMEOUT_MS)),
152
- ]);
141
+ let timeoutHandle;
142
+ const result = await (async () => {
143
+ try {
144
+ return await Promise.race([
145
+ chatCompletion(llmConfig, [
146
+ { role: "system", content: "Return only valid JSON. No prose." },
147
+ { role: "user", content: prompt },
148
+ ], { maxTokens: 256, temperature: 0.1 }),
149
+ new Promise((_, reject) => {
150
+ timeoutHandle = setTimeout(() => reject(new Error("LLM enrichment timed out")), LLM_ENRICH_TIMEOUT_MS);
151
+ }),
152
+ ]);
153
+ }
154
+ finally {
155
+ if (timeoutHandle !== undefined) {
156
+ clearTimeout(timeoutHandle);
157
+ }
158
+ }
159
+ })();
153
160
  const parsed = parseJsonResponse(result);
154
161
  if (!parsed) {
155
162
  warn("Warning: --enrich received invalid JSON from the LLM. Writing memory without enrichment.");
@@ -165,8 +172,7 @@ Return ONLY the JSON object, no prose, no markdown fences.`;
165
172
  return { tags, description, observed_at };
166
173
  }
167
174
  catch (err) {
168
- const msg = err instanceof Error ? err.message : String(err);
169
- warn(`Warning: --enrich failed (${msg}). Writing memory without enrichment.`);
175
+ warn(`Warning: --enrich failed (${toErrorMessage(err)}). Writing memory without enrichment.`);
170
176
  return { tags: [] };
171
177
  }
172
178
  }
@@ -1,13 +1,23 @@
1
- import { loadConfig } from "./config";
2
- import { closeDatabase, openDatabase } from "./db";
3
- import { searchLocal } from "./db-search";
4
- import { resolveStashProviders } from "./stash-provider-factory";
5
- // Eagerly import stash providers to trigger self-registration
6
- import "./stash-providers/index";
7
- import { UsageError } from "./errors";
1
+ /**
2
+ * `akm search` entry point.
3
+ *
4
+ * Spec §6.1: search consults the local FTS5 index. There is one query path
5
+ * because there is one data store. Provider fan-out is gone.
6
+ *
7
+ * The orchestration here is thin: build the FTS query, optionally interleave
8
+ * a registry search behind `--source registry|both`, and log a usage event.
9
+ * Provider `search()` methods do not exist.
10
+ */
11
+ import { loadConfig } from "../core/config";
12
+ import { UsageError } from "../core/errors";
13
+ import { closeDatabase, openDatabase } from "../indexer/db";
14
+ import { searchLocal } from "../indexer/db-search";
15
+ import { resolveSourceEntries } from "../indexer/search-source";
16
+ // Eagerly import source providers to trigger self-registration before the
17
+ // indexer or path-resolution code runs.
18
+ import "../sources/providers/index";
19
+ import { insertUsageEvent } from "../indexer/usage-events";
8
20
  import { searchRegistry } from "./registry-search";
9
- import { resolveStashSources } from "./search-source";
10
- import { insertUsageEvent } from "./usage-events";
11
21
  const DEFAULT_LIMIT = 20;
12
22
  export async function akmSearch(input) {
13
23
  const t0 = Date.now();
@@ -17,7 +27,7 @@ export async function akmSearch(input) {
17
27
  const limit = normalizeLimit(input.limit);
18
28
  const source = parseSearchSource(input.source ?? "stash");
19
29
  const config = loadConfig();
20
- const sources = resolveStashSources(undefined, config);
30
+ const sources = resolveSourceEntries(undefined, config);
21
31
  if (sources.length === 0) {
22
32
  // stashDir: "" is a safe sentinel here — the response carries zero hits
23
33
  // and a warning, so no downstream code will try to use the empty path.
@@ -35,10 +45,6 @@ export async function akmSearch(input) {
35
45
  // Primary stash directory — used for DB path lookups and as the default
36
46
  // stash root. Safe because the empty-sources case is handled above.
37
47
  const stashDir = sources[0].path;
38
- // Resolve additional stash providers (e.g. OpenViking) from config.
39
- // Exclude filesystem (handled by resolveStashSources) and git (content
40
- // now indexed through the unified FTS5 pipeline).
41
- const additionalStashProviders = resolveStashProviders(config).filter((p) => p.type !== "filesystem" && p.type !== "git");
42
48
  const localResult = source === "registry"
43
49
  ? undefined
44
50
  : await searchLocal({
@@ -49,35 +55,17 @@ export async function akmSearch(input) {
49
55
  sources,
50
56
  config,
51
57
  });
52
- // Pass original case to providers — FTS5 requires lowercase but remote providers handle case themselves
53
- const additionalStashResults = source === "registry" || additionalStashProviders.length === 0
54
- ? []
55
- : await Promise.all(additionalStashProviders.map(async (provider) => {
56
- try {
57
- return await provider.search({ query, type: searchType === "any" ? undefined : searchType, limit });
58
- }
59
- catch (err) {
60
- return {
61
- hits: [],
62
- warnings: [`Stash ${provider.name}: ${err instanceof Error ? err.message : String(err)}`],
63
- };
64
- }
65
- }));
66
- // Merge stash hits from all providers
67
- const additionalHits = additionalStashResults.flatMap((r) => r.hits);
68
- const additionalWarnings = additionalStashResults.flatMap((r) => r.warnings ?? []);
69
58
  const registryResult = source === "stash" ? undefined : await searchRegistry(query, { limit, registries: config.registries });
70
59
  if (source === "stash") {
71
- const allStashHits = mergeStashHits(localResult?.hits ?? [], additionalHits, limit);
72
- const localWarnings = [...(localResult?.warnings ?? []), ...additionalWarnings];
73
- const hasResults = allStashHits.length > 0;
60
+ const localHits = localResult?.hits ?? [];
61
+ const hasResults = localHits.length > 0;
74
62
  const response = {
75
63
  schemaVersion: 1,
76
64
  stashDir,
77
65
  source,
78
- hits: allStashHits,
66
+ hits: localHits,
79
67
  tip: hasResults ? undefined : localResult?.tip,
80
- warnings: localWarnings.length > 0 ? localWarnings : undefined,
68
+ warnings: localResult?.warnings?.length ? localResult.warnings : undefined,
81
69
  timing: { totalMs: Date.now() - t0, rankMs: localResult?.rankMs, embedMs: localResult?.embedMs },
82
70
  };
83
71
  logSearchEvent(query, response);
@@ -116,14 +104,14 @@ export async function akmSearch(input) {
116
104
  return response;
117
105
  }
118
106
  // source === "both"
119
- const allStashHits = mergeStashHits(localResult?.hits ?? [], additionalHits, limit * 2);
120
- const warnings = [...(localResult?.warnings ?? []), ...additionalWarnings, ...(registryResult?.warnings ?? [])];
107
+ const allStashHits = (localResult?.hits ?? []).slice(0, limit);
108
+ const warnings = [...(localResult?.warnings ?? []), ...(registryResult?.warnings ?? [])];
121
109
  const hasResults = allStashHits.length > 0 || registryHits.length > 0;
122
110
  const response = {
123
111
  schemaVersion: 1,
124
112
  stashDir,
125
113
  source,
126
- hits: allStashHits.slice(0, limit),
114
+ hits: allStashHits,
127
115
  registryHits,
128
116
  tip: hasResults ? undefined : "No matching stash assets or registry entries were found.",
129
117
  warnings: warnings.length ? warnings : undefined,
@@ -184,35 +172,6 @@ function logSearchEvent(query, response, existingDb) {
184
172
  }
185
173
  }
186
174
  // ── Helpers ──────────────────────────────────────────────────────────────────
187
- /**
188
- * Merge local and additional stash hits into a single ranked list.
189
- *
190
- * Provider hits (e.g. OpenViking) keep their original scores and compete
191
- * fairly alongside local hits. Duplicates are resolved in favour of the
192
- * local version.
193
- *
194
- * 1. Build set of local hit keys for dedup.
195
- * 2. Filter provider hits that aren't duplicates.
196
- * 3. Combine local + non-duplicate provider hits.
197
- * 4. Sort by score descending.
198
- * 5. Slice to limit.
199
- */
200
- export function mergeStashHits(localHits, additionalHits, limit) {
201
- if (additionalHits.length === 0)
202
- return localHits.slice(0, limit);
203
- // Track local hits by a dedup key (path > ref > name)
204
- const localKeys = new Set();
205
- for (const h of localHits) {
206
- localKeys.add(h.path ?? h.ref ?? h.name);
207
- }
208
- // Keep non-duplicate provider hits with their original scores
209
- const providerOnly = additionalHits.filter((h) => {
210
- const key = h.path ?? h.ref ?? h.name;
211
- return !localKeys.has(key);
212
- });
213
- // Combine and sort by score descending
214
- return [...localHits, ...providerOnly].sort((a, b) => (b.score ?? 0) - (a.score ?? 0)).slice(0, limit);
215
- }
216
175
  function normalizeLimit(limit) {
217
176
  if (typeof limit !== "number" || Number.isNaN(limit) || limit <= 0) {
218
177
  return DEFAULT_LIMIT;
@@ -227,7 +186,7 @@ export function parseSearchSource(source) {
227
186
  return "stash";
228
187
  if (typeof source === "undefined")
229
188
  return "stash";
230
- throw new UsageError(`Invalid value for --source: ${String(source)}. Expected one of: stash|registry|both`);
189
+ throw new UsageError(`Invalid value for --source: ${String(source)}. Expected one of: stash|registry|both`, "INVALID_SOURCE_VALUE");
231
190
  }
232
191
  /**
233
192
  * Merge stash hits and registry hits via simple concatenation.
@@ -2,8 +2,8 @@ import * as childProcess from "node:child_process";
2
2
  import { createHash } from "node:crypto";
3
3
  import fs from "node:fs";
4
4
  import path from "node:path";
5
- import { fetchWithRetry, IS_WINDOWS } from "./common";
6
- import { githubHeaders } from "./github";
5
+ import { fetchWithRetry, IS_WINDOWS } from "../core/common";
6
+ import { githubHeaders } from "../integrations/github";
7
7
  const REPO = "itlackey/akm";
8
8
  const DEFAULT_PACKAGE_NAME = "akm-cli";
9
9
  const NODE_MODULES_SEGMENT = "/node_modules/";
@@ -80,6 +80,7 @@ export async function checkForUpdate(currentVersion) {
80
80
  export async function performUpgrade(check, opts) {
81
81
  const { currentVersion, latestVersion, installMethod } = check;
82
82
  const force = opts?.force === true;
83
+ const skipPostUpgrade = opts?.skipPostUpgrade === true;
83
84
  // All install methods can short-circuit here unless the user explicitly forces an upgrade.
84
85
  if (!check.updateAvailable && !force) {
85
86
  return {
@@ -113,6 +114,7 @@ export async function performUpgrade(check, opts) {
113
114
  upgraded: true,
114
115
  installMethod,
115
116
  message: `akm upgraded via ${installMethod}`,
117
+ postUpgrade: runPostUpgradeTasks("akm", { skip: skipPostUpgrade }),
116
118
  };
117
119
  }
118
120
  if (installMethod === "unknown") {
@@ -283,8 +285,72 @@ export async function performUpgrade(check, opts) {
283
285
  installMethod,
284
286
  binaryPath: execPath,
285
287
  checksumVerified,
288
+ // For binary installs, the new binary now lives at execPath; spawn it
289
+ // directly so the post-upgrade work runs against the new code.
290
+ postUpgrade: runPostUpgradeTasks(execPath, { skip: skipPostUpgrade }),
286
291
  };
287
292
  }
293
+ /**
294
+ * Run the post-upgrade tasks against the *new* binary as a child process.
295
+ *
296
+ * Why a child process: the running akm process still has the old code in
297
+ * memory. Calling loadConfig()/akmIndex() in-process would use the old
298
+ * implementations and miss any DB_VERSION / config-key changes the new
299
+ * release introduces.
300
+ *
301
+ * The new binary's `akm index` does the work for us:
302
+ * 1. loadConfig() runs at startup — auto-migrates legacy `stashes` →
303
+ * `sources` if the on-disk config still uses the old key.
304
+ * 2. ensureSchema() detects DB_VERSION mismatch and rebuilds index.db
305
+ * tables (preserving usage_events).
306
+ * 3. The full reindex repopulates entries + workflow_documents + FTS.
307
+ */
308
+ function runPostUpgradeTasks(akmBin, opts) {
309
+ if (opts.skip) {
310
+ return {
311
+ ok: true,
312
+ skipped: true,
313
+ message: "Skipped post-upgrade tasks. Run `akm index` manually to migrate config and rebuild the index.",
314
+ };
315
+ }
316
+ try {
317
+ const result = childProcess.spawnSync(akmBin, ["index"], {
318
+ encoding: "utf8",
319
+ env: process.env,
320
+ stdio: "pipe",
321
+ });
322
+ if (result.error) {
323
+ return {
324
+ ok: false,
325
+ skipped: false,
326
+ message: `Post-upgrade tasks could not start: ${result.error.message}. Run \`akm index\` manually.`,
327
+ };
328
+ }
329
+ if (result.status !== 0) {
330
+ const detail = (result.stderr ?? "").trim() || (result.stdout ?? "").trim() || `exit code ${result.status}`;
331
+ return {
332
+ ok: false,
333
+ skipped: false,
334
+ exitCode: result.status,
335
+ message: `Post-upgrade \`akm index\` failed (${detail}). Run \`akm index\` manually.`,
336
+ };
337
+ }
338
+ return {
339
+ ok: true,
340
+ skipped: false,
341
+ exitCode: 0,
342
+ message: "Config migrated (if needed) and index rebuilt against the new binary.",
343
+ };
344
+ }
345
+ catch (err) {
346
+ const detail = err instanceof Error ? err.message : String(err);
347
+ return {
348
+ ok: false,
349
+ skipped: false,
350
+ message: `Post-upgrade tasks failed: ${detail}. Run \`akm index\` manually.`,
351
+ };
352
+ }
353
+ }
288
354
  function parseChecksumForFile(checksumsText, filename) {
289
355
  for (const line of checksumsText.split("\n")) {
290
356
  const trimmed = line.trim();
@@ -303,7 +369,7 @@ function normalizePathSeparators(value) {
303
369
  }
304
370
  function getInstalledPackageName() {
305
371
  try {
306
- const pkgPath = path.resolve(import.meta.dir ?? __dirname, "../package.json");
372
+ const pkgPath = path.resolve(import.meta.dir ?? __dirname, "../../package.json");
307
373
  if (fs.existsSync(pkgPath)) {
308
374
  const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
309
375
  if (typeof pkg.name === "string" && pkg.name.trim()) {