akm-cli 0.7.1 → 0.7.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +35 -0
- package/dist/cli.js +62 -16
- package/dist/commands/history.js +2 -7
- package/dist/commands/info.js +2 -2
- package/dist/commands/installed-stashes.js +45 -1
- package/dist/commands/search.js +2 -2
- package/dist/commands/show.js +4 -19
- package/dist/commands/source-add.js +1 -1
- package/dist/core/common.js +16 -1
- package/dist/core/config.js +18 -3
- package/dist/indexer/db-search.js +33 -39
- package/dist/indexer/db.js +51 -1
- package/dist/indexer/graph-extraction.js +5 -3
- package/dist/indexer/indexer.js +334 -121
- package/dist/indexer/manifest.js +18 -23
- package/dist/indexer/memory-inference.js +47 -58
- package/dist/indexer/metadata.js +253 -21
- package/dist/indexer/search-source.js +11 -5
- package/dist/llm/client.js +61 -1
- package/dist/llm/embedder.js +8 -5
- package/dist/llm/embedders/local.js +8 -2
- package/dist/llm/embedders/remote.js +4 -2
- package/dist/llm/graph-extract.js +4 -4
- package/dist/llm/memory-infer.js +61 -33
- package/dist/llm/metadata-enhance.js +2 -2
- package/dist/output/cli-hints.js +5 -2
- package/dist/output/renderers.js +22 -49
- package/dist/registry/build-index.js +13 -18
- package/dist/setup/setup.js +238 -96
- package/dist/sources/providers/git.js +14 -2
- package/dist/sources/providers/website.js +4 -460
- package/dist/sources/website-ingest.js +470 -0
- package/dist/wiki/wiki.js +11 -1
- package/dist/workflows/parser.js +19 -4
- package/dist/workflows/runs.js +3 -3
- package/docs/README.md +10 -3
- package/docs/migration/release-notes/0.7.0.md +22 -0
- package/package.json +5 -2
package/CHANGELOG.md
CHANGED
|
@@ -6,6 +6,41 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
|
|
6
6
|
|
|
7
7
|
## [Unreleased]
|
|
8
8
|
|
|
9
|
+
## [0.7.3] - 2026-05-05
|
|
10
|
+
|
|
11
|
+
### Added
|
|
12
|
+
|
|
13
|
+
- **`akm index --enrich` opt-in for LLM passes** — index-time enrichment work such as metadata enhancement, memory inference, and graph extraction now runs only when explicitly requested with `--enrich`. Default indexing is faster and no longer surprises operators with LLM-backed work during normal maintenance runs.
|
|
14
|
+
- **Config backup snapshots before writes** — config writes now create AKM cache backups so setup/config flows have a recovery path if a config is overwritten or corrupted during development or testing.
|
|
15
|
+
|
|
16
|
+
### Changed
|
|
17
|
+
|
|
18
|
+
- **Setup wizard UX refresh** — `akm setup` now better reflects the real configured state: source prompts are ordered more sensibly, configured and preserved stash information is surfaced, agent defaults can be selected explicitly (including disabled), and post-setup indexing does not implicitly enable enrichment.
|
|
19
|
+
- **CI workflows updated for current GitHub Actions runtimes** — CI, release, and publishing workflows now use current action majors (`checkout@v5`, `cache@v5`, `setup-node@v5`, `upload-artifact@v5`, `download-artifact@v6`) to stay off deprecated Node 20 action runtimes.
|
|
20
|
+
- **Technical investigation notes updated** — the index investigation note now reflects the latest `.stash.json` migration status, current green CI runs, and the narrowed remaining compatibility surface ahead of `v0.8.0`.
|
|
21
|
+
|
|
22
|
+
### Fixed
|
|
23
|
+
|
|
24
|
+
- **Embedding-dimension drift on read-only DB opens** — read/telemetry paths no longer mutate the live index schema with the default embedding dimension. `akm info`, search/show parity paths, and related readers now preserve the configured embedding shape instead of downgrading vector tables.
|
|
25
|
+
- **Incremental index churn across multiple source layouts** — incremental indexing is now significantly more stable for filename-less legacy metadata, wiki-root sources, repo-root git stash layouts, non-indexed companion files, and cross-source dedupe cases.
|
|
26
|
+
- **Git source indexing for repo-root stashes** — git-backed sources no longer assume a `<repo>/content` subtree; repo-root stash layouts are indexed correctly and cached mirrors are treated as fresh instead of being needlessly refreshed.
|
|
27
|
+
- **`show` metadata no longer depends on `.stash.json`** — command and skill summary/show metadata now comes from file-local frontmatter and renderer parsing rather than the deprecated disk fallback sidecar.
|
|
28
|
+
- **`.stash.json` no longer drives incremental stale detection** — editing `.stash.json` alone no longer forces directories to rescan during incremental indexing.
|
|
29
|
+
|
|
30
|
+
### Internal
|
|
31
|
+
|
|
32
|
+
- **Ranking and scoring fixtures migrated toward file-local metadata** — routine benchmark and regression fixtures now prefer markdown frontmatter or inline script metadata, with `.stash.json` retained only for intentional legacy-compatibility coverage that still exercises explicit-file override behavior.
|
|
33
|
+
- **Production-path ranking regression coverage** — ranking regression tests now build their fixture index through the production indexer rather than a custom `.stash.json` crawler, reducing fixture drift and improving confidence in the real indexing/search path.
|
|
34
|
+
|
|
35
|
+
### Added
|
|
36
|
+
|
|
37
|
+
- **One-shot URL ingest for `akm import` and `akm wiki stash`** — both commands now accept a single HTTP/HTTPS URL in addition to file paths and stdin. `akm import <url>` fetches the exact page, converts it to markdown, and writes it into `knowledge/` using a URL-path-derived default name. `akm wiki stash <wiki> <url>` fetches the exact page, converts it to markdown, and writes it into `wikis/<wiki>/raw/`. Neither command registers a persistent website source or crawls linked pages.
|
|
38
|
+
|
|
39
|
+
### Changed
|
|
40
|
+
|
|
41
|
+
- **Shared website ingest boundary** — website URL validation, single-page fetch/convert, and website mirror generation now live in a dedicated shared ingest module. The website source provider is a thin adapter, and `akm add`, `akm import`, and `akm wiki stash` all reuse the same core website-ingest path.
|
|
42
|
+
- **`.stash.json` docs deprecation timeline** — the docs now explicitly state that `.stash.json` is deprecated, remains only as a 0.7.x compatibility bridge, and will be removed in v0.8.0 to match the current aggressive pre-release phase-out posture.
|
|
43
|
+
|
|
9
44
|
## [0.7.0]
|
|
10
45
|
|
|
11
46
|
### Added
|
package/dist/cli.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
#!/usr/bin/env bun
|
|
2
2
|
import fs from "node:fs";
|
|
3
3
|
import path from "node:path";
|
|
4
|
+
import * as p from "@clack/prompts";
|
|
4
5
|
import { defineCommand, runMain } from "citty";
|
|
5
6
|
import { generateBashCompletions, installBashCompletions } from "./commands/completions";
|
|
6
7
|
import { getConfigValue, listConfig, setConfigValue, unsetConfigValue } from "./commands/config-cli";
|
|
@@ -25,14 +26,14 @@ import { akmClone } from "./commands/source-clone";
|
|
|
25
26
|
import { addStash } from "./commands/source-manage";
|
|
26
27
|
import { parseAssetRef } from "./core/asset-ref";
|
|
27
28
|
import { deriveCanonicalAssetName, resolveAssetPathFromName } from "./core/asset-spec";
|
|
28
|
-
import { isWithin, resolveStashDir, tryReadStdinText } from "./core/common";
|
|
29
|
+
import { isHttpUrl, isWithin, resolveStashDir, tryReadStdinText } from "./core/common";
|
|
29
30
|
import { DEFAULT_CONFIG, getConfigPath, loadConfig, loadUserConfig, saveConfig } from "./core/config";
|
|
30
31
|
import { ConfigError, NotFoundError, UsageError } from "./core/errors";
|
|
31
32
|
import { appendEvent } from "./core/events";
|
|
32
33
|
import { getCacheDir, getDbPath, getDefaultStashDir } from "./core/paths";
|
|
33
34
|
import { setQuiet, setVerbose, warn } from "./core/warn";
|
|
34
35
|
import { resolveWriteTarget, writeAssetToSource } from "./core/write-source";
|
|
35
|
-
import { closeDatabase, findEntryIdByRef,
|
|
36
|
+
import { closeDatabase, findEntryIdByRef, openExistingDatabase } from "./indexer/db";
|
|
36
37
|
import { akmIndex } from "./indexer/indexer";
|
|
37
38
|
import { resolveSourceEntries } from "./indexer/search-source";
|
|
38
39
|
import { insertUsageEvent } from "./indexer/usage-events";
|
|
@@ -44,6 +45,7 @@ import { buildRegistryIndex, writeRegistryIndex } from "./registry/build-index";
|
|
|
44
45
|
import { resolveSourcesForOrigin } from "./registry/origin-resolve";
|
|
45
46
|
import { saveGitStash } from "./sources/providers/git";
|
|
46
47
|
import { resolveAssetPath } from "./sources/resolve";
|
|
48
|
+
import { fetchWebsiteMarkdownSnapshot } from "./sources/website-ingest";
|
|
47
49
|
import { pkgVersion } from "./version";
|
|
48
50
|
import { createWorkflowAsset, formatWorkflowErrors, getWorkflowTemplate, validateWorkflowSource, } from "./workflows/authoring";
|
|
49
51
|
import { hasWorkflowSubcommand, parseWorkflowJsonObject, parseWorkflowStepState, WORKFLOW_STEP_STATES, } from "./workflows/cli";
|
|
@@ -138,15 +140,53 @@ const indexCommand = defineCommand({
|
|
|
138
140
|
meta: { name: "index", description: "Build search index (incremental by default; --full forces full reindex)" },
|
|
139
141
|
args: {
|
|
140
142
|
full: { type: "boolean", description: "Force full reindex", default: false },
|
|
141
|
-
|
|
143
|
+
enrich: { type: "boolean", description: "Enable LLM inference and enrichment passes", default: false },
|
|
144
|
+
verbose: { type: "boolean", description: "Print phase-by-phase indexing progress to stderr", default: false },
|
|
142
145
|
},
|
|
143
146
|
async run({ args }) {
|
|
144
147
|
await runWithJsonErrors(async () => {
|
|
145
|
-
const
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
148
|
+
const outputMode = getOutputMode();
|
|
149
|
+
const controller = new AbortController();
|
|
150
|
+
const abort = () => controller.abort(new Error("index interrupted"));
|
|
151
|
+
process.once("SIGINT", abort);
|
|
152
|
+
process.once("SIGTERM", abort);
|
|
153
|
+
const spin = !args.verbose && outputMode.format === "text" ? p.spinner() : null;
|
|
154
|
+
if (spin) {
|
|
155
|
+
spin.start(`Building search index${args.full ? " (full rebuild)" : ""}...`);
|
|
156
|
+
}
|
|
157
|
+
let latestMessage = "";
|
|
158
|
+
try {
|
|
159
|
+
const result = await akmIndex({
|
|
160
|
+
full: args.full,
|
|
161
|
+
enrich: args.enrich,
|
|
162
|
+
onProgress: ({ message, processed, total }) => {
|
|
163
|
+
latestMessage = message;
|
|
164
|
+
const progressPrefix = processed !== undefined && total !== undefined ? `[${processed}/${total}] ` : "";
|
|
165
|
+
if (args.verbose) {
|
|
166
|
+
console.error(`[index] ${progressPrefix}${message}`);
|
|
167
|
+
}
|
|
168
|
+
else if (spin) {
|
|
169
|
+
spin.stop(`${progressPrefix}${message}`);
|
|
170
|
+
spin.start(`${progressPrefix}${message}`);
|
|
171
|
+
}
|
|
172
|
+
},
|
|
173
|
+
signal: controller.signal,
|
|
174
|
+
});
|
|
175
|
+
if (spin) {
|
|
176
|
+
spin.stop(`Indexed ${result.totalEntries} assets.`);
|
|
177
|
+
}
|
|
178
|
+
output("index", result);
|
|
179
|
+
}
|
|
180
|
+
catch (error) {
|
|
181
|
+
if (spin) {
|
|
182
|
+
spin.stop(latestMessage ? `Indexing failed after: ${latestMessage}` : "Indexing failed.");
|
|
183
|
+
}
|
|
184
|
+
throw error;
|
|
185
|
+
}
|
|
186
|
+
finally {
|
|
187
|
+
process.off("SIGINT", abort);
|
|
188
|
+
process.off("SIGTERM", abort);
|
|
189
|
+
}
|
|
150
190
|
});
|
|
151
191
|
},
|
|
152
192
|
});
|
|
@@ -939,7 +979,7 @@ const feedbackCommand = defineCommand({
|
|
|
939
979
|
}
|
|
940
980
|
const signal = args.positive ? "positive" : "negative";
|
|
941
981
|
const metadata = args.note ? JSON.stringify({ note: args.note }) : undefined;
|
|
942
|
-
const db =
|
|
982
|
+
const db = openExistingDatabase();
|
|
943
983
|
try {
|
|
944
984
|
const entryId = findEntryIdByRef(db, ref);
|
|
945
985
|
if (entryId === undefined) {
|
|
@@ -1057,6 +1097,12 @@ function readKnowledgeContent(source) {
|
|
|
1057
1097
|
preferredName: path.basename(resolvedSource, path.extname(resolvedSource)),
|
|
1058
1098
|
};
|
|
1059
1099
|
}
|
|
1100
|
+
async function readKnowledgeInput(source) {
|
|
1101
|
+
if (!isHttpUrl(source))
|
|
1102
|
+
return readKnowledgeContent(source);
|
|
1103
|
+
const snapshot = await fetchWebsiteMarkdownSnapshot(source);
|
|
1104
|
+
return { content: snapshot.content, preferredName: snapshot.preferredName };
|
|
1105
|
+
}
|
|
1060
1106
|
async function writeMarkdownAsset(options) {
|
|
1061
1107
|
// Resolve write target via the v1 precedence chain (`--target` →
|
|
1062
1108
|
// `defaultWriteTarget` → working stash). Per spec §10 step 5, this is the
|
|
@@ -1609,12 +1655,12 @@ function wasRememberFlagValueConsumedAsContent(content, flagValue, flagName) {
|
|
|
1609
1655
|
const importKnowledgeCommand = defineCommand({
|
|
1610
1656
|
meta: {
|
|
1611
1657
|
name: "import",
|
|
1612
|
-
description: "Import a knowledge document into the default stash",
|
|
1658
|
+
description: "Import a knowledge document or URL into the default stash",
|
|
1613
1659
|
},
|
|
1614
1660
|
args: {
|
|
1615
1661
|
source: {
|
|
1616
1662
|
type: "positional",
|
|
1617
|
-
description: 'Source file path, or "-" to read from stdin',
|
|
1663
|
+
description: 'Source file path, URL, or "-" to read from stdin',
|
|
1618
1664
|
required: true,
|
|
1619
1665
|
},
|
|
1620
1666
|
name: {
|
|
@@ -1633,11 +1679,11 @@ const importKnowledgeCommand = defineCommand({
|
|
|
1633
1679
|
},
|
|
1634
1680
|
async run({ args }) {
|
|
1635
1681
|
return runWithJsonErrors(async () => {
|
|
1636
|
-
const { content, preferredName } =
|
|
1682
|
+
const { content, preferredName } = await readKnowledgeInput(args.source);
|
|
1637
1683
|
const result = await writeMarkdownAsset({
|
|
1638
1684
|
type: "knowledge",
|
|
1639
1685
|
content,
|
|
1640
|
-
name: args.name,
|
|
1686
|
+
name: args.name ?? (isHttpUrl(args.source) ? preferredName : undefined),
|
|
1641
1687
|
fallbackPrefix: "knowledge",
|
|
1642
1688
|
preferredName,
|
|
1643
1689
|
force: args.force,
|
|
@@ -2227,17 +2273,17 @@ const wikiSearchCommand = defineCommand({
|
|
|
2227
2273
|
const wikiStashCommand = defineCommand({
|
|
2228
2274
|
meta: {
|
|
2229
2275
|
name: "stash",
|
|
2230
|
-
description: "Copy a source into wikis/<name>/raw/<slug>.md with frontmatter. Source may be a file path or '-' for stdin.",
|
|
2276
|
+
description: "Copy a source into wikis/<name>/raw/<slug>.md with frontmatter. Source may be a file path, URL, or '-' for stdin.",
|
|
2231
2277
|
},
|
|
2232
2278
|
args: {
|
|
2233
2279
|
name: { type: "positional", description: "Wiki name", required: true },
|
|
2234
|
-
source: { type: "positional", description: "Source file path, or '-' to read from stdin", required: true },
|
|
2280
|
+
source: { type: "positional", description: "Source file path, URL, or '-' to read from stdin", required: true },
|
|
2235
2281
|
as: { type: "string", description: "Preferred slug base (defaults to source filename or first-line slug)" },
|
|
2236
2282
|
},
|
|
2237
2283
|
run({ args }) {
|
|
2238
2284
|
return runWithJsonErrors(async () => {
|
|
2239
2285
|
const { stashRaw } = await import("./wiki/wiki.js");
|
|
2240
|
-
const { content, preferredName } =
|
|
2286
|
+
const { content, preferredName } = await readKnowledgeInput(args.source);
|
|
2241
2287
|
const stashDir = resolveStashDir();
|
|
2242
2288
|
const result = stashRaw({
|
|
2243
2289
|
stashDir,
|
package/dist/commands/history.js
CHANGED
|
@@ -16,8 +16,7 @@
|
|
|
16
16
|
import { parseAssetRef } from "../core/asset-ref";
|
|
17
17
|
import { UsageError } from "../core/errors";
|
|
18
18
|
import { readEvents } from "../core/events";
|
|
19
|
-
import { closeDatabase,
|
|
20
|
-
import { ensureUsageEventsSchema } from "../indexer/usage-events";
|
|
19
|
+
import { closeDatabase, openExistingDatabase } from "../indexer/db";
|
|
21
20
|
// Proposal lifecycle event types emitted by the proposal substrate (#225).
|
|
22
21
|
const PROPOSAL_EVENT_TYPES = new Set(["promoted", "rejected"]);
|
|
23
22
|
// ── Helpers ──────────────────────────────────────────────────────────────────
|
|
@@ -106,13 +105,9 @@ export async function akmHistory(options = {}) {
|
|
|
106
105
|
normalizedRef = trimmed;
|
|
107
106
|
}
|
|
108
107
|
const sinceNormalized = options.since !== undefined ? normalizeSince(options.since) : undefined;
|
|
109
|
-
const db = options.db ??
|
|
108
|
+
const db = options.db ?? openExistingDatabase();
|
|
110
109
|
const ownsDb = options.db === undefined;
|
|
111
110
|
try {
|
|
112
|
-
// The schema is normally created during `akm index`; ensure it exists so
|
|
113
|
-
// `akm history` works on a freshly-initialised stash that has never been
|
|
114
|
-
// indexed (and just returns an empty list rather than an error).
|
|
115
|
-
ensureUsageEventsSchema(db);
|
|
116
111
|
const conditions = [];
|
|
117
112
|
const params = [];
|
|
118
113
|
if (normalizedRef !== undefined) {
|
package/dist/commands/info.js
CHANGED
|
@@ -2,7 +2,7 @@ import fs from "node:fs";
|
|
|
2
2
|
import { getAssetTypes } from "../core/asset-spec";
|
|
3
3
|
import { loadConfig } from "../core/config";
|
|
4
4
|
import { getDbPath } from "../core/paths";
|
|
5
|
-
import { closeDatabase, getEntryCount, getMeta, isVecAvailable,
|
|
5
|
+
import { closeDatabase, getEntryCount, getMeta, isVecAvailable, openExistingDatabase } from "../indexer/db";
|
|
6
6
|
import { getEffectiveSemanticStatus, readSemanticStatus } from "../indexer/semantic-status";
|
|
7
7
|
import { pkgVersion } from "../version";
|
|
8
8
|
/**
|
|
@@ -74,7 +74,7 @@ function readIndexStats(dbPath) {
|
|
|
74
74
|
}
|
|
75
75
|
let db;
|
|
76
76
|
try {
|
|
77
|
-
db =
|
|
77
|
+
db = openExistingDatabase(resolvedPath);
|
|
78
78
|
const entryCount = getEntryCount(db);
|
|
79
79
|
const lastBuiltAt = getMeta(db, "builtAt") ?? null;
|
|
80
80
|
const vecAvailable = isVecAvailable(db);
|
|
@@ -12,8 +12,9 @@ import { NotFoundError, UsageError } from "../core/errors";
|
|
|
12
12
|
import { akmIndex } from "../indexer/indexer";
|
|
13
13
|
import { removeLockEntry, upsertLockEntry } from "../integrations/lockfile";
|
|
14
14
|
import { parseRegistryRef } from "../registry/resolve";
|
|
15
|
+
import { parseGitRepoUrl, syncMirroredRepo } from "../sources/providers/git";
|
|
15
16
|
import { syncFromRef } from "../sources/providers/sync-from-ref";
|
|
16
|
-
import { ensureWebsiteMirror } from "../sources/
|
|
17
|
+
import { ensureWebsiteMirror } from "../sources/website-ingest";
|
|
17
18
|
import { listWikis, resolveWikisRoot } from "../wiki/wiki";
|
|
18
19
|
import { auditInstallCandidate, deriveRegistryLabels, enforceRegistryInstallPolicy, formatInstallAuditFailure, } from "./install-audit";
|
|
19
20
|
import { removeInstalledRegistryEntry, upsertInstalledRegistryEntry } from "./source-add";
|
|
@@ -173,6 +174,49 @@ export async function akmUpdate(input) {
|
|
|
173
174
|
const stashes = config.sources ?? config.stashes ?? [];
|
|
174
175
|
const isUrl = target.startsWith("http://") || target.startsWith("https://");
|
|
175
176
|
const resolvedPath = !isUrl ? path.resolve(target) : undefined;
|
|
177
|
+
const gitMatch = stashes.find((s) => {
|
|
178
|
+
if (s.type !== "git")
|
|
179
|
+
return false;
|
|
180
|
+
if (isUrl && s.url === target)
|
|
181
|
+
return true;
|
|
182
|
+
if (resolvedPath && s.path && path.resolve(s.path) === resolvedPath)
|
|
183
|
+
return true;
|
|
184
|
+
if (s.name === target)
|
|
185
|
+
return true;
|
|
186
|
+
if (s.url) {
|
|
187
|
+
try {
|
|
188
|
+
const repo = parseGitRepoUrl(s.url);
|
|
189
|
+
if (repo.canonicalUrl === target)
|
|
190
|
+
return true;
|
|
191
|
+
}
|
|
192
|
+
catch {
|
|
193
|
+
// Ignore malformed config here; later provider sync will surface it.
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
return false;
|
|
197
|
+
});
|
|
198
|
+
if (gitMatch) {
|
|
199
|
+
await syncMirroredRepo(gitMatch, { force: true, writable: gitMatch.writable === true });
|
|
200
|
+
const index = await akmIndex({ stashDir, full: true });
|
|
201
|
+
const updatedConfig = loadConfig();
|
|
202
|
+
return {
|
|
203
|
+
schemaVersion: 1,
|
|
204
|
+
stashDir,
|
|
205
|
+
target,
|
|
206
|
+
all,
|
|
207
|
+
processed: [],
|
|
208
|
+
config: {
|
|
209
|
+
sourceCount: (updatedConfig.sources ?? updatedConfig.stashes ?? []).length,
|
|
210
|
+
installedKitCount: updatedConfig.installed?.length ?? 0,
|
|
211
|
+
},
|
|
212
|
+
index: {
|
|
213
|
+
mode: index.mode,
|
|
214
|
+
totalEntries: index.totalEntries,
|
|
215
|
+
directoriesScanned: index.directoriesScanned,
|
|
216
|
+
directoriesSkipped: index.directoriesSkipped,
|
|
217
|
+
},
|
|
218
|
+
};
|
|
219
|
+
}
|
|
176
220
|
const websiteMatch = stashes.find((s) => {
|
|
177
221
|
if (s.type !== "website")
|
|
178
222
|
return false;
|
package/dist/commands/search.js
CHANGED
|
@@ -11,7 +11,7 @@
|
|
|
11
11
|
import { loadConfig } from "../core/config";
|
|
12
12
|
import { UsageError } from "../core/errors";
|
|
13
13
|
import { appendEvent } from "../core/events";
|
|
14
|
-
import { closeDatabase,
|
|
14
|
+
import { closeDatabase, openExistingDatabase } from "../indexer/db";
|
|
15
15
|
import { searchLocal } from "../indexer/db-search";
|
|
16
16
|
import { resolveSourceEntries } from "../indexer/search-source";
|
|
17
17
|
// Eagerly import source providers to trigger self-registration before the
|
|
@@ -169,7 +169,7 @@ function logSearchEvent(query, response, existingDb) {
|
|
|
169
169
|
metadata: { query, hitCount: stashHits.length, resultRefs: stashHits.map((h) => h.ref) },
|
|
170
170
|
});
|
|
171
171
|
try {
|
|
172
|
-
const db = existingDb ??
|
|
172
|
+
const db = existingDb ?? openExistingDatabase();
|
|
173
173
|
try {
|
|
174
174
|
const resolved = resolveEntryIds(db, stashHits.slice(0, 50));
|
|
175
175
|
for (const { entryId, ref } of resolved) {
|
package/dist/commands/show.js
CHANGED
|
@@ -26,10 +26,9 @@ import { loadConfig } from "../core/config";
|
|
|
26
26
|
import { NotFoundError, UsageError } from "../core/errors";
|
|
27
27
|
import { appendEvent, readEvents } from "../core/events";
|
|
28
28
|
import { parseFrontmatter, toStringOrUndefined } from "../core/frontmatter";
|
|
29
|
-
import { closeDatabase, findEntryIdByRef,
|
|
29
|
+
import { closeDatabase, findEntryIdByRef, openExistingDatabase } from "../indexer/db";
|
|
30
30
|
import { buildFileContext, buildRenderContext, getRenderer, runMatchers } from "../indexer/file-context";
|
|
31
31
|
import { lookup } from "../indexer/indexer";
|
|
32
|
-
import { loadStashFile } from "../indexer/metadata";
|
|
33
32
|
import { buildEditHint, findSourceForPath, isEditable, resolveSourceEntries } from "../indexer/search-source";
|
|
34
33
|
import { insertUsageEvent } from "../indexer/usage-events";
|
|
35
34
|
import { resolveSourcesForOrigin } from "../registry/origin-resolve";
|
|
@@ -203,7 +202,7 @@ function logShowEvent(ref, existingDb) {
|
|
|
203
202
|
const parsed = parseAssetRef(ref);
|
|
204
203
|
appendEvent({ eventType: "show", ref, metadata: { type: parsed.type, name: parsed.name } });
|
|
205
204
|
try {
|
|
206
|
-
const db = existingDb ??
|
|
205
|
+
const db = existingDb ?? openExistingDatabase();
|
|
207
206
|
try {
|
|
208
207
|
insertUsageEvent(db, {
|
|
209
208
|
event_type: "show",
|
|
@@ -369,33 +368,19 @@ function buildBriefResponse(full, assetPath) {
|
|
|
369
368
|
*
|
|
370
369
|
* Strips content/template/prompt and returns only metadata fields:
|
|
371
370
|
* type, name, path, description, tags, parameters, action.
|
|
372
|
-
* Enriches description and tags from
|
|
371
|
+
* Enriches description and tags from rendered content when available.
|
|
373
372
|
*
|
|
374
373
|
* The resulting JSON should be under 200 tokens.
|
|
375
374
|
*/
|
|
376
375
|
function buildSummaryResponse(full, assetPath) {
|
|
377
376
|
let description = full.description;
|
|
378
|
-
|
|
377
|
+
const tags = full.tags;
|
|
379
378
|
if (assetPath) {
|
|
380
379
|
const textContent = full.content ?? full.template ?? full.prompt;
|
|
381
380
|
if (textContent && !description) {
|
|
382
381
|
const parsed = parseFrontmatter(textContent);
|
|
383
382
|
description = toStringOrUndefined(parsed.data.description);
|
|
384
383
|
}
|
|
385
|
-
const dir = path.dirname(assetPath);
|
|
386
|
-
const stashFile = loadStashFile(dir);
|
|
387
|
-
if (stashFile) {
|
|
388
|
-
const fileName = path.basename(assetPath);
|
|
389
|
-
const entry = stashFile.entries.find((e) => e.filename === fileName);
|
|
390
|
-
if (entry) {
|
|
391
|
-
if (!description && entry.description) {
|
|
392
|
-
description = entry.description;
|
|
393
|
-
}
|
|
394
|
-
if (!tags && entry.tags) {
|
|
395
|
-
tags = entry.tags;
|
|
396
|
-
}
|
|
397
|
-
}
|
|
398
|
-
}
|
|
399
384
|
}
|
|
400
385
|
const summary = {
|
|
401
386
|
type: full.type,
|
|
@@ -9,7 +9,7 @@ import { upsertLockEntry } from "../integrations/lockfile";
|
|
|
9
9
|
import { parseRegistryRef } from "../registry/resolve";
|
|
10
10
|
import { detectStashRoot } from "../sources/providers/provider-utils";
|
|
11
11
|
import { syncFromRef } from "../sources/providers/sync-from-ref";
|
|
12
|
-
import { ensureWebsiteMirror, validateWebsiteInputUrl } from "../sources/
|
|
12
|
+
import { ensureWebsiteMirror, validateWebsiteInputUrl } from "../sources/website-ingest";
|
|
13
13
|
import { ensureWikiNameAvailable, validateWikiName } from "../wiki/wiki";
|
|
14
14
|
import { auditInstallCandidate, deriveRegistryLabels, enforceRegistryInstallPolicy, formatInstallAuditFailure, } from "./install-audit";
|
|
15
15
|
const VALID_OVERRIDE_TYPES = new Set(["wiki"]);
|
package/dist/core/common.js
CHANGED
|
@@ -146,19 +146,34 @@ function normalizeFsPathForComparison(value) {
|
|
|
146
146
|
* Fetch with an AbortController timeout.
|
|
147
147
|
* Defaults to 30 seconds if no timeout is specified.
|
|
148
148
|
*/
|
|
149
|
-
export async function fetchWithTimeout(url, opts, timeoutMs = 30_000) {
|
|
149
|
+
export async function fetchWithTimeout(url, opts, timeoutMs = 30_000, signal) {
|
|
150
150
|
const controller = new AbortController();
|
|
151
151
|
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
152
|
+
const abortExternal = () => controller.abort(signal?.reason);
|
|
153
|
+
if (signal) {
|
|
154
|
+
if (signal.aborted) {
|
|
155
|
+
clearTimeout(timer);
|
|
156
|
+
controller.abort(signal.reason);
|
|
157
|
+
}
|
|
158
|
+
else {
|
|
159
|
+
signal.addEventListener("abort", abortExternal, { once: true });
|
|
160
|
+
}
|
|
161
|
+
}
|
|
152
162
|
try {
|
|
153
163
|
return await fetch(url, { ...opts, signal: controller.signal });
|
|
154
164
|
}
|
|
155
165
|
catch (err) {
|
|
156
166
|
if (err instanceof DOMException && err.name === "AbortError") {
|
|
167
|
+
if (signal?.aborted) {
|
|
168
|
+
throw new Error(`Request aborted: ${url}`);
|
|
169
|
+
}
|
|
157
170
|
throw new Error(`Request timed out after ${timeoutMs}ms: ${url}`);
|
|
158
171
|
}
|
|
159
172
|
throw err;
|
|
160
173
|
}
|
|
161
174
|
finally {
|
|
175
|
+
if (signal)
|
|
176
|
+
signal.removeEventListener("abort", abortExternal);
|
|
162
177
|
clearTimeout(timer);
|
|
163
178
|
}
|
|
164
179
|
}
|
package/dist/core/config.js
CHANGED
|
@@ -4,14 +4,14 @@ import path from "node:path";
|
|
|
4
4
|
import { parseAgentConfig } from "../integrations/agent/config";
|
|
5
5
|
import { filterNonEmptyStrings } from "./common";
|
|
6
6
|
import { ConfigError } from "./errors";
|
|
7
|
-
import { getConfigDir as _getConfigDir, getConfigPath as _getConfigPath } from "./paths";
|
|
7
|
+
import { getConfigDir as _getConfigDir, getConfigPath as _getConfigPath, getCacheDir } from "./paths";
|
|
8
8
|
import { warn } from "./warn";
|
|
9
9
|
// ── Defaults ────────────────────────────────────────────────────────────────
|
|
10
10
|
export const DEFAULT_CONFIG = {
|
|
11
11
|
semanticSearchMode: "auto",
|
|
12
12
|
registries: [
|
|
13
|
-
{ url: "https://raw.githubusercontent.com/itlackey/akm-registry/main/index.json", name: "
|
|
14
|
-
{ url: "https://skills.sh", name: "skills.sh", provider: "skills-sh" },
|
|
13
|
+
{ url: "https://raw.githubusercontent.com/itlackey/akm-registry/main/index.json", name: "akm-registry" },
|
|
14
|
+
{ url: "https://skills.sh", name: "skills.sh", provider: "skills-sh", enabled: false },
|
|
15
15
|
],
|
|
16
16
|
output: {
|
|
17
17
|
format: "json",
|
|
@@ -108,9 +108,21 @@ export function saveConfig(config) {
|
|
|
108
108
|
const configPath = getConfigPath();
|
|
109
109
|
const dir = path.dirname(configPath);
|
|
110
110
|
fs.mkdirSync(dir, { recursive: true });
|
|
111
|
+
backupExistingConfig(configPath);
|
|
111
112
|
const sanitized = sanitizeConfigForWrite(config);
|
|
112
113
|
writeConfigObject(configPath, sanitized);
|
|
113
114
|
}
|
|
115
|
+
function backupExistingConfig(configPath) {
|
|
116
|
+
if (!fs.existsSync(configPath))
|
|
117
|
+
return;
|
|
118
|
+
const backupDir = path.join(getCacheDir(), "config-backups");
|
|
119
|
+
fs.mkdirSync(backupDir, { recursive: true });
|
|
120
|
+
const timestamp = new Date().toISOString().replace(/[.:]/g, "-");
|
|
121
|
+
const backupPath = path.join(backupDir, `config-${timestamp}.json`);
|
|
122
|
+
fs.copyFileSync(configPath, backupPath);
|
|
123
|
+
const latestPath = path.join(backupDir, "config.latest.json");
|
|
124
|
+
fs.copyFileSync(configPath, latestPath);
|
|
125
|
+
}
|
|
114
126
|
/**
|
|
115
127
|
* Strip apiKey fields before writing config to disk.
|
|
116
128
|
* API keys should be provided via environment variables
|
|
@@ -509,6 +521,9 @@ function parseLlmConfig(value) {
|
|
|
509
521
|
if (Object.keys(features).length > 0)
|
|
510
522
|
result.features = features;
|
|
511
523
|
}
|
|
524
|
+
if (typeof obj.extraParams === "object" && obj.extraParams !== null && !Array.isArray(obj.extraParams)) {
|
|
525
|
+
result.extraParams = obj.extraParams;
|
|
526
|
+
}
|
|
512
527
|
return result;
|
|
513
528
|
}
|
|
514
529
|
/**
|
|
@@ -17,7 +17,7 @@ import { defaultRendererRegistry } from "../core/asset-registry";
|
|
|
17
17
|
import { deriveCanonicalAssetNameFromStashRoot } from "../core/asset-spec";
|
|
18
18
|
import { getDbPath } from "../core/paths";
|
|
19
19
|
import { warn } from "../core/warn";
|
|
20
|
-
import { closeDatabase, getAllEntries, getEntryById, getEntryCount, getMeta, getUtilityScoresByIds,
|
|
20
|
+
import { closeDatabase, getAllEntries, getEntryById, getEntryCount, getMeta, getUtilityScoresByIds, openExistingDatabase, sanitizeFtsQuery, searchFts, searchVec, } from "./db";
|
|
21
21
|
import { getRenderer } from "./file-context";
|
|
22
22
|
import { computeGraphBoost, loadGraphBoostContext } from "./graph-boost";
|
|
23
23
|
import { generateMetadataFlat, isProposedQuality, loadStashFile, shouldIndexStashFile, } from "./metadata";
|
|
@@ -69,8 +69,7 @@ export async function searchLocal(input) {
|
|
|
69
69
|
const dbPath = getDbPath();
|
|
70
70
|
try {
|
|
71
71
|
if (fs.existsSync(dbPath)) {
|
|
72
|
-
const
|
|
73
|
-
const db = openDatabase(dbPath, embeddingDim ? { embeddingDim } : undefined);
|
|
72
|
+
const db = openExistingDatabase(dbPath);
|
|
74
73
|
try {
|
|
75
74
|
const entryCount = getEntryCount(db);
|
|
76
75
|
const storedStashDir = getMeta(db, "stashDir");
|
|
@@ -269,7 +268,10 @@ async function searchDatabase(db, query, searchType, limit, stashDir, allSourceD
|
|
|
269
268
|
// If the query IS the asset name (or very close), this is almost certainly
|
|
270
269
|
// what the user wants. This is the single most important ranking signal.
|
|
271
270
|
const nameLower = entry.name.toLowerCase();
|
|
272
|
-
const
|
|
271
|
+
const rawNameBase = nameLower.split("/").pop() ?? nameLower; // last segment for path-based names
|
|
272
|
+
const nameBase = entry.type === "memory" && rawNameBase.endsWith(".derived")
|
|
273
|
+
? rawNameBase.slice(0, -".derived".length)
|
|
274
|
+
: rawNameBase;
|
|
273
275
|
if (nameBase === queryLower || nameLower === queryLower) {
|
|
274
276
|
// Exact match: massive boost
|
|
275
277
|
boostSum += 2.0;
|
|
@@ -301,6 +303,18 @@ async function searchDatabase(db, query, searchType, limit, stashDir, allSourceD
|
|
|
301
303
|
knowledge: 0,
|
|
302
304
|
};
|
|
303
305
|
boostSum += TYPE_BOOST[entry.type] ?? 0;
|
|
306
|
+
// ── 2.5. Derived-vs-raw memory preference ──
|
|
307
|
+
// Raw memories are user notes and may be incomplete or unvetted. Compressed
|
|
308
|
+
// `.derived` memories are the higher-signal retrieval target, but the
|
|
309
|
+
// preference should stay modest so stronger relevance signals still dominate.
|
|
310
|
+
if (entry.type === "memory") {
|
|
311
|
+
if (entry.name.toLowerCase().endsWith(".derived")) {
|
|
312
|
+
boostSum += 0.18;
|
|
313
|
+
}
|
|
314
|
+
else {
|
|
315
|
+
boostSum -= 0.08;
|
|
316
|
+
}
|
|
317
|
+
}
|
|
304
318
|
// ── 3. Tag exact match ──
|
|
305
319
|
// Exact tag equality is a strong signal — the author explicitly tagged
|
|
306
320
|
// this asset with the user's search term.
|
|
@@ -691,44 +705,20 @@ async function indexAssets(stashDir, type, sources) {
|
|
|
691
705
|
dirGroups.set(ctx.parentDirAbs, [ctx.absPath]);
|
|
692
706
|
}
|
|
693
707
|
for (const [dirPath, files] of dirGroups) {
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
}
|
|
703
|
-
}
|
|
704
|
-
}
|
|
705
|
-
else {
|
|
706
|
-
const generated = await generateMetadataFlat(stashDir, files);
|
|
707
|
-
if (generated.entries.length === 0)
|
|
708
|
-
continue;
|
|
709
|
-
stash = generated;
|
|
710
|
-
}
|
|
711
|
-
// Build a lookup for matching filename-less entries to actual files
|
|
712
|
-
const fileBasenameMap = new Map();
|
|
713
|
-
for (const file of files) {
|
|
714
|
-
const base = path.basename(file, path.extname(file));
|
|
715
|
-
if (!fileBasenameMap.has(base))
|
|
716
|
-
fileBasenameMap.set(base, file);
|
|
717
|
-
}
|
|
708
|
+
const generated = await generateMetadataFlat(stashDir, files);
|
|
709
|
+
const legacyOverrides = loadStashFile(dirPath, { requireFilename: true });
|
|
710
|
+
const mergedEntries = legacyOverrides
|
|
711
|
+
? generated.entries.map((entry) => mergeLegacyEntry(entry, legacyOverrides.entries))
|
|
712
|
+
: generated.entries;
|
|
713
|
+
const stash = mergedEntries.length > 0 ? { entries: mergedEntries } : legacyOverrides;
|
|
714
|
+
if (!stash || stash.entries.length === 0)
|
|
715
|
+
continue;
|
|
718
716
|
for (const entry of stash.entries) {
|
|
719
717
|
if (filterType && entry.type !== filterType)
|
|
720
718
|
continue;
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
}
|
|
725
|
-
else {
|
|
726
|
-
// Try matching entry name to a file by basename
|
|
727
|
-
entryPath =
|
|
728
|
-
fileBasenameMap.get(entry.name) ??
|
|
729
|
-
fileBasenameMap.get(entry.name.split("/").pop() ?? "") ??
|
|
730
|
-
(files[0] || dirPath);
|
|
731
|
-
}
|
|
719
|
+
if (!entry.filename)
|
|
720
|
+
continue;
|
|
721
|
+
const entryPath = path.join(dirPath, entry.filename);
|
|
732
722
|
if (!shouldIndexStashFile(stashDir, entryPath))
|
|
733
723
|
continue;
|
|
734
724
|
assets.push({ entry, path: entryPath });
|
|
@@ -736,6 +726,10 @@ async function indexAssets(stashDir, type, sources) {
|
|
|
736
726
|
}
|
|
737
727
|
return assets;
|
|
738
728
|
}
|
|
729
|
+
function mergeLegacyEntry(entry, legacyEntries) {
|
|
730
|
+
const legacy = legacyEntries.find((candidate) => candidate.filename === entry.filename);
|
|
731
|
+
return legacy ? { ...entry, ...legacy, filename: entry.filename } : entry;
|
|
732
|
+
}
|
|
739
733
|
async function indexWikiRootAssets(wikiRoot, wikiName, type) {
|
|
740
734
|
if (type !== "any" && type !== "wiki")
|
|
741
735
|
return [];
|