wicked-brain 0.11.0 → 0.12.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/install.mjs CHANGED
@@ -11,8 +11,44 @@ const __dirname = fileURLToPath(new URL(".", import.meta.url));
11
11
  const skillsSource = join(__dirname, "skills");
12
12
  const home = homedir();
13
13
 
14
+ // Claude-root candidate builder. Claude Code's config root is redirectable
15
+ // via $CLAUDE_CONFIG_DIR (multi-tenant setups, alt-config layouts, corporate
16
+ // home-dir overrides). Mirrors the 0.3.3 wicked-testing fix: env var is
17
+ // authoritative when set; otherwise we probe ~/.claude + common alt-config
18
+ // paths and install into each that carries Claude identity markers.
19
+ function buildClaudeTarget(rootDir, source, { trusted = false } = {}) {
20
+ return {
21
+ name: "claude",
22
+ rootDir,
23
+ dir: join(rootDir, "skills"),
24
+ agentDir: join(rootDir, "agents"),
25
+ agentSubdir: "agents",
26
+ platform: "claude",
27
+ identityMarkers: ["settings.json", "plugins", "projects"],
28
+ source,
29
+ trusted,
30
+ };
31
+ }
32
+
33
+ function resolveClaudeCandidates() {
34
+ const envDir = process.env.CLAUDE_CONFIG_DIR;
35
+ if (envDir && typeof envDir === "string" && envDir.trim()) {
36
+ // Function replacement avoids `$&` etc. being interpreted as regex
37
+ // back-references if $HOME contains those literals (pathological but
38
+ // cheap to defend against — flagged by gemini on the sibling PRs).
39
+ const root = resolve(envDir.trim().replace(/^~/, () => home));
40
+ return [buildClaudeTarget(root, "env:CLAUDE_CONFIG_DIR", { trusted: true })];
41
+ }
42
+ return [
43
+ buildClaudeTarget(join(home, ".claude"), "default"),
44
+ buildClaudeTarget(join(home, "alt-configs", ".claude"), "alt-configs"),
45
+ buildClaudeTarget(join(home, ".config", "claude"), "xdg"),
46
+ ];
47
+ }
48
+
49
+ // Canonical non-claude targets. Claude is expanded dynamically via
50
+ // resolveClaudeCandidates() below so CLI_TARGETS stays a flat spec.
14
51
  const CLI_TARGETS = [
15
- { name: "claude", dir: join(home, ".claude", "skills"), agentDir: join(home, ".claude", "agents"), agentSubdir: "agents", platform: "claude" },
16
52
  { name: "gemini", dir: join(home, ".gemini", "skills"), agentDir: join(home, ".gemini", "agents"), agentSubdir: "agents", platform: "gemini" },
17
53
  { name: "copilot", dir: join(home, ".github", "skills"), agentDir: join(home, ".github", "agents"), agentSubdir: "agents", platform: "copilot" },
18
54
  { name: "codex", dir: join(home, ".codex", "skills"), agentDir: join(home, ".codex", "agents"), agentSubdir: "agents", platform: "codex" },
@@ -21,22 +57,61 @@ const CLI_TARGETS = [
21
57
  { name: "antigravity", dir: join(home, ".antigravity", "skills"), agentDir: join(home, ".antigravity", "rules"), agentSubdir: "rules", platform: "antigravity" },
22
58
  ];
23
59
 
60
+ // Identity-marker gate for claude candidates. Without this, probing
61
+ // ~/.claude, ~/alt-configs/.claude, and ~/.config/claude would install
62
+ // into every path that happens to exist — risky if one was created by a
63
+ // different tool. Env-var / --path targets are `trusted` and skip this.
64
+ function claudeHasIdentityMarker(target) {
65
+ if (target.trusted) return true;
66
+ if (!existsSync(target.rootDir)) return false;
67
+ return (target.identityMarkers || []).some(m => existsSync(join(target.rootDir, m)));
68
+ }
69
+
24
70
  console.log("wicked-brain installer\n");
25
71
 
26
72
  const args = argv.slice(2);
27
- const argValue = (a) => a.split("=")[1];
28
- const cliArg = args.find((a) => a.startsWith("--cli="));
29
- const pathArg = args.find((a) => a.startsWith("--path="));
73
+
74
+ // Flag parser supporting both forms:
75
+ // --flag=value (canonical)
76
+ // --flag value (common shell muscle-memory; previously silently
77
+ // dropped the value and fell through to default
78
+ // detection — same bug that hit wicked-testing 0.3.2).
79
+ // Narrow string-boolean coercion: literal "true" / "false" become
80
+ // booleans so `--hooks=false` doesn't install hooks.
81
+ const flagValue = (name) => {
82
+ const f = args.find(a => a === `--${name}` || a.startsWith(`--${name}=`));
83
+ if (!f) return null;
84
+ let val;
85
+ if (f.includes("=")) {
86
+ // slice from the first '=' forward — split("=")[1] would truncate at
87
+ // the second '=' (e.g. --path=/volumes/build=artifacts would silently
88
+ // drop "=artifacts").
89
+ val = f.slice(f.indexOf("=") + 1);
90
+ } else {
91
+ const idx = args.indexOf(f);
92
+ const next = args[idx + 1];
93
+ val = (next && !next.startsWith("-")) ? next : true;
94
+ }
95
+ if (val === "false") return false;
96
+ if (val === "true") return true;
97
+ return val;
98
+ };
99
+
100
+ const cliArg = flagValue("cli");
101
+ const pathArg = flagValue("path");
102
+
103
+ // Validate --cli upfront — if the user passed a bare --cli or --cli=,
104
+ // they misspoke and we should not silently fall through to "install
105
+ // everywhere". Applies regardless of whether --path is also set.
106
+ if (cliArg === true || cliArg === "") {
107
+ console.error("Error: --cli requires a value (e.g. --cli=claude or --cli claude)");
108
+ process.exit(1);
109
+ }
30
110
 
31
111
  let targets;
32
112
 
33
- if (pathArg) {
34
- const rawPath = argValue(pathArg);
35
- if (!rawPath) {
36
- console.error("Error: --path requires a value (e.g. --path=~/.claude)");
37
- process.exit(1);
38
- }
39
- const customPath = resolve(rawPath.replace(/^~/, home));
113
+ if (pathArg && typeof pathArg === "string" && pathArg !== "") {
114
+ const customPath = resolve(pathArg.replace(/^~/, () => home));
40
115
  // Strip leading dot to match CLI_TARGETS names (e.g. ".claude" → "claude")
41
116
  const dirName = basename(customPath).replace(/^\./, "");
42
117
  const knownPlatform = CLI_TARGETS.find((t) => t.name === dirName);
@@ -48,19 +123,33 @@ if (pathArg) {
48
123
  platform: knownPlatform?.platform ?? dirName,
49
124
  }];
50
125
  console.log(`Custom path: ${customPath}\n`);
126
+ } else if (pathArg === true || pathArg === "") {
127
+ console.error("Error: --path requires a value (e.g. --path=~/.claude or --path ~/.claude)");
128
+ process.exit(1);
51
129
  } else {
52
- // Detect which CLIs are installed by checking if parent dir exists
53
- const detected = CLI_TARGETS.filter((t) => existsSync(resolve(t.dir, "..")));
130
+ // Build the detection set: expanded claude candidates (env var OR alt-config
131
+ // probes) + all non-claude targets. Claude candidates pass an identity-marker
132
+ // check so we don't install into a bare ~/.claude that belongs to some other
133
+ // tool. Non-claude targets keep the original parent-dir-exists heuristic.
134
+ const claudeDetected = resolveClaudeCandidates().filter(claudeHasIdentityMarker);
135
+ const otherDetected = CLI_TARGETS.filter((t) => existsSync(resolve(t.dir, "..")));
136
+ const detected = [...claudeDetected, ...otherDetected];
54
137
 
55
138
  if (detected.length === 0) {
56
139
  console.log("No supported AI CLIs detected. Supported: claude, gemini, copilot, codex, cursor, kiro, antigravity");
57
- console.log("Install skills manually by copying the skills/ directory.");
140
+ console.log("Install skills manually by copying the skills/ directory, or set CLAUDE_CONFIG_DIR.");
58
141
  process.exit(1);
59
142
  }
60
143
 
61
- console.log(`Detected CLIs: ${detected.map((d) => d.name).join(", ")}\n`);
144
+ // Annotate claude candidates with their source when more than one was
145
+ // detected, so the log is not ambiguous.
146
+ const claudeCount = claudeDetected.length;
147
+ const label = (d) => d.name === "claude" && claudeCount > 1 && d.source
148
+ ? `${d.name}[${d.source}]`
149
+ : d.name;
150
+ console.log(`Detected CLIs: ${detected.map(label).join(", ")}\n`);
62
151
 
63
- const cliFilter = cliArg ? argValue(cliArg).split(",") : null;
152
+ const cliFilter = (typeof cliArg === "string" && cliArg !== "") ? cliArg.split(",") : null;
64
153
  targets = cliFilter ? detected.filter((d) => cliFilter.includes(d.name)) : detected;
65
154
  }
66
155
 
@@ -106,8 +195,10 @@ for (const target of targets) {
106
195
  console.log(` Installed ${agentCount} agents to ${target.agentDir}`);
107
196
  }
108
197
 
109
- // Optional hook installation (--hooks flag)
110
- const installHooks = args.includes("--hooks");
198
+ // Optional hook installation (--hooks flag). Goes through flagValue so
199
+ // `--hooks=false` correctly disables; bare `--hooks` and `--hooks=true`
200
+ // both enable (flagValue coerces "true"/"false" literals to booleans).
201
+ const installHooks = flagValue("hooks") === true;
111
202
 
112
203
  if (installHooks) {
113
204
  console.log("\nInstalling hooks...");
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "wicked-brain",
3
- "version": "0.11.0",
3
+ "version": "0.12.1",
4
4
  "type": "module",
5
5
  "description": "Digital brain as skills for AI coding CLIs — no vector DB, no embeddings, no infrastructure",
6
6
  "keywords": [
@@ -207,6 +207,17 @@ const actions = {
207
207
  tag_frequency: () => ({ tags: db.tagFrequency() }),
208
208
  search_misses: (p) => ({ misses: db.searchMisses(p) }),
209
209
  wiki_list: (p) => db.wikiList(p),
210
+ verify_wiki: (p = {}) => {
211
+ const result = db.verifyWiki(p);
212
+ emitEvent("wicked.wiki.verified", "brain.wiki", {
213
+ brain_id: brainId,
214
+ total: result.summary.total,
215
+ stale: result.summary.stale,
216
+ orphaned: result.summary.orphaned,
217
+ unverifiable: result.summary.unverifiable,
218
+ });
219
+ return result;
220
+ },
210
221
  // LSP actions
211
222
  "lsp-health": () => lsp.health(),
212
223
  "lsp-symbols": (p) => lsp.symbols(p),
@@ -1,7 +1,32 @@
1
1
  import Database from "better-sqlite3";
2
2
  import { parseWikilinks } from "./wikilinks.mjs";
3
- import { parseFrontmatterBlock } from "./frontmatter.mjs";
3
+ import { parseFrontmatterBlock, extractFrontmatter } from "./frontmatter.mjs";
4
4
  import { statSync } from "node:fs";
5
+ import { createHash } from "node:crypto";
6
+
7
+ /**
8
+ * Parse a source_hashes entry of the form "{chunk_path}: {hash}". Returns
9
+ * null if the shape doesn't match — malformed entries are skipped rather
10
+ * than blocking the whole verify call.
11
+ */
12
+ function parseHashEntry(raw) {
13
+ if (typeof raw !== "string") return null;
14
+ const idx = raw.indexOf(":");
15
+ if (idx < 0) return null;
16
+ const chunkPath = raw.slice(0, idx).trim();
17
+ const hash = raw.slice(idx + 1).trim();
18
+ if (!chunkPath || !hash) return null;
19
+ return { chunkPath, hash };
20
+ }
21
+
22
+ /**
23
+ * First 8 hex chars of SHA-256 over the chunk body (frontmatter stripped).
24
+ * Matches the convention wicked-brain:compile uses when writing source_hashes.
25
+ */
26
+ function chunkBodyHash(content) {
27
+ const { body } = extractFrontmatter(content ?? "");
28
+ return createHash("sha256").update(body).digest("hex").slice(0, 8);
29
+ }
5
30
 
6
31
  /**
7
32
  * Extracts body text from a document, stripping YAML frontmatter.
@@ -258,7 +283,8 @@ export class SqliteSearch {
258
283
  canonical_for TEXT,
259
284
  refs TEXT,
260
285
  translation_of TEXT,
261
- version_of TEXT
286
+ version_of TEXT,
287
+ last_verified_at INTEGER
262
288
  );
263
289
 
264
290
  CREATE TABLE IF NOT EXISTS canonical_ownership (
@@ -402,6 +428,14 @@ export class SqliteSearch {
402
428
  currentVersion = 5;
403
429
  }
404
430
 
431
+ // Migration 6: add last_verified_at for wiki staleness detection
432
+ if (currentVersion < 6) {
433
+ try { this.#db.prepare(`SELECT last_verified_at FROM documents LIMIT 0`).get(); } catch {
434
+ this.#db.exec(`ALTER TABLE documents ADD COLUMN last_verified_at INTEGER`);
435
+ }
436
+ currentVersion = 6;
437
+ }
438
+
405
439
  // Persist the current version
406
440
  this.#db.exec(`DELETE FROM _schema_version`);
407
441
  this.#db.prepare(`INSERT INTO _schema_version (version) VALUES (?)`).run(currentVersion);
@@ -540,7 +574,7 @@ export class SqliteSearch {
540
574
  */
541
575
  getDocument(id) {
542
576
  const row = this.#db
543
- .prepare(`SELECT id, path, content, frontmatter, brain_id, indexed_at, content_hash, canonical_for, refs FROM documents WHERE id = ?`)
577
+ .prepare(`SELECT id, path, content, frontmatter, brain_id, indexed_at, content_hash, canonical_for, refs, last_verified_at FROM documents WHERE id = ?`)
544
578
  .get(id);
545
579
  return this.#hydrateDocumentRow(row);
546
580
  }
@@ -551,7 +585,7 @@ export class SqliteSearch {
551
585
  */
552
586
  getDocumentByPath(path) {
553
587
  const row = this.#db
554
- .prepare(`SELECT id, path, content, frontmatter, brain_id, indexed_at, content_hash, canonical_for, refs FROM documents WHERE path = ? LIMIT 1`)
588
+ .prepare(`SELECT id, path, content, frontmatter, brain_id, indexed_at, content_hash, canonical_for, refs, last_verified_at FROM documents WHERE path = ? LIMIT 1`)
555
589
  .get(path);
556
590
  return this.#hydrateDocumentRow(row);
557
591
  }
@@ -568,6 +602,7 @@ export class SqliteSearch {
568
602
  content_hash: row.content_hash,
569
603
  canonical_for: row.canonical_for ? JSON.parse(row.canonical_for) : [],
570
604
  references: row.refs ? JSON.parse(row.refs) : [],
605
+ last_verified_at: row.last_verified_at ?? null,
571
606
  };
572
607
  }
573
608
 
@@ -1259,6 +1294,102 @@ export class SqliteSearch {
1259
1294
  return { articles };
1260
1295
  }
1261
1296
 
1297
+ /**
1298
+ * Verify wiki articles against the current hashes of the chunks they claim
1299
+ * to synthesize. Returns a per-article classification plus a summary.
1300
+ *
1301
+ * See docs/specs/2026-04-17-wiki-staleness-detection.md for the contract.
1302
+ * Read-only w.r.t. content: the only mutation is stamping
1303
+ * documents.last_verified_at for each article scanned.
1304
+ */
1305
+ verifyWiki({ path = null } = {}) {
1306
+ let rows;
1307
+ if (path) {
1308
+ rows = this.#db.prepare(`
1309
+ SELECT id, path, content, frontmatter
1310
+ FROM documents
1311
+ WHERE path = ? AND path LIKE 'wiki/%'
1312
+ `).all(path);
1313
+ } else {
1314
+ rows = this.#db.prepare(`
1315
+ SELECT id, path, content, frontmatter
1316
+ FROM documents
1317
+ WHERE path LIKE 'wiki/%'
1318
+ ORDER BY path
1319
+ `).all();
1320
+ }
1321
+
1322
+ const summary = { total: 0, fresh: 0, stale: 0, orphaned: 0, unverifiable: 0 };
1323
+ const articles = [];
1324
+ const now = Date.now();
1325
+ const stampVerified = this.#db.prepare(
1326
+ `UPDATE documents SET last_verified_at = ? WHERE id = ?`,
1327
+ );
1328
+
1329
+ for (const row of rows) {
1330
+ summary.total++;
1331
+ const classification = this.#classifyWikiArticle(row);
1332
+ articles.push({
1333
+ path: row.path,
1334
+ status: classification.status,
1335
+ source_count: classification.source_count,
1336
+ matched: classification.matched,
1337
+ mismatched: classification.mismatched,
1338
+ missing: classification.missing,
1339
+ last_verified_at: now,
1340
+ });
1341
+ summary[classification.status]++;
1342
+ stampVerified.run(now, row.id);
1343
+ }
1344
+
1345
+ return { articles, summary };
1346
+ }
1347
+
1348
+ #classifyWikiArticle(row) {
1349
+ const fmBlock = row.frontmatter || SqliteSearch.#extractFrontmatter(row.content) || "";
1350
+ let fmData = {};
1351
+ try { fmData = parseFrontmatterBlock(fmBlock); } catch { fmData = {}; }
1352
+ const hashEntries = Array.isArray(fmData.source_hashes) ? fmData.source_hashes : [];
1353
+ if (hashEntries.length === 0) {
1354
+ return { status: "unverifiable", source_count: 0, matched: 0, mismatched: [], missing: [] };
1355
+ }
1356
+
1357
+ const mismatched = [];
1358
+ const missing = [];
1359
+ let matched = 0;
1360
+
1361
+ for (const entry of hashEntries) {
1362
+ const parsed = parseHashEntry(entry);
1363
+ if (!parsed) continue; // malformed line — skip silently
1364
+ const chunk = this.#db.prepare(
1365
+ `SELECT content FROM documents WHERE path = ?`,
1366
+ ).get(parsed.chunkPath);
1367
+ if (!chunk) {
1368
+ missing.push(parsed.chunkPath);
1369
+ continue;
1370
+ }
1371
+ const currentHash = chunkBodyHash(chunk.content);
1372
+ if (currentHash === parsed.hash) {
1373
+ matched++;
1374
+ } else {
1375
+ mismatched.push(parsed.chunkPath);
1376
+ }
1377
+ }
1378
+
1379
+ const source_count = hashEntries.length;
1380
+ let status;
1381
+ if (mismatched.length > 0) {
1382
+ status = "stale";
1383
+ } else if (missing.length === source_count) {
1384
+ status = "orphaned";
1385
+ } else if (missing.length > 0) {
1386
+ status = "stale";
1387
+ } else {
1388
+ status = "fresh";
1389
+ }
1390
+ return { status, source_count, matched, mismatched, missing };
1391
+ }
1392
+
1262
1393
  /**
1263
1394
  * Parse tags from frontmatter string.
1264
1395
  * Supports space-separated inline, JSON array, and YAML block list formats.
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "wicked-brain-server",
3
- "version": "0.11.0",
3
+ "version": "0.12.1",
4
4
  "type": "module",
5
5
  "description": "SQLite FTS5 search server for wicked-brain digital knowledge bases",
6
6
  "keywords": [
@@ -83,7 +83,21 @@ Depth 0 plus:
83
83
  ```
84
84
  Group results by path prefix (e.g., `chunks/extracted/`, `wiki/`) to show recent activity distribution.
85
85
  - List the top 10 most common tags
86
- - Flag any staleness warnings (sources modified after last ingest)
86
+ - Flag wiki staleness warnings by calling `verify_wiki`:
87
+ ```bash
88
+ curl -s -X POST http://localhost:{port}/api \
89
+ -H "Content-Type: application/json" \
90
+ -d '{"action":"verify_wiki","params":{}}'
91
+ ```
92
+ Report one line per non-fresh bucket — only emit the lines where the count is > 0:
93
+ ```
94
+ ⚠ Wiki staleness: {stale} stale / {orphaned} orphaned / {unverifiable} unverifiable (of {total} articles)
95
+ ```
96
+ `stale` = at least one referenced chunk changed or is missing. `orphaned` =
97
+ every referenced chunk is missing from the index. `unverifiable` = article
98
+ predates `source_hashes` frontmatter. Suggest running `wicked-brain:compile`
99
+ to refresh stale articles.
100
+ If `total == 0` (brain has no wiki yet), skip the line entirely.
87
101
 
88
102
  **Convergence Debt:**
89
103
  Detect chunks that are frequently accessed but have never been compiled into wiki articles: