wicked-brain 0.11.0 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -207,6 +207,17 @@ const actions = {
|
|
|
207
207
|
tag_frequency: () => ({ tags: db.tagFrequency() }),
|
|
208
208
|
search_misses: (p) => ({ misses: db.searchMisses(p) }),
|
|
209
209
|
wiki_list: (p) => db.wikiList(p),
|
|
210
|
+
verify_wiki: (p = {}) => {
|
|
211
|
+
const result = db.verifyWiki(p);
|
|
212
|
+
emitEvent("wicked.wiki.verified", "brain.wiki", {
|
|
213
|
+
brain_id: brainId,
|
|
214
|
+
total: result.summary.total,
|
|
215
|
+
stale: result.summary.stale,
|
|
216
|
+
orphaned: result.summary.orphaned,
|
|
217
|
+
unverifiable: result.summary.unverifiable,
|
|
218
|
+
});
|
|
219
|
+
return result;
|
|
220
|
+
},
|
|
210
221
|
// LSP actions
|
|
211
222
|
"lsp-health": () => lsp.health(),
|
|
212
223
|
"lsp-symbols": (p) => lsp.symbols(p),
|
|
@@ -1,7 +1,32 @@
|
|
|
1
1
|
import Database from "better-sqlite3";
|
|
2
2
|
import { parseWikilinks } from "./wikilinks.mjs";
|
|
3
|
-
import { parseFrontmatterBlock } from "./frontmatter.mjs";
|
|
3
|
+
import { parseFrontmatterBlock, extractFrontmatter } from "./frontmatter.mjs";
|
|
4
4
|
import { statSync } from "node:fs";
|
|
5
|
+
import { createHash } from "node:crypto";
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Parse a source_hashes entry of the form "{chunk_path}: {hash}". Returns
|
|
9
|
+
* null if the shape doesn't match — malformed entries are skipped rather
|
|
10
|
+
* than blocking the whole verify call.
|
|
11
|
+
*/
|
|
12
|
+
function parseHashEntry(raw) {
|
|
13
|
+
if (typeof raw !== "string") return null;
|
|
14
|
+
const idx = raw.indexOf(":");
|
|
15
|
+
if (idx < 0) return null;
|
|
16
|
+
const chunkPath = raw.slice(0, idx).trim();
|
|
17
|
+
const hash = raw.slice(idx + 1).trim();
|
|
18
|
+
if (!chunkPath || !hash) return null;
|
|
19
|
+
return { chunkPath, hash };
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* First 8 hex chars of SHA-256 over the chunk body (frontmatter stripped).
|
|
24
|
+
* Matches the convention wicked-brain:compile uses when writing source_hashes.
|
|
25
|
+
*/
|
|
26
|
+
function chunkBodyHash(content) {
|
|
27
|
+
const { body } = extractFrontmatter(content ?? "");
|
|
28
|
+
return createHash("sha256").update(body).digest("hex").slice(0, 8);
|
|
29
|
+
}
|
|
5
30
|
|
|
6
31
|
/**
|
|
7
32
|
* Extracts body text from a document, stripping YAML frontmatter.
|
|
@@ -258,7 +283,8 @@ export class SqliteSearch {
|
|
|
258
283
|
canonical_for TEXT,
|
|
259
284
|
refs TEXT,
|
|
260
285
|
translation_of TEXT,
|
|
261
|
-
version_of TEXT
|
|
286
|
+
version_of TEXT,
|
|
287
|
+
last_verified_at INTEGER
|
|
262
288
|
);
|
|
263
289
|
|
|
264
290
|
CREATE TABLE IF NOT EXISTS canonical_ownership (
|
|
@@ -402,6 +428,14 @@ export class SqliteSearch {
|
|
|
402
428
|
currentVersion = 5;
|
|
403
429
|
}
|
|
404
430
|
|
|
431
|
+
// Migration 6: add last_verified_at for wiki staleness detection
|
|
432
|
+
if (currentVersion < 6) {
|
|
433
|
+
try { this.#db.prepare(`SELECT last_verified_at FROM documents LIMIT 0`).get(); } catch {
|
|
434
|
+
this.#db.exec(`ALTER TABLE documents ADD COLUMN last_verified_at INTEGER`);
|
|
435
|
+
}
|
|
436
|
+
currentVersion = 6;
|
|
437
|
+
}
|
|
438
|
+
|
|
405
439
|
// Persist the current version
|
|
406
440
|
this.#db.exec(`DELETE FROM _schema_version`);
|
|
407
441
|
this.#db.prepare(`INSERT INTO _schema_version (version) VALUES (?)`).run(currentVersion);
|
|
@@ -540,7 +574,7 @@ export class SqliteSearch {
|
|
|
540
574
|
*/
|
|
541
575
|
getDocument(id) {
|
|
542
576
|
const row = this.#db
|
|
543
|
-
.prepare(`SELECT id, path, content, frontmatter, brain_id, indexed_at, content_hash, canonical_for, refs FROM documents WHERE id = ?`)
|
|
577
|
+
.prepare(`SELECT id, path, content, frontmatter, brain_id, indexed_at, content_hash, canonical_for, refs, last_verified_at FROM documents WHERE id = ?`)
|
|
544
578
|
.get(id);
|
|
545
579
|
return this.#hydrateDocumentRow(row);
|
|
546
580
|
}
|
|
@@ -551,7 +585,7 @@ export class SqliteSearch {
|
|
|
551
585
|
*/
|
|
552
586
|
getDocumentByPath(path) {
|
|
553
587
|
const row = this.#db
|
|
554
|
-
.prepare(`SELECT id, path, content, frontmatter, brain_id, indexed_at, content_hash, canonical_for, refs FROM documents WHERE path = ? LIMIT 1`)
|
|
588
|
+
.prepare(`SELECT id, path, content, frontmatter, brain_id, indexed_at, content_hash, canonical_for, refs, last_verified_at FROM documents WHERE path = ? LIMIT 1`)
|
|
555
589
|
.get(path);
|
|
556
590
|
return this.#hydrateDocumentRow(row);
|
|
557
591
|
}
|
|
@@ -568,6 +602,7 @@ export class SqliteSearch {
|
|
|
568
602
|
content_hash: row.content_hash,
|
|
569
603
|
canonical_for: row.canonical_for ? JSON.parse(row.canonical_for) : [],
|
|
570
604
|
references: row.refs ? JSON.parse(row.refs) : [],
|
|
605
|
+
last_verified_at: row.last_verified_at ?? null,
|
|
571
606
|
};
|
|
572
607
|
}
|
|
573
608
|
|
|
@@ -1259,6 +1294,102 @@ export class SqliteSearch {
|
|
|
1259
1294
|
return { articles };
|
|
1260
1295
|
}
|
|
1261
1296
|
|
|
1297
|
+
/**
|
|
1298
|
+
* Verify wiki articles against the current hashes of the chunks they claim
|
|
1299
|
+
* to synthesize. Returns a per-article classification plus a summary.
|
|
1300
|
+
*
|
|
1301
|
+
* See docs/specs/2026-04-17-wiki-staleness-detection.md for the contract.
|
|
1302
|
+
* Read-only w.r.t. content: the only mutation is stamping
|
|
1303
|
+
* documents.last_verified_at for each article scanned.
|
|
1304
|
+
*/
|
|
1305
|
+
verifyWiki({ path = null } = {}) {
|
|
1306
|
+
let rows;
|
|
1307
|
+
if (path) {
|
|
1308
|
+
rows = this.#db.prepare(`
|
|
1309
|
+
SELECT id, path, content, frontmatter
|
|
1310
|
+
FROM documents
|
|
1311
|
+
WHERE path = ? AND path LIKE 'wiki/%'
|
|
1312
|
+
`).all(path);
|
|
1313
|
+
} else {
|
|
1314
|
+
rows = this.#db.prepare(`
|
|
1315
|
+
SELECT id, path, content, frontmatter
|
|
1316
|
+
FROM documents
|
|
1317
|
+
WHERE path LIKE 'wiki/%'
|
|
1318
|
+
ORDER BY path
|
|
1319
|
+
`).all();
|
|
1320
|
+
}
|
|
1321
|
+
|
|
1322
|
+
const summary = { total: 0, fresh: 0, stale: 0, orphaned: 0, unverifiable: 0 };
|
|
1323
|
+
const articles = [];
|
|
1324
|
+
const now = Date.now();
|
|
1325
|
+
const stampVerified = this.#db.prepare(
|
|
1326
|
+
`UPDATE documents SET last_verified_at = ? WHERE id = ?`,
|
|
1327
|
+
);
|
|
1328
|
+
|
|
1329
|
+
for (const row of rows) {
|
|
1330
|
+
summary.total++;
|
|
1331
|
+
const classification = this.#classifyWikiArticle(row);
|
|
1332
|
+
articles.push({
|
|
1333
|
+
path: row.path,
|
|
1334
|
+
status: classification.status,
|
|
1335
|
+
source_count: classification.source_count,
|
|
1336
|
+
matched: classification.matched,
|
|
1337
|
+
mismatched: classification.mismatched,
|
|
1338
|
+
missing: classification.missing,
|
|
1339
|
+
last_verified_at: now,
|
|
1340
|
+
});
|
|
1341
|
+
summary[classification.status]++;
|
|
1342
|
+
stampVerified.run(now, row.id);
|
|
1343
|
+
}
|
|
1344
|
+
|
|
1345
|
+
return { articles, summary };
|
|
1346
|
+
}
|
|
1347
|
+
|
|
1348
|
+
#classifyWikiArticle(row) {
|
|
1349
|
+
const fmBlock = row.frontmatter || SqliteSearch.#extractFrontmatter(row.content) || "";
|
|
1350
|
+
let fmData = {};
|
|
1351
|
+
try { fmData = parseFrontmatterBlock(fmBlock); } catch { fmData = {}; }
|
|
1352
|
+
const hashEntries = Array.isArray(fmData.source_hashes) ? fmData.source_hashes : [];
|
|
1353
|
+
if (hashEntries.length === 0) {
|
|
1354
|
+
return { status: "unverifiable", source_count: 0, matched: 0, mismatched: [], missing: [] };
|
|
1355
|
+
}
|
|
1356
|
+
|
|
1357
|
+
const mismatched = [];
|
|
1358
|
+
const missing = [];
|
|
1359
|
+
let matched = 0;
|
|
1360
|
+
|
|
1361
|
+
for (const entry of hashEntries) {
|
|
1362
|
+
const parsed = parseHashEntry(entry);
|
|
1363
|
+
if (!parsed) continue; // malformed line — skip silently
|
|
1364
|
+
const chunk = this.#db.prepare(
|
|
1365
|
+
`SELECT content FROM documents WHERE path = ?`,
|
|
1366
|
+
).get(parsed.chunkPath);
|
|
1367
|
+
if (!chunk) {
|
|
1368
|
+
missing.push(parsed.chunkPath);
|
|
1369
|
+
continue;
|
|
1370
|
+
}
|
|
1371
|
+
const currentHash = chunkBodyHash(chunk.content);
|
|
1372
|
+
if (currentHash === parsed.hash) {
|
|
1373
|
+
matched++;
|
|
1374
|
+
} else {
|
|
1375
|
+
mismatched.push(parsed.chunkPath);
|
|
1376
|
+
}
|
|
1377
|
+
}
|
|
1378
|
+
|
|
1379
|
+
const source_count = hashEntries.length;
|
|
1380
|
+
let status;
|
|
1381
|
+
if (mismatched.length > 0) {
|
|
1382
|
+
status = "stale";
|
|
1383
|
+
} else if (missing.length === source_count) {
|
|
1384
|
+
status = "orphaned";
|
|
1385
|
+
} else if (missing.length > 0) {
|
|
1386
|
+
status = "stale";
|
|
1387
|
+
} else {
|
|
1388
|
+
status = "fresh";
|
|
1389
|
+
}
|
|
1390
|
+
return { status, source_count, matched, mismatched, missing };
|
|
1391
|
+
}
|
|
1392
|
+
|
|
1262
1393
|
/**
|
|
1263
1394
|
* Parse tags from frontmatter string.
|
|
1264
1395
|
* Supports space-separated inline, JSON array, and YAML block list formats.
|
package/server/package.json
CHANGED
|
@@ -83,7 +83,21 @@ Depth 0 plus:
|
|
|
83
83
|
```
|
|
84
84
|
Group results by path prefix (e.g., `chunks/extracted/`, `wiki/`) to show recent activity distribution.
|
|
85
85
|
- List the top 10 most common tags
|
|
86
|
-
- Flag
|
|
86
|
+
- Flag wiki staleness warnings by calling `verify_wiki`:
|
|
87
|
+
```bash
|
|
88
|
+
curl -s -X POST http://localhost:{port}/api \
|
|
89
|
+
-H "Content-Type: application/json" \
|
|
90
|
+
-d '{"action":"verify_wiki","params":{}}'
|
|
91
|
+
```
|
|
92
|
+
Report one line per non-fresh bucket — only emit the lines where the count is > 0:
|
|
93
|
+
```
|
|
94
|
+
⚠ Wiki staleness: {stale} stale / {orphaned} orphaned / {unverifiable} unverifiable (of {total} articles)
|
|
95
|
+
```
|
|
96
|
+
`stale` = at least one referenced chunk changed or is missing. `orphaned` =
|
|
97
|
+
every referenced chunk is missing from the index. `unverifiable` = article
|
|
98
|
+
predates `source_hashes` frontmatter. Suggest running `wicked-brain:compile`
|
|
99
|
+
to refresh stale articles.
|
|
100
|
+
If `total == 0` (brain has no wiki yet), skip the line entirely.
|
|
87
101
|
|
|
88
102
|
**Convergence Debt:**
|
|
89
103
|
Detect chunks that are frequently accessed but have never been compiled into wiki articles:
|