@rarusoft/dendrite-wiki 0.1.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/README.md +79 -0
  2. package/dist/api-extractor/extract.js +269 -0
  3. package/dist/api-extractor/language-extractor.js +15 -0
  4. package/dist/api-extractor/python-extractor.js +358 -0
  5. package/dist/api-extractor/render.js +195 -0
  6. package/dist/api-extractor/tree-sitter-extractor.js +1079 -0
  7. package/dist/api-extractor/types.js +11 -0
  8. package/dist/api-extractor/typescript-extractor.js +50 -0
  9. package/dist/api-extractor/walk.js +178 -0
  10. package/dist/api-reference.js +438 -0
  11. package/dist/benchmark-events.js +129 -0
  12. package/dist/benchmark.js +270 -0
  13. package/dist/binder-export.js +381 -0
  14. package/dist/canonical-target.js +168 -0
  15. package/dist/chart-insert.js +377 -0
  16. package/dist/chart-prompts.js +414 -0
  17. package/dist/context-cache.js +98 -0
  18. package/dist/contradicts-shipped-memory.js +232 -0
  19. package/dist/diff-context.js +142 -0
  20. package/dist/doctor.js +220 -0
  21. package/dist/generated-docs.js +219 -0
  22. package/dist/i18n.js +71 -0
  23. package/dist/index.js +49 -0
  24. package/dist/librarian.js +255 -0
  25. package/dist/maintenance-actions.js +244 -0
  26. package/dist/maintenance-inbox.js +842 -0
  27. package/dist/maintenance-runner.js +62 -0
  28. package/dist/page-drift.js +225 -0
  29. package/dist/page-inbox.js +168 -0
  30. package/dist/report-export.js +339 -0
  31. package/dist/review-bridge.js +1386 -0
  32. package/dist/search-index.js +199 -0
  33. package/dist/store.js +1617 -0
  34. package/dist/telemetry-defaults.js +44 -0
  35. package/dist/telemetry-report.js +263 -0
  36. package/dist/telemetry.js +544 -0
  37. package/dist/wiki-synthesis.js +901 -0
  38. package/package.json +35 -0
  39. package/src/api-extractor/extract.ts +333 -0
  40. package/src/api-extractor/language-extractor.ts +37 -0
  41. package/src/api-extractor/python-extractor.ts +380 -0
  42. package/src/api-extractor/render.ts +267 -0
  43. package/src/api-extractor/tree-sitter-extractor.ts +1210 -0
  44. package/src/api-extractor/types.ts +41 -0
  45. package/src/api-extractor/typescript-extractor.ts +56 -0
  46. package/src/api-extractor/walk.ts +209 -0
  47. package/src/api-reference.ts +552 -0
  48. package/src/benchmark-events.ts +216 -0
  49. package/src/benchmark.ts +376 -0
  50. package/src/binder-export.ts +437 -0
  51. package/src/canonical-target.ts +192 -0
  52. package/src/chart-insert.ts +478 -0
  53. package/src/chart-prompts.ts +417 -0
  54. package/src/context-cache.ts +129 -0
  55. package/src/contradicts-shipped-memory.ts +311 -0
  56. package/src/diff-context.ts +187 -0
  57. package/src/doctor.ts +260 -0
  58. package/src/generated-docs.ts +316 -0
  59. package/src/i18n.ts +106 -0
  60. package/src/index.ts +59 -0
  61. package/src/librarian.ts +331 -0
  62. package/src/maintenance-actions.ts +314 -0
  63. package/src/maintenance-inbox.ts +1132 -0
  64. package/src/maintenance-runner.ts +85 -0
  65. package/src/page-drift.ts +292 -0
  66. package/src/page-inbox.ts +254 -0
  67. package/src/report-export.ts +392 -0
  68. package/src/review-bridge.ts +1729 -0
  69. package/src/search-index.ts +266 -0
  70. package/src/store.ts +2171 -0
  71. package/src/telemetry-defaults.ts +50 -0
  72. package/src/telemetry-report.ts +365 -0
  73. package/src/telemetry.ts +757 -0
  74. package/src/wiki-synthesis.ts +1307 -0
package/src/store.ts ADDED
@@ -0,0 +1,2171 @@
1
+ /**
2
+ * The wiki page store and the heart of the read/write/search/lint surface.
3
+ *
4
+ * Owns everything about wiki pages as filesystem markdown: listing pages under
5
+ * `docs/wiki/**`, parsing frontmatter into `WikiPageMetadata`, reading and writing page
6
+ * content, extracting source-backed claims, building the graph of inbound/outbound links,
7
+ * surfacing lint findings (`missing-h1`, `orphan-page`, `stale-claim`, `page-drift`, etc.),
8
+ * and assembling the task-scoped briefing returned by `wiki_context`. The lint pass exempts
9
+ * `lifecycle: generated` pages so auto-managed surfaces (the API reference tree) don't
10
+ * surface findings humans can't act on.
11
+ *
12
+ * Most other modules in `src/wiki/` consume this module rather than the filesystem directly.
13
+ * `memory-store.ts` joins memories to pages here, `synthesis.ts` reads pages for claim
14
+ * synthesis prompts, `generated-docs.ts` rebuilds derived artifacts from the page list.
15
+ */
16
+ import { promises as fs, statSync } from 'node:fs';
17
+ import path from 'node:path';
18
+ import { createPatch } from 'diff';
19
+ import {
20
+ recallProjectHandoffs,
21
+ recallProjectMemories,
22
+ summarizeMemoryBacklog,
23
+ type MemoryBacklogSummary,
24
+ type RecalledProjectMemory
25
+ } from '@rarusoft/dendrite-memory';
26
+ import { recallProjectSkills, type RecalledProjectSkill } from '@rarusoft/dendrite-memory';
27
+ import { getCachedWikiContext, invalidateWikiContextCache, setCachedWikiContext } from './context-cache.js';
28
+ import { buildContradictsShippedMemoryMessage, detectContradictsShippedMemory } from './contradicts-shipped-memory.js';
29
+ import { listProjectMemories } from '@rarusoft/dendrite-memory';
30
+ import { buildPageDriftMessage, detectPageDrift } from './page-drift.js';
31
+ import {
32
+ buildMemoryTrailReason,
33
+ loadMemoryTrailBonusLookup,
34
+ reinforceQueryEdges
35
+ } from '@rarusoft/dendrite-memory';
36
+ import { loadActivePageDriftSnoozes } from '@rarusoft/dendrite-memory';
37
+ import {
38
+ buildWikiSearchIndex,
39
+ fallbackSearchResults,
40
+ searchResultToContextPage,
41
+ searchWikiIndex,
42
+ tokenizeSearchQuery,
43
+ type WikiSearchIndex,
44
+ type WikiSearchGraphNode,
45
+ type WikiSearchResult
46
+ } from './search-index.js';
47
+
48
+ export interface WikiPageSummary {
49
+ slug: string;
50
+ title: string;
51
+ path: string;
52
+ metadata?: WikiPageMetadata;
53
+ }
54
+
55
+ export type WikiPageLifecycle = 'active' | 'dormant' | 'superseded' | 'pending-review' | 'generated';
56
+
57
+ export interface WikiPageMetadata {
58
+ lifecycle: WikiPageLifecycle;
59
+ owner: string;
60
+ lastReviewed: string;
61
+ sourceCoverage: 'none' | 'partial' | 'complete' | 'unknown';
62
+ }
63
+
64
+ export type WikiLintRule =
65
+ | 'missing-h1'
66
+ | 'missing-summary'
67
+ | 'orphan-page'
68
+ | 'stale-claim'
69
+ | 'unsupported-claim'
70
+ | 'dormant-skill'
71
+ | 'oversized-guidance'
72
+ | 'duplicate-guidance'
73
+ | 'stale-guidance-reference'
74
+ | 'conflicting-guidance'
75
+ | 'unrouted-guidance'
76
+ | 'page-drift'
77
+ | 'contradicts-shipped-memory';
78
+
79
+ export interface WikiLintFinding {
80
+ rule: WikiLintRule;
81
+ slug: string;
82
+ path: string;
83
+ message: string;
84
+ }
85
+
86
+ export interface WikiContextOptions {
87
+ maxPages?: number;
88
+ includeLint?: boolean;
89
+ maxLogEntries?: number;
90
+ maxSkills?: number;
91
+ relatedFiles?: string[];
92
+ languages?: string[];
93
+ frameworks?: string[];
94
+ /**
95
+ * Cap on how many omittedPageReasons entries to include. Defaults to 12.
96
+ * The full omitted count is still reported via `omittedPages`; this just
97
+ * bounds the size of the per-page reason array so the briefing payload
98
+ * stays under MCP token limits on large wikis. Use Infinity to disable.
99
+ */
100
+ maxOmittedPageReasons?: number;
101
+ /** Per-omitted-page reason text cap. Defaults to 80 chars. */
102
+ maxOmittedReasonChars?: number;
103
+ /** Per-handoff body cap in the briefing. Defaults to 1200 chars. Full body via memory_recall. */
104
+ maxHandoffTextChars?: number;
105
+ /** Per-memory body cap in the briefing. Defaults to 600 chars. Full body via memory_recall. */
106
+ maxMemoryTextChars?: number;
107
+ /** Per-skill body cap in the briefing. Defaults to 800 chars. Full body via wiki_skill_load. */
108
+ maxSkillTextChars?: number;
109
+ }
110
+
111
+ export interface WikiContextPage extends WikiPageSummary {
112
+ score: number;
113
+ summary: string;
114
+ reason: string;
115
+ evidence: WikiContextEvidence;
116
+ }
117
+
118
+ export interface WikiContextEvidence {
119
+ matchedTerms: string[];
120
+ inboundLinks: number;
121
+ relatedPages: string[];
122
+ }
123
+
124
+ export type WikiClaimStatus = 'current' | 'needs-review' | 'superseded' | 'unknown';
125
+
126
+ // Phase 3 of the Library Extraction Roadmap: the source-kind enum moved to
127
+ // `./memory-store.ts` as `MemorySourceKind` because the brain owns the source-kind
128
+ // vocabulary (memory records cite sources too), not the wiki. The legacy name
129
+ // `WikiClaimSourceKind` is kept as a type alias for one release so existing imports
130
+ // and external consumers don't break.
131
+ import type { MemorySourceKind } from '@rarusoft/dendrite-memory';
132
+ export type WikiClaimSourceKind = MemorySourceKind;
133
+
134
+ export interface WikiClaimSource {
135
+ kind: WikiClaimSourceKind;
136
+ label: string;
137
+ slug: string;
138
+ }
139
+
140
+ export interface WikiClaim {
141
+ pageSlug: string;
142
+ text: string;
143
+ status: WikiClaimStatus;
144
+ sources: WikiClaimSource[];
145
+ }
146
+
147
+ export type WikiGuidanceKind = 'agents' | 'copilot-instructions' | 'instruction' | 'prompt' | 'agent' | 'skill';
148
+
149
+ export interface WikiGuidanceFile {
150
+ path: string;
151
+ kind: WikiGuidanceKind;
152
+ summary: string;
153
+ }
154
+
155
+ export type WikiGuidanceLifecycleStatus = 'active' | 'dormant' | 'superseded' | 'pending-review';
156
+
157
+ export interface WikiGuidanceLifecycleItem extends WikiGuidanceFile {
158
+ status: WikiGuidanceLifecycleStatus;
159
+ linkedFrom: string[];
160
+ archiveTarget?: string;
161
+ reviewStatus?: 'none' | 'pending-review';
162
+ reason: string;
163
+ }
164
+
165
+ export interface WikiMergeGuidanceProposal {
166
+ kind: 'merge-guidance';
167
+ summary: string;
168
+ currentStateSummary: string;
169
+ afterApplySummary: string;
170
+ reviewSlug: string;
171
+ reviewPath: string;
172
+ canonicalPath: string;
173
+ duplicatePaths: string[];
174
+ archiveTargets: Array<{ sourcePath: string; suggestedPath: string; reviewStatus: 'pending-review'; reason: string }>;
175
+ rationale: string;
176
+ }
177
+
178
+ export interface WikiRouteGuidanceProposal {
179
+ kind: 'route-guidance';
180
+ summary: string;
181
+ currentStateSummary: string;
182
+ afterApplySummary: string;
183
+ reviewSlug: string;
184
+ reviewPath: string;
185
+ guidancePath: string;
186
+ targetPaths: string[];
187
+ rationale: string;
188
+ }
189
+
190
+ export type WikiProposal = WikiMergeGuidanceProposal | WikiRouteGuidanceProposal;
191
+ type WikiMergeGuidanceProposalDraft = Omit<WikiMergeGuidanceProposal, 'reviewSlug' | 'reviewPath'>;
192
+ type WikiRouteGuidanceProposalDraft = Omit<WikiRouteGuidanceProposal, 'reviewSlug' | 'reviewPath'>;
193
+ type WikiProposalDraft = WikiMergeGuidanceProposalDraft | WikiRouteGuidanceProposalDraft;
194
+
195
+ export interface WikiContextResult {
196
+ query: string;
197
+ briefing: string;
198
+ readFirst: string[];
199
+ handoffs: RecalledProjectMemory[];
200
+ pages: WikiContextPage[];
201
+ memories: RecalledProjectMemory[];
202
+ skills: RecalledProjectSkill[];
203
+ claims: WikiClaim[];
204
+ guidanceFiles: WikiGuidanceFile[];
205
+ omittedPages: number;
206
+ omittedPageReasons: Array<{ slug: string; score: number; reason: string }>;
207
+ recentLogEntries: string[];
208
+ findings: WikiLintFinding[];
209
+ openQuestions: string[];
210
+ memoryBacklog: MemoryBacklogSummary;
211
+ }
212
+
213
+ export interface WikiGraphNode extends WikiSearchGraphNode {
214
+ title: string;
215
+ path: string;
216
+ staleClaimCount: number;
217
+ claimCount: number;
218
+ }
219
+
220
+ export interface WikiGraphSnapshot {
221
+ pages: number;
222
+ nodes: WikiGraphNode[];
223
+ }
224
+
225
+ export interface WikiProposalPage extends WikiPageSummary {
226
+ proposalKind: WikiProposal['kind'];
227
+ }
228
+
229
+ export interface WikiAppliedProposalResult {
230
+ reviewSlug: string;
231
+ proposalKind: WikiProposal['kind'];
232
+ updatedPaths: string[];
233
+ removedReviewSlugs: string[];
234
+ activeReviewSlugs: string[];
235
+ }
236
+
237
+ export interface WikiProposalFileChange {
238
+ path: string;
239
+ currentContent: string;
240
+ proposedContent: string;
241
+ unifiedDiff: string;
242
+ skippedBecauseUnchanged: boolean;
243
+ }
244
+
245
+ export interface WikiProposalPreview {
246
+ mode: 'preview';
247
+ reviewSlug: string;
248
+ proposalKind: WikiProposal['kind'];
249
+ summary: string;
250
+ rationale: string;
251
+ warnings: string[];
252
+ fileChanges: WikiProposalFileChange[];
253
+ }
254
+
255
+ interface WikiProposalSyncResult {
256
+ pages: WikiProposalPage[];
257
+ removedSlugs: string[];
258
+ }
259
+
260
+ const proposalPageMarker = 'Reviewable deterministic maintenance proposal.';
261
+
262
+ export async function listWikiProposals(): Promise<WikiProposal[]> {
263
+ const duplicateGroups = await findDuplicateGuidanceGroups();
264
+ const guidanceFiles = await listProjectGuidanceFiles();
265
+
266
+ const mergeProposals: WikiMergeGuidanceProposalDraft[] = duplicateGroups.map((group) => {
267
+ const [canonical, ...duplicates] = group;
268
+ return {
269
+ kind: 'merge-guidance',
270
+ summary: `Merge duplicate guidance into ${canonical.path}`,
271
+ currentStateSummary: `${duplicates.map((guidance) => guidance.path).join(', ')} currently duplicate ${canonical.path}.`,
272
+ afterApplySummary: `${duplicates.map((guidance) => guidance.path).join(', ')} become short pointers to ${canonical.path} while the canonical file stays unchanged.`,
273
+ canonicalPath: canonical.path,
274
+ duplicatePaths: duplicates.map((guidance) => guidance.path),
275
+ archiveTargets: duplicates.map((guidance) => ({
276
+ sourcePath: guidance.path,
277
+ suggestedPath: buildGuidanceArchivePath(guidance.path),
278
+ reviewStatus: 'pending-review',
279
+ reason: 'Archive only after the duplicate guidance has been reviewed and the pointer rewrite has been accepted.'
280
+ })),
281
+ rationale: `These guidance files share the same normalized content and should route through one canonical entry file before the redundant copies are archived.`
282
+ };
283
+ });
284
+
285
+ const routeProposals: WikiRouteGuidanceProposalDraft[] = [];
286
+ for (const guidance of guidanceFiles.filter((candidate) => candidate.kind !== 'skill')) {
287
+ const content = await fs.readFile(path.join(repoRoot, guidance.path), 'utf8').catch(() => '');
288
+ if (countLines(content) <= maxGuidanceLineCount) {
289
+ continue;
290
+ }
291
+
292
+ const targetPaths = listGuidanceRouteTargets(content, guidance.path);
293
+ if (targetPaths.length === 0) {
294
+ continue;
295
+ }
296
+
297
+ routeProposals.push({
298
+ kind: 'route-guidance',
299
+ summary: `Trim ${guidance.path} and route to ${targetPaths[0]}`,
300
+ currentStateSummary: `${guidance.path} is longer than the preferred guidance length.`,
301
+ afterApplySummary: `${guidance.path} becomes a short entry file that routes to ${targetPaths[0]}.`,
302
+ guidancePath: guidance.path,
303
+ targetPaths,
304
+ rationale: 'This guidance file exceeds the preferred length and already links to canonical local docs pages that can carry the detailed workflow.'
305
+ });
306
+ }
307
+
308
+ return attachProposalReviewPages([...mergeProposals, ...routeProposals].sort((left, right) => left.summary.localeCompare(right.summary)));
309
+ }
310
+
311
+ export async function writeWikiProposalPages(): Promise<WikiProposalPage[]> {
312
+ const result = await syncGeneratedProposalPages();
313
+ return result.pages;
314
+ }
315
+
316
+ export async function applyWikiProposal(reviewSlug: string): Promise<WikiAppliedProposalResult> {
317
+ const proposals = await listWikiProposals();
318
+ const proposal = proposals.find((candidate) => candidate.reviewSlug === reviewSlug);
319
+ if (!proposal) {
320
+ throw new Error(`Unknown active proposal: ${reviewSlug}`);
321
+ }
322
+
323
+ if (proposal.kind === 'route-guidance') {
324
+ const absolutePath = path.join(repoRoot, proposal.guidancePath);
325
+ const existingContent = await fs.readFile(absolutePath, 'utf8').catch(() => '');
326
+ const nextContent = await renderRouteGuidanceApplyContent(proposal, existingContent);
327
+ await fs.writeFile(absolutePath, nextContent.endsWith('\n') ? nextContent : `${nextContent}\n`, 'utf8');
328
+ const syncResult = await syncGeneratedProposalPages();
329
+
330
+ return {
331
+ reviewSlug: proposal.reviewSlug,
332
+ proposalKind: proposal.kind,
333
+ updatedPaths: [proposal.guidancePath],
334
+ removedReviewSlugs: syncResult.removedSlugs,
335
+ activeReviewSlugs: syncResult.pages.map((page) => page.slug)
336
+ };
337
+ }
338
+
339
+ if (proposal.kind === 'merge-guidance') {
340
+ const canonicalContent = await fs.readFile(path.join(repoRoot, proposal.canonicalPath), 'utf8').catch(() => '');
341
+ const updatedPaths: string[] = [];
342
+
343
+ for (const duplicatePath of proposal.duplicatePaths) {
344
+ const absolutePath = path.join(repoRoot, duplicatePath);
345
+ const existingContent = await fs.readFile(absolutePath, 'utf8').catch(() => '');
346
+ const nextContent = await renderMergeGuidanceApplyContent(proposal, duplicatePath, existingContent, canonicalContent);
347
+ await fs.writeFile(absolutePath, nextContent.endsWith('\n') ? nextContent : `${nextContent}\n`, 'utf8');
348
+ updatedPaths.push(duplicatePath);
349
+ }
350
+
351
+ const syncResult = await syncGeneratedProposalPages();
352
+
353
+ return {
354
+ reviewSlug: proposal.reviewSlug,
355
+ proposalKind: proposal.kind,
356
+ updatedPaths,
357
+ removedReviewSlugs: syncResult.removedSlugs,
358
+ activeReviewSlugs: syncResult.pages.map((page) => page.slug)
359
+ };
360
+ }
361
+
362
+ throw new Error(`Auto-apply is not supported for proposal kind: ${reviewSlug}`);
363
+ }
364
+
365
+ export async function previewWikiProposal(reviewSlug: string): Promise<WikiProposalPreview> {
366
+ const proposals = await listWikiProposals();
367
+ const proposal = proposals.find((candidate) => candidate.reviewSlug === reviewSlug);
368
+ if (!proposal) {
369
+ throw new Error(`Unknown active proposal: ${reviewSlug}`);
370
+ }
371
+
372
+ const fileChanges: WikiProposalFileChange[] = [];
373
+ const warnings: string[] = [];
374
+
375
+ if (proposal.kind === 'route-guidance') {
376
+ const absolutePath = path.join(repoRoot, proposal.guidancePath);
377
+ const existingContent = await fs.readFile(absolutePath, 'utf8').catch(() => '');
378
+ if (!existingContent) {
379
+ warnings.push(`${proposal.guidancePath} does not currently exist; applying will create it.`);
380
+ }
381
+ const renderedContent = await renderRouteGuidanceApplyContent(proposal, existingContent);
382
+ const proposedContent = ensureTrailingNewline(renderedContent);
383
+ fileChanges.push(buildFileChange(proposal.guidancePath, existingContent, proposedContent));
384
+ } else if (proposal.kind === 'merge-guidance') {
385
+ const canonicalContent = await fs.readFile(path.join(repoRoot, proposal.canonicalPath), 'utf8').catch(() => '');
386
+ if (!canonicalContent) {
387
+ warnings.push(`Canonical guidance file ${proposal.canonicalPath} does not exist; the merge will run with empty canonical content.`);
388
+ }
389
+ for (const duplicatePath of proposal.duplicatePaths) {
390
+ const absolutePath = path.join(repoRoot, duplicatePath);
391
+ const existingContent = await fs.readFile(absolutePath, 'utf8').catch(() => '');
392
+ const renderedContent = await renderMergeGuidanceApplyContent(proposal, duplicatePath, existingContent, canonicalContent);
393
+ const proposedContent = ensureTrailingNewline(renderedContent);
394
+ fileChanges.push(buildFileChange(duplicatePath, existingContent, proposedContent));
395
+ }
396
+ } else {
397
+ throw new Error(`Preview is not supported for proposal kind: ${(proposal as { kind: string }).kind}`);
398
+ }
399
+
400
+ if (fileChanges.every((change) => change.skippedBecauseUnchanged)) {
401
+ warnings.unshift('Every affected file already matches the proposed content; applying will be a no-op.');
402
+ }
403
+
404
+ return {
405
+ mode: 'preview',
406
+ reviewSlug: proposal.reviewSlug,
407
+ proposalKind: proposal.kind,
408
+ summary: proposal.summary,
409
+ rationale: proposal.rationale,
410
+ warnings,
411
+ fileChanges
412
+ };
413
+ }
414
+
415
+ function ensureTrailingNewline(content: string): string {
416
+ return content.endsWith('\n') ? content : `${content}\n`;
417
+ }
418
+
419
+ function buildFileChange(filePath: string, currentContent: string, proposedContent: string): WikiProposalFileChange {
420
+ const skippedBecauseUnchanged = currentContent === proposedContent;
421
+ // Render the diff with the entire file as context (not the diff library's default 4-line window)
422
+ // so the operator sees the whole file surrounding the change. Same convention as memory-promotion.
423
+ const unifiedDiff = createPatch(filePath, currentContent, proposedContent, 'current', 'after apply', { context: 100_000 });
424
+ return { path: filePath, currentContent, proposedContent, unifiedDiff, skippedBecauseUnchanged };
425
+ }
426
+
427
+ async function syncGeneratedProposalPages(): Promise<WikiProposalSyncResult> {
428
+ const proposals = await listWikiProposals();
429
+ const pages: WikiProposalPage[] = [];
430
+ const existingSlugs = await listGeneratedProposalPageSlugs();
431
+ const currentSlugs = new Set<string>();
432
+ const removedSlugs: string[] = [];
433
+
434
+ for (const proposal of proposals) {
435
+ const content = renderProposalPage(proposal);
436
+ await writeWikiPage(proposal.reviewSlug, content);
437
+ const title = content.match(/^#\s+(.+)$/m)?.[1]?.trim() ?? proposal.reviewSlug;
438
+ pages.push({
439
+ slug: proposal.reviewSlug,
440
+ title,
441
+ path: proposal.reviewPath,
442
+ proposalKind: proposal.kind
443
+ });
444
+ currentSlugs.add(proposal.reviewSlug);
445
+ }
446
+
447
+ for (const staleSlug of existingSlugs) {
448
+ if (currentSlugs.has(staleSlug)) {
449
+ continue;
450
+ }
451
+ await fs.rm(pagePathFromSlug(staleSlug), { force: true });
452
+ removedSlugs.push(staleSlug);
453
+ }
454
+
455
+ return {
456
+ pages: pages.sort((left, right) => left.slug.localeCompare(right.slug)),
457
+ removedSlugs: removedSlugs.sort((left, right) => left.localeCompare(right))
458
+ };
459
+ }
460
+
461
+ function attachProposalReviewPages(proposals: WikiProposalDraft[]): WikiProposal[] {
462
+ const usedSlugs = new Set<string>();
463
+ return proposals.map((proposal) => {
464
+ const reviewSlug = buildProposalPageSlug(proposal, usedSlugs);
465
+ return {
466
+ ...proposal,
467
+ reviewSlug,
468
+ reviewPath: `docs/wiki/${reviewSlug}.md`
469
+ };
470
+ });
471
+ }
472
+
473
+ async function listGeneratedProposalPageSlugs(): Promise<string[]> {
474
+ const pendingReviewDirectory = path.join(wikiRoot, 'pending-review');
475
+ const matches: string[] = [];
476
+
477
+ async function walk(directory: string): Promise<void> {
478
+ const entries = await fs.readdir(directory, { withFileTypes: true }).catch(() => []);
479
+ for (const entry of entries) {
480
+ const fullPath = path.join(directory, entry.name);
481
+ if (entry.isDirectory()) {
482
+ await walk(fullPath);
483
+ continue;
484
+ }
485
+ if (!entry.isFile() || !entry.name.endsWith('.md')) {
486
+ continue;
487
+ }
488
+
489
+ const content = await fs.readFile(fullPath, 'utf8').catch(() => '');
490
+ if (!content.includes(proposalPageMarker)) {
491
+ continue;
492
+ }
493
+
494
+ const relative = path.relative(wikiRoot, fullPath).replace(/\\/g, '/');
495
+ matches.push(relative.replace(/\.md$/i, ''));
496
+ }
497
+ }
498
+
499
+ await walk(pendingReviewDirectory);
500
+ return matches.sort((left, right) => left.localeCompare(right));
501
+ }
502
+
503
+ function buildGuidanceArchivePath(relativePath: string): string {
504
+ const safeName = relativePath.replace(/^[./]+/, '').replace(/[\\/]/g, '__');
505
+ return `docs/wiki/archive-guidance/${safeName}`;
506
+ }
507
+
508
+ function buildProposalPageSlug(proposal: WikiProposalDraft | WikiProposal, usedSlugs: Set<string>): string {
509
+ const key = proposal.kind === 'merge-guidance' ? proposal.canonicalPath : proposal.guidancePath;
510
+ const base = `pending-review/${proposal.kind}-${slugifyProposalKey(key)}`;
511
+ let slug = base;
512
+ let counter = 2;
513
+ while (usedSlugs.has(slug)) {
514
+ slug = `${base}-${counter}`;
515
+ counter += 1;
516
+ }
517
+ usedSlugs.add(slug);
518
+ return slug;
519
+ }
520
+
521
+ function slugifyProposalKey(value: string): string {
522
+ return value
523
+ .toLowerCase()
524
+ .replace(/[^a-z0-9]+/g, '-')
525
+ .replace(/^-+|-+$/g, '') || 'proposal';
526
+ }
527
+
528
+ function renderProposalPage(proposal: WikiProposal): string {
529
+ if (proposal.kind === 'merge-guidance') {
530
+ return [
531
+ `# Review merge guidance for ${proposal.canonicalPath}`,
532
+ '',
533
+ proposalPageMarker,
534
+ '',
535
+ '## Summary',
536
+ proposal.summary,
537
+ '',
538
+ '## Current State',
539
+ `- ${proposal.currentStateSummary}`,
540
+ `- ${proposal.canonicalPath} is the canonical guidance entry.`,
541
+ ...proposal.duplicatePaths.map((duplicatePath) => `- ${duplicatePath} currently repeats that guidance content.`),
542
+ '',
543
+ '## After Apply',
544
+ `- ${proposal.afterApplySummary}`,
545
+ `- ${proposal.canonicalPath} stays unchanged as the canonical guidance entry.`,
546
+ ...proposal.duplicatePaths.map((duplicatePath) => `- ${duplicatePath} becomes a short pointer to the canonical guidance and wiki pages.`),
547
+ ...proposal.archiveTargets.map(
548
+ (target) => `- If you want to keep history, archive ${target.sourcePath} at ${target.suggestedPath} before deleting or moving it later. ${target.reason}`
549
+ ),
550
+ '',
551
+ '## Rationale',
552
+ proposal.rationale,
553
+ ].join('\n');
554
+ }
555
+
556
+ return [
557
+ `# Review route guidance for ${proposal.guidancePath}`,
558
+ '',
559
+ proposalPageMarker,
560
+ '',
561
+ '## Summary',
562
+ proposal.summary,
563
+ '',
564
+ '## Current State',
565
+ `- ${proposal.currentStateSummary}`,
566
+ `- ${proposal.guidancePath} is longer than the preferred guidance length.`,
567
+ ...proposal.targetPaths.map((targetPath) => `- It already points readers toward ${targetPath}.`),
568
+ '',
569
+ '## After Apply',
570
+ `- ${proposal.afterApplySummary}`,
571
+ `- ${proposal.guidancePath} becomes a short entry file.`,
572
+ ...proposal.targetPaths.map((targetPath) => `- Detailed workflow is routed to ${targetPath}.`),
573
+ '',
574
+ '## Rationale',
575
+ proposal.rationale
576
+ ].join('\n');
577
+ }
578
+
579
+ async function renderRouteGuidanceApplyContent(
580
+ proposal: WikiRouteGuidanceProposal,
581
+ existingContent: string
582
+ ): Promise<string> {
583
+ const heading = extractHeading(existingContent) || defaultGuidanceHeading(proposal.guidancePath);
584
+ const summary = extractSummaryParagraph(existingContent) || 'This entry file now routes to canonical local docs pages.';
585
+ const routeLines = await Promise.all(
586
+ proposal.targetPaths.map(async (targetPath) => {
587
+ const label = await readMarkdownTitle(targetPath);
588
+ const relativeLink = buildRelativeMarkdownLink(proposal.guidancePath, targetPath);
589
+ return `- Read [${label}](${relativeLink}).`;
590
+ })
591
+ );
592
+
593
+ return [
594
+ `# ${heading}`,
595
+ '',
596
+ summary,
597
+ '',
598
+ 'Detailed workflow lives in the wiki pages below.',
599
+ '',
600
+ ...routeLines
601
+ ].join('\n');
602
+ }
603
+
604
+ async function renderMergeGuidanceApplyContent(
605
+ proposal: WikiMergeGuidanceProposal,
606
+ duplicatePath: string,
607
+ duplicateContent: string,
608
+ canonicalContent: string
609
+ ): Promise<string> {
610
+ const heading = extractHeading(duplicateContent) || defaultGuidanceHeading(duplicatePath);
611
+ const summary = extractSummaryParagraph(duplicateContent) || 'This entry file now points to the canonical guidance file and wiki pages.';
612
+ const canonicalTitle = await readMarkdownTitle(proposal.canonicalPath);
613
+ const canonicalLink = buildRelativeMarkdownLink(duplicatePath, proposal.canonicalPath);
614
+ const targetPaths = listGuidanceRouteTargets(duplicateContent, duplicatePath);
615
+ const fallbackTargetPaths = targetPaths.length > 0 ? targetPaths : listGuidanceRouteTargets(canonicalContent, proposal.canonicalPath);
616
+ const routeLines = await Promise.all(
617
+ fallbackTargetPaths.map(async (targetPath) => {
618
+ const label = await readMarkdownTitle(targetPath);
619
+ const relativeLink = buildRelativeMarkdownLink(duplicatePath, targetPath);
620
+ return `- Read [${label}](${relativeLink}).`;
621
+ })
622
+ );
623
+
624
+ return [
625
+ `# ${heading}`,
626
+ '',
627
+ summary,
628
+ '',
629
+ `Canonical guidance lives in [${canonicalTitle}](${canonicalLink}).`,
630
+ '',
631
+ 'Detailed workflow lives in the wiki pages below.',
632
+ '',
633
+ ...routeLines
634
+ ].join('\n');
635
+ }
636
+
637
+ function extractHeading(content: string): string {
638
+ return content.match(/^#\s+(.+)$/m)?.[1]?.trim() ?? '';
639
+ }
640
+
641
+ function defaultGuidanceHeading(guidancePath: string): string {
642
+ return path.basename(guidancePath, '.md').replace(/[-_]+/g, ' ').trim() || 'Guidance';
643
+ }
644
+
645
+ async function readMarkdownTitle(relativePath: string): Promise<string> {
646
+ const content = await fs.readFile(path.join(repoRoot, relativePath), 'utf8').catch(() => '');
647
+ return extractHeading(content) || path.basename(relativePath, '.md');
648
+ }
649
+
650
+ function buildRelativeMarkdownLink(sourcePath: string, targetPath: string): string {
651
+ const sourceDir = path.posix.dirname(sourcePath.replace(/\\/g, '/'));
652
+ return path.posix.relative(sourceDir, targetPath.replace(/\\/g, '/'));
653
+ }
654
+
655
+ const repoRoot = path.resolve(process.cwd());
656
+ const docsRoot = path.resolve(repoRoot, 'docs');
657
+ const wikiRoot = path.join(docsRoot, 'wiki');
658
+
659
+ // Tests reload this module per fixture (different cwd → different `repoRoot`), but
660
+ // `context-cache.js` is imported once and shared across all instances. Without this
661
+ // invalidation, a buildWikiContext call from a prior fixture's instance could serve
662
+ // a cached result whose `findings` were computed against a different `repoRoot`.
663
+ // On a fast Linux runner this surfaced as flaky lint assertions where guidance files
664
+ // from one fixture would silently bleed into another. Cheap to call at module init.
665
+ invalidateWikiContextCache();
666
+ const defaultContextPageLimit = 4;
667
+ const defaultLogEntryLimit = 3;
668
+ const maxGuidanceLineCount = 40;
669
+ const contextStopTerms = new Set(['current', 'latest', 'need', 'project', 'question', 'recent', 'task']);
670
+ const projectLogHintTerms = new Set(['change', 'changes', 'history', 'log', 'recent', 'ship', 'status', 'update', 'updates']);
671
+
672
+ export function pagePathFromSlug(slug: string): string {
673
+ const slashNormalized = slug.replace(/\\/g, '/').trim();
674
+ const normalized = slashNormalized.replace(/\.md$/i, '');
675
+ if (
676
+ !normalized ||
677
+ slashNormalized.startsWith('/') ||
678
+ normalized.endsWith('/') ||
679
+ normalized.split('/').some((segment) => !segment || segment === '.' || segment === '..') ||
680
+ !/^[a-z0-9][a-z0-9/_-]*$/i.test(normalized)
681
+ ) {
682
+ throw new Error(`Invalid wiki slug: ${slug}`);
683
+ }
684
+ return path.join(wikiRoot, `${normalized}.md`);
685
+ }
686
+
687
+ export async function readWikiPage(slug: string): Promise<string> {
688
+ return fs.readFile(pagePathFromSlug(slug), 'utf8');
689
+ }
690
+
691
+ export async function writeWikiPage(slug: string, content: string): Promise<void> {
692
+ const filePath = pagePathFromSlug(slug);
693
+ await fs.mkdir(path.dirname(filePath), { recursive: true });
694
+ await fs.writeFile(filePath, content.endsWith('\n') ? content : `${content}\n`, 'utf8');
695
+ invalidateWikiContextCache();
696
+ }
697
+
698
+ export async function appendProjectLog(entry: string, date = new Date()): Promise<void> {
699
+ const filePath = pagePathFromSlug('project-log');
700
+ const isoDate = date.toISOString().slice(0, 10);
701
+ // VitePress runs every page through the Vue compiler, so unescaped `<word>` tokens
702
+ // in log entries get parsed as custom Vue tags and break the docs build (the
703
+ // memory-promotion and maintenance-inbox writers already do this; project-log
704
+ // needs the same treatment). Escape inside backtick spans is preserved by markdown.
705
+ const line = `\n- ${escapeMarkdownAngleBrackets(entry.trim())}\n`;
706
+ let content = await fs.readFile(filePath, 'utf8').catch(() => '# Project Log\n');
707
+ const heading = `## ${isoDate}`;
708
+ if (!content.includes(heading)) {
709
+ content += `\n${heading}\n`;
710
+ }
711
+ content += line;
712
+ await fs.writeFile(filePath, content, 'utf8');
713
+ invalidateWikiContextCache();
714
+ }
715
+
716
+ function escapeMarkdownAngleBrackets(value: string): string {
717
+ return value.replace(/</g, '&lt;').replace(/>/g, '&gt;');
718
+ }
719
+
720
+ // Insert an H1 heading derived from the page slug. Used by the maintenance inbox's
721
+ // `missing-h1` lint action so the operator can resolve the finding with one click instead
722
+ // of editing the file by hand. The inserted heading lands AFTER the frontmatter block
723
+ // (if present) and BEFORE the first body line. Idempotent: if the page already has an
724
+ // H1, returns false and writes nothing.
725
+ export async function insertH1FromSlug(slug: string): Promise<boolean> {
726
+ const filePath = pagePathFromSlug(slug);
727
+ const content = await fs.readFile(filePath, 'utf8');
728
+ if (hasH1(content)) {
729
+ return false;
730
+ }
731
+
732
+ const title = titleCaseFromSlug(slug);
733
+ const frontmatterMatch = content.match(/^(---\r?\n[\s\S]*?\r?\n---\r?\n)/);
734
+ if (frontmatterMatch) {
735
+ const frontmatter = frontmatterMatch[1];
736
+ const rest = content.slice(frontmatter.length);
737
+ const restNoLeadingBlank = rest.replace(/^\r?\n+/, '');
738
+ const next = `${frontmatter}\n# ${title}\n\n${restNoLeadingBlank}`;
739
+ await fs.writeFile(filePath, next.endsWith('\n') ? next : `${next}\n`, 'utf8');
740
+ } else {
741
+ const restNoLeadingBlank = content.replace(/^\r?\n+/, '');
742
+ const next = `# ${title}\n\n${restNoLeadingBlank}`;
743
+ await fs.writeFile(filePath, next.endsWith('\n') ? next : `${next}\n`, 'utf8');
744
+ }
745
+ invalidateWikiContextCache();
746
+ return true;
747
+ }
748
+
749
+ function titleCaseFromSlug(slug: string): string {
750
+ return slug
751
+ .split(/[\/-]/)
752
+ .filter((part) => part.length > 0)
753
+ .map((part) => part.charAt(0).toUpperCase() + part.slice(1))
754
+ .join(' ');
755
+ }
756
+
757
+ // Replace the first paragraph of a wiki page with new operator-supplied text. Used by the
758
+ // maintenance inbox's `page-drift` resolve action when a finding is real (not snooze-worthy)
759
+ // — the page genuinely needs a rewritten summary that reflects what the page is now about.
760
+ //
761
+ // The "first paragraph" is the contiguous run of non-blank, non-heading lines after the
762
+ // optional frontmatter and the H1 heading. This matches what extractPageIntent considers
763
+ // the page intent and what the page-drift Jaccard signal scores against.
764
+ //
765
+ // Returns:
766
+ // - { changed: true, previousSummary } when the file was rewritten
767
+ // - { changed: false, previousSummary } when the new text already matches (idempotent)
768
+ //
769
+ // Throws if the page has no H1 (in which case `missing-h1` is the right finding to resolve
770
+ // first) or if the new summary text is empty after trimming.
771
+ export interface EditPageSummaryResult {
772
+ slug: string;
773
+ changed: boolean;
774
+ previousSummary: string;
775
+ newSummary: string;
776
+ }
777
+
778
+ export async function editPageSummary(slug: string, newFirstParagraph: string): Promise<EditPageSummaryResult> {
779
+ const trimmedNew = newFirstParagraph.replace(/\r\n/g, '\n').trim();
780
+ if (!trimmedNew) {
781
+ throw new Error('editPageSummary requires a non-empty replacement paragraph.');
782
+ }
783
+
784
+ const filePath = pagePathFromSlug(slug);
785
+ const content = await fs.readFile(filePath, 'utf8');
786
+
787
+ const frontmatterMatch = content.match(/^(---\r?\n[\s\S]*?\r?\n---\r?\n)/);
788
+ const frontmatter = frontmatterMatch ? frontmatterMatch[1] : '';
789
+ const body = content.slice(frontmatter.length);
790
+
791
+ const h1Match = body.match(/^(\s*\r?\n)*(#\s+[^\n]+\r?\n)/);
792
+ if (!h1Match) {
793
+ throw new Error(`Page ${slug} has no H1 heading; resolve the missing-h1 finding before rewriting the summary.`);
794
+ }
795
+ const headerBlock = h1Match[0];
796
+ const afterHeader = body.slice(headerBlock.length);
797
+
798
+ // Identify the existing first paragraph: skip leading blank lines, then capture lines
799
+ // until the next blank line OR a heading. Everything after that is the rest of the page.
800
+ const lines = afterHeader.split(/\r?\n/);
801
+ let cursor = 0;
802
+ while (cursor < lines.length && lines[cursor].trim() === '') {
803
+ cursor += 1;
804
+ }
805
+ const paragraphStart = cursor;
806
+ while (cursor < lines.length) {
807
+ const line = lines[cursor];
808
+ if (line.trim() === '' || line.startsWith('#')) {
809
+ break;
810
+ }
811
+ cursor += 1;
812
+ }
813
+ const previousSummary = lines.slice(paragraphStart, cursor).join('\n').trim();
814
+ const remainder = lines.slice(cursor).join('\n');
815
+ const remainderNoLeadingBlank = remainder.replace(/^\r?\n+/, '');
816
+
817
+ if (previousSummary === trimmedNew) {
818
+ return { slug, changed: false, previousSummary, newSummary: trimmedNew };
819
+ }
820
+
821
+ const next = `${frontmatter}${headerBlock}\n${trimmedNew}\n\n${remainderNoLeadingBlank}`;
822
+ const finalText = next.endsWith('\n') ? next : `${next}\n`;
823
+ await fs.writeFile(filePath, finalText, 'utf8');
824
+ invalidateWikiContextCache();
825
+ return { slug, changed: true, previousSummary, newSummary: trimmedNew };
826
+ }
827
+
828
+ // Archive a dormant guidance file (e.g., a skill markdown that no other doc links to)
829
+ // by moving it into a sibling `archive/` directory. Idempotent: if the file is already
830
+ // under an `archive/` segment, returns the existing path with no work. The caller is
831
+ // expected to pass a relative-from-repo-root path (matching the lint finding's `path`).
832
+ export async function archiveGuidanceFile(relativePath: string): Promise<{ from: string; to: string; moved: boolean }> {
833
+ const trimmed = relativePath.trim().replace(/\\/g, '/');
834
+ if (!trimmed || trimmed.includes('..')) {
835
+ throw new Error(`Invalid guidance path for archive: ${relativePath}`);
836
+ }
837
+ const absoluteFrom = path.resolve(repoRoot, trimmed);
838
+ const stat = await fs.stat(absoluteFrom).catch(() => undefined);
839
+ if (!stat || !stat.isFile()) {
840
+ throw new Error(`Guidance file not found: ${trimmed}`);
841
+ }
842
+
843
+ const dir = path.posix.dirname(trimmed);
844
+ const fileName = path.posix.basename(trimmed);
845
+ if (dir.split('/').includes('archive')) {
846
+ return { from: trimmed, to: trimmed, moved: false };
847
+ }
848
+ const archiveDir = `${dir}/archive`;
849
+ const archiveRelative = `${archiveDir}/${fileName}`;
850
+ const absoluteTo = path.resolve(repoRoot, archiveRelative);
851
+ await fs.mkdir(path.dirname(absoluteTo), { recursive: true });
852
+ await fs.rename(absoluteFrom, absoluteTo);
853
+ invalidateWikiContextCache();
854
+ return { from: trimmed, to: archiveRelative, moved: true };
855
+ }
856
+
857
+ export function extractWikiPageMetadata(content: string): WikiPageMetadata {
858
+ const frontmatter = content.match(/^---\r?\n([\s\S]*?)\r?\n---\r?\n/)?.[1] ?? '';
859
+ const fields = new Map(
860
+ frontmatter
861
+ .split(/\r?\n/)
862
+ .map((line) => line.match(/^([A-Za-z][A-Za-z0-9_-]*):\s*(.*?)\s*$/))
863
+ .filter((match): match is RegExpMatchArray => Boolean(match))
864
+ .map((match) => [normalizeMetadataKey(match[1]), match[2].replace(/^['"]|['"]$/g, '')])
865
+ );
866
+
867
+ return {
868
+ lifecycle: parsePageLifecycle(fields.get('lifecycle')),
869
+ owner: fields.get('owner') || 'unassigned',
870
+ lastReviewed: fields.get('lastreviewed') || fields.get('last-reviewed') || '',
871
+ sourceCoverage: parseSourceCoverage(fields.get('sourcecoverage') || fields.get('source-coverage'))
872
+ };
873
+ }
874
+
875
+ function normalizeMetadataKey(value: string): string {
876
+ return value.toLowerCase().replace(/_/g, '-');
877
+ }
878
+
879
+ function parsePageLifecycle(value: string | undefined): WikiPageLifecycle {
880
+ switch (value?.trim()) {
881
+ case 'dormant':
882
+ case 'superseded':
883
+ case 'pending-review':
884
+ case 'generated':
885
+ return value.trim() as WikiPageLifecycle;
886
+ default:
887
+ return 'active';
888
+ }
889
+ }
890
+
891
+ function parseSourceCoverage(value: string | undefined): WikiPageMetadata['sourceCoverage'] {
892
+ switch (value?.trim()) {
893
+ case 'none':
894
+ case 'partial':
895
+ case 'complete':
896
+ return value.trim() as WikiPageMetadata['sourceCoverage'];
897
+ default:
898
+ return 'unknown';
899
+ }
900
+ }
901
+
902
+ export async function listWikiPages(): Promise<WikiPageSummary[]> {
903
+ const pages: WikiPageSummary[] = [];
904
+
905
+ async function walk(directory: string): Promise<void> {
906
+ const entries = await fs.readdir(directory, { withFileTypes: true });
907
+ for (const entry of entries) {
908
+ const fullPath = path.join(directory, entry.name);
909
+ if (entry.isDirectory()) {
910
+ await walk(fullPath);
911
+ continue;
912
+ }
913
+ if (!entry.isFile() || !entry.name.endsWith('.md')) {
914
+ continue;
915
+ }
916
+ const relative = path.relative(wikiRoot, fullPath).replace(/\\/g, '/');
917
+ const slug = relative.replace(/\.md$/i, '');
918
+ const content = await fs.readFile(fullPath, 'utf8');
919
+ const title = content.match(/^#\s+(.+)$/m)?.[1]?.trim() ?? slug;
920
+ pages.push({ slug, title, path: `docs/wiki/${relative}`, metadata: extractWikiPageMetadata(content) });
921
+ }
922
+ }
923
+
924
+ await walk(wikiRoot);
925
+ return pages.sort((a, b) => a.slug.localeCompare(b.slug));
926
+ }
927
+
928
+ export async function listGuidanceLifecycle(): Promise<WikiGuidanceLifecycleItem[]> {
929
+ const [guidanceFiles, pages, proposals, findings] = await Promise.all([
930
+ listProjectGuidanceFiles(),
931
+ listWikiPages(),
932
+ listWikiProposals(),
933
+ lintWikiPages()
934
+ ]);
935
+ const linkedFromByPath = await collectMarkdownInboundSources(guidanceFiles, pages);
936
+ const proposalByPath = new Map<string, WikiProposal>();
937
+ const archiveTargetByPath = new Map<string, string>();
938
+
939
+ for (const proposal of proposals) {
940
+ if (proposal.kind === 'merge-guidance') {
941
+ for (const duplicatePath of proposal.duplicatePaths) {
942
+ proposalByPath.set(duplicatePath, proposal);
943
+ }
944
+ for (const archiveTarget of proposal.archiveTargets) {
945
+ archiveTargetByPath.set(archiveTarget.sourcePath, archiveTarget.suggestedPath);
946
+ }
947
+ } else {
948
+ proposalByPath.set(proposal.guidancePath, proposal);
949
+ }
950
+ }
951
+
952
+ const dormantPaths = new Set(findings.filter((finding) => finding.rule === 'dormant-skill').map((finding) => finding.path));
953
+
954
+ return guidanceFiles.map<WikiGuidanceLifecycleItem>((guidance) => {
955
+ const linkedFrom = linkedFromByPath.get(guidance.path) ?? [];
956
+ const proposal = proposalByPath.get(guidance.path);
957
+ const archiveTarget = archiveTargetByPath.get(guidance.path);
958
+
959
+ if (proposal) {
960
+ return {
961
+ ...guidance,
962
+ status: 'pending-review',
963
+ linkedFrom,
964
+ archiveTarget,
965
+ reviewStatus: 'pending-review',
966
+ reason: `Active ${proposal.kind} proposal is waiting for operator review.`
967
+ };
968
+ }
969
+
970
+ if (archiveTarget) {
971
+ return {
972
+ ...guidance,
973
+ status: 'superseded',
974
+ linkedFrom,
975
+ archiveTarget,
976
+ reviewStatus: 'pending-review',
977
+ reason: 'Guidance has a concrete archive destination after review.'
978
+ };
979
+ }
980
+
981
+ if (dormantPaths.has(guidance.path)) {
982
+ return {
983
+ ...guidance,
984
+ status: 'dormant',
985
+ linkedFrom,
986
+ reviewStatus: 'none',
987
+ reason: 'Guidance is not linked from project docs or active guidance files.'
988
+ };
989
+ }
990
+
991
+ return {
992
+ ...guidance,
993
+ status: 'active',
994
+ linkedFrom,
995
+ reviewStatus: 'none',
996
+ reason: linkedFrom.length > 0 ? 'Guidance is linked from project docs or another active guidance file.' : 'Guidance is an active entry file.'
997
+ };
998
+ }).sort((left, right) => left.status.localeCompare(right.status) || left.path.localeCompare(right.path));
999
+ }
1000
+
1001
+ export async function lintWikiPages(): Promise<WikiLintFinding[]> {
1002
+ const pages = await listWikiPages();
1003
+ const findings: WikiLintFinding[] = [];
1004
+ const inboundLinks = await collectInboundWikiLinks(pages);
1005
+ const pageByPath = new Map(pages.map((page) => [page.path, page.slug]));
1006
+ const guidanceFiles = await listProjectGuidanceFiles();
1007
+
1008
+ // Read project-log once so per-page drift detection doesn't re-read for each page.
1009
+ const projectLogContent = await fs.readFile(pagePathFromSlug('project-log'), 'utf8').catch(() => '');
1010
+
1011
+ // Load active page-drift snoozes so we can suppress findings the operator has already
1012
+ // acknowledged as noise. Expired snoozes are pruned lazily inside the loader.
1013
+ const snoozedPageDrifts = await loadActivePageDriftSnoozes().catch(() => new Map());
1014
+
1015
+ // Load active project-local memories once for the contradicts-shipped-memory rule.
1016
+ // Includes superseded records because being promoted-then-superseded still proves the
1017
+ // feature exists in the wiki — that's the strongest possible evidence against a
1018
+ // "this is missing" assertion.
1019
+ const activeMemoriesForContradictionCheck = await listProjectMemories({ includeArchived: true })
1020
+ .then((records) => records.filter((record) => record.status === 'active' || record.status === 'superseded'))
1021
+ .catch(() => []);
1022
+
1023
+ for (const page of pages) {
1024
+ // Generated pages are managed by the API reference generator (or any future
1025
+ // generator that uses the same `lifecycle: generated` frontmatter convention).
1026
+ // Their source of truth lives outside the wiki — humans don't review them, and
1027
+ // surfacing lint findings on them in the maintenance inbox is noise. Skip every
1028
+ // per-page rule for these.
1029
+ if (page.metadata?.lifecycle === 'generated') {
1030
+ continue;
1031
+ }
1032
+
1033
+ const content = await readWikiPage(page.slug);
1034
+ if (!hasH1(content)) {
1035
+ findings.push({
1036
+ rule: 'missing-h1',
1037
+ slug: page.slug,
1038
+ path: page.path,
1039
+ message: 'Page is missing a top-level H1 heading.'
1040
+ });
1041
+ }
1042
+ if (!hasSummaryParagraph(content)) {
1043
+ findings.push({
1044
+ rule: 'missing-summary',
1045
+ slug: page.slug,
1046
+ path: page.path,
1047
+ message: 'Page is missing a short summary paragraph after its H1.'
1048
+ });
1049
+ }
1050
+ if ((inboundLinks.get(page.slug) ?? 0) === 0) {
1051
+ findings.push({
1052
+ rule: 'orphan-page',
1053
+ slug: page.slug,
1054
+ path: page.path,
1055
+ message: 'Page is not linked from the project index or another wiki page.'
1056
+ });
1057
+ }
1058
+
1059
+ for (const claim of extractWikiClaims(page.slug, content, pageByPath)) {
1060
+ if (claim.sources.length === 0) {
1061
+ findings.push({
1062
+ rule: 'unsupported-claim',
1063
+ slug: page.slug,
1064
+ path: page.path,
1065
+ message: `Claim is missing supporting sources: ${claim.text}`
1066
+ });
1067
+ }
1068
+ if (claim.status === 'current') {
1069
+ continue;
1070
+ }
1071
+ findings.push({
1072
+ rule: 'stale-claim',
1073
+ slug: page.slug,
1074
+ path: page.path,
1075
+ message: `Claim is marked ${claim.status}: ${claim.text}`
1076
+ });
1077
+ }
1078
+
1079
+ // Page drift: only check pages that aren't the project-log itself (which trivially mentions every other page).
1080
+ // Skip pages the operator has snoozed — these are findings already acknowledged as noise.
1081
+ if (page.slug !== 'project-log' && projectLogContent && !snoozedPageDrifts.has(page.slug)) {
1082
+ const drift = detectPageDrift(content, page.slug, projectLogContent);
1083
+ if (drift) {
1084
+ findings.push({
1085
+ rule: 'page-drift',
1086
+ slug: page.slug,
1087
+ path: page.path,
1088
+ message: buildPageDriftMessage(drift)
1089
+ });
1090
+ }
1091
+ }
1092
+
1093
+ // contradicts-shipped-memory: catch sections that claim X doesn't exist while shipped
1094
+ // memories say it does. Skipped on the project-log itself (chronological log of changes,
1095
+ // not a claims surface) — same exclusion as page-drift, for the same reason.
1096
+ if (page.slug !== 'project-log' && activeMemoriesForContradictionCheck.length > 0) {
1097
+ const contradictions = detectContradictsShippedMemory(
1098
+ content,
1099
+ activeMemoriesForContradictionCheck,
1100
+ projectLogContent
1101
+ );
1102
+ for (const signal of contradictions) {
1103
+ findings.push({
1104
+ rule: 'contradicts-shipped-memory',
1105
+ slug: page.slug,
1106
+ path: page.path,
1107
+ message: buildContradictsShippedMemoryMessage(signal)
1108
+ });
1109
+ }
1110
+ }
1111
+ }
1112
+
1113
+ for (const guidance of guidanceFiles) {
1114
+ const content = await fs.readFile(path.join(repoRoot, guidance.path), 'utf8').catch(() => '');
1115
+ const lineCount = countLines(content);
1116
+ if (lineCount > maxGuidanceLineCount) {
1117
+ findings.push({
1118
+ rule: 'oversized-guidance',
1119
+ slug: guidance.path,
1120
+ path: guidance.path,
1121
+ message: `Guidance file exceeds ${maxGuidanceLineCount} lines: ${guidance.path} (${lineCount} lines).`
1122
+ });
1123
+ }
1124
+
1125
+ for (const brokenLink of findBrokenGuidanceLinks(content, guidance.path)) {
1126
+ findings.push({
1127
+ rule: 'stale-guidance-reference',
1128
+ slug: guidance.path,
1129
+ path: guidance.path,
1130
+ message: `Guidance file links to missing markdown: ${brokenLink}`
1131
+ });
1132
+ }
1133
+
1134
+ if (guidance.kind !== 'skill' && !hasGuidanceRoute(content, guidance.path)) {
1135
+ findings.push({
1136
+ rule: 'unrouted-guidance',
1137
+ slug: guidance.path,
1138
+ path: guidance.path,
1139
+ message: 'Guidance file should link to at least one canonical local docs page.'
1140
+ });
1141
+ }
1142
+ }
1143
+
1144
+ const guidanceInboundLinks = await collectMarkdownInboundLinks(guidanceFiles, pages);
1145
+ for (const guidance of guidanceFiles.filter((candidate) => candidate.kind === 'skill')) {
1146
+ if ((guidanceInboundLinks.get(guidance.path) ?? 0) > 0) {
1147
+ continue;
1148
+ }
1149
+ findings.push({
1150
+ rule: 'dormant-skill',
1151
+ slug: guidance.path,
1152
+ path: guidance.path,
1153
+ message: 'Skill file is not linked from project docs or active guidance files.'
1154
+ });
1155
+ }
1156
+
1157
+ for (const duplicateGroup of await findDuplicateGuidanceGroups()) {
1158
+ const joinedPaths = duplicateGroup.map((guidance) => guidance.path).sort().join(', ');
1159
+ for (const guidance of duplicateGroup) {
1160
+ findings.push({
1161
+ rule: 'duplicate-guidance',
1162
+ slug: guidance.path,
1163
+ path: guidance.path,
1164
+ message: `Guidance content duplicates: ${joinedPaths}`
1165
+ });
1166
+ }
1167
+ }
1168
+
1169
+ for (const conflict of await findConflictingGuidanceRules()) {
1170
+ const joinedPaths = conflict.paths.join(', ');
1171
+ for (const guidancePath of conflict.paths) {
1172
+ findings.push({
1173
+ rule: 'conflicting-guidance',
1174
+ slug: guidancePath,
1175
+ path: guidancePath,
1176
+ message: `Guidance conflicts on "${conflict.rule}": ${joinedPaths}`
1177
+ });
1178
+ }
1179
+ }
1180
+
1181
+ return findings.sort((a, b) => a.slug.localeCompare(b.slug) || a.rule.localeCompare(b.rule));
1182
+ }
1183
+
1184
+ export async function searchWikiPages(query: string): Promise<WikiSearchResult[]> {
1185
+ const index = await buildCurrentWikiSearchIndex();
1186
+ return searchWikiIndex(index, query);
1187
+ }
1188
+
1189
+ export async function buildWikiContext(query: string, options: WikiContextOptions = {}): Promise<WikiContextResult> {
1190
+ const cached = getCachedWikiContext(query, options);
1191
+ if (cached) {
1192
+ return cached;
1193
+ }
1194
+
1195
+ const maxPages = Math.max(1, options.maxPages ?? defaultContextPageLimit);
1196
+ const maxLogEntries = Math.max(0, options.maxLogEntries ?? defaultLogEntryLimit);
1197
+ const maxSkills = Math.max(1, Math.min(options.maxSkills ?? 3, 20));
1198
+ const index = await buildCurrentWikiSearchIndex();
1199
+ const queryTerms = tokenizeSearchQuery(query);
1200
+ const searchResults = searchWikiIndex(index, query);
1201
+ const rankedResults = searchResults.length > 0 ? searchResults : fallbackSearchResults(index);
1202
+ const selectedResults = rankedResults.slice(0, maxPages);
1203
+ // Cap the omittedPageReasons payload: 96+ omitted entries with full reason strings
1204
+ // can dominate the wiki_context payload (single largest contributor at session start).
1205
+ // The full omitted count is still reported via `omittedPages`; the reasons here are
1206
+ // a triage hint, not a full audit trail. Operators who want more can wiki_search.
1207
+ const maxOmittedPageReasons = Math.max(0, options.maxOmittedPageReasons ?? 12);
1208
+ const maxOmittedReasonChars = Math.max(20, options.maxOmittedReasonChars ?? 80);
1209
+ const omittedPageReasons = rankedResults
1210
+ .slice(maxPages, maxPages + maxOmittedPageReasons)
1211
+ .map((result) => ({
1212
+ slug: result.slug,
1213
+ score: result.score,
1214
+ reason: truncateForBriefing(result.reasons.join('; '), maxOmittedReasonChars)
1215
+ }));
1216
+ const selectedPages = selectedResults.map((result) => searchResultToContextPage(result));
1217
+
1218
+ // Memory Trails: page→query edges (shadow mode for the bonus, active for reinforcement).
1219
+ // Reinforcement runs unconditionally so edges accrue from real usage. Bonus is surfaced
1220
+ // as a `[shadow] page recall trail: ...` reason on each page that has accumulated edges,
1221
+ // but is NOT added to the score yet — same kill-switch principle as the bipartite
1222
+ // projection shadow mode. Watch the recall benchmark before promoting to active ranking.
1223
+ const pageTrailLookup = await loadMemoryTrailBonusLookup('page', query).catch(() => () => undefined);
1224
+ for (const page of selectedPages) {
1225
+ const bonus = pageTrailLookup(page.slug);
1226
+ if (bonus) {
1227
+ const trailReason = `[shadow] page recall trail: ${buildMemoryTrailReason(bonus)} (not yet applied to ranking)`;
1228
+ page.reason = page.reason ? `${page.reason}; ${trailReason}` : trailReason;
1229
+ }
1230
+ }
1231
+ if (selectedPages.length > 0) {
1232
+ await reinforceQueryEdges('page', selectedPages.map((page) => page.slug), query).catch(() => undefined);
1233
+ }
1234
+
1235
+ const recentLogEntries = maxLogEntries > 0 ? await listRecentProjectLogEntries(maxLogEntries) : [];
1236
+ const findings = options.includeLint === false ? [] : await lintWikiPages();
1237
+ const handoffs = await recallProjectHandoffs({
1238
+ relatedPages: selectedPages.map((page) => page.slug),
1239
+ maxItems: Math.max(1, Math.min(maxPages, 2))
1240
+ });
1241
+ const memories = (await recallProjectMemories(query, {
1242
+ relatedPages: selectedPages.map((page) => page.slug),
1243
+ maxItems: Math.max(1, Math.min(maxPages, 5))
1244
+ })).filter((memory) => memory.kind !== 'handoff' && !handoffs.some((handoff) => handoff.id === memory.id));
1245
+ const skills = await recallProjectSkills({
1246
+ query,
1247
+ relatedFiles: options.relatedFiles,
1248
+ languages: options.languages,
1249
+ frameworks: options.frameworks,
1250
+ maxItems: maxSkills
1251
+ });
1252
+ const claims = rankContextClaims(
1253
+ selectedResults.flatMap((result) => index.pages.find((document) => document.page.slug === result.slug)?.claims ?? []),
1254
+ queryTerms
1255
+ ).slice(0, maxPages * 2);
1256
+ const guidanceFiles = await listProjectGuidanceFiles();
1257
+ const openQuestions = buildOpenQuestions(claims, findings);
1258
+ // Brain-faithfulness roadmap B5: surface the unprocessed memory backlog so the
1259
+ // briefing tells the operator/agent what is sitting in the inbox waiting for
1260
+ // triage. Lightweight — counts only, no findings list.
1261
+ const memoryBacklog = await summarizeMemoryBacklog().catch(() => ({
1262
+ promotionReady: 0,
1263
+ skillPromotionReady: 0,
1264
+ staleUnsupported: 0,
1265
+ total: 0
1266
+ }));
1267
+
1268
+ // Cap handoff/memory/skill body text in the briefing payload. The recall functions
1269
+ // return full records; the briefing is meant to be a *briefing*, not a memory dump.
1270
+ // Full bodies remain available via memory_recall, wiki_skill_load, and (for handoffs)
1271
+ // a memory_recall call by id. Truncated records still carry every other field — ids,
1272
+ // tags, sources, relatedFiles, recallCount — so the agent knows exactly what to fetch
1273
+ // for a deeper read.
1274
+ const maxHandoffTextChars = Math.max(120, options.maxHandoffTextChars ?? 1200);
1275
+ const maxMemoryTextChars = Math.max(120, options.maxMemoryTextChars ?? 600);
1276
+ const maxSkillTextChars = Math.max(120, options.maxSkillTextChars ?? 800);
1277
+ const trimmedHandoffs = handoffs.map((handoff) => ({
1278
+ ...handoff,
1279
+ text: truncateForBriefing(handoff.text, maxHandoffTextChars)
1280
+ }));
1281
+ const trimmedMemories = memories.map((memory) => ({
1282
+ ...memory,
1283
+ text: truncateForBriefing(memory.text, maxMemoryTextChars)
1284
+ }));
1285
+ const trimmedSkills = skills.map((skill) => ({
1286
+ ...skill,
1287
+ text: truncateForBriefing(skill.text, maxSkillTextChars)
1288
+ }));
1289
+
1290
+ const result: WikiContextResult = {
1291
+ query,
1292
+ briefing: buildContextBriefing(selectedPages, trimmedHandoffs, trimmedMemories, trimmedSkills, claims, guidanceFiles, recentLogEntries, findings, omittedPageReasons, memoryBacklog),
1293
+ readFirst: selectedPages.map((page) => page.slug),
1294
+ handoffs: trimmedHandoffs,
1295
+ pages: selectedPages,
1296
+ memories: trimmedMemories,
1297
+ skills: trimmedSkills,
1298
+ claims,
1299
+ guidanceFiles,
1300
+ omittedPages: Math.max(rankedResults.length - maxPages, 0),
1301
+ omittedPageReasons,
1302
+ recentLogEntries,
1303
+ findings,
1304
+ openQuestions,
1305
+ memoryBacklog
1306
+ };
1307
+ setCachedWikiContext(query, options, result);
1308
+ return result;
1309
+ }
1310
+
1311
+ export async function buildWikiGraphSnapshot(): Promise<WikiGraphSnapshot> {
1312
+ const index = await buildCurrentWikiSearchIndex();
1313
+ const nodes = index.pages.map(({ page, claims }) => {
1314
+ const graph = index.graph.get(page.slug) ?? {
1315
+ slug: page.slug,
1316
+ inboundLinks: 0,
1317
+ outgoingLinks: [],
1318
+ relatedPages: []
1319
+ };
1320
+
1321
+ return {
1322
+ ...graph,
1323
+ title: page.title,
1324
+ path: page.path,
1325
+ claimCount: claims.length,
1326
+ staleClaimCount: claims.filter((claim) => claim.status !== 'current').length
1327
+ };
1328
+ });
1329
+
1330
+ return {
1331
+ pages: nodes.length,
1332
+ nodes: nodes.sort((left, right) => left.slug.localeCompare(right.slug))
1333
+ };
1334
+ }
1335
+
1336
+ async function buildCurrentWikiSearchIndex(): Promise<WikiSearchIndex> {
1337
+ const pages = await listWikiPages();
1338
+ const pageByPath = new Map(pages.map((page) => [page.path, page.slug]));
1339
+ const documents = await Promise.all(
1340
+ pages.map(async (page) => {
1341
+ const content = await readWikiPage(page.slug);
1342
+ return {
1343
+ page,
1344
+ content,
1345
+ claims: extractWikiClaims(page.slug, content, pageByPath)
1346
+ };
1347
+ })
1348
+ );
1349
+ const indexContent = await fs.readFile(path.join(docsRoot, 'index.md'), 'utf8').catch(() => '');
1350
+ return buildWikiSearchIndex({ pages: documents, indexContent });
1351
+ }
1352
+
1353
+ async function collectInboundWikiLinks(pages: WikiPageSummary[]): Promise<Map<string, number>> {
1354
+ const counts = new Map(pages.map((page) => [page.slug, 0]));
1355
+ const pageByPath = new Map(pages.map((page) => [page.path, page.slug]));
1356
+ const sources = [
1357
+ { path: 'docs/index.md', content: await fs.readFile(path.join(docsRoot, 'index.md'), 'utf8').catch(() => '') },
1358
+ ...(await Promise.all(
1359
+ pages.map(async (page) => ({ path: page.path, content: await readWikiPage(page.slug) }))
1360
+ ))
1361
+ ];
1362
+
1363
+ for (const source of sources) {
1364
+ const sourceDir = path.posix.dirname(source.path);
1365
+ for (const link of extractMarkdownLinks(source.content)) {
1366
+ const linkedSlug = resolveWikiLinkSlug(link, sourceDir, pageByPath);
1367
+ if (!linkedSlug) {
1368
+ continue;
1369
+ }
1370
+ counts.set(linkedSlug, (counts.get(linkedSlug) ?? 0) + 1);
1371
+ }
1372
+ }
1373
+
1374
+ return counts;
1375
+ }
1376
+
1377
+ function hasH1(content: string): boolean {
1378
+ return /^#\s+\S+/m.test(content);
1379
+ }
1380
+
1381
+ function hasSummaryParagraph(content: string): boolean {
1382
+ const lines = content.split(/\r?\n/);
1383
+ const h1Index = lines.findIndex((line) => /^#\s+\S+/.test(line));
1384
+ if (h1Index === -1) {
1385
+ return false;
1386
+ }
1387
+
1388
+ for (const line of lines.slice(h1Index + 1)) {
1389
+ const trimmed = line.trim();
1390
+ if (!trimmed) {
1391
+ continue;
1392
+ }
1393
+ if (trimmed.startsWith('#')) {
1394
+ return false;
1395
+ }
1396
+ return !trimmed.startsWith('|') && !trimmed.startsWith('- ') && !/^\d+\.\s/.test(trimmed);
1397
+ }
1398
+
1399
+ return false;
1400
+ }
1401
+
1402
+ function extractSummaryParagraph(content: string): string {
1403
+ const lines = content.split(/\r?\n/);
1404
+ const h1Index = lines.findIndex((line) => /^#\s+\S+/.test(line));
1405
+ const bodyLines = lines.slice(h1Index === -1 ? 0 : h1Index + 1);
1406
+
1407
+ for (const line of bodyLines) {
1408
+ const trimmed = line.trim();
1409
+ if (!trimmed) {
1410
+ continue;
1411
+ }
1412
+ if (trimmed.startsWith('#') || trimmed.startsWith('|') || trimmed.startsWith('- ') || /^\d+\.\s/.test(trimmed)) {
1413
+ continue;
1414
+ }
1415
+ return trimmed;
1416
+ }
1417
+
1418
+ return '';
1419
+ }
1420
+
1421
+ function tokenizeQuery(query: string): string[] {
1422
+ return Array.from(
1423
+ new Set(
1424
+ query
1425
+ .toLowerCase()
1426
+ .split(/[^a-z0-9]+/i)
1427
+ .map((part) => part.trim())
1428
+ .filter((part) => part.length >= 2 && !contextStopTerms.has(part))
1429
+ )
1430
+ );
1431
+ }
1432
+
1433
+ function scoreContextPage(
1434
+ page: WikiPageSummary,
1435
+ content: string,
1436
+ queryTerms: string[],
1437
+ inboundLinks: Map<string, number>,
1438
+ pageByPath: Map<string, string>
1439
+ ): WikiContextPage {
1440
+ const summary = extractSummaryParagraph(content) || page.title;
1441
+ const title = page.title.toLowerCase();
1442
+ const slug = page.slug.toLowerCase();
1443
+ const haystack = content.toLowerCase();
1444
+ const reasons = new Set<string>();
1445
+ const matchedTerms = new Set<string>();
1446
+ let score = 0;
1447
+
1448
+ for (const term of queryTerms) {
1449
+ if (title.includes(term)) {
1450
+ score += 6;
1451
+ reasons.add(`title matches "${term}"`);
1452
+ matchedTerms.add(term);
1453
+ } else if (slug.includes(term)) {
1454
+ score += 5;
1455
+ reasons.add(`slug matches "${term}"`);
1456
+ matchedTerms.add(term);
1457
+ }
1458
+
1459
+ if (haystack.includes(term)) {
1460
+ score += 2;
1461
+ reasons.add(`content mentions "${term}"`);
1462
+ matchedTerms.add(term);
1463
+ }
1464
+ }
1465
+
1466
+ if (page.slug === 'project-log' && queryTerms.some((term) => projectLogHintTerms.has(term))) {
1467
+ score += 4;
1468
+ reasons.add('project log helps with recent changes');
1469
+ }
1470
+
1471
+ if (score > 0 || queryTerms.length === 0) {
1472
+ const inboundCount = inboundLinks.get(page.slug) ?? 0;
1473
+ if (inboundCount > 0) {
1474
+ score += Math.min(inboundCount, 3);
1475
+ reasons.add(inboundCount > 1 ? `${inboundCount} inbound links` : 'linked from the wiki');
1476
+ }
1477
+ }
1478
+
1479
+ const inboundCount = inboundLinks.get(page.slug) ?? 0;
1480
+ const relatedPages = extractRelatedWikiSlugs(content, page.path, pageByPath).slice(0, 3);
1481
+
1482
+ return {
1483
+ ...page,
1484
+ score,
1485
+ summary,
1486
+ reason: Array.from(reasons).slice(0, 3).join('; ') || 'fallback page for broad project briefing',
1487
+ evidence: {
1488
+ matchedTerms: Array.from(matchedTerms),
1489
+ inboundLinks: inboundCount,
1490
+ relatedPages
1491
+ }
1492
+ };
1493
+ }
1494
+
1495
+ function fallbackContextPage(page: WikiContextPage, inboundLinks: Map<string, number>): WikiContextPage {
1496
+ const inboundCount = inboundLinks.get(page.slug) ?? 0;
1497
+ let score = Math.min(inboundCount, 3);
1498
+ let reason = inboundCount > 0 ? `fallback page with ${inboundCount} inbound links` : 'fallback page for broad project briefing';
1499
+
1500
+ if (page.slug === 'architecture') {
1501
+ score += 4;
1502
+ reason = 'default architecture briefing page';
1503
+ } else if (page.slug === 'project-log') {
1504
+ score += 3;
1505
+ reason = 'default recent changes briefing page';
1506
+ }
1507
+
1508
+ return {
1509
+ ...page,
1510
+ score,
1511
+ reason,
1512
+ evidence: {
1513
+ ...page.evidence,
1514
+ inboundLinks: inboundCount
1515
+ }
1516
+ };
1517
+ }
1518
+
1519
+ function buildContextBriefing(
1520
+ pages: WikiContextPage[],
1521
+ handoffs: RecalledProjectMemory[],
1522
+ memories: RecalledProjectMemory[],
1523
+ skills: RecalledProjectSkill[],
1524
+ claims: WikiClaim[],
1525
+ guidanceFiles: WikiGuidanceFile[],
1526
+ recentLogEntries: string[],
1527
+ findings: WikiLintFinding[],
1528
+ omittedPageReasons: Array<{ slug: string; score: number; reason: string }>,
1529
+ memoryBacklog: MemoryBacklogSummary
1530
+ ): string {
1531
+ const lines: string[] = [];
1532
+
1533
+ // Brain-faithfulness roadmap B5: surface unprocessed memory backlog so the agent
1534
+ // sees waiting triage work at every wiki_context call. Cache invalidates on any
1535
+ // memory mutation so the banner stays accurate; suppressed entirely when zero.
1536
+ if (memoryBacklog.total > 0) {
1537
+ const parts: string[] = [];
1538
+ if (memoryBacklog.promotionReady > 0) {
1539
+ parts.push(`${memoryBacklog.promotionReady} promotion-ready`);
1540
+ }
1541
+ if (memoryBacklog.skillPromotionReady > 0) {
1542
+ parts.push(`${memoryBacklog.skillPromotionReady} skill-promotion-ready`);
1543
+ }
1544
+ if (memoryBacklog.staleUnsupported > 0) {
1545
+ parts.push(`${memoryBacklog.staleUnsupported} stale-unsupported`);
1546
+ }
1547
+ lines.push(
1548
+ `Memory backlog: ${parts.join(', ')} memor${memoryBacklog.total === 1 ? 'y' : 'ies'} waiting in the inbox. Call wiki_maintenance_inbox to triage, or memory_review for the full findings list.`
1549
+ );
1550
+ }
1551
+
1552
+ if (pages.length > 0) {
1553
+ const readFirst = pages.map((page) => page.slug).join(', ');
1554
+ lines.push(`Read first: ${readFirst}.`);
1555
+ lines.push(`Top page: ${pages[0]?.slug} because ${pages[0]?.reason}.`);
1556
+ }
1557
+
1558
+ if (recentLogEntries.length > 0) {
1559
+ lines.push(`${recentLogEntries.length} recent project log entr${recentLogEntries.length === 1 ? 'y is' : 'ies are'} included.`);
1560
+ }
1561
+
1562
+ if (handoffs.length > 0) {
1563
+ lines.push(`${handoffs.length} recent session handoff${handoffs.length === 1 ? ' is' : 's are'} included.`);
1564
+ }
1565
+
1566
+ if (memories.length > 0) {
1567
+ lines.push(`${memories.length} project-local memor${memories.length === 1 ? 'y is' : 'ies are'} included.`);
1568
+ }
1569
+
1570
+ if (skills.length > 0) {
1571
+ lines.push(
1572
+ `${skills.length} matching skill${skills.length === 1 ? '' : 's'} included; call wiki_skill_load(id) for full content.`
1573
+ );
1574
+ }
1575
+
1576
+ if (claims.length > 0) {
1577
+ lines.push(`${claims.length} source-backed claim${claims.length === 1 ? ' is' : 's are'} included.`);
1578
+ }
1579
+
1580
+ if (guidanceFiles.length > 0) {
1581
+ lines.push(`${guidanceFiles.length} project guidance file${guidanceFiles.length === 1 ? ' is' : 's are'} included.`);
1582
+ }
1583
+
1584
+ if (omittedPageReasons.length > 0) {
1585
+ const omittedSummary = omittedPageReasons
1586
+ .slice(0, 3)
1587
+ .map((page) => `${page.slug} (${page.reason})`)
1588
+ .join('; ');
1589
+ lines.push(`${omittedPageReasons.length} ranked page${omittedPageReasons.length === 1 ? ' was' : 's were'} omitted by the page budget: ${omittedSummary}.`);
1590
+ }
1591
+
1592
+ if (findings.length === 0) {
1593
+ lines.push('No current lint findings are blocking the briefing.');
1594
+ } else {
1595
+ lines.push(`${findings.length} lint finding${findings.length === 1 ? '' : 's'} should be treated as context risk.`);
1596
+ }
1597
+
1598
+ return lines.join(' ');
1599
+ }
1600
+
1601
+ // Trim a body field for inclusion in the wiki_context briefing payload. Appends an
1602
+ // ellipsis when truncated so the agent can tell at a glance that the full body lives
1603
+ // elsewhere (call wiki_skill_load / memory_recall by id to fetch it).
1604
+ function truncateForBriefing(text: string, maxChars: number): string {
1605
+ if (!text || text.length <= maxChars) return text;
1606
+ return `${text.slice(0, Math.max(1, maxChars - 1)).trimEnd()}…`;
1607
+ }
1608
+
1609
+ export async function listProjectGuidanceFiles(): Promise<WikiGuidanceFile[]> {
1610
+ const results = new Map<string, WikiGuidanceFile>();
1611
+ const candidateFiles: Array<{ relativePath: string; kind: WikiGuidanceKind }> = [
1612
+ { relativePath: 'AGENTS.md', kind: 'agents' },
1613
+ { relativePath: '.github/copilot-instructions.md', kind: 'copilot-instructions' }
1614
+ ];
1615
+ const candidateDirectories: Array<{ relativeDir: string; kind: WikiGuidanceKind; pattern: RegExp }> = [
1616
+ { relativeDir: '.github/instructions', kind: 'instruction', pattern: /\.instructions\.md$/i },
1617
+ { relativeDir: '.github/prompts', kind: 'prompt', pattern: /\.prompt\.md$/i },
1618
+ { relativeDir: '.github/agents', kind: 'agent', pattern: /\.agent\.md$/i },
1619
+ { relativeDir: 'skills', kind: 'skill', pattern: /SKILL\.md$/i }
1620
+ ];
1621
+
1622
+ for (const candidate of candidateFiles) {
1623
+ const guidance = await readGuidanceFile(repoRoot, candidate.relativePath, candidate.kind);
1624
+ if (guidance) {
1625
+ results.set(guidance.path, guidance);
1626
+ }
1627
+ }
1628
+
1629
+ for (const candidate of candidateDirectories) {
1630
+ for (const relativePath of await findGuidanceFiles(path.join(repoRoot, candidate.relativeDir), candidate.pattern, repoRoot)) {
1631
+ const guidance = await readGuidanceFile(repoRoot, relativePath, candidate.kind);
1632
+ if (guidance) {
1633
+ results.set(guidance.path, guidance);
1634
+ }
1635
+ }
1636
+ }
1637
+
1638
+ return Array.from(results.values()).sort((left, right) => left.path.localeCompare(right.path));
1639
+ }
1640
+
1641
+ async function readGuidanceFile(
1642
+ repoRoot: string,
1643
+ relativePath: string,
1644
+ kind: WikiGuidanceKind
1645
+ ): Promise<WikiGuidanceFile | undefined> {
1646
+ const absolutePath = path.join(repoRoot, relativePath);
1647
+
1648
+ // Retry transient empty/error reads. Observed on ubuntu-latest CI: the same
1649
+ // guidance file (e.g. AGENTS.md, 2222 bytes, untouched on disk) intermittently
1650
+ // returns empty content from fs.readFile when many sequential reads happen
1651
+ // through the test suite, which silently drops the file from the guidance
1652
+ // listing and cascades into surprising downstream test failures (lint
1653
+ // findings missing, route-guidance proposals not generated, etc.).
1654
+ //
1655
+ // We treat this as a transient I/O hiccup and retry up to 3 times with brief
1656
+ // backoff. ENOENT (file genuinely doesn't exist) is the common
1657
+ // "skip silently" path so it short-circuits the retry. Any other error code
1658
+ // is surfaced via console.warn on the final retry so a real bug doesn't hide
1659
+ // behind the silent-failure semantic.
1660
+ let lastErr: NodeJS.ErrnoException | undefined;
1661
+ for (let attempt = 0; attempt < 3; attempt++) {
1662
+ let content: string | undefined;
1663
+ try {
1664
+ content = await fs.readFile(absolutePath, 'utf8');
1665
+ } catch (err) {
1666
+ const e = err as NodeJS.ErrnoException;
1667
+ if (e?.code === 'ENOENT') {
1668
+ // File doesn't exist — caller's contract is "return undefined" silently.
1669
+ return undefined;
1670
+ }
1671
+ lastErr = e;
1672
+ }
1673
+
1674
+ if (content) {
1675
+ return {
1676
+ path: relativePath.replace(/\\/g, '/'),
1677
+ kind,
1678
+ summary: extractSummaryParagraph(content) || path.basename(relativePath)
1679
+ };
1680
+ }
1681
+
1682
+ if (attempt < 2) {
1683
+ await new Promise((resolve) => setTimeout(resolve, 25));
1684
+ }
1685
+ }
1686
+
1687
+ if (lastErr) {
1688
+ // eslint-disable-next-line no-console
1689
+ console.warn(
1690
+ `[dendrite] readGuidanceFile: persistent error reading ${relativePath} (${lastErr.code ?? 'unknown'}): ${lastErr.message}. Returning undefined; downstream lint/proposals will not see this guidance file.`
1691
+ );
1692
+ } else {
1693
+ // eslint-disable-next-line no-console
1694
+ console.warn(
1695
+ `[dendrite] readGuidanceFile: ${relativePath} read returned empty content on 3 attempts. Returning undefined; downstream lint/proposals will not see this guidance file. If you can reproduce this on CI, please open an issue with the runner OS and Node version.`
1696
+ );
1697
+ }
1698
+ return undefined;
1699
+ }
1700
+
1701
+ async function findGuidanceFiles(directory: string, pattern: RegExp, repoRoot: string): Promise<string[]> {
1702
+ const entries = await fs.readdir(directory, { withFileTypes: true }).catch(() => []);
1703
+ const matches: string[] = [];
1704
+
1705
+ for (const entry of entries) {
1706
+ const fullPath = path.join(directory, entry.name);
1707
+ if (entry.isDirectory()) {
1708
+ matches.push(...(await findGuidanceFiles(fullPath, pattern, repoRoot)));
1709
+ continue;
1710
+ }
1711
+ if (entry.isFile() && pattern.test(entry.name)) {
1712
+ matches.push(path.relative(repoRoot, fullPath));
1713
+ }
1714
+ }
1715
+
1716
+ return matches;
1717
+ }
1718
+
1719
+ async function collectMarkdownInboundLinks(
1720
+ guidanceFiles: WikiGuidanceFile[],
1721
+ pages: WikiPageSummary[]
1722
+ ): Promise<Map<string, number>> {
1723
+ const sources = await collectMarkdownInboundSources(guidanceFiles, pages);
1724
+ return new Map([...sources.entries()].map(([targetPath, sourcePaths]) => [targetPath, sourcePaths.length]));
1725
+ }
1726
+
1727
+ async function collectMarkdownInboundSources(
1728
+ guidanceFiles: WikiGuidanceFile[],
1729
+ pages: WikiPageSummary[]
1730
+ ): Promise<Map<string, string[]>> {
1731
+ const inboundLinks = new Map(guidanceFiles.map((guidance) => [guidance.path, 0]));
1732
+ const inboundSources = new Map(guidanceFiles.map((guidance) => [guidance.path, [] as string[]]));
1733
+ const sourceFiles = [
1734
+ 'docs/index.md',
1735
+ 'docs/project-plan.md',
1736
+ ...pages.map((page) => page.path),
1737
+ ...guidanceFiles.map((guidance) => guidance.path)
1738
+ ];
1739
+
1740
+ for (const sourcePath of Array.from(new Set(sourceFiles)).sort()) {
1741
+ const content = await fs.readFile(path.join(repoRoot, sourcePath), 'utf8').catch(() => '');
1742
+ const sourceDir = path.posix.dirname(sourcePath);
1743
+
1744
+ for (const link of extractMarkdownLinks(content)) {
1745
+ const targetPath = resolveMarkdownLinkPath(link, sourceDir);
1746
+ if (!targetPath || targetPath === sourcePath || !inboundLinks.has(targetPath)) {
1747
+ continue;
1748
+ }
1749
+ inboundLinks.set(targetPath, (inboundLinks.get(targetPath) ?? 0) + 1);
1750
+ inboundSources.get(targetPath)?.push(sourcePath);
1751
+ }
1752
+ }
1753
+
1754
+ return new Map(
1755
+ [...inboundSources.entries()].map(([targetPath, sourcePaths]) => [
1756
+ targetPath,
1757
+ Array.from(new Set(sourcePaths)).sort((left, right) => left.localeCompare(right))
1758
+ ])
1759
+ );
1760
+ }
1761
+
1762
+ async function findDuplicateGuidanceGroups(): Promise<WikiGuidanceFile[][]> {
1763
+ const guidanceFiles = await listProjectGuidanceFiles();
1764
+ const fingerprintGroups = new Map<string, WikiGuidanceFile[]>();
1765
+
1766
+ for (const guidance of guidanceFiles) {
1767
+ const content = await fs.readFile(path.join(repoRoot, guidance.path), 'utf8').catch(() => '');
1768
+ const fingerprint = buildGuidanceFingerprint(content);
1769
+ if (!fingerprint) {
1770
+ continue;
1771
+ }
1772
+
1773
+ const group = fingerprintGroups.get(fingerprint) ?? [];
1774
+ group.push(guidance);
1775
+ fingerprintGroups.set(fingerprint, group);
1776
+ }
1777
+
1778
+ return Array.from(fingerprintGroups.values())
1779
+ .filter((group) => group.length > 1)
1780
+ .map((group) => group.sort((left, right) => left.path.localeCompare(right.path)));
1781
+ }
1782
+
1783
+ async function findConflictingGuidanceRules(): Promise<Array<{ rule: string; paths: string[] }>> {
1784
+ const guidanceFiles = await listProjectGuidanceFiles();
1785
+ const directiveMap = new Map<string, { positive: Set<string>; negative: Set<string> }>();
1786
+
1787
+ for (const guidance of guidanceFiles) {
1788
+ const content = await fs.readFile(path.join(repoRoot, guidance.path), 'utf8').catch(() => '');
1789
+ for (const directive of extractGuidanceDirectives(content)) {
1790
+ const current = directiveMap.get(directive.rule) ?? { positive: new Set<string>(), negative: new Set<string>() };
1791
+ current[directive.polarity].add(guidance.path);
1792
+ directiveMap.set(directive.rule, current);
1793
+ }
1794
+ }
1795
+
1796
+ return Array.from(directiveMap.entries())
1797
+ .filter(([, polarities]) => polarities.positive.size > 0 && polarities.negative.size > 0)
1798
+ .map(([rule, polarities]) => ({
1799
+ rule,
1800
+ paths: Array.from(new Set([...polarities.positive, ...polarities.negative])).sort((left, right) => left.localeCompare(right))
1801
+ }));
1802
+ }
1803
+
1804
+ function extractGuidanceDirectives(content: string): Array<{ polarity: 'positive' | 'negative'; rule: string }> {
1805
+ return Array.from(
1806
+ new Map(
1807
+ content
1808
+ .split(/\r?\n/)
1809
+ .map((line) => parseGuidanceDirective(line))
1810
+ .filter((directive): directive is { polarity: 'positive' | 'negative'; rule: string } => Boolean(directive))
1811
+ .map((directive) => [`${directive.polarity}:${directive.rule}`, directive])
1812
+ ).values()
1813
+ );
1814
+ }
1815
+
1816
+ function parseGuidanceDirective(line: string): { polarity: 'positive' | 'negative'; rule: string } | undefined {
1817
+ const trimmed = line.trim();
1818
+ if (!trimmed || /^#/.test(trimmed)) {
1819
+ return undefined;
1820
+ }
1821
+
1822
+ const normalized = trimmed
1823
+ .replace(/^[-*]\s+/, '')
1824
+ .replace(/^\d+\.\s+/, '')
1825
+ .replace(/[.?!]+$/, '')
1826
+ .replace(/\s+/g, ' ')
1827
+ .trim();
1828
+
1829
+ const negativeMatch = normalized.match(/^(do not|don't|never|avoid|must not|should not)\s+(.+)$/i);
1830
+ if (negativeMatch) {
1831
+ return { polarity: 'negative', rule: negativeMatch[2].trim().toLowerCase() };
1832
+ }
1833
+
1834
+ const positiveMatch = normalized.match(/^(always|must|should|prefer)\s+(.+)$/i);
1835
+ if (positiveMatch) {
1836
+ return { polarity: 'positive', rule: positiveMatch[2].trim().toLowerCase() };
1837
+ }
1838
+
1839
+ return undefined;
1840
+ }
1841
+
1842
+ function buildGuidanceFingerprint(content: string): string {
1843
+ const normalizedLines = content
1844
+ .split(/\r?\n/)
1845
+ .map((line) => line.trim())
1846
+ .filter((line, index) => line.length > 0 && !(index === 0 && /^#\s+/.test(line)))
1847
+ .map((line) => line.replace(/\[[^\]]+\]\(([^)]+)\)/g, (match, _target, offset, fullLine) => {
1848
+ const labelMatch = fullLine.slice(offset).match(/^\[([^\]]+)\]\([^)]+\)/);
1849
+ return labelMatch ? `[${labelMatch[1]}](link)` : match;
1850
+ }))
1851
+ .map((line) => line.replace(/\s+/g, ' '));
1852
+
1853
+ return normalizedLines.join('\n').toLowerCase();
1854
+ }
1855
+
1856
+ function findBrokenGuidanceLinks(content: string, guidancePath: string): string[] {
1857
+ const sourceDir = path.posix.dirname(guidancePath);
1858
+ return Array.from(
1859
+ new Set(
1860
+ extractMarkdownLinks(content).filter((link) => !guidanceLinkExists(link, sourceDir))
1861
+ )
1862
+ ).sort();
1863
+ }
1864
+
1865
+ function hasGuidanceRoute(content: string, guidancePath: string): boolean {
1866
+ const sourceDir = path.posix.dirname(guidancePath);
1867
+ return extractMarkdownLinks(content).some((link) => guidanceLinkExists(link, sourceDir) && isDocsRoute(link, sourceDir));
1868
+ }
1869
+
1870
+ function listGuidanceRouteTargets(content: string, guidancePath: string): string[] {
1871
+ const sourceDir = path.posix.dirname(guidancePath);
1872
+ return Array.from(
1873
+ new Set(
1874
+ extractMarkdownLinks(content)
1875
+ .map((link) => resolveMarkdownLinkPath(link, sourceDir))
1876
+ .filter((targetPath): targetPath is string => Boolean(targetPath))
1877
+ .filter((targetPath) => targetPath.startsWith('docs/') && requirePathExists(path.join(repoRoot, targetPath)))
1878
+ )
1879
+ ).sort((left, right) => left.localeCompare(right));
1880
+ }
1881
+
1882
+ function resolveMarkdownLinkPath(link: string, sourceDir: string): string | undefined {
1883
+ if (/^[a-z]+:/i.test(link) || path.isAbsolute(link)) {
1884
+ return undefined;
1885
+ }
1886
+
1887
+ return path.posix.normalize(path.posix.join(sourceDir, link.replace(/\\/g, '/')));
1888
+ }
1889
+
1890
+ function guidanceLinkExists(link: string, sourceDir: string): boolean {
1891
+ const normalized = resolveMarkdownLinkPath(link, sourceDir);
1892
+ if (!normalized) {
1893
+ return true;
1894
+ }
1895
+
1896
+ const absolutePath = path.join(repoRoot, normalized);
1897
+ return requirePathExists(absolutePath);
1898
+ }
1899
+
1900
+ function isDocsRoute(link: string, sourceDir: string): boolean {
1901
+ const normalized = resolveMarkdownLinkPath(link, sourceDir);
1902
+ if (!normalized) {
1903
+ return false;
1904
+ }
1905
+
1906
+ return normalized.startsWith('docs/');
1907
+ }
1908
+
1909
+ function requirePathExists(filePath: string): boolean {
1910
+ try {
1911
+ return statSync(filePath).isFile();
1912
+ } catch {
1913
+ return false;
1914
+ }
1915
+ }
1916
+
1917
+ function countLines(content: string): number {
1918
+ if (!content) {
1919
+ return 0;
1920
+ }
1921
+ return content.split(/\r?\n/).length;
1922
+ }
1923
+
1924
+ export function extractWikiClaims(pageSlug: string, content: string, pageByPath: Map<string, string>): WikiClaim[] {
1925
+ const claimSection = stripFencedCodeBlocks(extractMarkdownSection(content, 'Claims'));
1926
+ const pagePath = `docs/wiki/${pageSlug}.md`;
1927
+
1928
+ return claimSection
1929
+ .split(/\r?\n/)
1930
+ .map((line) => line.trim())
1931
+ .filter((line) => line.startsWith('- ['))
1932
+ .map((line) => parseClaimLine(line, pageSlug, pagePath, pageByPath))
1933
+ .filter((claim): claim is WikiClaim => claim !== undefined);
1934
+ }
1935
+
1936
+ function extractMarkdownSection(content: string, heading: string): string {
1937
+ const lines = content.split(/\r?\n/);
1938
+ let headingIndex = -1;
1939
+ let activeFence: '`' | '~' | undefined;
1940
+
1941
+ for (const [index, line] of lines.entries()) {
1942
+ const trimmed = line.trim();
1943
+ const fenceMarker = trimmed.startsWith('```') ? '`' : trimmed.startsWith('~~~') ? '~' : undefined;
1944
+
1945
+ if (fenceMarker) {
1946
+ if (!activeFence) {
1947
+ activeFence = fenceMarker;
1948
+ } else if (activeFence === fenceMarker) {
1949
+ activeFence = undefined;
1950
+ }
1951
+ continue;
1952
+ }
1953
+
1954
+ if (!activeFence && trimmed === `## ${heading}`) {
1955
+ headingIndex = index;
1956
+ break;
1957
+ }
1958
+ }
1959
+
1960
+ if (headingIndex === -1) {
1961
+ return '';
1962
+ }
1963
+
1964
+ const sectionLines: string[] = [];
1965
+ activeFence = undefined;
1966
+ for (const line of lines.slice(headingIndex + 1)) {
1967
+ const trimmed = line.trim();
1968
+ const fenceMarker = trimmed.startsWith('```') ? '`' : trimmed.startsWith('~~~') ? '~' : undefined;
1969
+
1970
+ if (!activeFence && /^##\s+/.test(trimmed)) {
1971
+ break;
1972
+ }
1973
+
1974
+ sectionLines.push(line);
1975
+
1976
+ if (fenceMarker) {
1977
+ if (!activeFence) {
1978
+ activeFence = fenceMarker;
1979
+ } else if (activeFence === fenceMarker) {
1980
+ activeFence = undefined;
1981
+ }
1982
+ }
1983
+ }
1984
+
1985
+ return sectionLines.join('\n');
1986
+ }
1987
+
1988
+ function stripFencedCodeBlocks(content: string): string {
1989
+ const lines = content.split(/\r?\n/);
1990
+ const keptLines: string[] = [];
1991
+ let activeFence: '`' | '~' | undefined;
1992
+
1993
+ for (const line of lines) {
1994
+ const trimmed = line.trim();
1995
+ const fenceMarker = trimmed.startsWith('```') ? '`' : trimmed.startsWith('~~~') ? '~' : undefined;
1996
+
1997
+ if (fenceMarker) {
1998
+ if (!activeFence) {
1999
+ activeFence = fenceMarker;
2000
+ } else if (activeFence === fenceMarker) {
2001
+ activeFence = undefined;
2002
+ }
2003
+ continue;
2004
+ }
2005
+
2006
+ if (!activeFence) {
2007
+ keptLines.push(line);
2008
+ }
2009
+ }
2010
+
2011
+ return keptLines.join('\n');
2012
+ }
2013
+
2014
+ function parseClaimLine(
2015
+ line: string,
2016
+ pageSlug: string,
2017
+ pagePath: string,
2018
+ pageByPath: Map<string, string>
2019
+ ): WikiClaim | undefined {
2020
+ const match = line.match(/^- \[(current|needs-review|superseded|unknown)\]\s+(.+)$/i);
2021
+ if (!match) {
2022
+ return undefined;
2023
+ }
2024
+
2025
+ const status = match[1].toLowerCase() as WikiClaimStatus;
2026
+ const body = match[2].trim();
2027
+ return {
2028
+ pageSlug,
2029
+ text: body.replace(/\s*Sources:\s*.+$/i, '').trim(),
2030
+ status,
2031
+ sources: extractClaimSources(body, pagePath, pageByPath)
2032
+ };
2033
+ }
2034
+
2035
+ function extractClaimSources(body: string, pagePath: string, pageByPath: Map<string, string>): WikiClaimSource[] {
2036
+ const sourceDir = path.posix.dirname(pagePath);
2037
+ const sourceText = body.match(/\sSources:\s*(.+)$/i)?.[1]?.trim() ?? '';
2038
+ const sources = new Map<string, WikiClaimSource>();
2039
+
2040
+ for (const match of sourceText.matchAll(/\[([^\]]+)\]\(([^)]+)\)/g)) {
2041
+ const label = match[1]?.trim() ?? '';
2042
+ const slug = resolveWikiLinkSlug(match[2]?.trim() ?? '', sourceDir, pageByPath);
2043
+ if (slug) {
2044
+ sources.set(`wiki:${slug}`, { kind: 'wiki', label, slug });
2045
+ }
2046
+ }
2047
+
2048
+ for (const rawSource of sourceText.replace(/\[[^\]]+\]\([^)]+\)/g, '').split(',')) {
2049
+ const typedSource = parseTypedClaimSource(rawSource);
2050
+ if (typedSource) {
2051
+ sources.set(`${typedSource.kind}:${typedSource.slug}`, typedSource);
2052
+ }
2053
+ }
2054
+
2055
+ return Array.from(sources.values());
2056
+ }
2057
+
2058
+ function parseTypedClaimSource(value: string): WikiClaimSource | undefined {
2059
+ const match = value.trim().match(/^(file|command|decision):\s*(.+)$/i);
2060
+ if (!match) {
2061
+ return undefined;
2062
+ }
2063
+
2064
+ const kind = match[1].toLowerCase() as WikiClaimSourceKind;
2065
+ const slug = match[2].trim();
2066
+ if (!slug) {
2067
+ return undefined;
2068
+ }
2069
+
2070
+ return {
2071
+ kind,
2072
+ label: slug,
2073
+ slug
2074
+ };
2075
+ }
2076
+
2077
+ function rankContextClaims(claims: WikiClaim[], queryTerms: string[]): WikiClaim[] {
2078
+ return [...claims].sort((left, right) => {
2079
+ const scoreDelta = scoreClaim(right, queryTerms) - scoreClaim(left, queryTerms);
2080
+ if (scoreDelta !== 0) {
2081
+ return scoreDelta;
2082
+ }
2083
+ return left.pageSlug.localeCompare(right.pageSlug) || left.text.localeCompare(right.text);
2084
+ });
2085
+ }
2086
+
2087
+ function scoreClaim(claim: WikiClaim, queryTerms: string[]): number {
2088
+ const haystack = `${claim.pageSlug} ${claim.text}`.toLowerCase();
2089
+ let score = claim.sources.length * 3;
2090
+ for (const term of queryTerms) {
2091
+ if (haystack.includes(term)) {
2092
+ score += 2;
2093
+ }
2094
+ }
2095
+ if (claim.status === 'current') {
2096
+ score += 1;
2097
+ }
2098
+ return score;
2099
+ }
2100
+
2101
+ function buildOpenQuestions(claims: WikiClaim[], findings: WikiLintFinding[]): string[] {
2102
+ const claimQuestions = claims
2103
+ .map((claim) => {
2104
+ if (claim.status !== 'current' && claim.sources.length === 0) {
2105
+ return `Verify ${claim.pageSlug}: ${claim.text} (status: ${claim.status}). Add at least one supporting source.`;
2106
+ }
2107
+
2108
+ if (claim.status !== 'current') {
2109
+ return `Verify ${claim.pageSlug}: ${claim.text} (status: ${claim.status}). Review ${claim.sources.map((source) => source.slug).join(', ')}.`;
2110
+ }
2111
+
2112
+ if (claim.sources.length === 0) {
2113
+ return `Add at least one supporting source for ${claim.pageSlug}: ${claim.text}.`;
2114
+ }
2115
+
2116
+ return undefined;
2117
+ })
2118
+ .filter((question): question is string => Boolean(question));
2119
+
2120
+ const guidanceQuestions = findings
2121
+ .filter((finding) => guidanceLintRules.has(finding.rule))
2122
+ .map((finding) => `Resolve ${finding.rule} in ${finding.path}: ${finding.message}`);
2123
+
2124
+ return [...claimQuestions, ...guidanceQuestions];
2125
+ }
2126
+
2127
+ const guidanceLintRules = new Set<WikiLintRule>([
2128
+ 'dormant-skill',
2129
+ 'oversized-guidance',
2130
+ 'duplicate-guidance',
2131
+ 'stale-guidance-reference',
2132
+ 'conflicting-guidance',
2133
+ 'unrouted-guidance'
2134
+ ]);
2135
+
2136
+ async function listRecentProjectLogEntries(maxEntries: number): Promise<string[]> {
2137
+ const content = await fs.readFile(pagePathFromSlug('project-log'), 'utf8').catch(() => '');
2138
+ return content
2139
+ .split(/\r?\n/)
2140
+ .map((line) => line.trim())
2141
+ .filter((line) => line.startsWith('- '))
2142
+ .slice(-maxEntries)
2143
+ .reverse();
2144
+ }
2145
+
2146
+ function extractRelatedWikiSlugs(content: string, sourcePath: string, pageByPath: Map<string, string>): string[] {
2147
+ const sourceDir = path.posix.dirname(sourcePath);
2148
+ return Array.from(
2149
+ new Set(
2150
+ extractMarkdownLinks(content)
2151
+ .map((link) => resolveWikiLinkSlug(link, sourceDir, pageByPath))
2152
+ .filter((slug): slug is string => Boolean(slug))
2153
+ )
2154
+ );
2155
+ }
2156
+
2157
+ function extractMarkdownLinks(content: string): string[] {
2158
+ return Array.from(content.matchAll(/\[[^\]]+\]\(([^)]+)\)/g), (match) => match[1]?.trim() ?? '')
2159
+ .filter(Boolean)
2160
+ .map((link) => link.split('#')[0]?.split('?')[0]?.trim() ?? '')
2161
+ .filter((link) => link.endsWith('.md'));
2162
+ }
2163
+
2164
+ function resolveWikiLinkSlug(link: string, sourceDir: string, pageByPath: Map<string, string>): string | undefined {
2165
+ if (/^[a-z]+:/i.test(link) || path.isAbsolute(link)) {
2166
+ return undefined;
2167
+ }
2168
+
2169
+ const normalized = path.posix.normalize(path.posix.join(sourceDir, link.replace(/\\/g, '/')));
2170
+ return pageByPath.get(normalized);
2171
+ }