@rarusoft/dendrite-wiki 0.1.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/README.md +79 -0
  2. package/dist/api-extractor/extract.js +269 -0
  3. package/dist/api-extractor/language-extractor.js +15 -0
  4. package/dist/api-extractor/python-extractor.js +358 -0
  5. package/dist/api-extractor/render.js +195 -0
  6. package/dist/api-extractor/tree-sitter-extractor.js +1079 -0
  7. package/dist/api-extractor/types.js +11 -0
  8. package/dist/api-extractor/typescript-extractor.js +50 -0
  9. package/dist/api-extractor/walk.js +178 -0
  10. package/dist/api-reference.js +438 -0
  11. package/dist/benchmark-events.js +129 -0
  12. package/dist/benchmark.js +270 -0
  13. package/dist/binder-export.js +381 -0
  14. package/dist/canonical-target.js +168 -0
  15. package/dist/chart-insert.js +377 -0
  16. package/dist/chart-prompts.js +414 -0
  17. package/dist/context-cache.js +98 -0
  18. package/dist/contradicts-shipped-memory.js +232 -0
  19. package/dist/diff-context.js +142 -0
  20. package/dist/doctor.js +220 -0
  21. package/dist/generated-docs.js +219 -0
  22. package/dist/i18n.js +71 -0
  23. package/dist/index.js +49 -0
  24. package/dist/librarian.js +255 -0
  25. package/dist/maintenance-actions.js +244 -0
  26. package/dist/maintenance-inbox.js +842 -0
  27. package/dist/maintenance-runner.js +62 -0
  28. package/dist/page-drift.js +225 -0
  29. package/dist/page-inbox.js +168 -0
  30. package/dist/report-export.js +339 -0
  31. package/dist/review-bridge.js +1386 -0
  32. package/dist/search-index.js +199 -0
  33. package/dist/store.js +1617 -0
  34. package/dist/telemetry-defaults.js +44 -0
  35. package/dist/telemetry-report.js +263 -0
  36. package/dist/telemetry.js +544 -0
  37. package/dist/wiki-synthesis.js +901 -0
  38. package/package.json +35 -0
  39. package/src/api-extractor/extract.ts +333 -0
  40. package/src/api-extractor/language-extractor.ts +37 -0
  41. package/src/api-extractor/python-extractor.ts +380 -0
  42. package/src/api-extractor/render.ts +267 -0
  43. package/src/api-extractor/tree-sitter-extractor.ts +1210 -0
  44. package/src/api-extractor/types.ts +41 -0
  45. package/src/api-extractor/typescript-extractor.ts +56 -0
  46. package/src/api-extractor/walk.ts +209 -0
  47. package/src/api-reference.ts +552 -0
  48. package/src/benchmark-events.ts +216 -0
  49. package/src/benchmark.ts +376 -0
  50. package/src/binder-export.ts +437 -0
  51. package/src/canonical-target.ts +192 -0
  52. package/src/chart-insert.ts +478 -0
  53. package/src/chart-prompts.ts +417 -0
  54. package/src/context-cache.ts +129 -0
  55. package/src/contradicts-shipped-memory.ts +311 -0
  56. package/src/diff-context.ts +187 -0
  57. package/src/doctor.ts +260 -0
  58. package/src/generated-docs.ts +316 -0
  59. package/src/i18n.ts +106 -0
  60. package/src/index.ts +59 -0
  61. package/src/librarian.ts +331 -0
  62. package/src/maintenance-actions.ts +314 -0
  63. package/src/maintenance-inbox.ts +1132 -0
  64. package/src/maintenance-runner.ts +85 -0
  65. package/src/page-drift.ts +292 -0
  66. package/src/page-inbox.ts +254 -0
  67. package/src/report-export.ts +392 -0
  68. package/src/review-bridge.ts +1729 -0
  69. package/src/search-index.ts +266 -0
  70. package/src/store.ts +2171 -0
  71. package/src/telemetry-defaults.ts +50 -0
  72. package/src/telemetry-report.ts +365 -0
  73. package/src/telemetry.ts +757 -0
  74. package/src/wiki-synthesis.ts +1307 -0
@@ -0,0 +1,232 @@
1
+ // Each pattern targets a specific negation grammar. The first capturing group, when
2
+ // present, holds the "what's missing" noun phrase — that's what we score against memories.
3
+ // When a pattern has no capturing group (heading-style "No X"), the object is the words
4
+ // that follow the match up to the end of the line/sentence.
5
+ const NEGATION_PATTERNS = [
6
+ { regex: /\bdoes\s+not\s+(?:yet\s+|currently\s+)?have\b([^.!?\n]{3,160})/i, objectFromCapture: true },
7
+ { regex: /\bdoes\s+not\s+(?:yet\s+|currently\s+)?(?:support|include|implement|provide|ship|offer)\b([^.!?\n]{3,160})/i, objectFromCapture: true },
8
+ { regex: /\bis\s+not\s+(?:yet\s+|currently\s+)?(?:built|shipped|implemented|present|available|done)\b([^.!?\n]{0,120})/i, objectFromCapture: true },
9
+ { regex: /\bhas\s+not\s+been\s+(?:yet\s+)?(?:built|shipped|implemented|delivered)\b([^.!?\n]{0,120})/i, objectFromCapture: true },
10
+ { regex: /\bis\s+missing\b([^.!?\n]{3,160})/i, objectFromCapture: true },
11
+ { regex: /\bstill\s+needs?\s+(?:another\s+layer|to\s+be\s+built)\b([^.!?\n]{0,160})/i, objectFromCapture: true },
12
+ { regex: /\bintentionally\s+dropped\b([^.!?\n]{0,160})/i, objectFromCapture: true },
13
+ // Heading-style negation: "No Shared Free-Form Memory Store", "No Subconscious Background Organizer".
14
+ // Anchored to the start of a heading line because mid-sentence "no X" is too noisy.
15
+ { regex: /^#{2,6}\s+No\s+([A-Z][^\n]{3,120})$/m, objectFromCapture: true }
16
+ ];
17
+ const AFFIRMATIVE_KEYWORDS = [
18
+ 'shipped',
19
+ 'landed',
20
+ 'implemented',
21
+ 'now supports',
22
+ 'now has',
23
+ 'now includes',
24
+ 'now provides',
25
+ 'now offers',
26
+ 'now applies',
27
+ 'now ranks',
28
+ 'now runs',
29
+ 'is implemented',
30
+ 'is shipped',
31
+ 'is available',
32
+ 'is now',
33
+ 'complete',
34
+ 'completed',
35
+ 'mostly done',
36
+ 'done',
37
+ 'in progress',
38
+ 'partly shipped'
39
+ ];
40
+ const STOP_TOKENS = new Set([
41
+ 'that',
42
+ 'this',
43
+ 'kind',
44
+ 'sort',
45
+ 'type',
46
+ 'thing',
47
+ 'with',
48
+ 'into',
49
+ 'have',
50
+ 'been',
51
+ 'still',
52
+ 'some',
53
+ 'sort',
54
+ 'will',
55
+ 'must',
56
+ 'should',
57
+ 'around',
58
+ 'maintain',
59
+ 'expected',
60
+ 'project',
61
+ 'projects',
62
+ 'system',
63
+ 'systems'
64
+ ]);
65
+ const MIN_OBJECT_TOKENS = 2;
66
+ const MIN_OBJECT_OVERLAP = 2;
67
+ const MAX_AFFIRMING_SNIPPETS_PER_SIGNAL = 3;
68
+ const MAX_CONTRADICTING_MEMORIES_PER_SIGNAL = 5;
69
+ /**
70
+ * Scan a wiki page for prose that contradicts shipped memories. Returns one signal per
71
+ * affected section (the H2/H3 the negation lives under). Empty array means no contradiction
72
+ * was found — that's the healthy state.
73
+ */
74
+ export function detectContradictsShippedMemory(pageContent, memories, projectLogContent = '') {
75
+ if (extractOptOutDirective(pageContent)) {
76
+ return [];
77
+ }
78
+ const sections = extractPageSections(pageContent);
79
+ if (sections.length === 0) {
80
+ return [];
81
+ }
82
+ const affirmingMemories = memories
83
+ .filter((record) => record.status === 'active' || record.status === 'superseded')
84
+ .map((record) => ({
85
+ record,
86
+ haystack: `${record.summary} ${record.text}`.toLowerCase(),
87
+ affirmative: hasAffirmativeKeyword(`${record.summary} ${record.text}`)
88
+ }))
89
+ .filter((entry) => entry.affirmative);
90
+ // Project-log entries are weaker evidence (one bullet line vs a memory's full body),
91
+ // so we only use them to BOOST confidence — never as the sole contradiction source.
92
+ const logBlobLower = projectLogContent.toLowerCase();
93
+ const logHasAffirmativeContext = hasAffirmativeKeyword(projectLogContent);
94
+ const signals = [];
95
+ for (const section of sections) {
96
+ const probe = `${section.heading}. ${section.body}`;
97
+ const negation = findFirstNegation(section.heading, section.body, probe);
98
+ if (!negation) {
99
+ continue;
100
+ }
101
+ const objectTokens = extractObjectTokens(negation.objectText);
102
+ if (objectTokens.length < MIN_OBJECT_TOKENS) {
103
+ continue;
104
+ }
105
+ const contradicting = [];
106
+ const snippets = [];
107
+ for (const entry of affirmingMemories) {
108
+ const overlap = countTokenOverlap(objectTokens, entry.haystack);
109
+ if (overlap >= MIN_OBJECT_OVERLAP) {
110
+ contradicting.push(entry.record.id);
111
+ if (snippets.length < MAX_AFFIRMING_SNIPPETS_PER_SIGNAL) {
112
+ snippets.push(truncate(entry.record.summary, 140));
113
+ }
114
+ }
115
+ if (contradicting.length >= MAX_CONTRADICTING_MEMORIES_PER_SIGNAL) {
116
+ break;
117
+ }
118
+ }
119
+ // Project-log corroboration: if the log itself contains affirmative phrasing AND
120
+ // mentions enough object tokens, that counts as one extra contradicting voice and
121
+ // raises the signal's confidence — but never replaces a memory match.
122
+ if (contradicting.length > 0 &&
123
+ logHasAffirmativeContext &&
124
+ countTokenOverlap(objectTokens, logBlobLower) >= MIN_OBJECT_OVERLAP &&
125
+ snippets.length < MAX_AFFIRMING_SNIPPETS_PER_SIGNAL) {
126
+ snippets.push('project-log corroborates this feature has shipped');
127
+ }
128
+ if (contradicting.length === 0) {
129
+ continue;
130
+ }
131
+ signals.push({
132
+ sectionHeading: section.heading,
133
+ matchedNegation: negation.matchedText.trim(),
134
+ objectTokens,
135
+ contradictingMemoryIds: contradicting,
136
+ affirmingSnippets: snippets
137
+ });
138
+ }
139
+ return signals;
140
+ }
141
+ export function buildContradictsShippedMemoryMessage(signal) {
142
+ const memoryWord = signal.contradictingMemoryIds.length === 1 ? 'memory' : 'memories';
143
+ const snippet = signal.affirmingSnippets[0] ?? '';
144
+ const snippetSuffix = snippet ? ` (e.g., "${snippet}")` : '';
145
+ return `Section "${signal.sectionHeading}" asserts "${signal.matchedNegation}", but ${signal.contradictingMemoryIds.length} shipped ${memoryWord} ${signal.contradictingMemoryIds.length === 1 ? 'says' : 'say'} otherwise${snippetSuffix}. Rewrite or remove the negation, or add \`contradicts-shipped-memory: ignore\` to the page frontmatter if the assertion is intentional.`;
146
+ }
147
+ function extractPageSections(pageContent) {
148
+ const withoutFrontmatter = stripFrontmatter(pageContent);
149
+ const lines = withoutFrontmatter.split(/\r?\n/);
150
+ const sections = [];
151
+ let currentHeading = '';
152
+ let currentBody = [];
153
+ const pushSection = () => {
154
+ if (currentHeading || currentBody.length > 0) {
155
+ sections.push({ heading: currentHeading, body: currentBody.join('\n').trim() });
156
+ }
157
+ };
158
+ for (const line of lines) {
159
+ const headingMatch = line.match(/^(#{1,6})\s+(.+?)\s*$/);
160
+ if (headingMatch) {
161
+ pushSection();
162
+ currentHeading = headingMatch[2].trim();
163
+ currentBody = [line];
164
+ continue;
165
+ }
166
+ currentBody.push(line);
167
+ }
168
+ pushSection();
169
+ return sections;
170
+ }
171
+ function stripFrontmatter(content) {
172
+ return content.replace(/^---\r?\n[\s\S]*?\r?\n---\r?\n/, '');
173
+ }
174
+ function extractOptOutDirective(pageContent) {
175
+ const match = pageContent.match(/^---\r?\n([\s\S]*?)\r?\n---/);
176
+ if (!match)
177
+ return false;
178
+ return /^\s*contradicts-shipped-memory\s*:\s*ignore\s*$/m.test(match[1]);
179
+ }
180
+ function findFirstNegation(heading, body, probe) {
181
+ // Try heading first — heading-style negations ("No Shared Memory Store") are higher-precision.
182
+ for (const pattern of NEGATION_PATTERNS) {
183
+ // The heading-anchored pattern needs the literal heading line, not the joined probe.
184
+ if (pattern.regex.source.startsWith('^')) {
185
+ const headingLine = `## ${heading}`;
186
+ const match = headingLine.match(pattern.regex);
187
+ if (match) {
188
+ return {
189
+ matchedText: match[0],
190
+ objectText: pattern.objectFromCapture ? match[1] ?? '' : match[0]
191
+ };
192
+ }
193
+ continue;
194
+ }
195
+ const match = probe.match(pattern.regex);
196
+ if (match) {
197
+ return {
198
+ matchedText: match[0],
199
+ objectText: pattern.objectFromCapture ? match[1] ?? '' : match[0]
200
+ };
201
+ }
202
+ }
203
+ // Suppress unused-parameter lint — body is read indirectly via the probe param.
204
+ void body;
205
+ return undefined;
206
+ }
207
+ function extractObjectTokens(objectText) {
208
+ return Array.from(new Set(objectText
209
+ .toLowerCase()
210
+ .replace(/[`*_]+/g, '')
211
+ .split(/[^a-z0-9-]+/)
212
+ .map((token) => token.trim())
213
+ .filter((token) => token.length >= 5 && !STOP_TOKENS.has(token))));
214
+ }
215
+ function countTokenOverlap(objectTokens, haystackLower) {
216
+ let overlap = 0;
217
+ for (const token of objectTokens) {
218
+ if (haystackLower.includes(token)) {
219
+ overlap += 1;
220
+ }
221
+ }
222
+ return overlap;
223
+ }
224
+ function hasAffirmativeKeyword(text) {
225
+ const lower = text.toLowerCase();
226
+ return AFFIRMATIVE_KEYWORDS.some((keyword) => lower.includes(keyword));
227
+ }
228
+ function truncate(text, max) {
229
+ if (text.length <= max)
230
+ return text;
231
+ return `${text.slice(0, max - 1).trimEnd()}…`;
232
+ }
@@ -0,0 +1,142 @@
1
+ /**
2
+ * Diff-driven context aggregation for PR and local-diff reviews.
3
+ *
4
+ * Aggregates the wiki pages, project-local memories, and skills relevant to a set of
5
+ * changed files — the same recall pipeline `wiki_context` uses for the in-editor agent,
6
+ * but driven by file paths from a diff instead of an interactive task. Output is markdown
7
+ * suitable for a GitHub PR comment, a local terminal review, or piping into a chat
8
+ * application.
9
+ *
10
+ * Driven by the CLI's `context-for-diff` subcommand (a list of paths via the `--files`
11
+ * flag, or piped newline-delimited via stdin from `git diff --name-only main...HEAD`).
12
+ * The intended consumer is the future GitHub Action
13
+ * (`dendrite-wiki/context-action`) that auto-comments PRs with relevant project memory
14
+ * when a developer is reviewing a change — exposing the recall moat at exactly the moment
15
+ * a human reviewer cares most.
16
+ */
17
+ import { buildWikiContext } from './store.js';
18
+ import { recallProjectMemories } from '@rarusoft/dendrite-memory';
19
+ import { recallProjectSkills } from '@rarusoft/dendrite-memory';
20
+ const defaultMaxPagesPerFile = 3;
21
+ const defaultMaxMemoriesPerFile = 3;
22
+ const defaultMaxSkillsPerFile = 2;
23
+ export async function buildDiffContext(options) {
24
+ const files = uniqueFiles(options.files);
25
+ if (files.length === 0) {
26
+ return { files: [], pageCount: 0, memoryCount: 0, skillCount: 0 };
27
+ }
28
+ const query = (options.query ?? '').trim() || `Review changes to ${files.join(', ')}`;
29
+ const maxPages = Math.max(1, options.maxPagesPerFile ?? defaultMaxPagesPerFile);
30
+ const maxMemories = Math.max(1, options.maxMemoriesPerFile ?? defaultMaxMemoriesPerFile);
31
+ const maxSkills = Math.max(1, options.maxSkillsPerFile ?? defaultMaxSkillsPerFile);
32
+ const seenPageSlugs = new Set();
33
+ const seenMemoryIds = new Set();
34
+ const seenSkillIds = new Set();
35
+ const entries = [];
36
+ for (const file of files) {
37
+ const [contextResult, memories, skills] = await Promise.all([
38
+ // wiki_context with the file in relatedFiles biases ranking towards memory and page
39
+ // matches that already cite the file.
40
+ buildWikiContext(query, {
41
+ maxPages,
42
+ relatedFiles: [file],
43
+ languages: options.languages,
44
+ frameworks: options.frameworks,
45
+ includeLint: false,
46
+ maxSkills
47
+ }),
48
+ recallProjectMemories(query, { relatedFiles: [file], maxItems: maxMemories }),
49
+ recallProjectSkills({ query, relatedFiles: [file], languages: options.languages, frameworks: options.frameworks, maxItems: maxSkills })
50
+ ]);
51
+ const pages = contextResult.pages.filter((page) => {
52
+ if (seenPageSlugs.has(page.slug)) {
53
+ return false;
54
+ }
55
+ seenPageSlugs.add(page.slug);
56
+ return true;
57
+ });
58
+ const dedupedMemories = memories.filter((memory) => {
59
+ if (seenMemoryIds.has(memory.id)) {
60
+ return false;
61
+ }
62
+ seenMemoryIds.add(memory.id);
63
+ return true;
64
+ });
65
+ const dedupedSkills = skills.filter((skill) => {
66
+ if (seenSkillIds.has(skill.id)) {
67
+ return false;
68
+ }
69
+ seenSkillIds.add(skill.id);
70
+ return true;
71
+ });
72
+ entries.push({
73
+ file,
74
+ pages,
75
+ memories: dedupedMemories,
76
+ skills: dedupedSkills
77
+ });
78
+ }
79
+ return {
80
+ files: entries,
81
+ pageCount: seenPageSlugs.size,
82
+ memoryCount: seenMemoryIds.size,
83
+ skillCount: seenSkillIds.size
84
+ };
85
+ }
86
+ export function renderDiffContextMarkdown(result) {
87
+ if (result.files.length === 0) {
88
+ return '_Dendrite Wiki MCP found no changed files to analyze._';
89
+ }
90
+ const lines = [
91
+ '## Dendrite Wiki: relevant context for this change',
92
+ '',
93
+ `Reviewed ${result.files.length} file${result.files.length === 1 ? '' : 's'}. Surfaced ${result.pageCount} wiki page${result.pageCount === 1 ? '' : 's'}, ${result.memoryCount} memor${result.memoryCount === 1 ? 'y' : 'ies'}, and ${result.skillCount} skill${result.skillCount === 1 ? '' : 's'}.`,
94
+ ''
95
+ ];
96
+ for (const entry of result.files) {
97
+ if (entry.pages.length === 0 && entry.memories.length === 0 && entry.skills.length === 0) {
98
+ continue;
99
+ }
100
+ lines.push(`### \`${entry.file}\``, '');
101
+ if (entry.skills.length > 0) {
102
+ lines.push('**Matching skills**');
103
+ for (const skill of entry.skills) {
104
+ lines.push(`- \`${skill.id}\` — ${escape(skill.summary)} _(${skill.reasons.slice(0, 2).join('; ')})_`);
105
+ }
106
+ lines.push('');
107
+ }
108
+ if (entry.memories.length > 0) {
109
+ lines.push('**Relevant memories**');
110
+ for (const memory of entry.memories) {
111
+ lines.push(`- \`${memory.id}\` — ${escape(memory.summary)} _(${memory.reasons.slice(0, 2).join('; ')})_`);
112
+ }
113
+ lines.push('');
114
+ }
115
+ if (entry.pages.length > 0) {
116
+ lines.push('**Relevant wiki pages**');
117
+ for (const page of entry.pages) {
118
+ lines.push(`- [\`${page.slug}\`](docs/wiki/${page.slug}.md) — ${escape(page.summary)} _(${escape(page.reason)})_`);
119
+ }
120
+ lines.push('');
121
+ }
122
+ }
123
+ if (result.pageCount === 0 && result.memoryCount === 0 && result.skillCount === 0) {
124
+ lines.push('_No matching wiki pages, memories, or skills were surfaced for the changed files. This usually means the changes touch new territory the wiki has not documented yet._');
125
+ }
126
+ return lines.join('\n');
127
+ }
128
+ function uniqueFiles(input) {
129
+ const seen = new Set();
130
+ for (const value of input) {
131
+ if (typeof value !== 'string')
132
+ continue;
133
+ const normalized = value.trim().replace(/\\/g, '/');
134
+ if (!normalized)
135
+ continue;
136
+ seen.add(normalized);
137
+ }
138
+ return Array.from(seen);
139
+ }
140
+ function escape(value) {
141
+ return value.replace(/\|/g, '\\|').replace(/[\r\n]+/g, ' ').trim();
142
+ }
package/dist/doctor.js ADDED
@@ -0,0 +1,220 @@
1
+ /**
2
+ * `dendrite-wiki doctor` — project-health audit.
3
+ *
4
+ * Aggregates findings from every health-relevant subsystem into one ranked list with
5
+ * severities (`critical`, `warning`, `info`): missing required files, stale benchmark
6
+ * snapshots, accumulated wiki lint findings, contested or unsupported memories, missing
7
+ * telemetry config when sharing is opt-in, etc. The CLI prints a human report by default
8
+ * and a structured `--json` output for scripted health checks.
9
+ *
10
+ * The doctor exits 1 on any `critical` finding so it integrates cleanly with CI gates and
11
+ * pre-commit hooks. Most findings are advisory and live as `warning` so the doctor stays
12
+ * useful without becoming a nag.
13
+ */
14
+ import { promises as fs } from 'node:fs';
15
+ import path from 'node:path';
16
+ import { readBenchmarkHistory } from './benchmark.js';
17
+ import { reviewProjectMemories } from '@rarusoft/dendrite-memory';
18
+ import { lintWikiPages, listWikiPages, listWikiProposals } from './store.js';
19
+ import { writeTelemetryStatusArtifact } from './telemetry.js';
20
+ export async function runDoctor(options = {}) {
21
+ const root = path.resolve(options.root ?? process.cwd());
22
+ const findings = [];
23
+ // Critical filesystem checks first — if these fail, deeper checks may throw.
24
+ const wikiDirExists = await pathExists(path.join(root, 'docs', 'wiki'));
25
+ if (!wikiDirExists) {
26
+ findings.push({
27
+ severity: 'critical',
28
+ rule: 'no-wiki-directory',
29
+ title: 'Wiki directory is missing.',
30
+ detail: 'Dendrite expects markdown pages under docs/wiki/. The agent has nothing to read or update.',
31
+ fix: 'npx dendrite-wiki init'
32
+ });
33
+ }
34
+ const indexExists = await pathExists(path.join(root, 'docs', 'index.md'));
35
+ if (!indexExists) {
36
+ findings.push({
37
+ severity: 'critical',
38
+ rule: 'no-index-page',
39
+ title: 'docs/index.md is missing.',
40
+ detail: 'Agents are instructed to read docs/index.md first. Without it, orientation breaks.',
41
+ fix: 'npx dendrite-wiki init'
42
+ });
43
+ }
44
+ const mcpClientPaths = [
45
+ { client: 'Claude Code', file: '.mcp.json' },
46
+ { client: 'VS Code / Copilot', file: '.vscode/mcp.json' },
47
+ { client: 'Cursor', file: '.cursor/mcp.json' },
48
+ { client: 'Codex', file: '.codex/config.toml' },
49
+ { client: 'Continue', file: '.continue/mcpServers/dendrite-wiki-mcp.json' }
50
+ ];
51
+ const presentClients = [];
52
+ for (const entry of mcpClientPaths) {
53
+ if (await pathExists(path.join(root, entry.file))) {
54
+ presentClients.push(entry.client);
55
+ }
56
+ }
57
+ if (presentClients.length === 0) {
58
+ findings.push({
59
+ severity: 'critical',
60
+ rule: 'no-mcp-client-config',
61
+ title: 'No MCP client config files found.',
62
+ detail: 'No editor or agent client knows how to launch the MCP server. The agent cannot reach Dendrite.',
63
+ fix: 'npx dendrite-wiki init --profile claude (or --profile cursor / copilot-vscode / codex / continue)'
64
+ });
65
+ }
66
+ else {
67
+ findings.push({
68
+ severity: 'info',
69
+ rule: 'mcp-clients-configured',
70
+ title: `${presentClients.length} MCP client config${presentClients.length === 1 ? '' : 's'} present.`,
71
+ detail: `Configured: ${presentClients.join(', ')}.`
72
+ });
73
+ }
74
+ // If the basic skeleton is broken, skip deeper checks to avoid noisy errors.
75
+ const skeletonOk = wikiDirExists && indexExists;
76
+ if (skeletonOk) {
77
+ const [pages, lintFindings, proposals, memoryReview, history, telemetryStatus] = await Promise.all([
78
+ listWikiPages().catch(() => []),
79
+ lintWikiPages().catch(() => []),
80
+ listWikiProposals().catch(() => []),
81
+ reviewProjectMemories().catch(() => ({ findings: [] })),
82
+ readBenchmarkHistory(root).catch(() => null),
83
+ writeTelemetryStatusArtifact(root).catch(() => null)
84
+ ]);
85
+ if (lintFindings.length > 0) {
86
+ findings.push({
87
+ severity: 'warning',
88
+ rule: 'lint-findings-present',
89
+ title: `${lintFindings.length} wiki lint finding${lintFindings.length === 1 ? '' : 's'}.`,
90
+ detail: 'The wiki has open hygiene issues (oversized guidance, stale claims, orphan pages, etc.). Review the maintenance inbox to triage.',
91
+ fix: 'Open docs/wiki/maintenance-inbox.md or run `npx dendrite-wiki benchmark:snapshot` to refresh state.'
92
+ });
93
+ }
94
+ if (proposals.length > 0) {
95
+ findings.push({
96
+ severity: 'warning',
97
+ rule: 'pending-proposals',
98
+ title: `${proposals.length} pending maintenance proposal${proposals.length === 1 ? '' : 's'}.`,
99
+ detail: 'Generated guidance cleanup proposals are waiting for review.',
100
+ fix: 'Open docs/wiki/maintenance-review.md in the browser, or call wiki_apply_proposal for low-risk items.'
101
+ });
102
+ }
103
+ const contradictionFindings = memoryReview.findings.filter((f) => f.kind === 'contradiction');
104
+ if (contradictionFindings.length > 0) {
105
+ findings.push({
106
+ severity: 'warning',
107
+ rule: 'memory-contradictions',
108
+ title: `${contradictionFindings.length} memory contradiction group${contradictionFindings.length === 1 ? '' : 's'} detected.`,
109
+ detail: 'Two or more memories disagree. The agent may be acting on inconsistent project truth.',
110
+ fix: 'Open the Maintenance Review board in the browser to inspect and resolve.'
111
+ });
112
+ }
113
+ if (history && history.snapshots.length > 0) {
114
+ const latest = history.latest && history.latest.timestamp ? history.latest : history.snapshots.at(-1);
115
+ if (latest && latest.timestamp) {
116
+ const ageDays = Math.floor((Date.now() - new Date(latest.timestamp).getTime()) / (1000 * 60 * 60 * 24));
117
+ if (ageDays > 14) {
118
+ findings.push({
119
+ severity: 'warning',
120
+ rule: 'stale-benchmark',
121
+ title: `Last benchmark snapshot is ${ageDays} days old.`,
122
+ detail: 'Benchmarks should be captured at session boundaries to detect drift. Stale snapshots make trend lines meaningless.',
123
+ fix: 'npx dendrite-wiki benchmark:snapshot --label session-end'
124
+ });
125
+ }
126
+ }
127
+ }
128
+ else {
129
+ findings.push({
130
+ severity: 'warning',
131
+ rule: 'no-benchmark-history',
132
+ title: 'No benchmark snapshots have been captured.',
133
+ detail: 'Without baseline snapshots there is no way to measure whether Dendrite is helping the project over time.',
134
+ fix: 'npx dendrite-wiki benchmark:snapshot --label baseline'
135
+ });
136
+ }
137
+ const projectLogPath = path.join(root, 'docs', 'wiki', 'project-log.md');
138
+ if (await pathExists(projectLogPath)) {
139
+ const projectLogStat = await fs.stat(projectLogPath);
140
+ const logAgeDays = Math.floor((Date.now() - projectLogStat.mtime.getTime()) / (1000 * 60 * 60 * 24));
141
+ if (logAgeDays > 7) {
142
+ findings.push({
143
+ severity: 'warning',
144
+ rule: 'stale-project-log',
145
+ title: `project-log.md was last touched ${logAgeDays} days ago.`,
146
+ detail: 'No meaningful work has been logged in the past week. The project log is the chronological record agents read for context.',
147
+ fix: 'Append an entry via wiki_log when meaningful work happens.'
148
+ });
149
+ }
150
+ }
151
+ if (!(await pathExists(path.join(root, '.git')))) {
152
+ findings.push({
153
+ severity: 'warning',
154
+ rule: 'no-git-repository',
155
+ title: 'No .git/ directory found.',
156
+ detail: 'Dendrite assumes git for diff-based review. Without git, audit and rollback are weakened.',
157
+ fix: 'git init'
158
+ });
159
+ }
160
+ findings.push({
161
+ severity: 'info',
162
+ rule: 'project-stats',
163
+ title: `${pages.length} wiki page${pages.length === 1 ? '' : 's'} total.`,
164
+ detail: `${lintFindings.length} lint finding${lintFindings.length === 1 ? '' : 's'}, ${proposals.length} proposal${proposals.length === 1 ? '' : 's'}, ${memoryReview.findings.length} memory finding${memoryReview.findings.length === 1 ? '' : 's'}. Telemetry: ${telemetryStatus?.sharingMode ?? 'unknown'}.`
165
+ });
166
+ }
167
+ const counts = {
168
+ critical: findings.filter((f) => f.severity === 'critical').length,
169
+ warning: findings.filter((f) => f.severity === 'warning').length,
170
+ info: findings.filter((f) => f.severity === 'info').length
171
+ };
172
+ const status = counts.critical > 0 ? 'critical' : counts.warning > 0 ? 'warnings' : 'healthy';
173
+ return {
174
+ generatedAt: new Date().toISOString(),
175
+ root,
176
+ findings,
177
+ counts,
178
+ status
179
+ };
180
+ }
181
+ export function formatDoctorReport(report) {
182
+ const lines = [];
183
+ const statusEmoji = report.status === 'healthy' ? 'OK' : report.status === 'warnings' ? 'WARN' : 'FAIL';
184
+ lines.push(`Dendrite Doctor — ${statusEmoji}`);
185
+ lines.push(`Project: ${report.root}`);
186
+ lines.push(`Critical: ${report.counts.critical} Warnings: ${report.counts.warning} Info: ${report.counts.info}`);
187
+ lines.push('');
188
+ const sections = [
189
+ { severity: 'critical', label: 'CRITICAL' },
190
+ { severity: 'warning', label: 'WARNING' },
191
+ { severity: 'info', label: 'INFO' }
192
+ ];
193
+ for (const section of sections) {
194
+ const sectionFindings = report.findings.filter((f) => f.severity === section.severity);
195
+ if (sectionFindings.length === 0)
196
+ continue;
197
+ lines.push(`[${section.label}]`);
198
+ for (const finding of sectionFindings) {
199
+ lines.push(` ${finding.title}`);
200
+ lines.push(` ${finding.detail}`);
201
+ if (finding.fix) {
202
+ lines.push(` Fix: ${finding.fix}`);
203
+ }
204
+ lines.push('');
205
+ }
206
+ }
207
+ if (report.findings.length === 0) {
208
+ lines.push('No findings — Dendrite is healthy.');
209
+ }
210
+ return lines.join('\n');
211
+ }
212
+ async function pathExists(target) {
213
+ try {
214
+ await fs.access(target);
215
+ return true;
216
+ }
217
+ catch {
218
+ return false;
219
+ }
220
+ }