@rarusoft/dendrite-wiki 0.1.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/README.md +79 -0
  2. package/dist/api-extractor/extract.js +269 -0
  3. package/dist/api-extractor/language-extractor.js +15 -0
  4. package/dist/api-extractor/python-extractor.js +358 -0
  5. package/dist/api-extractor/render.js +195 -0
  6. package/dist/api-extractor/tree-sitter-extractor.js +1079 -0
  7. package/dist/api-extractor/types.js +11 -0
  8. package/dist/api-extractor/typescript-extractor.js +50 -0
  9. package/dist/api-extractor/walk.js +178 -0
  10. package/dist/api-reference.js +438 -0
  11. package/dist/benchmark-events.js +129 -0
  12. package/dist/benchmark.js +270 -0
  13. package/dist/binder-export.js +381 -0
  14. package/dist/canonical-target.js +168 -0
  15. package/dist/chart-insert.js +377 -0
  16. package/dist/chart-prompts.js +414 -0
  17. package/dist/context-cache.js +98 -0
  18. package/dist/contradicts-shipped-memory.js +232 -0
  19. package/dist/diff-context.js +142 -0
  20. package/dist/doctor.js +220 -0
  21. package/dist/generated-docs.js +219 -0
  22. package/dist/i18n.js +71 -0
  23. package/dist/index.js +49 -0
  24. package/dist/librarian.js +255 -0
  25. package/dist/maintenance-actions.js +244 -0
  26. package/dist/maintenance-inbox.js +842 -0
  27. package/dist/maintenance-runner.js +62 -0
  28. package/dist/page-drift.js +225 -0
  29. package/dist/page-inbox.js +168 -0
  30. package/dist/report-export.js +339 -0
  31. package/dist/review-bridge.js +1386 -0
  32. package/dist/search-index.js +199 -0
  33. package/dist/store.js +1617 -0
  34. package/dist/telemetry-defaults.js +44 -0
  35. package/dist/telemetry-report.js +263 -0
  36. package/dist/telemetry.js +544 -0
  37. package/dist/wiki-synthesis.js +901 -0
  38. package/package.json +35 -0
  39. package/src/api-extractor/extract.ts +333 -0
  40. package/src/api-extractor/language-extractor.ts +37 -0
  41. package/src/api-extractor/python-extractor.ts +380 -0
  42. package/src/api-extractor/render.ts +267 -0
  43. package/src/api-extractor/tree-sitter-extractor.ts +1210 -0
  44. package/src/api-extractor/types.ts +41 -0
  45. package/src/api-extractor/typescript-extractor.ts +56 -0
  46. package/src/api-extractor/walk.ts +209 -0
  47. package/src/api-reference.ts +552 -0
  48. package/src/benchmark-events.ts +216 -0
  49. package/src/benchmark.ts +376 -0
  50. package/src/binder-export.ts +437 -0
  51. package/src/canonical-target.ts +192 -0
  52. package/src/chart-insert.ts +478 -0
  53. package/src/chart-prompts.ts +417 -0
  54. package/src/context-cache.ts +129 -0
  55. package/src/contradicts-shipped-memory.ts +311 -0
  56. package/src/diff-context.ts +187 -0
  57. package/src/doctor.ts +260 -0
  58. package/src/generated-docs.ts +316 -0
  59. package/src/i18n.ts +106 -0
  60. package/src/index.ts +59 -0
  61. package/src/librarian.ts +331 -0
  62. package/src/maintenance-actions.ts +314 -0
  63. package/src/maintenance-inbox.ts +1132 -0
  64. package/src/maintenance-runner.ts +85 -0
  65. package/src/page-drift.ts +292 -0
  66. package/src/page-inbox.ts +254 -0
  67. package/src/report-export.ts +392 -0
  68. package/src/review-bridge.ts +1729 -0
  69. package/src/search-index.ts +266 -0
  70. package/src/store.ts +2171 -0
  71. package/src/telemetry-defaults.ts +50 -0
  72. package/src/telemetry-report.ts +365 -0
  73. package/src/telemetry.ts +757 -0
  74. package/src/wiki-synthesis.ts +1307 -0
@@ -0,0 +1,311 @@
1
+ /**
2
+ * `contradicts-shipped-memory` lint rule — catches wiki prose that asserts a feature
3
+ * doesn't exist while shipped memories or project-log entries say it does.
4
+ *
5
+ * Motivation: the dendritemcp-lessons page sat for months claiming "No Shared Free-Form
6
+ * Memory Store" and "No Subconscious Background Organizer" while M1/M8/B6 actually
7
+ * shipped those features. No existing lint catches that — `stale-claim` only fires on
8
+ * pages with explicit [stale] claim tags, and `page-drift` measures token overlap, not
9
+ * direct contradiction. This rule is the systemic fix the operator asked for: the wiki
10
+ * should call out its own rot when memories prove it's wrong.
11
+ *
12
+ * Deliberately narrow. Matches a small allowlist of negation patterns ("does not have",
13
+ * "is not yet built", "is missing", headings that start with "No"). For each match it
14
+ * extracts the noun-phrase object and looks for active memories that mention enough of
15
+ * those object tokens AND contain an affirmative shipping keyword (shipped / implemented
16
+ * / now supports / etc.). Required overlap is high enough to suppress generic-word noise.
17
+ * Pages can opt out with `contradicts-shipped-memory: ignore` in frontmatter — the
18
+ * dendritemcp-lessons rewrite intentionally keeps the rule on so this never regresses.
19
+ */
20
+ import type { ProjectMemoryRecord } from '@rarusoft/dendrite-memory';
21
+
22
+ export interface ContradictionSignal {
23
+ /** The wiki section heading where the negation lives — operator clicks here to fix. */
24
+ sectionHeading: string;
25
+ /** The literal phrase that matched a negation pattern. */
26
+ matchedNegation: string;
27
+ /** Tokens extracted from the negated object (the "what's missing" noun phrase). */
28
+ objectTokens: string[];
29
+ /** IDs of memories whose text contradicts the negation. */
30
+ contradictingMemoryIds: string[];
31
+ /** Short snippets from the contradicting memories so the finding message is concrete. */
32
+ affirmingSnippets: string[];
33
+ }
34
+
35
+ // Each pattern targets a specific negation grammar. The first capturing group, when
36
+ // present, holds the "what's missing" noun phrase — that's what we score against memories.
37
+ // When a pattern has no capturing group (heading-style "No X"), the object is the words
38
+ // that follow the match up to the end of the line/sentence.
39
+ const NEGATION_PATTERNS: Array<{ regex: RegExp; objectFromCapture: boolean }> = [
40
+ { regex: /\bdoes\s+not\s+(?:yet\s+|currently\s+)?have\b([^.!?\n]{3,160})/i, objectFromCapture: true },
41
+ { regex: /\bdoes\s+not\s+(?:yet\s+|currently\s+)?(?:support|include|implement|provide|ship|offer)\b([^.!?\n]{3,160})/i, objectFromCapture: true },
42
+ { regex: /\bis\s+not\s+(?:yet\s+|currently\s+)?(?:built|shipped|implemented|present|available|done)\b([^.!?\n]{0,120})/i, objectFromCapture: true },
43
+ { regex: /\bhas\s+not\s+been\s+(?:yet\s+)?(?:built|shipped|implemented|delivered)\b([^.!?\n]{0,120})/i, objectFromCapture: true },
44
+ { regex: /\bis\s+missing\b([^.!?\n]{3,160})/i, objectFromCapture: true },
45
+ { regex: /\bstill\s+needs?\s+(?:another\s+layer|to\s+be\s+built)\b([^.!?\n]{0,160})/i, objectFromCapture: true },
46
+ { regex: /\bintentionally\s+dropped\b([^.!?\n]{0,160})/i, objectFromCapture: true },
47
+ // Heading-style negation: "No Shared Free-Form Memory Store", "No Subconscious Background Organizer".
48
+ // Anchored to the start of a heading line because mid-sentence "no X" is too noisy.
49
+ { regex: /^#{2,6}\s+No\s+([A-Z][^\n]{3,120})$/m, objectFromCapture: true }
50
+ ];
51
+
52
+ const AFFIRMATIVE_KEYWORDS = [
53
+ 'shipped',
54
+ 'landed',
55
+ 'implemented',
56
+ 'now supports',
57
+ 'now has',
58
+ 'now includes',
59
+ 'now provides',
60
+ 'now offers',
61
+ 'now applies',
62
+ 'now ranks',
63
+ 'now runs',
64
+ 'is implemented',
65
+ 'is shipped',
66
+ 'is available',
67
+ 'is now',
68
+ 'complete',
69
+ 'completed',
70
+ 'mostly done',
71
+ 'done',
72
+ 'in progress',
73
+ 'partly shipped'
74
+ ];
75
+
76
+ const STOP_TOKENS = new Set([
77
+ 'that',
78
+ 'this',
79
+ 'kind',
80
+ 'sort',
81
+ 'type',
82
+ 'thing',
83
+ 'with',
84
+ 'into',
85
+ 'have',
86
+ 'been',
87
+ 'still',
88
+ 'some',
89
+ 'sort',
90
+ 'will',
91
+ 'must',
92
+ 'should',
93
+ 'around',
94
+ 'maintain',
95
+ 'expected',
96
+ 'project',
97
+ 'projects',
98
+ 'system',
99
+ 'systems'
100
+ ]);
101
+
102
+ const MIN_OBJECT_TOKENS = 2;
103
+ const MIN_OBJECT_OVERLAP = 2;
104
+ const MAX_AFFIRMING_SNIPPETS_PER_SIGNAL = 3;
105
+ const MAX_CONTRADICTING_MEMORIES_PER_SIGNAL = 5;
106
+
107
+ /**
108
+ * Scan a wiki page for prose that contradicts shipped memories. Returns one signal per
109
+ * affected section (the H2/H3 the negation lives under). Empty array means no contradiction
110
+ * was found — that's the healthy state.
111
+ */
112
+ export function detectContradictsShippedMemory(
113
+ pageContent: string,
114
+ memories: ProjectMemoryRecord[],
115
+ projectLogContent = ''
116
+ ): ContradictionSignal[] {
117
+ if (extractOptOutDirective(pageContent)) {
118
+ return [];
119
+ }
120
+
121
+ const sections = extractPageSections(pageContent);
122
+ if (sections.length === 0) {
123
+ return [];
124
+ }
125
+
126
+ const affirmingMemories = memories
127
+ .filter((record) => record.status === 'active' || record.status === 'superseded')
128
+ .map((record) => ({
129
+ record,
130
+ haystack: `${record.summary} ${record.text}`.toLowerCase(),
131
+ affirmative: hasAffirmativeKeyword(`${record.summary} ${record.text}`)
132
+ }))
133
+ .filter((entry) => entry.affirmative);
134
+
135
+ // Project-log entries are weaker evidence (one bullet line vs a memory's full body),
136
+ // so we only use them to BOOST confidence — never as the sole contradiction source.
137
+ const logBlobLower = projectLogContent.toLowerCase();
138
+ const logHasAffirmativeContext = hasAffirmativeKeyword(projectLogContent);
139
+
140
+ const signals: ContradictionSignal[] = [];
141
+
142
+ for (const section of sections) {
143
+ const probe = `${section.heading}. ${section.body}`;
144
+ const negation = findFirstNegation(section.heading, section.body, probe);
145
+ if (!negation) {
146
+ continue;
147
+ }
148
+
149
+ const objectTokens = extractObjectTokens(negation.objectText);
150
+ if (objectTokens.length < MIN_OBJECT_TOKENS) {
151
+ continue;
152
+ }
153
+
154
+ const contradicting: string[] = [];
155
+ const snippets: string[] = [];
156
+ for (const entry of affirmingMemories) {
157
+ const overlap = countTokenOverlap(objectTokens, entry.haystack);
158
+ if (overlap >= MIN_OBJECT_OVERLAP) {
159
+ contradicting.push(entry.record.id);
160
+ if (snippets.length < MAX_AFFIRMING_SNIPPETS_PER_SIGNAL) {
161
+ snippets.push(truncate(entry.record.summary, 140));
162
+ }
163
+ }
164
+ if (contradicting.length >= MAX_CONTRADICTING_MEMORIES_PER_SIGNAL) {
165
+ break;
166
+ }
167
+ }
168
+
169
+ // Project-log corroboration: if the log itself contains affirmative phrasing AND
170
+ // mentions enough object tokens, that counts as one extra contradicting voice and
171
+ // raises the signal's confidence — but never replaces a memory match.
172
+ if (
173
+ contradicting.length > 0 &&
174
+ logHasAffirmativeContext &&
175
+ countTokenOverlap(objectTokens, logBlobLower) >= MIN_OBJECT_OVERLAP &&
176
+ snippets.length < MAX_AFFIRMING_SNIPPETS_PER_SIGNAL
177
+ ) {
178
+ snippets.push('project-log corroborates this feature has shipped');
179
+ }
180
+
181
+ if (contradicting.length === 0) {
182
+ continue;
183
+ }
184
+
185
+ signals.push({
186
+ sectionHeading: section.heading,
187
+ matchedNegation: negation.matchedText.trim(),
188
+ objectTokens,
189
+ contradictingMemoryIds: contradicting,
190
+ affirmingSnippets: snippets
191
+ });
192
+ }
193
+
194
+ return signals;
195
+ }
196
+
197
+ export function buildContradictsShippedMemoryMessage(signal: ContradictionSignal): string {
198
+ const memoryWord = signal.contradictingMemoryIds.length === 1 ? 'memory' : 'memories';
199
+ const snippet = signal.affirmingSnippets[0] ?? '';
200
+ const snippetSuffix = snippet ? ` (e.g., "${snippet}")` : '';
201
+ return `Section "${signal.sectionHeading}" asserts "${signal.matchedNegation}", but ${signal.contradictingMemoryIds.length} shipped ${memoryWord} ${signal.contradictingMemoryIds.length === 1 ? 'says' : 'say'} otherwise${snippetSuffix}. Rewrite or remove the negation, or add \`contradicts-shipped-memory: ignore\` to the page frontmatter if the assertion is intentional.`;
202
+ }
203
+
204
+ interface PageSection {
205
+ heading: string;
206
+ body: string;
207
+ }
208
+
209
+ function extractPageSections(pageContent: string): PageSection[] {
210
+ const withoutFrontmatter = stripFrontmatter(pageContent);
211
+ const lines = withoutFrontmatter.split(/\r?\n/);
212
+ const sections: PageSection[] = [];
213
+ let currentHeading = '';
214
+ let currentBody: string[] = [];
215
+
216
+ const pushSection = (): void => {
217
+ if (currentHeading || currentBody.length > 0) {
218
+ sections.push({ heading: currentHeading, body: currentBody.join('\n').trim() });
219
+ }
220
+ };
221
+
222
+ for (const line of lines) {
223
+ const headingMatch = line.match(/^(#{1,6})\s+(.+?)\s*$/);
224
+ if (headingMatch) {
225
+ pushSection();
226
+ currentHeading = headingMatch[2].trim();
227
+ currentBody = [line];
228
+ continue;
229
+ }
230
+ currentBody.push(line);
231
+ }
232
+ pushSection();
233
+
234
+ return sections;
235
+ }
236
+
237
+ function stripFrontmatter(content: string): string {
238
+ return content.replace(/^---\r?\n[\s\S]*?\r?\n---\r?\n/, '');
239
+ }
240
+
241
+ function extractOptOutDirective(pageContent: string): boolean {
242
+ const match = pageContent.match(/^---\r?\n([\s\S]*?)\r?\n---/);
243
+ if (!match) return false;
244
+ return /^\s*contradicts-shipped-memory\s*:\s*ignore\s*$/m.test(match[1]);
245
+ }
246
+
247
+ interface NegationMatch {
248
+ matchedText: string;
249
+ objectText: string;
250
+ }
251
+
252
+ function findFirstNegation(heading: string, body: string, probe: string): NegationMatch | undefined {
253
+ // Try heading first — heading-style negations ("No Shared Memory Store") are higher-precision.
254
+ for (const pattern of NEGATION_PATTERNS) {
255
+ // The heading-anchored pattern needs the literal heading line, not the joined probe.
256
+ if (pattern.regex.source.startsWith('^')) {
257
+ const headingLine = `## ${heading}`;
258
+ const match = headingLine.match(pattern.regex);
259
+ if (match) {
260
+ return {
261
+ matchedText: match[0],
262
+ objectText: pattern.objectFromCapture ? match[1] ?? '' : match[0]
263
+ };
264
+ }
265
+ continue;
266
+ }
267
+ const match = probe.match(pattern.regex);
268
+ if (match) {
269
+ return {
270
+ matchedText: match[0],
271
+ objectText: pattern.objectFromCapture ? match[1] ?? '' : match[0]
272
+ };
273
+ }
274
+ }
275
+ // Suppress unused-parameter lint — body is read indirectly via the probe param.
276
+ void body;
277
+ return undefined;
278
+ }
279
+
280
+ function extractObjectTokens(objectText: string): string[] {
281
+ return Array.from(
282
+ new Set(
283
+ objectText
284
+ .toLowerCase()
285
+ .replace(/[`*_]+/g, '')
286
+ .split(/[^a-z0-9-]+/)
287
+ .map((token) => token.trim())
288
+ .filter((token) => token.length >= 5 && !STOP_TOKENS.has(token))
289
+ )
290
+ );
291
+ }
292
+
293
+ function countTokenOverlap(objectTokens: string[], haystackLower: string): number {
294
+ let overlap = 0;
295
+ for (const token of objectTokens) {
296
+ if (haystackLower.includes(token)) {
297
+ overlap += 1;
298
+ }
299
+ }
300
+ return overlap;
301
+ }
302
+
303
+ function hasAffirmativeKeyword(text: string): boolean {
304
+ const lower = text.toLowerCase();
305
+ return AFFIRMATIVE_KEYWORDS.some((keyword) => lower.includes(keyword));
306
+ }
307
+
308
+ function truncate(text: string, max: number): string {
309
+ if (text.length <= max) return text;
310
+ return `${text.slice(0, max - 1).trimEnd()}…`;
311
+ }
@@ -0,0 +1,187 @@
1
+ /**
2
+ * Diff-driven context aggregation for PR and local-diff reviews.
3
+ *
4
+ * Aggregates the wiki pages, project-local memories, and skills relevant to a set of
5
+ * changed files — the same recall pipeline `wiki_context` uses for the in-editor agent,
6
+ * but driven by file paths from a diff instead of an interactive task. Output is markdown
7
+ * suitable for a GitHub PR comment, a local terminal review, or piping into a chat
8
+ * application.
9
+ *
10
+ * Driven by the CLI's `context-for-diff` subcommand (a list of paths via the `--files`
11
+ * flag, or piped newline-delimited via stdin from `git diff --name-only main...HEAD`).
12
+ * The intended consumer is the future GitHub Action
13
+ * (`dendrite-wiki/context-action`) that auto-comments PRs with relevant project memory
14
+ * when a developer is reviewing a change — exposing the recall moat at exactly the moment
15
+ * a human reviewer cares most.
16
+ */
17
+ import { buildWikiContext, type WikiContextResult, type WikiContextPage } from './store.js';
18
+ import { recallProjectMemories, type RecalledProjectMemory } from '@rarusoft/dendrite-memory';
19
+ import { recallProjectSkills, type RecalledProjectSkill } from '@rarusoft/dendrite-memory';
20
+ // into any other surface. The Action manifest that wraps this CLI for GitHub PR auto-
21
+ // commenting ships separately when there's a real signal it's wanted.
22
+
23
+ export interface BuildDiffContextOptions {
24
+ files: string[];
25
+ query?: string;
26
+ maxPagesPerFile?: number;
27
+ maxMemoriesPerFile?: number;
28
+ maxSkillsPerFile?: number;
29
+ // Embed languages/frameworks from the harness when known (e.g., from package.json /
30
+ // language detection); they tighten skill scope matching.
31
+ languages?: string[];
32
+ frameworks?: string[];
33
+ }
34
+
35
+ export interface DiffContextEntry {
36
+ file: string;
37
+ pages: WikiContextPage[];
38
+ memories: RecalledProjectMemory[];
39
+ skills: RecalledProjectSkill[];
40
+ }
41
+
42
+ export interface BuildDiffContextResult {
43
+ files: DiffContextEntry[];
44
+ pageCount: number;
45
+ memoryCount: number;
46
+ skillCount: number;
47
+ }
48
+
49
+ const defaultMaxPagesPerFile = 3;
50
+ const defaultMaxMemoriesPerFile = 3;
51
+ const defaultMaxSkillsPerFile = 2;
52
+
53
+ export async function buildDiffContext(options: BuildDiffContextOptions): Promise<BuildDiffContextResult> {
54
+ const files = uniqueFiles(options.files);
55
+ if (files.length === 0) {
56
+ return { files: [], pageCount: 0, memoryCount: 0, skillCount: 0 };
57
+ }
58
+
59
+ const query = (options.query ?? '').trim() || `Review changes to ${files.join(', ')}`;
60
+ const maxPages = Math.max(1, options.maxPagesPerFile ?? defaultMaxPagesPerFile);
61
+ const maxMemories = Math.max(1, options.maxMemoriesPerFile ?? defaultMaxMemoriesPerFile);
62
+ const maxSkills = Math.max(1, options.maxSkillsPerFile ?? defaultMaxSkillsPerFile);
63
+
64
+ const seenPageSlugs = new Set<string>();
65
+ const seenMemoryIds = new Set<string>();
66
+ const seenSkillIds = new Set<string>();
67
+
68
+ const entries: DiffContextEntry[] = [];
69
+
70
+ for (const file of files) {
71
+ const [contextResult, memories, skills] = await Promise.all([
72
+ // wiki_context with the file in relatedFiles biases ranking towards memory and page
73
+ // matches that already cite the file.
74
+ buildWikiContext(query, {
75
+ maxPages,
76
+ relatedFiles: [file],
77
+ languages: options.languages,
78
+ frameworks: options.frameworks,
79
+ includeLint: false,
80
+ maxSkills
81
+ }) as Promise<WikiContextResult>,
82
+ recallProjectMemories(query, { relatedFiles: [file], maxItems: maxMemories }),
83
+ recallProjectSkills({ query, relatedFiles: [file], languages: options.languages, frameworks: options.frameworks, maxItems: maxSkills })
84
+ ]);
85
+
86
+ const pages = contextResult.pages.filter((page) => {
87
+ if (seenPageSlugs.has(page.slug)) {
88
+ return false;
89
+ }
90
+ seenPageSlugs.add(page.slug);
91
+ return true;
92
+ });
93
+ const dedupedMemories = memories.filter((memory) => {
94
+ if (seenMemoryIds.has(memory.id)) {
95
+ return false;
96
+ }
97
+ seenMemoryIds.add(memory.id);
98
+ return true;
99
+ });
100
+ const dedupedSkills = skills.filter((skill) => {
101
+ if (seenSkillIds.has(skill.id)) {
102
+ return false;
103
+ }
104
+ seenSkillIds.add(skill.id);
105
+ return true;
106
+ });
107
+
108
+ entries.push({
109
+ file,
110
+ pages,
111
+ memories: dedupedMemories,
112
+ skills: dedupedSkills
113
+ });
114
+ }
115
+
116
+ return {
117
+ files: entries,
118
+ pageCount: seenPageSlugs.size,
119
+ memoryCount: seenMemoryIds.size,
120
+ skillCount: seenSkillIds.size
121
+ };
122
+ }
123
+
124
+ export function renderDiffContextMarkdown(result: BuildDiffContextResult): string {
125
+ if (result.files.length === 0) {
126
+ return '_Dendrite Wiki MCP found no changed files to analyze._';
127
+ }
128
+
129
+ const lines: string[] = [
130
+ '## Dendrite Wiki: relevant context for this change',
131
+ '',
132
+ `Reviewed ${result.files.length} file${result.files.length === 1 ? '' : 's'}. Surfaced ${result.pageCount} wiki page${result.pageCount === 1 ? '' : 's'}, ${result.memoryCount} memor${result.memoryCount === 1 ? 'y' : 'ies'}, and ${result.skillCount} skill${result.skillCount === 1 ? '' : 's'}.`,
133
+ ''
134
+ ];
135
+
136
+ for (const entry of result.files) {
137
+ if (entry.pages.length === 0 && entry.memories.length === 0 && entry.skills.length === 0) {
138
+ continue;
139
+ }
140
+ lines.push(`### \`${entry.file}\``, '');
141
+
142
+ if (entry.skills.length > 0) {
143
+ lines.push('**Matching skills**');
144
+ for (const skill of entry.skills) {
145
+ lines.push(`- \`${skill.id}\` — ${escape(skill.summary)} _(${skill.reasons.slice(0, 2).join('; ')})_`);
146
+ }
147
+ lines.push('');
148
+ }
149
+ if (entry.memories.length > 0) {
150
+ lines.push('**Relevant memories**');
151
+ for (const memory of entry.memories) {
152
+ lines.push(`- \`${memory.id}\` — ${escape(memory.summary)} _(${memory.reasons.slice(0, 2).join('; ')})_`);
153
+ }
154
+ lines.push('');
155
+ }
156
+ if (entry.pages.length > 0) {
157
+ lines.push('**Relevant wiki pages**');
158
+ for (const page of entry.pages) {
159
+ lines.push(`- [\`${page.slug}\`](docs/wiki/${page.slug}.md) — ${escape(page.summary)} _(${escape(page.reason)})_`);
160
+ }
161
+ lines.push('');
162
+ }
163
+ }
164
+
165
+ if (result.pageCount === 0 && result.memoryCount === 0 && result.skillCount === 0) {
166
+ lines.push(
167
+ '_No matching wiki pages, memories, or skills were surfaced for the changed files. This usually means the changes touch new territory the wiki has not documented yet._'
168
+ );
169
+ }
170
+
171
+ return lines.join('\n');
172
+ }
173
+
174
+ function uniqueFiles(input: string[]): string[] {
175
+ const seen = new Set<string>();
176
+ for (const value of input) {
177
+ if (typeof value !== 'string') continue;
178
+ const normalized = value.trim().replace(/\\/g, '/');
179
+ if (!normalized) continue;
180
+ seen.add(normalized);
181
+ }
182
+ return Array.from(seen);
183
+ }
184
+
185
+ function escape(value: string): string {
186
+ return value.replace(/\|/g, '\\|').replace(/[\r\n]+/g, ' ').trim();
187
+ }