@rarusoft/dendrite-wiki 0.1.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +79 -0
- package/dist/api-extractor/extract.js +269 -0
- package/dist/api-extractor/language-extractor.js +15 -0
- package/dist/api-extractor/python-extractor.js +358 -0
- package/dist/api-extractor/render.js +195 -0
- package/dist/api-extractor/tree-sitter-extractor.js +1079 -0
- package/dist/api-extractor/types.js +11 -0
- package/dist/api-extractor/typescript-extractor.js +50 -0
- package/dist/api-extractor/walk.js +178 -0
- package/dist/api-reference.js +438 -0
- package/dist/benchmark-events.js +129 -0
- package/dist/benchmark.js +270 -0
- package/dist/binder-export.js +381 -0
- package/dist/canonical-target.js +168 -0
- package/dist/chart-insert.js +377 -0
- package/dist/chart-prompts.js +414 -0
- package/dist/context-cache.js +98 -0
- package/dist/contradicts-shipped-memory.js +232 -0
- package/dist/diff-context.js +142 -0
- package/dist/doctor.js +220 -0
- package/dist/generated-docs.js +219 -0
- package/dist/i18n.js +71 -0
- package/dist/index.js +49 -0
- package/dist/librarian.js +255 -0
- package/dist/maintenance-actions.js +244 -0
- package/dist/maintenance-inbox.js +842 -0
- package/dist/maintenance-runner.js +62 -0
- package/dist/page-drift.js +225 -0
- package/dist/page-inbox.js +168 -0
- package/dist/report-export.js +339 -0
- package/dist/review-bridge.js +1386 -0
- package/dist/search-index.js +199 -0
- package/dist/store.js +1617 -0
- package/dist/telemetry-defaults.js +44 -0
- package/dist/telemetry-report.js +263 -0
- package/dist/telemetry.js +544 -0
- package/dist/wiki-synthesis.js +901 -0
- package/package.json +35 -0
- package/src/api-extractor/extract.ts +333 -0
- package/src/api-extractor/language-extractor.ts +37 -0
- package/src/api-extractor/python-extractor.ts +380 -0
- package/src/api-extractor/render.ts +267 -0
- package/src/api-extractor/tree-sitter-extractor.ts +1210 -0
- package/src/api-extractor/types.ts +41 -0
- package/src/api-extractor/typescript-extractor.ts +56 -0
- package/src/api-extractor/walk.ts +209 -0
- package/src/api-reference.ts +552 -0
- package/src/benchmark-events.ts +216 -0
- package/src/benchmark.ts +376 -0
- package/src/binder-export.ts +437 -0
- package/src/canonical-target.ts +192 -0
- package/src/chart-insert.ts +478 -0
- package/src/chart-prompts.ts +417 -0
- package/src/context-cache.ts +129 -0
- package/src/contradicts-shipped-memory.ts +311 -0
- package/src/diff-context.ts +187 -0
- package/src/doctor.ts +260 -0
- package/src/generated-docs.ts +316 -0
- package/src/i18n.ts +106 -0
- package/src/index.ts +59 -0
- package/src/librarian.ts +331 -0
- package/src/maintenance-actions.ts +314 -0
- package/src/maintenance-inbox.ts +1132 -0
- package/src/maintenance-runner.ts +85 -0
- package/src/page-drift.ts +292 -0
- package/src/page-inbox.ts +254 -0
- package/src/report-export.ts +392 -0
- package/src/review-bridge.ts +1729 -0
- package/src/search-index.ts +266 -0
- package/src/store.ts +2171 -0
- package/src/telemetry-defaults.ts +50 -0
- package/src/telemetry-report.ts +365 -0
- package/src/telemetry.ts +757 -0
- package/src/wiki-synthesis.ts +1307 -0
package/dist/store.js
ADDED
|
@@ -0,0 +1,1617 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* The wiki page store and the heart of the read/write/search/lint surface.
|
|
3
|
+
*
|
|
4
|
+
* Owns everything about wiki pages as filesystem markdown: listing pages under
|
|
5
|
+
* `docs/wiki/**`, parsing frontmatter into `WikiPageMetadata`, reading and writing page
|
|
6
|
+
* content, extracting source-backed claims, building the graph of inbound/outbound links,
|
|
7
|
+
* surfacing lint findings (`missing-h1`, `orphan-page`, `stale-claim`, `page-drift`, etc.),
|
|
8
|
+
* and assembling the task-scoped briefing returned by `wiki_context`. The lint pass exempts
|
|
9
|
+
* `lifecycle: generated` pages so auto-managed surfaces (the API reference tree) don't
|
|
10
|
+
* surface findings humans can't act on.
|
|
11
|
+
*
|
|
12
|
+
* Most other modules in `src/wiki/` consume this module rather than the filesystem directly.
|
|
13
|
+
* `memory-store.ts` joins memories to pages here, `synthesis.ts` reads pages for claim
|
|
14
|
+
* synthesis prompts, `generated-docs.ts` rebuilds derived artifacts from the page list.
|
|
15
|
+
*/
|
|
16
|
+
import { promises as fs, statSync } from 'node:fs';
|
|
17
|
+
import path from 'node:path';
|
|
18
|
+
import { createPatch } from 'diff';
|
|
19
|
+
import { recallProjectHandoffs, recallProjectMemories, summarizeMemoryBacklog } from '@rarusoft/dendrite-memory';
|
|
20
|
+
import { recallProjectSkills } from '@rarusoft/dendrite-memory';
|
|
21
|
+
import { getCachedWikiContext, invalidateWikiContextCache, setCachedWikiContext } from './context-cache.js';
|
|
22
|
+
import { buildContradictsShippedMemoryMessage, detectContradictsShippedMemory } from './contradicts-shipped-memory.js';
|
|
23
|
+
import { listProjectMemories } from '@rarusoft/dendrite-memory';
|
|
24
|
+
import { buildPageDriftMessage, detectPageDrift } from './page-drift.js';
|
|
25
|
+
import { buildMemoryTrailReason, loadMemoryTrailBonusLookup, reinforceQueryEdges } from '@rarusoft/dendrite-memory';
|
|
26
|
+
import { loadActivePageDriftSnoozes } from '@rarusoft/dendrite-memory';
|
|
27
|
+
import { buildWikiSearchIndex, fallbackSearchResults, searchResultToContextPage, searchWikiIndex, tokenizeSearchQuery } from './search-index.js';
|
|
28
|
+
const proposalPageMarker = 'Reviewable deterministic maintenance proposal.';
|
|
29
|
+
export async function listWikiProposals() {
|
|
30
|
+
const duplicateGroups = await findDuplicateGuidanceGroups();
|
|
31
|
+
const guidanceFiles = await listProjectGuidanceFiles();
|
|
32
|
+
const mergeProposals = duplicateGroups.map((group) => {
|
|
33
|
+
const [canonical, ...duplicates] = group;
|
|
34
|
+
return {
|
|
35
|
+
kind: 'merge-guidance',
|
|
36
|
+
summary: `Merge duplicate guidance into ${canonical.path}`,
|
|
37
|
+
currentStateSummary: `${duplicates.map((guidance) => guidance.path).join(', ')} currently duplicate ${canonical.path}.`,
|
|
38
|
+
afterApplySummary: `${duplicates.map((guidance) => guidance.path).join(', ')} become short pointers to ${canonical.path} while the canonical file stays unchanged.`,
|
|
39
|
+
canonicalPath: canonical.path,
|
|
40
|
+
duplicatePaths: duplicates.map((guidance) => guidance.path),
|
|
41
|
+
archiveTargets: duplicates.map((guidance) => ({
|
|
42
|
+
sourcePath: guidance.path,
|
|
43
|
+
suggestedPath: buildGuidanceArchivePath(guidance.path),
|
|
44
|
+
reviewStatus: 'pending-review',
|
|
45
|
+
reason: 'Archive only after the duplicate guidance has been reviewed and the pointer rewrite has been accepted.'
|
|
46
|
+
})),
|
|
47
|
+
rationale: `These guidance files share the same normalized content and should route through one canonical entry file before the redundant copies are archived.`
|
|
48
|
+
};
|
|
49
|
+
});
|
|
50
|
+
const routeProposals = [];
|
|
51
|
+
for (const guidance of guidanceFiles.filter((candidate) => candidate.kind !== 'skill')) {
|
|
52
|
+
const content = await fs.readFile(path.join(repoRoot, guidance.path), 'utf8').catch(() => '');
|
|
53
|
+
if (countLines(content) <= maxGuidanceLineCount) {
|
|
54
|
+
continue;
|
|
55
|
+
}
|
|
56
|
+
const targetPaths = listGuidanceRouteTargets(content, guidance.path);
|
|
57
|
+
if (targetPaths.length === 0) {
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
routeProposals.push({
|
|
61
|
+
kind: 'route-guidance',
|
|
62
|
+
summary: `Trim ${guidance.path} and route to ${targetPaths[0]}`,
|
|
63
|
+
currentStateSummary: `${guidance.path} is longer than the preferred guidance length.`,
|
|
64
|
+
afterApplySummary: `${guidance.path} becomes a short entry file that routes to ${targetPaths[0]}.`,
|
|
65
|
+
guidancePath: guidance.path,
|
|
66
|
+
targetPaths,
|
|
67
|
+
rationale: 'This guidance file exceeds the preferred length and already links to canonical local docs pages that can carry the detailed workflow.'
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
return attachProposalReviewPages([...mergeProposals, ...routeProposals].sort((left, right) => left.summary.localeCompare(right.summary)));
|
|
71
|
+
}
|
|
72
|
+
export async function writeWikiProposalPages() {
|
|
73
|
+
const result = await syncGeneratedProposalPages();
|
|
74
|
+
return result.pages;
|
|
75
|
+
}
|
|
76
|
+
export async function applyWikiProposal(reviewSlug) {
|
|
77
|
+
const proposals = await listWikiProposals();
|
|
78
|
+
const proposal = proposals.find((candidate) => candidate.reviewSlug === reviewSlug);
|
|
79
|
+
if (!proposal) {
|
|
80
|
+
throw new Error(`Unknown active proposal: ${reviewSlug}`);
|
|
81
|
+
}
|
|
82
|
+
if (proposal.kind === 'route-guidance') {
|
|
83
|
+
const absolutePath = path.join(repoRoot, proposal.guidancePath);
|
|
84
|
+
const existingContent = await fs.readFile(absolutePath, 'utf8').catch(() => '');
|
|
85
|
+
const nextContent = await renderRouteGuidanceApplyContent(proposal, existingContent);
|
|
86
|
+
await fs.writeFile(absolutePath, nextContent.endsWith('\n') ? nextContent : `${nextContent}\n`, 'utf8');
|
|
87
|
+
const syncResult = await syncGeneratedProposalPages();
|
|
88
|
+
return {
|
|
89
|
+
reviewSlug: proposal.reviewSlug,
|
|
90
|
+
proposalKind: proposal.kind,
|
|
91
|
+
updatedPaths: [proposal.guidancePath],
|
|
92
|
+
removedReviewSlugs: syncResult.removedSlugs,
|
|
93
|
+
activeReviewSlugs: syncResult.pages.map((page) => page.slug)
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
if (proposal.kind === 'merge-guidance') {
|
|
97
|
+
const canonicalContent = await fs.readFile(path.join(repoRoot, proposal.canonicalPath), 'utf8').catch(() => '');
|
|
98
|
+
const updatedPaths = [];
|
|
99
|
+
for (const duplicatePath of proposal.duplicatePaths) {
|
|
100
|
+
const absolutePath = path.join(repoRoot, duplicatePath);
|
|
101
|
+
const existingContent = await fs.readFile(absolutePath, 'utf8').catch(() => '');
|
|
102
|
+
const nextContent = await renderMergeGuidanceApplyContent(proposal, duplicatePath, existingContent, canonicalContent);
|
|
103
|
+
await fs.writeFile(absolutePath, nextContent.endsWith('\n') ? nextContent : `${nextContent}\n`, 'utf8');
|
|
104
|
+
updatedPaths.push(duplicatePath);
|
|
105
|
+
}
|
|
106
|
+
const syncResult = await syncGeneratedProposalPages();
|
|
107
|
+
return {
|
|
108
|
+
reviewSlug: proposal.reviewSlug,
|
|
109
|
+
proposalKind: proposal.kind,
|
|
110
|
+
updatedPaths,
|
|
111
|
+
removedReviewSlugs: syncResult.removedSlugs,
|
|
112
|
+
activeReviewSlugs: syncResult.pages.map((page) => page.slug)
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
throw new Error(`Auto-apply is not supported for proposal kind: ${reviewSlug}`);
|
|
116
|
+
}
|
|
117
|
+
export async function previewWikiProposal(reviewSlug) {
|
|
118
|
+
const proposals = await listWikiProposals();
|
|
119
|
+
const proposal = proposals.find((candidate) => candidate.reviewSlug === reviewSlug);
|
|
120
|
+
if (!proposal) {
|
|
121
|
+
throw new Error(`Unknown active proposal: ${reviewSlug}`);
|
|
122
|
+
}
|
|
123
|
+
const fileChanges = [];
|
|
124
|
+
const warnings = [];
|
|
125
|
+
if (proposal.kind === 'route-guidance') {
|
|
126
|
+
const absolutePath = path.join(repoRoot, proposal.guidancePath);
|
|
127
|
+
const existingContent = await fs.readFile(absolutePath, 'utf8').catch(() => '');
|
|
128
|
+
if (!existingContent) {
|
|
129
|
+
warnings.push(`${proposal.guidancePath} does not currently exist; applying will create it.`);
|
|
130
|
+
}
|
|
131
|
+
const renderedContent = await renderRouteGuidanceApplyContent(proposal, existingContent);
|
|
132
|
+
const proposedContent = ensureTrailingNewline(renderedContent);
|
|
133
|
+
fileChanges.push(buildFileChange(proposal.guidancePath, existingContent, proposedContent));
|
|
134
|
+
}
|
|
135
|
+
else if (proposal.kind === 'merge-guidance') {
|
|
136
|
+
const canonicalContent = await fs.readFile(path.join(repoRoot, proposal.canonicalPath), 'utf8').catch(() => '');
|
|
137
|
+
if (!canonicalContent) {
|
|
138
|
+
warnings.push(`Canonical guidance file ${proposal.canonicalPath} does not exist; the merge will run with empty canonical content.`);
|
|
139
|
+
}
|
|
140
|
+
for (const duplicatePath of proposal.duplicatePaths) {
|
|
141
|
+
const absolutePath = path.join(repoRoot, duplicatePath);
|
|
142
|
+
const existingContent = await fs.readFile(absolutePath, 'utf8').catch(() => '');
|
|
143
|
+
const renderedContent = await renderMergeGuidanceApplyContent(proposal, duplicatePath, existingContent, canonicalContent);
|
|
144
|
+
const proposedContent = ensureTrailingNewline(renderedContent);
|
|
145
|
+
fileChanges.push(buildFileChange(duplicatePath, existingContent, proposedContent));
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
else {
|
|
149
|
+
throw new Error(`Preview is not supported for proposal kind: ${proposal.kind}`);
|
|
150
|
+
}
|
|
151
|
+
if (fileChanges.every((change) => change.skippedBecauseUnchanged)) {
|
|
152
|
+
warnings.unshift('Every affected file already matches the proposed content; applying will be a no-op.');
|
|
153
|
+
}
|
|
154
|
+
return {
|
|
155
|
+
mode: 'preview',
|
|
156
|
+
reviewSlug: proposal.reviewSlug,
|
|
157
|
+
proposalKind: proposal.kind,
|
|
158
|
+
summary: proposal.summary,
|
|
159
|
+
rationale: proposal.rationale,
|
|
160
|
+
warnings,
|
|
161
|
+
fileChanges
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
function ensureTrailingNewline(content) {
|
|
165
|
+
return content.endsWith('\n') ? content : `${content}\n`;
|
|
166
|
+
}
|
|
167
|
+
function buildFileChange(filePath, currentContent, proposedContent) {
|
|
168
|
+
const skippedBecauseUnchanged = currentContent === proposedContent;
|
|
169
|
+
// Render the diff with the entire file as context (not the diff library's default 4-line window)
|
|
170
|
+
// so the operator sees the whole file surrounding the change. Same convention as memory-promotion.
|
|
171
|
+
const unifiedDiff = createPatch(filePath, currentContent, proposedContent, 'current', 'after apply', { context: 100_000 });
|
|
172
|
+
return { path: filePath, currentContent, proposedContent, unifiedDiff, skippedBecauseUnchanged };
|
|
173
|
+
}
|
|
174
|
+
async function syncGeneratedProposalPages() {
|
|
175
|
+
const proposals = await listWikiProposals();
|
|
176
|
+
const pages = [];
|
|
177
|
+
const existingSlugs = await listGeneratedProposalPageSlugs();
|
|
178
|
+
const currentSlugs = new Set();
|
|
179
|
+
const removedSlugs = [];
|
|
180
|
+
for (const proposal of proposals) {
|
|
181
|
+
const content = renderProposalPage(proposal);
|
|
182
|
+
await writeWikiPage(proposal.reviewSlug, content);
|
|
183
|
+
const title = content.match(/^#\s+(.+)$/m)?.[1]?.trim() ?? proposal.reviewSlug;
|
|
184
|
+
pages.push({
|
|
185
|
+
slug: proposal.reviewSlug,
|
|
186
|
+
title,
|
|
187
|
+
path: proposal.reviewPath,
|
|
188
|
+
proposalKind: proposal.kind
|
|
189
|
+
});
|
|
190
|
+
currentSlugs.add(proposal.reviewSlug);
|
|
191
|
+
}
|
|
192
|
+
for (const staleSlug of existingSlugs) {
|
|
193
|
+
if (currentSlugs.has(staleSlug)) {
|
|
194
|
+
continue;
|
|
195
|
+
}
|
|
196
|
+
await fs.rm(pagePathFromSlug(staleSlug), { force: true });
|
|
197
|
+
removedSlugs.push(staleSlug);
|
|
198
|
+
}
|
|
199
|
+
return {
|
|
200
|
+
pages: pages.sort((left, right) => left.slug.localeCompare(right.slug)),
|
|
201
|
+
removedSlugs: removedSlugs.sort((left, right) => left.localeCompare(right))
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
function attachProposalReviewPages(proposals) {
|
|
205
|
+
const usedSlugs = new Set();
|
|
206
|
+
return proposals.map((proposal) => {
|
|
207
|
+
const reviewSlug = buildProposalPageSlug(proposal, usedSlugs);
|
|
208
|
+
return {
|
|
209
|
+
...proposal,
|
|
210
|
+
reviewSlug,
|
|
211
|
+
reviewPath: `docs/wiki/${reviewSlug}.md`
|
|
212
|
+
};
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
async function listGeneratedProposalPageSlugs() {
|
|
216
|
+
const pendingReviewDirectory = path.join(wikiRoot, 'pending-review');
|
|
217
|
+
const matches = [];
|
|
218
|
+
async function walk(directory) {
|
|
219
|
+
const entries = await fs.readdir(directory, { withFileTypes: true }).catch(() => []);
|
|
220
|
+
for (const entry of entries) {
|
|
221
|
+
const fullPath = path.join(directory, entry.name);
|
|
222
|
+
if (entry.isDirectory()) {
|
|
223
|
+
await walk(fullPath);
|
|
224
|
+
continue;
|
|
225
|
+
}
|
|
226
|
+
if (!entry.isFile() || !entry.name.endsWith('.md')) {
|
|
227
|
+
continue;
|
|
228
|
+
}
|
|
229
|
+
const content = await fs.readFile(fullPath, 'utf8').catch(() => '');
|
|
230
|
+
if (!content.includes(proposalPageMarker)) {
|
|
231
|
+
continue;
|
|
232
|
+
}
|
|
233
|
+
const relative = path.relative(wikiRoot, fullPath).replace(/\\/g, '/');
|
|
234
|
+
matches.push(relative.replace(/\.md$/i, ''));
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
await walk(pendingReviewDirectory);
|
|
238
|
+
return matches.sort((left, right) => left.localeCompare(right));
|
|
239
|
+
}
|
|
240
|
+
function buildGuidanceArchivePath(relativePath) {
|
|
241
|
+
const safeName = relativePath.replace(/^[./]+/, '').replace(/[\\/]/g, '__');
|
|
242
|
+
return `docs/wiki/archive-guidance/${safeName}`;
|
|
243
|
+
}
|
|
244
|
+
function buildProposalPageSlug(proposal, usedSlugs) {
|
|
245
|
+
const key = proposal.kind === 'merge-guidance' ? proposal.canonicalPath : proposal.guidancePath;
|
|
246
|
+
const base = `pending-review/${proposal.kind}-${slugifyProposalKey(key)}`;
|
|
247
|
+
let slug = base;
|
|
248
|
+
let counter = 2;
|
|
249
|
+
while (usedSlugs.has(slug)) {
|
|
250
|
+
slug = `${base}-${counter}`;
|
|
251
|
+
counter += 1;
|
|
252
|
+
}
|
|
253
|
+
usedSlugs.add(slug);
|
|
254
|
+
return slug;
|
|
255
|
+
}
|
|
256
|
+
function slugifyProposalKey(value) {
|
|
257
|
+
return value
|
|
258
|
+
.toLowerCase()
|
|
259
|
+
.replace(/[^a-z0-9]+/g, '-')
|
|
260
|
+
.replace(/^-+|-+$/g, '') || 'proposal';
|
|
261
|
+
}
|
|
262
|
+
function renderProposalPage(proposal) {
|
|
263
|
+
if (proposal.kind === 'merge-guidance') {
|
|
264
|
+
return [
|
|
265
|
+
`# Review merge guidance for ${proposal.canonicalPath}`,
|
|
266
|
+
'',
|
|
267
|
+
proposalPageMarker,
|
|
268
|
+
'',
|
|
269
|
+
'## Summary',
|
|
270
|
+
proposal.summary,
|
|
271
|
+
'',
|
|
272
|
+
'## Current State',
|
|
273
|
+
`- ${proposal.currentStateSummary}`,
|
|
274
|
+
`- ${proposal.canonicalPath} is the canonical guidance entry.`,
|
|
275
|
+
...proposal.duplicatePaths.map((duplicatePath) => `- ${duplicatePath} currently repeats that guidance content.`),
|
|
276
|
+
'',
|
|
277
|
+
'## After Apply',
|
|
278
|
+
`- ${proposal.afterApplySummary}`,
|
|
279
|
+
`- ${proposal.canonicalPath} stays unchanged as the canonical guidance entry.`,
|
|
280
|
+
...proposal.duplicatePaths.map((duplicatePath) => `- ${duplicatePath} becomes a short pointer to the canonical guidance and wiki pages.`),
|
|
281
|
+
...proposal.archiveTargets.map((target) => `- If you want to keep history, archive ${target.sourcePath} at ${target.suggestedPath} before deleting or moving it later. ${target.reason}`),
|
|
282
|
+
'',
|
|
283
|
+
'## Rationale',
|
|
284
|
+
proposal.rationale,
|
|
285
|
+
].join('\n');
|
|
286
|
+
}
|
|
287
|
+
return [
|
|
288
|
+
`# Review route guidance for ${proposal.guidancePath}`,
|
|
289
|
+
'',
|
|
290
|
+
proposalPageMarker,
|
|
291
|
+
'',
|
|
292
|
+
'## Summary',
|
|
293
|
+
proposal.summary,
|
|
294
|
+
'',
|
|
295
|
+
'## Current State',
|
|
296
|
+
`- ${proposal.currentStateSummary}`,
|
|
297
|
+
`- ${proposal.guidancePath} is longer than the preferred guidance length.`,
|
|
298
|
+
...proposal.targetPaths.map((targetPath) => `- It already points readers toward ${targetPath}.`),
|
|
299
|
+
'',
|
|
300
|
+
'## After Apply',
|
|
301
|
+
`- ${proposal.afterApplySummary}`,
|
|
302
|
+
`- ${proposal.guidancePath} becomes a short entry file.`,
|
|
303
|
+
...proposal.targetPaths.map((targetPath) => `- Detailed workflow is routed to ${targetPath}.`),
|
|
304
|
+
'',
|
|
305
|
+
'## Rationale',
|
|
306
|
+
proposal.rationale
|
|
307
|
+
].join('\n');
|
|
308
|
+
}
|
|
309
|
+
async function renderRouteGuidanceApplyContent(proposal, existingContent) {
|
|
310
|
+
const heading = extractHeading(existingContent) || defaultGuidanceHeading(proposal.guidancePath);
|
|
311
|
+
const summary = extractSummaryParagraph(existingContent) || 'This entry file now routes to canonical local docs pages.';
|
|
312
|
+
const routeLines = await Promise.all(proposal.targetPaths.map(async (targetPath) => {
|
|
313
|
+
const label = await readMarkdownTitle(targetPath);
|
|
314
|
+
const relativeLink = buildRelativeMarkdownLink(proposal.guidancePath, targetPath);
|
|
315
|
+
return `- Read [${label}](${relativeLink}).`;
|
|
316
|
+
}));
|
|
317
|
+
return [
|
|
318
|
+
`# ${heading}`,
|
|
319
|
+
'',
|
|
320
|
+
summary,
|
|
321
|
+
'',
|
|
322
|
+
'Detailed workflow lives in the wiki pages below.',
|
|
323
|
+
'',
|
|
324
|
+
...routeLines
|
|
325
|
+
].join('\n');
|
|
326
|
+
}
|
|
327
|
+
async function renderMergeGuidanceApplyContent(proposal, duplicatePath, duplicateContent, canonicalContent) {
|
|
328
|
+
const heading = extractHeading(duplicateContent) || defaultGuidanceHeading(duplicatePath);
|
|
329
|
+
const summary = extractSummaryParagraph(duplicateContent) || 'This entry file now points to the canonical guidance file and wiki pages.';
|
|
330
|
+
const canonicalTitle = await readMarkdownTitle(proposal.canonicalPath);
|
|
331
|
+
const canonicalLink = buildRelativeMarkdownLink(duplicatePath, proposal.canonicalPath);
|
|
332
|
+
const targetPaths = listGuidanceRouteTargets(duplicateContent, duplicatePath);
|
|
333
|
+
const fallbackTargetPaths = targetPaths.length > 0 ? targetPaths : listGuidanceRouteTargets(canonicalContent, proposal.canonicalPath);
|
|
334
|
+
const routeLines = await Promise.all(fallbackTargetPaths.map(async (targetPath) => {
|
|
335
|
+
const label = await readMarkdownTitle(targetPath);
|
|
336
|
+
const relativeLink = buildRelativeMarkdownLink(duplicatePath, targetPath);
|
|
337
|
+
return `- Read [${label}](${relativeLink}).`;
|
|
338
|
+
}));
|
|
339
|
+
return [
|
|
340
|
+
`# ${heading}`,
|
|
341
|
+
'',
|
|
342
|
+
summary,
|
|
343
|
+
'',
|
|
344
|
+
`Canonical guidance lives in [${canonicalTitle}](${canonicalLink}).`,
|
|
345
|
+
'',
|
|
346
|
+
'Detailed workflow lives in the wiki pages below.',
|
|
347
|
+
'',
|
|
348
|
+
...routeLines
|
|
349
|
+
].join('\n');
|
|
350
|
+
}
|
|
351
|
+
function extractHeading(content) {
|
|
352
|
+
return content.match(/^#\s+(.+)$/m)?.[1]?.trim() ?? '';
|
|
353
|
+
}
|
|
354
|
+
function defaultGuidanceHeading(guidancePath) {
|
|
355
|
+
return path.basename(guidancePath, '.md').replace(/[-_]+/g, ' ').trim() || 'Guidance';
|
|
356
|
+
}
|
|
357
|
+
async function readMarkdownTitle(relativePath) {
|
|
358
|
+
const content = await fs.readFile(path.join(repoRoot, relativePath), 'utf8').catch(() => '');
|
|
359
|
+
return extractHeading(content) || path.basename(relativePath, '.md');
|
|
360
|
+
}
|
|
361
|
+
function buildRelativeMarkdownLink(sourcePath, targetPath) {
|
|
362
|
+
const sourceDir = path.posix.dirname(sourcePath.replace(/\\/g, '/'));
|
|
363
|
+
return path.posix.relative(sourceDir, targetPath.replace(/\\/g, '/'));
|
|
364
|
+
}
|
|
365
|
+
const repoRoot = path.resolve(process.cwd());
|
|
366
|
+
const docsRoot = path.resolve(repoRoot, 'docs');
|
|
367
|
+
const wikiRoot = path.join(docsRoot, 'wiki');
|
|
368
|
+
// Tests reload this module per fixture (different cwd → different `repoRoot`), but
|
|
369
|
+
// `context-cache.js` is imported once and shared across all instances. Without this
|
|
370
|
+
// invalidation, a buildWikiContext call from a prior fixture's instance could serve
|
|
371
|
+
// a cached result whose `findings` were computed against a different `repoRoot`.
|
|
372
|
+
// On a fast Linux runner this surfaced as flaky lint assertions where guidance files
|
|
373
|
+
// from one fixture would silently bleed into another. Cheap to call at module init.
|
|
374
|
+
invalidateWikiContextCache();
|
|
375
|
+
const defaultContextPageLimit = 4;
|
|
376
|
+
const defaultLogEntryLimit = 3;
|
|
377
|
+
const maxGuidanceLineCount = 40;
|
|
378
|
+
const contextStopTerms = new Set(['current', 'latest', 'need', 'project', 'question', 'recent', 'task']);
|
|
379
|
+
const projectLogHintTerms = new Set(['change', 'changes', 'history', 'log', 'recent', 'ship', 'status', 'update', 'updates']);
|
|
380
|
+
export function pagePathFromSlug(slug) {
|
|
381
|
+
const slashNormalized = slug.replace(/\\/g, '/').trim();
|
|
382
|
+
const normalized = slashNormalized.replace(/\.md$/i, '');
|
|
383
|
+
if (!normalized ||
|
|
384
|
+
slashNormalized.startsWith('/') ||
|
|
385
|
+
normalized.endsWith('/') ||
|
|
386
|
+
normalized.split('/').some((segment) => !segment || segment === '.' || segment === '..') ||
|
|
387
|
+
!/^[a-z0-9][a-z0-9/_-]*$/i.test(normalized)) {
|
|
388
|
+
throw new Error(`Invalid wiki slug: ${slug}`);
|
|
389
|
+
}
|
|
390
|
+
return path.join(wikiRoot, `${normalized}.md`);
|
|
391
|
+
}
|
|
392
|
+
export async function readWikiPage(slug) {
|
|
393
|
+
return fs.readFile(pagePathFromSlug(slug), 'utf8');
|
|
394
|
+
}
|
|
395
|
+
export async function writeWikiPage(slug, content) {
|
|
396
|
+
const filePath = pagePathFromSlug(slug);
|
|
397
|
+
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
|
398
|
+
await fs.writeFile(filePath, content.endsWith('\n') ? content : `${content}\n`, 'utf8');
|
|
399
|
+
invalidateWikiContextCache();
|
|
400
|
+
}
|
|
401
|
+
export async function appendProjectLog(entry, date = new Date()) {
|
|
402
|
+
const filePath = pagePathFromSlug('project-log');
|
|
403
|
+
const isoDate = date.toISOString().slice(0, 10);
|
|
404
|
+
// VitePress runs every page through the Vue compiler, so unescaped `<word>` tokens
|
|
405
|
+
// in log entries get parsed as custom Vue tags and break the docs build (the
|
|
406
|
+
// memory-promotion and maintenance-inbox writers already do this; project-log
|
|
407
|
+
// needs the same treatment). Escape inside backtick spans is preserved by markdown.
|
|
408
|
+
const line = `\n- ${escapeMarkdownAngleBrackets(entry.trim())}\n`;
|
|
409
|
+
let content = await fs.readFile(filePath, 'utf8').catch(() => '# Project Log\n');
|
|
410
|
+
const heading = `## ${isoDate}`;
|
|
411
|
+
if (!content.includes(heading)) {
|
|
412
|
+
content += `\n${heading}\n`;
|
|
413
|
+
}
|
|
414
|
+
content += line;
|
|
415
|
+
await fs.writeFile(filePath, content, 'utf8');
|
|
416
|
+
invalidateWikiContextCache();
|
|
417
|
+
}
|
|
418
|
+
function escapeMarkdownAngleBrackets(value) {
|
|
419
|
+
return value.replace(/</g, '<').replace(/>/g, '>');
|
|
420
|
+
}
|
|
421
|
+
// Insert an H1 heading derived from the page slug. Used by the maintenance inbox's
|
|
422
|
+
// `missing-h1` lint action so the operator can resolve the finding with one click instead
|
|
423
|
+
// of editing the file by hand. The inserted heading lands AFTER the frontmatter block
|
|
424
|
+
// (if present) and BEFORE the first body line. Idempotent: if the page already has an
|
|
425
|
+
// H1, returns false and writes nothing.
|
|
426
|
+
export async function insertH1FromSlug(slug) {
|
|
427
|
+
const filePath = pagePathFromSlug(slug);
|
|
428
|
+
const content = await fs.readFile(filePath, 'utf8');
|
|
429
|
+
if (hasH1(content)) {
|
|
430
|
+
return false;
|
|
431
|
+
}
|
|
432
|
+
const title = titleCaseFromSlug(slug);
|
|
433
|
+
const frontmatterMatch = content.match(/^(---\r?\n[\s\S]*?\r?\n---\r?\n)/);
|
|
434
|
+
if (frontmatterMatch) {
|
|
435
|
+
const frontmatter = frontmatterMatch[1];
|
|
436
|
+
const rest = content.slice(frontmatter.length);
|
|
437
|
+
const restNoLeadingBlank = rest.replace(/^\r?\n+/, '');
|
|
438
|
+
const next = `${frontmatter}\n# ${title}\n\n${restNoLeadingBlank}`;
|
|
439
|
+
await fs.writeFile(filePath, next.endsWith('\n') ? next : `${next}\n`, 'utf8');
|
|
440
|
+
}
|
|
441
|
+
else {
|
|
442
|
+
const restNoLeadingBlank = content.replace(/^\r?\n+/, '');
|
|
443
|
+
const next = `# ${title}\n\n${restNoLeadingBlank}`;
|
|
444
|
+
await fs.writeFile(filePath, next.endsWith('\n') ? next : `${next}\n`, 'utf8');
|
|
445
|
+
}
|
|
446
|
+
invalidateWikiContextCache();
|
|
447
|
+
return true;
|
|
448
|
+
}
|
|
449
|
+
function titleCaseFromSlug(slug) {
|
|
450
|
+
return slug
|
|
451
|
+
.split(/[\/-]/)
|
|
452
|
+
.filter((part) => part.length > 0)
|
|
453
|
+
.map((part) => part.charAt(0).toUpperCase() + part.slice(1))
|
|
454
|
+
.join(' ');
|
|
455
|
+
}
|
|
456
|
+
export async function editPageSummary(slug, newFirstParagraph) {
|
|
457
|
+
const trimmedNew = newFirstParagraph.replace(/\r\n/g, '\n').trim();
|
|
458
|
+
if (!trimmedNew) {
|
|
459
|
+
throw new Error('editPageSummary requires a non-empty replacement paragraph.');
|
|
460
|
+
}
|
|
461
|
+
const filePath = pagePathFromSlug(slug);
|
|
462
|
+
const content = await fs.readFile(filePath, 'utf8');
|
|
463
|
+
const frontmatterMatch = content.match(/^(---\r?\n[\s\S]*?\r?\n---\r?\n)/);
|
|
464
|
+
const frontmatter = frontmatterMatch ? frontmatterMatch[1] : '';
|
|
465
|
+
const body = content.slice(frontmatter.length);
|
|
466
|
+
const h1Match = body.match(/^(\s*\r?\n)*(#\s+[^\n]+\r?\n)/);
|
|
467
|
+
if (!h1Match) {
|
|
468
|
+
throw new Error(`Page ${slug} has no H1 heading; resolve the missing-h1 finding before rewriting the summary.`);
|
|
469
|
+
}
|
|
470
|
+
const headerBlock = h1Match[0];
|
|
471
|
+
const afterHeader = body.slice(headerBlock.length);
|
|
472
|
+
// Identify the existing first paragraph: skip leading blank lines, then capture lines
|
|
473
|
+
// until the next blank line OR a heading. Everything after that is the rest of the page.
|
|
474
|
+
const lines = afterHeader.split(/\r?\n/);
|
|
475
|
+
let cursor = 0;
|
|
476
|
+
while (cursor < lines.length && lines[cursor].trim() === '') {
|
|
477
|
+
cursor += 1;
|
|
478
|
+
}
|
|
479
|
+
const paragraphStart = cursor;
|
|
480
|
+
while (cursor < lines.length) {
|
|
481
|
+
const line = lines[cursor];
|
|
482
|
+
if (line.trim() === '' || line.startsWith('#')) {
|
|
483
|
+
break;
|
|
484
|
+
}
|
|
485
|
+
cursor += 1;
|
|
486
|
+
}
|
|
487
|
+
const previousSummary = lines.slice(paragraphStart, cursor).join('\n').trim();
|
|
488
|
+
const remainder = lines.slice(cursor).join('\n');
|
|
489
|
+
const remainderNoLeadingBlank = remainder.replace(/^\r?\n+/, '');
|
|
490
|
+
if (previousSummary === trimmedNew) {
|
|
491
|
+
return { slug, changed: false, previousSummary, newSummary: trimmedNew };
|
|
492
|
+
}
|
|
493
|
+
const next = `${frontmatter}${headerBlock}\n${trimmedNew}\n\n${remainderNoLeadingBlank}`;
|
|
494
|
+
const finalText = next.endsWith('\n') ? next : `${next}\n`;
|
|
495
|
+
await fs.writeFile(filePath, finalText, 'utf8');
|
|
496
|
+
invalidateWikiContextCache();
|
|
497
|
+
return { slug, changed: true, previousSummary, newSummary: trimmedNew };
|
|
498
|
+
}
|
|
499
|
+
// Archive a dormant guidance file (e.g., a skill markdown that no other doc links to)
|
|
500
|
+
// by moving it into a sibling `archive/` directory. Idempotent: if the file is already
|
|
501
|
+
// under an `archive/` segment, returns the existing path with no work. The caller is
|
|
502
|
+
// expected to pass a relative-from-repo-root path (matching the lint finding's `path`).
|
|
503
|
+
export async function archiveGuidanceFile(relativePath) {
|
|
504
|
+
const trimmed = relativePath.trim().replace(/\\/g, '/');
|
|
505
|
+
if (!trimmed || trimmed.includes('..')) {
|
|
506
|
+
throw new Error(`Invalid guidance path for archive: ${relativePath}`);
|
|
507
|
+
}
|
|
508
|
+
const absoluteFrom = path.resolve(repoRoot, trimmed);
|
|
509
|
+
const stat = await fs.stat(absoluteFrom).catch(() => undefined);
|
|
510
|
+
if (!stat || !stat.isFile()) {
|
|
511
|
+
throw new Error(`Guidance file not found: ${trimmed}`);
|
|
512
|
+
}
|
|
513
|
+
const dir = path.posix.dirname(trimmed);
|
|
514
|
+
const fileName = path.posix.basename(trimmed);
|
|
515
|
+
if (dir.split('/').includes('archive')) {
|
|
516
|
+
return { from: trimmed, to: trimmed, moved: false };
|
|
517
|
+
}
|
|
518
|
+
const archiveDir = `${dir}/archive`;
|
|
519
|
+
const archiveRelative = `${archiveDir}/${fileName}`;
|
|
520
|
+
const absoluteTo = path.resolve(repoRoot, archiveRelative);
|
|
521
|
+
await fs.mkdir(path.dirname(absoluteTo), { recursive: true });
|
|
522
|
+
await fs.rename(absoluteFrom, absoluteTo);
|
|
523
|
+
invalidateWikiContextCache();
|
|
524
|
+
return { from: trimmed, to: archiveRelative, moved: true };
|
|
525
|
+
}
|
|
526
|
+
export function extractWikiPageMetadata(content) {
|
|
527
|
+
const frontmatter = content.match(/^---\r?\n([\s\S]*?)\r?\n---\r?\n/)?.[1] ?? '';
|
|
528
|
+
const fields = new Map(frontmatter
|
|
529
|
+
.split(/\r?\n/)
|
|
530
|
+
.map((line) => line.match(/^([A-Za-z][A-Za-z0-9_-]*):\s*(.*?)\s*$/))
|
|
531
|
+
.filter((match) => Boolean(match))
|
|
532
|
+
.map((match) => [normalizeMetadataKey(match[1]), match[2].replace(/^['"]|['"]$/g, '')]));
|
|
533
|
+
return {
|
|
534
|
+
lifecycle: parsePageLifecycle(fields.get('lifecycle')),
|
|
535
|
+
owner: fields.get('owner') || 'unassigned',
|
|
536
|
+
lastReviewed: fields.get('lastreviewed') || fields.get('last-reviewed') || '',
|
|
537
|
+
sourceCoverage: parseSourceCoverage(fields.get('sourcecoverage') || fields.get('source-coverage'))
|
|
538
|
+
};
|
|
539
|
+
}
|
|
540
|
+
function normalizeMetadataKey(value) {
|
|
541
|
+
return value.toLowerCase().replace(/_/g, '-');
|
|
542
|
+
}
|
|
543
|
+
function parsePageLifecycle(value) {
|
|
544
|
+
switch (value?.trim()) {
|
|
545
|
+
case 'dormant':
|
|
546
|
+
case 'superseded':
|
|
547
|
+
case 'pending-review':
|
|
548
|
+
case 'generated':
|
|
549
|
+
return value.trim();
|
|
550
|
+
default:
|
|
551
|
+
return 'active';
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
function parseSourceCoverage(value) {
|
|
555
|
+
switch (value?.trim()) {
|
|
556
|
+
case 'none':
|
|
557
|
+
case 'partial':
|
|
558
|
+
case 'complete':
|
|
559
|
+
return value.trim();
|
|
560
|
+
default:
|
|
561
|
+
return 'unknown';
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
export async function listWikiPages() {
|
|
565
|
+
const pages = [];
|
|
566
|
+
async function walk(directory) {
|
|
567
|
+
const entries = await fs.readdir(directory, { withFileTypes: true });
|
|
568
|
+
for (const entry of entries) {
|
|
569
|
+
const fullPath = path.join(directory, entry.name);
|
|
570
|
+
if (entry.isDirectory()) {
|
|
571
|
+
await walk(fullPath);
|
|
572
|
+
continue;
|
|
573
|
+
}
|
|
574
|
+
if (!entry.isFile() || !entry.name.endsWith('.md')) {
|
|
575
|
+
continue;
|
|
576
|
+
}
|
|
577
|
+
const relative = path.relative(wikiRoot, fullPath).replace(/\\/g, '/');
|
|
578
|
+
const slug = relative.replace(/\.md$/i, '');
|
|
579
|
+
const content = await fs.readFile(fullPath, 'utf8');
|
|
580
|
+
const title = content.match(/^#\s+(.+)$/m)?.[1]?.trim() ?? slug;
|
|
581
|
+
pages.push({ slug, title, path: `docs/wiki/${relative}`, metadata: extractWikiPageMetadata(content) });
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
await walk(wikiRoot);
|
|
585
|
+
return pages.sort((a, b) => a.slug.localeCompare(b.slug));
|
|
586
|
+
}
|
|
587
|
+
export async function listGuidanceLifecycle() {
|
|
588
|
+
const [guidanceFiles, pages, proposals, findings] = await Promise.all([
|
|
589
|
+
listProjectGuidanceFiles(),
|
|
590
|
+
listWikiPages(),
|
|
591
|
+
listWikiProposals(),
|
|
592
|
+
lintWikiPages()
|
|
593
|
+
]);
|
|
594
|
+
const linkedFromByPath = await collectMarkdownInboundSources(guidanceFiles, pages);
|
|
595
|
+
const proposalByPath = new Map();
|
|
596
|
+
const archiveTargetByPath = new Map();
|
|
597
|
+
for (const proposal of proposals) {
|
|
598
|
+
if (proposal.kind === 'merge-guidance') {
|
|
599
|
+
for (const duplicatePath of proposal.duplicatePaths) {
|
|
600
|
+
proposalByPath.set(duplicatePath, proposal);
|
|
601
|
+
}
|
|
602
|
+
for (const archiveTarget of proposal.archiveTargets) {
|
|
603
|
+
archiveTargetByPath.set(archiveTarget.sourcePath, archiveTarget.suggestedPath);
|
|
604
|
+
}
|
|
605
|
+
}
|
|
606
|
+
else {
|
|
607
|
+
proposalByPath.set(proposal.guidancePath, proposal);
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
const dormantPaths = new Set(findings.filter((finding) => finding.rule === 'dormant-skill').map((finding) => finding.path));
|
|
611
|
+
return guidanceFiles.map((guidance) => {
|
|
612
|
+
const linkedFrom = linkedFromByPath.get(guidance.path) ?? [];
|
|
613
|
+
const proposal = proposalByPath.get(guidance.path);
|
|
614
|
+
const archiveTarget = archiveTargetByPath.get(guidance.path);
|
|
615
|
+
if (proposal) {
|
|
616
|
+
return {
|
|
617
|
+
...guidance,
|
|
618
|
+
status: 'pending-review',
|
|
619
|
+
linkedFrom,
|
|
620
|
+
archiveTarget,
|
|
621
|
+
reviewStatus: 'pending-review',
|
|
622
|
+
reason: `Active ${proposal.kind} proposal is waiting for operator review.`
|
|
623
|
+
};
|
|
624
|
+
}
|
|
625
|
+
if (archiveTarget) {
|
|
626
|
+
return {
|
|
627
|
+
...guidance,
|
|
628
|
+
status: 'superseded',
|
|
629
|
+
linkedFrom,
|
|
630
|
+
archiveTarget,
|
|
631
|
+
reviewStatus: 'pending-review',
|
|
632
|
+
reason: 'Guidance has a concrete archive destination after review.'
|
|
633
|
+
};
|
|
634
|
+
}
|
|
635
|
+
if (dormantPaths.has(guidance.path)) {
|
|
636
|
+
return {
|
|
637
|
+
...guidance,
|
|
638
|
+
status: 'dormant',
|
|
639
|
+
linkedFrom,
|
|
640
|
+
reviewStatus: 'none',
|
|
641
|
+
reason: 'Guidance is not linked from project docs or active guidance files.'
|
|
642
|
+
};
|
|
643
|
+
}
|
|
644
|
+
return {
|
|
645
|
+
...guidance,
|
|
646
|
+
status: 'active',
|
|
647
|
+
linkedFrom,
|
|
648
|
+
reviewStatus: 'none',
|
|
649
|
+
reason: linkedFrom.length > 0 ? 'Guidance is linked from project docs or another active guidance file.' : 'Guidance is an active entry file.'
|
|
650
|
+
};
|
|
651
|
+
}).sort((left, right) => left.status.localeCompare(right.status) || left.path.localeCompare(right.path));
|
|
652
|
+
}
|
|
653
|
+
export async function lintWikiPages() {
|
|
654
|
+
const pages = await listWikiPages();
|
|
655
|
+
const findings = [];
|
|
656
|
+
const inboundLinks = await collectInboundWikiLinks(pages);
|
|
657
|
+
const pageByPath = new Map(pages.map((page) => [page.path, page.slug]));
|
|
658
|
+
const guidanceFiles = await listProjectGuidanceFiles();
|
|
659
|
+
// Read project-log once so per-page drift detection doesn't re-read for each page.
|
|
660
|
+
const projectLogContent = await fs.readFile(pagePathFromSlug('project-log'), 'utf8').catch(() => '');
|
|
661
|
+
// Load active page-drift snoozes so we can suppress findings the operator has already
|
|
662
|
+
// acknowledged as noise. Expired snoozes are pruned lazily inside the loader.
|
|
663
|
+
const snoozedPageDrifts = await loadActivePageDriftSnoozes().catch(() => new Map());
|
|
664
|
+
// Load active project-local memories once for the contradicts-shipped-memory rule.
|
|
665
|
+
// Includes superseded records because being promoted-then-superseded still proves the
|
|
666
|
+
// feature exists in the wiki — that's the strongest possible evidence against a
|
|
667
|
+
// "this is missing" assertion.
|
|
668
|
+
const activeMemoriesForContradictionCheck = await listProjectMemories({ includeArchived: true })
|
|
669
|
+
.then((records) => records.filter((record) => record.status === 'active' || record.status === 'superseded'))
|
|
670
|
+
.catch(() => []);
|
|
671
|
+
for (const page of pages) {
|
|
672
|
+
// Generated pages are managed by the API reference generator (or any future
|
|
673
|
+
// generator that uses the same `lifecycle: generated` frontmatter convention).
|
|
674
|
+
// Their source of truth lives outside the wiki — humans don't review them, and
|
|
675
|
+
// surfacing lint findings on them in the maintenance inbox is noise. Skip every
|
|
676
|
+
// per-page rule for these.
|
|
677
|
+
if (page.metadata?.lifecycle === 'generated') {
|
|
678
|
+
continue;
|
|
679
|
+
}
|
|
680
|
+
const content = await readWikiPage(page.slug);
|
|
681
|
+
if (!hasH1(content)) {
|
|
682
|
+
findings.push({
|
|
683
|
+
rule: 'missing-h1',
|
|
684
|
+
slug: page.slug,
|
|
685
|
+
path: page.path,
|
|
686
|
+
message: 'Page is missing a top-level H1 heading.'
|
|
687
|
+
});
|
|
688
|
+
}
|
|
689
|
+
if (!hasSummaryParagraph(content)) {
|
|
690
|
+
findings.push({
|
|
691
|
+
rule: 'missing-summary',
|
|
692
|
+
slug: page.slug,
|
|
693
|
+
path: page.path,
|
|
694
|
+
message: 'Page is missing a short summary paragraph after its H1.'
|
|
695
|
+
});
|
|
696
|
+
}
|
|
697
|
+
if ((inboundLinks.get(page.slug) ?? 0) === 0) {
|
|
698
|
+
findings.push({
|
|
699
|
+
rule: 'orphan-page',
|
|
700
|
+
slug: page.slug,
|
|
701
|
+
path: page.path,
|
|
702
|
+
message: 'Page is not linked from the project index or another wiki page.'
|
|
703
|
+
});
|
|
704
|
+
}
|
|
705
|
+
for (const claim of extractWikiClaims(page.slug, content, pageByPath)) {
|
|
706
|
+
if (claim.sources.length === 0) {
|
|
707
|
+
findings.push({
|
|
708
|
+
rule: 'unsupported-claim',
|
|
709
|
+
slug: page.slug,
|
|
710
|
+
path: page.path,
|
|
711
|
+
message: `Claim is missing supporting sources: ${claim.text}`
|
|
712
|
+
});
|
|
713
|
+
}
|
|
714
|
+
if (claim.status === 'current') {
|
|
715
|
+
continue;
|
|
716
|
+
}
|
|
717
|
+
findings.push({
|
|
718
|
+
rule: 'stale-claim',
|
|
719
|
+
slug: page.slug,
|
|
720
|
+
path: page.path,
|
|
721
|
+
message: `Claim is marked ${claim.status}: ${claim.text}`
|
|
722
|
+
});
|
|
723
|
+
}
|
|
724
|
+
// Page drift: only check pages that aren't the project-log itself (which trivially mentions every other page).
|
|
725
|
+
// Skip pages the operator has snoozed — these are findings already acknowledged as noise.
|
|
726
|
+
if (page.slug !== 'project-log' && projectLogContent && !snoozedPageDrifts.has(page.slug)) {
|
|
727
|
+
const drift = detectPageDrift(content, page.slug, projectLogContent);
|
|
728
|
+
if (drift) {
|
|
729
|
+
findings.push({
|
|
730
|
+
rule: 'page-drift',
|
|
731
|
+
slug: page.slug,
|
|
732
|
+
path: page.path,
|
|
733
|
+
message: buildPageDriftMessage(drift)
|
|
734
|
+
});
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
// contradicts-shipped-memory: catch sections that claim X doesn't exist while shipped
|
|
738
|
+
// memories say it does. Skipped on the project-log itself (chronological log of changes,
|
|
739
|
+
// not a claims surface) — same exclusion as page-drift, for the same reason.
|
|
740
|
+
if (page.slug !== 'project-log' && activeMemoriesForContradictionCheck.length > 0) {
|
|
741
|
+
const contradictions = detectContradictsShippedMemory(content, activeMemoriesForContradictionCheck, projectLogContent);
|
|
742
|
+
for (const signal of contradictions) {
|
|
743
|
+
findings.push({
|
|
744
|
+
rule: 'contradicts-shipped-memory',
|
|
745
|
+
slug: page.slug,
|
|
746
|
+
path: page.path,
|
|
747
|
+
message: buildContradictsShippedMemoryMessage(signal)
|
|
748
|
+
});
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
for (const guidance of guidanceFiles) {
|
|
753
|
+
const content = await fs.readFile(path.join(repoRoot, guidance.path), 'utf8').catch(() => '');
|
|
754
|
+
const lineCount = countLines(content);
|
|
755
|
+
if (lineCount > maxGuidanceLineCount) {
|
|
756
|
+
findings.push({
|
|
757
|
+
rule: 'oversized-guidance',
|
|
758
|
+
slug: guidance.path,
|
|
759
|
+
path: guidance.path,
|
|
760
|
+
message: `Guidance file exceeds ${maxGuidanceLineCount} lines: ${guidance.path} (${lineCount} lines).`
|
|
761
|
+
});
|
|
762
|
+
}
|
|
763
|
+
for (const brokenLink of findBrokenGuidanceLinks(content, guidance.path)) {
|
|
764
|
+
findings.push({
|
|
765
|
+
rule: 'stale-guidance-reference',
|
|
766
|
+
slug: guidance.path,
|
|
767
|
+
path: guidance.path,
|
|
768
|
+
message: `Guidance file links to missing markdown: ${brokenLink}`
|
|
769
|
+
});
|
|
770
|
+
}
|
|
771
|
+
if (guidance.kind !== 'skill' && !hasGuidanceRoute(content, guidance.path)) {
|
|
772
|
+
findings.push({
|
|
773
|
+
rule: 'unrouted-guidance',
|
|
774
|
+
slug: guidance.path,
|
|
775
|
+
path: guidance.path,
|
|
776
|
+
message: 'Guidance file should link to at least one canonical local docs page.'
|
|
777
|
+
});
|
|
778
|
+
}
|
|
779
|
+
}
|
|
780
|
+
const guidanceInboundLinks = await collectMarkdownInboundLinks(guidanceFiles, pages);
|
|
781
|
+
for (const guidance of guidanceFiles.filter((candidate) => candidate.kind === 'skill')) {
|
|
782
|
+
if ((guidanceInboundLinks.get(guidance.path) ?? 0) > 0) {
|
|
783
|
+
continue;
|
|
784
|
+
}
|
|
785
|
+
findings.push({
|
|
786
|
+
rule: 'dormant-skill',
|
|
787
|
+
slug: guidance.path,
|
|
788
|
+
path: guidance.path,
|
|
789
|
+
message: 'Skill file is not linked from project docs or active guidance files.'
|
|
790
|
+
});
|
|
791
|
+
}
|
|
792
|
+
for (const duplicateGroup of await findDuplicateGuidanceGroups()) {
|
|
793
|
+
const joinedPaths = duplicateGroup.map((guidance) => guidance.path).sort().join(', ');
|
|
794
|
+
for (const guidance of duplicateGroup) {
|
|
795
|
+
findings.push({
|
|
796
|
+
rule: 'duplicate-guidance',
|
|
797
|
+
slug: guidance.path,
|
|
798
|
+
path: guidance.path,
|
|
799
|
+
message: `Guidance content duplicates: ${joinedPaths}`
|
|
800
|
+
});
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
for (const conflict of await findConflictingGuidanceRules()) {
|
|
804
|
+
const joinedPaths = conflict.paths.join(', ');
|
|
805
|
+
for (const guidancePath of conflict.paths) {
|
|
806
|
+
findings.push({
|
|
807
|
+
rule: 'conflicting-guidance',
|
|
808
|
+
slug: guidancePath,
|
|
809
|
+
path: guidancePath,
|
|
810
|
+
message: `Guidance conflicts on "${conflict.rule}": ${joinedPaths}`
|
|
811
|
+
});
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
return findings.sort((a, b) => a.slug.localeCompare(b.slug) || a.rule.localeCompare(b.rule));
|
|
815
|
+
}
|
|
816
|
+
export async function searchWikiPages(query) {
|
|
817
|
+
const index = await buildCurrentWikiSearchIndex();
|
|
818
|
+
return searchWikiIndex(index, query);
|
|
819
|
+
}
|
|
820
|
+
export async function buildWikiContext(query, options = {}) {
|
|
821
|
+
const cached = getCachedWikiContext(query, options);
|
|
822
|
+
if (cached) {
|
|
823
|
+
return cached;
|
|
824
|
+
}
|
|
825
|
+
const maxPages = Math.max(1, options.maxPages ?? defaultContextPageLimit);
|
|
826
|
+
const maxLogEntries = Math.max(0, options.maxLogEntries ?? defaultLogEntryLimit);
|
|
827
|
+
const maxSkills = Math.max(1, Math.min(options.maxSkills ?? 3, 20));
|
|
828
|
+
const index = await buildCurrentWikiSearchIndex();
|
|
829
|
+
const queryTerms = tokenizeSearchQuery(query);
|
|
830
|
+
const searchResults = searchWikiIndex(index, query);
|
|
831
|
+
const rankedResults = searchResults.length > 0 ? searchResults : fallbackSearchResults(index);
|
|
832
|
+
const selectedResults = rankedResults.slice(0, maxPages);
|
|
833
|
+
// Cap the omittedPageReasons payload: 96+ omitted entries with full reason strings
|
|
834
|
+
// can dominate the wiki_context payload (single largest contributor at session start).
|
|
835
|
+
// The full omitted count is still reported via `omittedPages`; the reasons here are
|
|
836
|
+
// a triage hint, not a full audit trail. Operators who want more can wiki_search.
|
|
837
|
+
const maxOmittedPageReasons = Math.max(0, options.maxOmittedPageReasons ?? 12);
|
|
838
|
+
const maxOmittedReasonChars = Math.max(20, options.maxOmittedReasonChars ?? 80);
|
|
839
|
+
const omittedPageReasons = rankedResults
|
|
840
|
+
.slice(maxPages, maxPages + maxOmittedPageReasons)
|
|
841
|
+
.map((result) => ({
|
|
842
|
+
slug: result.slug,
|
|
843
|
+
score: result.score,
|
|
844
|
+
reason: truncateForBriefing(result.reasons.join('; '), maxOmittedReasonChars)
|
|
845
|
+
}));
|
|
846
|
+
const selectedPages = selectedResults.map((result) => searchResultToContextPage(result));
|
|
847
|
+
// Memory Trails: page→query edges (shadow mode for the bonus, active for reinforcement).
|
|
848
|
+
// Reinforcement runs unconditionally so edges accrue from real usage. Bonus is surfaced
|
|
849
|
+
// as a `[shadow] page recall trail: ...` reason on each page that has accumulated edges,
|
|
850
|
+
// but is NOT added to the score yet — same kill-switch principle as the bipartite
|
|
851
|
+
// projection shadow mode. Watch the recall benchmark before promoting to active ranking.
|
|
852
|
+
const pageTrailLookup = await loadMemoryTrailBonusLookup('page', query).catch(() => () => undefined);
|
|
853
|
+
for (const page of selectedPages) {
|
|
854
|
+
const bonus = pageTrailLookup(page.slug);
|
|
855
|
+
if (bonus) {
|
|
856
|
+
const trailReason = `[shadow] page recall trail: ${buildMemoryTrailReason(bonus)} (not yet applied to ranking)`;
|
|
857
|
+
page.reason = page.reason ? `${page.reason}; ${trailReason}` : trailReason;
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
if (selectedPages.length > 0) {
|
|
861
|
+
await reinforceQueryEdges('page', selectedPages.map((page) => page.slug), query).catch(() => undefined);
|
|
862
|
+
}
|
|
863
|
+
const recentLogEntries = maxLogEntries > 0 ? await listRecentProjectLogEntries(maxLogEntries) : [];
|
|
864
|
+
const findings = options.includeLint === false ? [] : await lintWikiPages();
|
|
865
|
+
const handoffs = await recallProjectHandoffs({
|
|
866
|
+
relatedPages: selectedPages.map((page) => page.slug),
|
|
867
|
+
maxItems: Math.max(1, Math.min(maxPages, 2))
|
|
868
|
+
});
|
|
869
|
+
const memories = (await recallProjectMemories(query, {
|
|
870
|
+
relatedPages: selectedPages.map((page) => page.slug),
|
|
871
|
+
maxItems: Math.max(1, Math.min(maxPages, 5))
|
|
872
|
+
})).filter((memory) => memory.kind !== 'handoff' && !handoffs.some((handoff) => handoff.id === memory.id));
|
|
873
|
+
const skills = await recallProjectSkills({
|
|
874
|
+
query,
|
|
875
|
+
relatedFiles: options.relatedFiles,
|
|
876
|
+
languages: options.languages,
|
|
877
|
+
frameworks: options.frameworks,
|
|
878
|
+
maxItems: maxSkills
|
|
879
|
+
});
|
|
880
|
+
const claims = rankContextClaims(selectedResults.flatMap((result) => index.pages.find((document) => document.page.slug === result.slug)?.claims ?? []), queryTerms).slice(0, maxPages * 2);
|
|
881
|
+
const guidanceFiles = await listProjectGuidanceFiles();
|
|
882
|
+
const openQuestions = buildOpenQuestions(claims, findings);
|
|
883
|
+
// Brain-faithfulness roadmap B5: surface the unprocessed memory backlog so the
|
|
884
|
+
// briefing tells the operator/agent what is sitting in the inbox waiting for
|
|
885
|
+
// triage. Lightweight — counts only, no findings list.
|
|
886
|
+
const memoryBacklog = await summarizeMemoryBacklog().catch(() => ({
|
|
887
|
+
promotionReady: 0,
|
|
888
|
+
skillPromotionReady: 0,
|
|
889
|
+
staleUnsupported: 0,
|
|
890
|
+
total: 0
|
|
891
|
+
}));
|
|
892
|
+
// Cap handoff/memory/skill body text in the briefing payload. The recall functions
|
|
893
|
+
// return full records; the briefing is meant to be a *briefing*, not a memory dump.
|
|
894
|
+
// Full bodies remain available via memory_recall, wiki_skill_load, and (for handoffs)
|
|
895
|
+
// a memory_recall call by id. Truncated records still carry every other field — ids,
|
|
896
|
+
// tags, sources, relatedFiles, recallCount — so the agent knows exactly what to fetch
|
|
897
|
+
// for a deeper read.
|
|
898
|
+
const maxHandoffTextChars = Math.max(120, options.maxHandoffTextChars ?? 1200);
|
|
899
|
+
const maxMemoryTextChars = Math.max(120, options.maxMemoryTextChars ?? 600);
|
|
900
|
+
const maxSkillTextChars = Math.max(120, options.maxSkillTextChars ?? 800);
|
|
901
|
+
const trimmedHandoffs = handoffs.map((handoff) => ({
|
|
902
|
+
...handoff,
|
|
903
|
+
text: truncateForBriefing(handoff.text, maxHandoffTextChars)
|
|
904
|
+
}));
|
|
905
|
+
const trimmedMemories = memories.map((memory) => ({
|
|
906
|
+
...memory,
|
|
907
|
+
text: truncateForBriefing(memory.text, maxMemoryTextChars)
|
|
908
|
+
}));
|
|
909
|
+
const trimmedSkills = skills.map((skill) => ({
|
|
910
|
+
...skill,
|
|
911
|
+
text: truncateForBriefing(skill.text, maxSkillTextChars)
|
|
912
|
+
}));
|
|
913
|
+
const result = {
|
|
914
|
+
query,
|
|
915
|
+
briefing: buildContextBriefing(selectedPages, trimmedHandoffs, trimmedMemories, trimmedSkills, claims, guidanceFiles, recentLogEntries, findings, omittedPageReasons, memoryBacklog),
|
|
916
|
+
readFirst: selectedPages.map((page) => page.slug),
|
|
917
|
+
handoffs: trimmedHandoffs,
|
|
918
|
+
pages: selectedPages,
|
|
919
|
+
memories: trimmedMemories,
|
|
920
|
+
skills: trimmedSkills,
|
|
921
|
+
claims,
|
|
922
|
+
guidanceFiles,
|
|
923
|
+
omittedPages: Math.max(rankedResults.length - maxPages, 0),
|
|
924
|
+
omittedPageReasons,
|
|
925
|
+
recentLogEntries,
|
|
926
|
+
findings,
|
|
927
|
+
openQuestions,
|
|
928
|
+
memoryBacklog
|
|
929
|
+
};
|
|
930
|
+
setCachedWikiContext(query, options, result);
|
|
931
|
+
return result;
|
|
932
|
+
}
|
|
933
|
+
export async function buildWikiGraphSnapshot() {
|
|
934
|
+
const index = await buildCurrentWikiSearchIndex();
|
|
935
|
+
const nodes = index.pages.map(({ page, claims }) => {
|
|
936
|
+
const graph = index.graph.get(page.slug) ?? {
|
|
937
|
+
slug: page.slug,
|
|
938
|
+
inboundLinks: 0,
|
|
939
|
+
outgoingLinks: [],
|
|
940
|
+
relatedPages: []
|
|
941
|
+
};
|
|
942
|
+
return {
|
|
943
|
+
...graph,
|
|
944
|
+
title: page.title,
|
|
945
|
+
path: page.path,
|
|
946
|
+
claimCount: claims.length,
|
|
947
|
+
staleClaimCount: claims.filter((claim) => claim.status !== 'current').length
|
|
948
|
+
};
|
|
949
|
+
});
|
|
950
|
+
return {
|
|
951
|
+
pages: nodes.length,
|
|
952
|
+
nodes: nodes.sort((left, right) => left.slug.localeCompare(right.slug))
|
|
953
|
+
};
|
|
954
|
+
}
|
|
955
|
+
async function buildCurrentWikiSearchIndex() {
|
|
956
|
+
const pages = await listWikiPages();
|
|
957
|
+
const pageByPath = new Map(pages.map((page) => [page.path, page.slug]));
|
|
958
|
+
const documents = await Promise.all(pages.map(async (page) => {
|
|
959
|
+
const content = await readWikiPage(page.slug);
|
|
960
|
+
return {
|
|
961
|
+
page,
|
|
962
|
+
content,
|
|
963
|
+
claims: extractWikiClaims(page.slug, content, pageByPath)
|
|
964
|
+
};
|
|
965
|
+
}));
|
|
966
|
+
const indexContent = await fs.readFile(path.join(docsRoot, 'index.md'), 'utf8').catch(() => '');
|
|
967
|
+
return buildWikiSearchIndex({ pages: documents, indexContent });
|
|
968
|
+
}
|
|
969
|
+
async function collectInboundWikiLinks(pages) {
|
|
970
|
+
const counts = new Map(pages.map((page) => [page.slug, 0]));
|
|
971
|
+
const pageByPath = new Map(pages.map((page) => [page.path, page.slug]));
|
|
972
|
+
const sources = [
|
|
973
|
+
{ path: 'docs/index.md', content: await fs.readFile(path.join(docsRoot, 'index.md'), 'utf8').catch(() => '') },
|
|
974
|
+
...(await Promise.all(pages.map(async (page) => ({ path: page.path, content: await readWikiPage(page.slug) }))))
|
|
975
|
+
];
|
|
976
|
+
for (const source of sources) {
|
|
977
|
+
const sourceDir = path.posix.dirname(source.path);
|
|
978
|
+
for (const link of extractMarkdownLinks(source.content)) {
|
|
979
|
+
const linkedSlug = resolveWikiLinkSlug(link, sourceDir, pageByPath);
|
|
980
|
+
if (!linkedSlug) {
|
|
981
|
+
continue;
|
|
982
|
+
}
|
|
983
|
+
counts.set(linkedSlug, (counts.get(linkedSlug) ?? 0) + 1);
|
|
984
|
+
}
|
|
985
|
+
}
|
|
986
|
+
return counts;
|
|
987
|
+
}
|
|
988
|
+
function hasH1(content) {
|
|
989
|
+
return /^#\s+\S+/m.test(content);
|
|
990
|
+
}
|
|
991
|
+
function hasSummaryParagraph(content) {
|
|
992
|
+
const lines = content.split(/\r?\n/);
|
|
993
|
+
const h1Index = lines.findIndex((line) => /^#\s+\S+/.test(line));
|
|
994
|
+
if (h1Index === -1) {
|
|
995
|
+
return false;
|
|
996
|
+
}
|
|
997
|
+
for (const line of lines.slice(h1Index + 1)) {
|
|
998
|
+
const trimmed = line.trim();
|
|
999
|
+
if (!trimmed) {
|
|
1000
|
+
continue;
|
|
1001
|
+
}
|
|
1002
|
+
if (trimmed.startsWith('#')) {
|
|
1003
|
+
return false;
|
|
1004
|
+
}
|
|
1005
|
+
return !trimmed.startsWith('|') && !trimmed.startsWith('- ') && !/^\d+\.\s/.test(trimmed);
|
|
1006
|
+
}
|
|
1007
|
+
return false;
|
|
1008
|
+
}
|
|
1009
|
+
function extractSummaryParagraph(content) {
|
|
1010
|
+
const lines = content.split(/\r?\n/);
|
|
1011
|
+
const h1Index = lines.findIndex((line) => /^#\s+\S+/.test(line));
|
|
1012
|
+
const bodyLines = lines.slice(h1Index === -1 ? 0 : h1Index + 1);
|
|
1013
|
+
for (const line of bodyLines) {
|
|
1014
|
+
const trimmed = line.trim();
|
|
1015
|
+
if (!trimmed) {
|
|
1016
|
+
continue;
|
|
1017
|
+
}
|
|
1018
|
+
if (trimmed.startsWith('#') || trimmed.startsWith('|') || trimmed.startsWith('- ') || /^\d+\.\s/.test(trimmed)) {
|
|
1019
|
+
continue;
|
|
1020
|
+
}
|
|
1021
|
+
return trimmed;
|
|
1022
|
+
}
|
|
1023
|
+
return '';
|
|
1024
|
+
}
|
|
1025
|
+
function tokenizeQuery(query) {
|
|
1026
|
+
return Array.from(new Set(query
|
|
1027
|
+
.toLowerCase()
|
|
1028
|
+
.split(/[^a-z0-9]+/i)
|
|
1029
|
+
.map((part) => part.trim())
|
|
1030
|
+
.filter((part) => part.length >= 2 && !contextStopTerms.has(part))));
|
|
1031
|
+
}
|
|
1032
|
+
function scoreContextPage(page, content, queryTerms, inboundLinks, pageByPath) {
|
|
1033
|
+
const summary = extractSummaryParagraph(content) || page.title;
|
|
1034
|
+
const title = page.title.toLowerCase();
|
|
1035
|
+
const slug = page.slug.toLowerCase();
|
|
1036
|
+
const haystack = content.toLowerCase();
|
|
1037
|
+
const reasons = new Set();
|
|
1038
|
+
const matchedTerms = new Set();
|
|
1039
|
+
let score = 0;
|
|
1040
|
+
for (const term of queryTerms) {
|
|
1041
|
+
if (title.includes(term)) {
|
|
1042
|
+
score += 6;
|
|
1043
|
+
reasons.add(`title matches "${term}"`);
|
|
1044
|
+
matchedTerms.add(term);
|
|
1045
|
+
}
|
|
1046
|
+
else if (slug.includes(term)) {
|
|
1047
|
+
score += 5;
|
|
1048
|
+
reasons.add(`slug matches "${term}"`);
|
|
1049
|
+
matchedTerms.add(term);
|
|
1050
|
+
}
|
|
1051
|
+
if (haystack.includes(term)) {
|
|
1052
|
+
score += 2;
|
|
1053
|
+
reasons.add(`content mentions "${term}"`);
|
|
1054
|
+
matchedTerms.add(term);
|
|
1055
|
+
}
|
|
1056
|
+
}
|
|
1057
|
+
if (page.slug === 'project-log' && queryTerms.some((term) => projectLogHintTerms.has(term))) {
|
|
1058
|
+
score += 4;
|
|
1059
|
+
reasons.add('project log helps with recent changes');
|
|
1060
|
+
}
|
|
1061
|
+
if (score > 0 || queryTerms.length === 0) {
|
|
1062
|
+
const inboundCount = inboundLinks.get(page.slug) ?? 0;
|
|
1063
|
+
if (inboundCount > 0) {
|
|
1064
|
+
score += Math.min(inboundCount, 3);
|
|
1065
|
+
reasons.add(inboundCount > 1 ? `${inboundCount} inbound links` : 'linked from the wiki');
|
|
1066
|
+
}
|
|
1067
|
+
}
|
|
1068
|
+
const inboundCount = inboundLinks.get(page.slug) ?? 0;
|
|
1069
|
+
const relatedPages = extractRelatedWikiSlugs(content, page.path, pageByPath).slice(0, 3);
|
|
1070
|
+
return {
|
|
1071
|
+
...page,
|
|
1072
|
+
score,
|
|
1073
|
+
summary,
|
|
1074
|
+
reason: Array.from(reasons).slice(0, 3).join('; ') || 'fallback page for broad project briefing',
|
|
1075
|
+
evidence: {
|
|
1076
|
+
matchedTerms: Array.from(matchedTerms),
|
|
1077
|
+
inboundLinks: inboundCount,
|
|
1078
|
+
relatedPages
|
|
1079
|
+
}
|
|
1080
|
+
};
|
|
1081
|
+
}
|
|
1082
|
+
function fallbackContextPage(page, inboundLinks) {
|
|
1083
|
+
const inboundCount = inboundLinks.get(page.slug) ?? 0;
|
|
1084
|
+
let score = Math.min(inboundCount, 3);
|
|
1085
|
+
let reason = inboundCount > 0 ? `fallback page with ${inboundCount} inbound links` : 'fallback page for broad project briefing';
|
|
1086
|
+
if (page.slug === 'architecture') {
|
|
1087
|
+
score += 4;
|
|
1088
|
+
reason = 'default architecture briefing page';
|
|
1089
|
+
}
|
|
1090
|
+
else if (page.slug === 'project-log') {
|
|
1091
|
+
score += 3;
|
|
1092
|
+
reason = 'default recent changes briefing page';
|
|
1093
|
+
}
|
|
1094
|
+
return {
|
|
1095
|
+
...page,
|
|
1096
|
+
score,
|
|
1097
|
+
reason,
|
|
1098
|
+
evidence: {
|
|
1099
|
+
...page.evidence,
|
|
1100
|
+
inboundLinks: inboundCount
|
|
1101
|
+
}
|
|
1102
|
+
};
|
|
1103
|
+
}
|
|
1104
|
+
function buildContextBriefing(pages, handoffs, memories, skills, claims, guidanceFiles, recentLogEntries, findings, omittedPageReasons, memoryBacklog) {
|
|
1105
|
+
const lines = [];
|
|
1106
|
+
// Brain-faithfulness roadmap B5: surface unprocessed memory backlog so the agent
|
|
1107
|
+
// sees waiting triage work at every wiki_context call. Cache invalidates on any
|
|
1108
|
+
// memory mutation so the banner stays accurate; suppressed entirely when zero.
|
|
1109
|
+
if (memoryBacklog.total > 0) {
|
|
1110
|
+
const parts = [];
|
|
1111
|
+
if (memoryBacklog.promotionReady > 0) {
|
|
1112
|
+
parts.push(`${memoryBacklog.promotionReady} promotion-ready`);
|
|
1113
|
+
}
|
|
1114
|
+
if (memoryBacklog.skillPromotionReady > 0) {
|
|
1115
|
+
parts.push(`${memoryBacklog.skillPromotionReady} skill-promotion-ready`);
|
|
1116
|
+
}
|
|
1117
|
+
if (memoryBacklog.staleUnsupported > 0) {
|
|
1118
|
+
parts.push(`${memoryBacklog.staleUnsupported} stale-unsupported`);
|
|
1119
|
+
}
|
|
1120
|
+
lines.push(`Memory backlog: ${parts.join(', ')} memor${memoryBacklog.total === 1 ? 'y' : 'ies'} waiting in the inbox. Call wiki_maintenance_inbox to triage, or memory_review for the full findings list.`);
|
|
1121
|
+
}
|
|
1122
|
+
if (pages.length > 0) {
|
|
1123
|
+
const readFirst = pages.map((page) => page.slug).join(', ');
|
|
1124
|
+
lines.push(`Read first: ${readFirst}.`);
|
|
1125
|
+
lines.push(`Top page: ${pages[0]?.slug} because ${pages[0]?.reason}.`);
|
|
1126
|
+
}
|
|
1127
|
+
if (recentLogEntries.length > 0) {
|
|
1128
|
+
lines.push(`${recentLogEntries.length} recent project log entr${recentLogEntries.length === 1 ? 'y is' : 'ies are'} included.`);
|
|
1129
|
+
}
|
|
1130
|
+
if (handoffs.length > 0) {
|
|
1131
|
+
lines.push(`${handoffs.length} recent session handoff${handoffs.length === 1 ? ' is' : 's are'} included.`);
|
|
1132
|
+
}
|
|
1133
|
+
if (memories.length > 0) {
|
|
1134
|
+
lines.push(`${memories.length} project-local memor${memories.length === 1 ? 'y is' : 'ies are'} included.`);
|
|
1135
|
+
}
|
|
1136
|
+
if (skills.length > 0) {
|
|
1137
|
+
lines.push(`${skills.length} matching skill${skills.length === 1 ? '' : 's'} included; call wiki_skill_load(id) for full content.`);
|
|
1138
|
+
}
|
|
1139
|
+
if (claims.length > 0) {
|
|
1140
|
+
lines.push(`${claims.length} source-backed claim${claims.length === 1 ? ' is' : 's are'} included.`);
|
|
1141
|
+
}
|
|
1142
|
+
if (guidanceFiles.length > 0) {
|
|
1143
|
+
lines.push(`${guidanceFiles.length} project guidance file${guidanceFiles.length === 1 ? ' is' : 's are'} included.`);
|
|
1144
|
+
}
|
|
1145
|
+
if (omittedPageReasons.length > 0) {
|
|
1146
|
+
const omittedSummary = omittedPageReasons
|
|
1147
|
+
.slice(0, 3)
|
|
1148
|
+
.map((page) => `${page.slug} (${page.reason})`)
|
|
1149
|
+
.join('; ');
|
|
1150
|
+
lines.push(`${omittedPageReasons.length} ranked page${omittedPageReasons.length === 1 ? ' was' : 's were'} omitted by the page budget: ${omittedSummary}.`);
|
|
1151
|
+
}
|
|
1152
|
+
if (findings.length === 0) {
|
|
1153
|
+
lines.push('No current lint findings are blocking the briefing.');
|
|
1154
|
+
}
|
|
1155
|
+
else {
|
|
1156
|
+
lines.push(`${findings.length} lint finding${findings.length === 1 ? '' : 's'} should be treated as context risk.`);
|
|
1157
|
+
}
|
|
1158
|
+
return lines.join(' ');
|
|
1159
|
+
}
|
|
1160
|
+
// Trim a body field for inclusion in the wiki_context briefing payload. Appends an
|
|
1161
|
+
// ellipsis when truncated so the agent can tell at a glance that the full body lives
|
|
1162
|
+
// elsewhere (call wiki_skill_load / memory_recall by id to fetch it).
|
|
1163
|
+
function truncateForBriefing(text, maxChars) {
|
|
1164
|
+
if (!text || text.length <= maxChars)
|
|
1165
|
+
return text;
|
|
1166
|
+
return `${text.slice(0, Math.max(1, maxChars - 1)).trimEnd()}…`;
|
|
1167
|
+
}
|
|
1168
|
+
export async function listProjectGuidanceFiles() {
|
|
1169
|
+
const results = new Map();
|
|
1170
|
+
const candidateFiles = [
|
|
1171
|
+
{ relativePath: 'AGENTS.md', kind: 'agents' },
|
|
1172
|
+
{ relativePath: '.github/copilot-instructions.md', kind: 'copilot-instructions' }
|
|
1173
|
+
];
|
|
1174
|
+
const candidateDirectories = [
|
|
1175
|
+
{ relativeDir: '.github/instructions', kind: 'instruction', pattern: /\.instructions\.md$/i },
|
|
1176
|
+
{ relativeDir: '.github/prompts', kind: 'prompt', pattern: /\.prompt\.md$/i },
|
|
1177
|
+
{ relativeDir: '.github/agents', kind: 'agent', pattern: /\.agent\.md$/i },
|
|
1178
|
+
{ relativeDir: 'skills', kind: 'skill', pattern: /SKILL\.md$/i }
|
|
1179
|
+
];
|
|
1180
|
+
for (const candidate of candidateFiles) {
|
|
1181
|
+
const guidance = await readGuidanceFile(repoRoot, candidate.relativePath, candidate.kind);
|
|
1182
|
+
if (guidance) {
|
|
1183
|
+
results.set(guidance.path, guidance);
|
|
1184
|
+
}
|
|
1185
|
+
}
|
|
1186
|
+
for (const candidate of candidateDirectories) {
|
|
1187
|
+
for (const relativePath of await findGuidanceFiles(path.join(repoRoot, candidate.relativeDir), candidate.pattern, repoRoot)) {
|
|
1188
|
+
const guidance = await readGuidanceFile(repoRoot, relativePath, candidate.kind);
|
|
1189
|
+
if (guidance) {
|
|
1190
|
+
results.set(guidance.path, guidance);
|
|
1191
|
+
}
|
|
1192
|
+
}
|
|
1193
|
+
}
|
|
1194
|
+
return Array.from(results.values()).sort((left, right) => left.path.localeCompare(right.path));
|
|
1195
|
+
}
|
|
1196
|
+
async function readGuidanceFile(repoRoot, relativePath, kind) {
|
|
1197
|
+
const absolutePath = path.join(repoRoot, relativePath);
|
|
1198
|
+
// Retry transient empty/error reads. Observed on ubuntu-latest CI: the same
|
|
1199
|
+
// guidance file (e.g. AGENTS.md, 2222 bytes, untouched on disk) intermittently
|
|
1200
|
+
// returns empty content from fs.readFile when many sequential reads happen
|
|
1201
|
+
// through the test suite, which silently drops the file from the guidance
|
|
1202
|
+
// listing and cascades into surprising downstream test failures (lint
|
|
1203
|
+
// findings missing, route-guidance proposals not generated, etc.).
|
|
1204
|
+
//
|
|
1205
|
+
// We treat this as a transient I/O hiccup and retry up to 3 times with brief
|
|
1206
|
+
// backoff. ENOENT (file genuinely doesn't exist) is the common
|
|
1207
|
+
// "skip silently" path so it short-circuits the retry. Any other error code
|
|
1208
|
+
// is surfaced via console.warn on the final retry so a real bug doesn't hide
|
|
1209
|
+
// behind the silent-failure semantic.
|
|
1210
|
+
let lastErr;
|
|
1211
|
+
for (let attempt = 0; attempt < 3; attempt++) {
|
|
1212
|
+
let content;
|
|
1213
|
+
try {
|
|
1214
|
+
content = await fs.readFile(absolutePath, 'utf8');
|
|
1215
|
+
}
|
|
1216
|
+
catch (err) {
|
|
1217
|
+
const e = err;
|
|
1218
|
+
if (e?.code === 'ENOENT') {
|
|
1219
|
+
// File doesn't exist — caller's contract is "return undefined" silently.
|
|
1220
|
+
return undefined;
|
|
1221
|
+
}
|
|
1222
|
+
lastErr = e;
|
|
1223
|
+
}
|
|
1224
|
+
if (content) {
|
|
1225
|
+
return {
|
|
1226
|
+
path: relativePath.replace(/\\/g, '/'),
|
|
1227
|
+
kind,
|
|
1228
|
+
summary: extractSummaryParagraph(content) || path.basename(relativePath)
|
|
1229
|
+
};
|
|
1230
|
+
}
|
|
1231
|
+
if (attempt < 2) {
|
|
1232
|
+
await new Promise((resolve) => setTimeout(resolve, 25));
|
|
1233
|
+
}
|
|
1234
|
+
}
|
|
1235
|
+
if (lastErr) {
|
|
1236
|
+
// eslint-disable-next-line no-console
|
|
1237
|
+
console.warn(`[dendrite] readGuidanceFile: persistent error reading ${relativePath} (${lastErr.code ?? 'unknown'}): ${lastErr.message}. Returning undefined; downstream lint/proposals will not see this guidance file.`);
|
|
1238
|
+
}
|
|
1239
|
+
else {
|
|
1240
|
+
// eslint-disable-next-line no-console
|
|
1241
|
+
console.warn(`[dendrite] readGuidanceFile: ${relativePath} read returned empty content on 3 attempts. Returning undefined; downstream lint/proposals will not see this guidance file. If you can reproduce this on CI, please open an issue with the runner OS and Node version.`);
|
|
1242
|
+
}
|
|
1243
|
+
return undefined;
|
|
1244
|
+
}
|
|
1245
|
+
async function findGuidanceFiles(directory, pattern, repoRoot) {
|
|
1246
|
+
const entries = await fs.readdir(directory, { withFileTypes: true }).catch(() => []);
|
|
1247
|
+
const matches = [];
|
|
1248
|
+
for (const entry of entries) {
|
|
1249
|
+
const fullPath = path.join(directory, entry.name);
|
|
1250
|
+
if (entry.isDirectory()) {
|
|
1251
|
+
matches.push(...(await findGuidanceFiles(fullPath, pattern, repoRoot)));
|
|
1252
|
+
continue;
|
|
1253
|
+
}
|
|
1254
|
+
if (entry.isFile() && pattern.test(entry.name)) {
|
|
1255
|
+
matches.push(path.relative(repoRoot, fullPath));
|
|
1256
|
+
}
|
|
1257
|
+
}
|
|
1258
|
+
return matches;
|
|
1259
|
+
}
|
|
1260
|
+
async function collectMarkdownInboundLinks(guidanceFiles, pages) {
|
|
1261
|
+
const sources = await collectMarkdownInboundSources(guidanceFiles, pages);
|
|
1262
|
+
return new Map([...sources.entries()].map(([targetPath, sourcePaths]) => [targetPath, sourcePaths.length]));
|
|
1263
|
+
}
|
|
1264
|
+
async function collectMarkdownInboundSources(guidanceFiles, pages) {
|
|
1265
|
+
const inboundLinks = new Map(guidanceFiles.map((guidance) => [guidance.path, 0]));
|
|
1266
|
+
const inboundSources = new Map(guidanceFiles.map((guidance) => [guidance.path, []]));
|
|
1267
|
+
const sourceFiles = [
|
|
1268
|
+
'docs/index.md',
|
|
1269
|
+
'docs/project-plan.md',
|
|
1270
|
+
...pages.map((page) => page.path),
|
|
1271
|
+
...guidanceFiles.map((guidance) => guidance.path)
|
|
1272
|
+
];
|
|
1273
|
+
for (const sourcePath of Array.from(new Set(sourceFiles)).sort()) {
|
|
1274
|
+
const content = await fs.readFile(path.join(repoRoot, sourcePath), 'utf8').catch(() => '');
|
|
1275
|
+
const sourceDir = path.posix.dirname(sourcePath);
|
|
1276
|
+
for (const link of extractMarkdownLinks(content)) {
|
|
1277
|
+
const targetPath = resolveMarkdownLinkPath(link, sourceDir);
|
|
1278
|
+
if (!targetPath || targetPath === sourcePath || !inboundLinks.has(targetPath)) {
|
|
1279
|
+
continue;
|
|
1280
|
+
}
|
|
1281
|
+
inboundLinks.set(targetPath, (inboundLinks.get(targetPath) ?? 0) + 1);
|
|
1282
|
+
inboundSources.get(targetPath)?.push(sourcePath);
|
|
1283
|
+
}
|
|
1284
|
+
}
|
|
1285
|
+
return new Map([...inboundSources.entries()].map(([targetPath, sourcePaths]) => [
|
|
1286
|
+
targetPath,
|
|
1287
|
+
Array.from(new Set(sourcePaths)).sort((left, right) => left.localeCompare(right))
|
|
1288
|
+
]));
|
|
1289
|
+
}
|
|
1290
|
+
async function findDuplicateGuidanceGroups() {
|
|
1291
|
+
const guidanceFiles = await listProjectGuidanceFiles();
|
|
1292
|
+
const fingerprintGroups = new Map();
|
|
1293
|
+
for (const guidance of guidanceFiles) {
|
|
1294
|
+
const content = await fs.readFile(path.join(repoRoot, guidance.path), 'utf8').catch(() => '');
|
|
1295
|
+
const fingerprint = buildGuidanceFingerprint(content);
|
|
1296
|
+
if (!fingerprint) {
|
|
1297
|
+
continue;
|
|
1298
|
+
}
|
|
1299
|
+
const group = fingerprintGroups.get(fingerprint) ?? [];
|
|
1300
|
+
group.push(guidance);
|
|
1301
|
+
fingerprintGroups.set(fingerprint, group);
|
|
1302
|
+
}
|
|
1303
|
+
return Array.from(fingerprintGroups.values())
|
|
1304
|
+
.filter((group) => group.length > 1)
|
|
1305
|
+
.map((group) => group.sort((left, right) => left.path.localeCompare(right.path)));
|
|
1306
|
+
}
|
|
1307
|
+
async function findConflictingGuidanceRules() {
|
|
1308
|
+
const guidanceFiles = await listProjectGuidanceFiles();
|
|
1309
|
+
const directiveMap = new Map();
|
|
1310
|
+
for (const guidance of guidanceFiles) {
|
|
1311
|
+
const content = await fs.readFile(path.join(repoRoot, guidance.path), 'utf8').catch(() => '');
|
|
1312
|
+
for (const directive of extractGuidanceDirectives(content)) {
|
|
1313
|
+
const current = directiveMap.get(directive.rule) ?? { positive: new Set(), negative: new Set() };
|
|
1314
|
+
current[directive.polarity].add(guidance.path);
|
|
1315
|
+
directiveMap.set(directive.rule, current);
|
|
1316
|
+
}
|
|
1317
|
+
}
|
|
1318
|
+
return Array.from(directiveMap.entries())
|
|
1319
|
+
.filter(([, polarities]) => polarities.positive.size > 0 && polarities.negative.size > 0)
|
|
1320
|
+
.map(([rule, polarities]) => ({
|
|
1321
|
+
rule,
|
|
1322
|
+
paths: Array.from(new Set([...polarities.positive, ...polarities.negative])).sort((left, right) => left.localeCompare(right))
|
|
1323
|
+
}));
|
|
1324
|
+
}
|
|
1325
|
+
function extractGuidanceDirectives(content) {
|
|
1326
|
+
return Array.from(new Map(content
|
|
1327
|
+
.split(/\r?\n/)
|
|
1328
|
+
.map((line) => parseGuidanceDirective(line))
|
|
1329
|
+
.filter((directive) => Boolean(directive))
|
|
1330
|
+
.map((directive) => [`${directive.polarity}:${directive.rule}`, directive])).values());
|
|
1331
|
+
}
|
|
1332
|
+
function parseGuidanceDirective(line) {
|
|
1333
|
+
const trimmed = line.trim();
|
|
1334
|
+
if (!trimmed || /^#/.test(trimmed)) {
|
|
1335
|
+
return undefined;
|
|
1336
|
+
}
|
|
1337
|
+
const normalized = trimmed
|
|
1338
|
+
.replace(/^[-*]\s+/, '')
|
|
1339
|
+
.replace(/^\d+\.\s+/, '')
|
|
1340
|
+
.replace(/[.?!]+$/, '')
|
|
1341
|
+
.replace(/\s+/g, ' ')
|
|
1342
|
+
.trim();
|
|
1343
|
+
const negativeMatch = normalized.match(/^(do not|don't|never|avoid|must not|should not)\s+(.+)$/i);
|
|
1344
|
+
if (negativeMatch) {
|
|
1345
|
+
return { polarity: 'negative', rule: negativeMatch[2].trim().toLowerCase() };
|
|
1346
|
+
}
|
|
1347
|
+
const positiveMatch = normalized.match(/^(always|must|should|prefer)\s+(.+)$/i);
|
|
1348
|
+
if (positiveMatch) {
|
|
1349
|
+
return { polarity: 'positive', rule: positiveMatch[2].trim().toLowerCase() };
|
|
1350
|
+
}
|
|
1351
|
+
return undefined;
|
|
1352
|
+
}
|
|
1353
|
+
function buildGuidanceFingerprint(content) {
|
|
1354
|
+
const normalizedLines = content
|
|
1355
|
+
.split(/\r?\n/)
|
|
1356
|
+
.map((line) => line.trim())
|
|
1357
|
+
.filter((line, index) => line.length > 0 && !(index === 0 && /^#\s+/.test(line)))
|
|
1358
|
+
.map((line) => line.replace(/\[[^\]]+\]\(([^)]+)\)/g, (match, _target, offset, fullLine) => {
|
|
1359
|
+
const labelMatch = fullLine.slice(offset).match(/^\[([^\]]+)\]\([^)]+\)/);
|
|
1360
|
+
return labelMatch ? `[${labelMatch[1]}](link)` : match;
|
|
1361
|
+
}))
|
|
1362
|
+
.map((line) => line.replace(/\s+/g, ' '));
|
|
1363
|
+
return normalizedLines.join('\n').toLowerCase();
|
|
1364
|
+
}
|
|
1365
|
+
function findBrokenGuidanceLinks(content, guidancePath) {
|
|
1366
|
+
const sourceDir = path.posix.dirname(guidancePath);
|
|
1367
|
+
return Array.from(new Set(extractMarkdownLinks(content).filter((link) => !guidanceLinkExists(link, sourceDir)))).sort();
|
|
1368
|
+
}
|
|
1369
|
+
function hasGuidanceRoute(content, guidancePath) {
|
|
1370
|
+
const sourceDir = path.posix.dirname(guidancePath);
|
|
1371
|
+
return extractMarkdownLinks(content).some((link) => guidanceLinkExists(link, sourceDir) && isDocsRoute(link, sourceDir));
|
|
1372
|
+
}
|
|
1373
|
+
function listGuidanceRouteTargets(content, guidancePath) {
|
|
1374
|
+
const sourceDir = path.posix.dirname(guidancePath);
|
|
1375
|
+
return Array.from(new Set(extractMarkdownLinks(content)
|
|
1376
|
+
.map((link) => resolveMarkdownLinkPath(link, sourceDir))
|
|
1377
|
+
.filter((targetPath) => Boolean(targetPath))
|
|
1378
|
+
.filter((targetPath) => targetPath.startsWith('docs/') && requirePathExists(path.join(repoRoot, targetPath))))).sort((left, right) => left.localeCompare(right));
|
|
1379
|
+
}
|
|
1380
|
+
function resolveMarkdownLinkPath(link, sourceDir) {
|
|
1381
|
+
if (/^[a-z]+:/i.test(link) || path.isAbsolute(link)) {
|
|
1382
|
+
return undefined;
|
|
1383
|
+
}
|
|
1384
|
+
return path.posix.normalize(path.posix.join(sourceDir, link.replace(/\\/g, '/')));
|
|
1385
|
+
}
|
|
1386
|
+
function guidanceLinkExists(link, sourceDir) {
|
|
1387
|
+
const normalized = resolveMarkdownLinkPath(link, sourceDir);
|
|
1388
|
+
if (!normalized) {
|
|
1389
|
+
return true;
|
|
1390
|
+
}
|
|
1391
|
+
const absolutePath = path.join(repoRoot, normalized);
|
|
1392
|
+
return requirePathExists(absolutePath);
|
|
1393
|
+
}
|
|
1394
|
+
function isDocsRoute(link, sourceDir) {
|
|
1395
|
+
const normalized = resolveMarkdownLinkPath(link, sourceDir);
|
|
1396
|
+
if (!normalized) {
|
|
1397
|
+
return false;
|
|
1398
|
+
}
|
|
1399
|
+
return normalized.startsWith('docs/');
|
|
1400
|
+
}
|
|
1401
|
+
function requirePathExists(filePath) {
|
|
1402
|
+
try {
|
|
1403
|
+
return statSync(filePath).isFile();
|
|
1404
|
+
}
|
|
1405
|
+
catch {
|
|
1406
|
+
return false;
|
|
1407
|
+
}
|
|
1408
|
+
}
|
|
1409
|
+
function countLines(content) {
|
|
1410
|
+
if (!content) {
|
|
1411
|
+
return 0;
|
|
1412
|
+
}
|
|
1413
|
+
return content.split(/\r?\n/).length;
|
|
1414
|
+
}
|
|
1415
|
+
export function extractWikiClaims(pageSlug, content, pageByPath) {
|
|
1416
|
+
const claimSection = stripFencedCodeBlocks(extractMarkdownSection(content, 'Claims'));
|
|
1417
|
+
const pagePath = `docs/wiki/${pageSlug}.md`;
|
|
1418
|
+
return claimSection
|
|
1419
|
+
.split(/\r?\n/)
|
|
1420
|
+
.map((line) => line.trim())
|
|
1421
|
+
.filter((line) => line.startsWith('- ['))
|
|
1422
|
+
.map((line) => parseClaimLine(line, pageSlug, pagePath, pageByPath))
|
|
1423
|
+
.filter((claim) => claim !== undefined);
|
|
1424
|
+
}
|
|
1425
|
+
function extractMarkdownSection(content, heading) {
|
|
1426
|
+
const lines = content.split(/\r?\n/);
|
|
1427
|
+
let headingIndex = -1;
|
|
1428
|
+
let activeFence;
|
|
1429
|
+
for (const [index, line] of lines.entries()) {
|
|
1430
|
+
const trimmed = line.trim();
|
|
1431
|
+
const fenceMarker = trimmed.startsWith('```') ? '`' : trimmed.startsWith('~~~') ? '~' : undefined;
|
|
1432
|
+
if (fenceMarker) {
|
|
1433
|
+
if (!activeFence) {
|
|
1434
|
+
activeFence = fenceMarker;
|
|
1435
|
+
}
|
|
1436
|
+
else if (activeFence === fenceMarker) {
|
|
1437
|
+
activeFence = undefined;
|
|
1438
|
+
}
|
|
1439
|
+
continue;
|
|
1440
|
+
}
|
|
1441
|
+
if (!activeFence && trimmed === `## ${heading}`) {
|
|
1442
|
+
headingIndex = index;
|
|
1443
|
+
break;
|
|
1444
|
+
}
|
|
1445
|
+
}
|
|
1446
|
+
if (headingIndex === -1) {
|
|
1447
|
+
return '';
|
|
1448
|
+
}
|
|
1449
|
+
const sectionLines = [];
|
|
1450
|
+
activeFence = undefined;
|
|
1451
|
+
for (const line of lines.slice(headingIndex + 1)) {
|
|
1452
|
+
const trimmed = line.trim();
|
|
1453
|
+
const fenceMarker = trimmed.startsWith('```') ? '`' : trimmed.startsWith('~~~') ? '~' : undefined;
|
|
1454
|
+
if (!activeFence && /^##\s+/.test(trimmed)) {
|
|
1455
|
+
break;
|
|
1456
|
+
}
|
|
1457
|
+
sectionLines.push(line);
|
|
1458
|
+
if (fenceMarker) {
|
|
1459
|
+
if (!activeFence) {
|
|
1460
|
+
activeFence = fenceMarker;
|
|
1461
|
+
}
|
|
1462
|
+
else if (activeFence === fenceMarker) {
|
|
1463
|
+
activeFence = undefined;
|
|
1464
|
+
}
|
|
1465
|
+
}
|
|
1466
|
+
}
|
|
1467
|
+
return sectionLines.join('\n');
|
|
1468
|
+
}
|
|
1469
|
+
function stripFencedCodeBlocks(content) {
|
|
1470
|
+
const lines = content.split(/\r?\n/);
|
|
1471
|
+
const keptLines = [];
|
|
1472
|
+
let activeFence;
|
|
1473
|
+
for (const line of lines) {
|
|
1474
|
+
const trimmed = line.trim();
|
|
1475
|
+
const fenceMarker = trimmed.startsWith('```') ? '`' : trimmed.startsWith('~~~') ? '~' : undefined;
|
|
1476
|
+
if (fenceMarker) {
|
|
1477
|
+
if (!activeFence) {
|
|
1478
|
+
activeFence = fenceMarker;
|
|
1479
|
+
}
|
|
1480
|
+
else if (activeFence === fenceMarker) {
|
|
1481
|
+
activeFence = undefined;
|
|
1482
|
+
}
|
|
1483
|
+
continue;
|
|
1484
|
+
}
|
|
1485
|
+
if (!activeFence) {
|
|
1486
|
+
keptLines.push(line);
|
|
1487
|
+
}
|
|
1488
|
+
}
|
|
1489
|
+
return keptLines.join('\n');
|
|
1490
|
+
}
|
|
1491
|
+
function parseClaimLine(line, pageSlug, pagePath, pageByPath) {
|
|
1492
|
+
const match = line.match(/^- \[(current|needs-review|superseded|unknown)\]\s+(.+)$/i);
|
|
1493
|
+
if (!match) {
|
|
1494
|
+
return undefined;
|
|
1495
|
+
}
|
|
1496
|
+
const status = match[1].toLowerCase();
|
|
1497
|
+
const body = match[2].trim();
|
|
1498
|
+
return {
|
|
1499
|
+
pageSlug,
|
|
1500
|
+
text: body.replace(/\s*Sources:\s*.+$/i, '').trim(),
|
|
1501
|
+
status,
|
|
1502
|
+
sources: extractClaimSources(body, pagePath, pageByPath)
|
|
1503
|
+
};
|
|
1504
|
+
}
|
|
1505
|
+
function extractClaimSources(body, pagePath, pageByPath) {
|
|
1506
|
+
const sourceDir = path.posix.dirname(pagePath);
|
|
1507
|
+
const sourceText = body.match(/\sSources:\s*(.+)$/i)?.[1]?.trim() ?? '';
|
|
1508
|
+
const sources = new Map();
|
|
1509
|
+
for (const match of sourceText.matchAll(/\[([^\]]+)\]\(([^)]+)\)/g)) {
|
|
1510
|
+
const label = match[1]?.trim() ?? '';
|
|
1511
|
+
const slug = resolveWikiLinkSlug(match[2]?.trim() ?? '', sourceDir, pageByPath);
|
|
1512
|
+
if (slug) {
|
|
1513
|
+
sources.set(`wiki:${slug}`, { kind: 'wiki', label, slug });
|
|
1514
|
+
}
|
|
1515
|
+
}
|
|
1516
|
+
for (const rawSource of sourceText.replace(/\[[^\]]+\]\([^)]+\)/g, '').split(',')) {
|
|
1517
|
+
const typedSource = parseTypedClaimSource(rawSource);
|
|
1518
|
+
if (typedSource) {
|
|
1519
|
+
sources.set(`${typedSource.kind}:${typedSource.slug}`, typedSource);
|
|
1520
|
+
}
|
|
1521
|
+
}
|
|
1522
|
+
return Array.from(sources.values());
|
|
1523
|
+
}
|
|
1524
|
+
function parseTypedClaimSource(value) {
|
|
1525
|
+
const match = value.trim().match(/^(file|command|decision):\s*(.+)$/i);
|
|
1526
|
+
if (!match) {
|
|
1527
|
+
return undefined;
|
|
1528
|
+
}
|
|
1529
|
+
const kind = match[1].toLowerCase();
|
|
1530
|
+
const slug = match[2].trim();
|
|
1531
|
+
if (!slug) {
|
|
1532
|
+
return undefined;
|
|
1533
|
+
}
|
|
1534
|
+
return {
|
|
1535
|
+
kind,
|
|
1536
|
+
label: slug,
|
|
1537
|
+
slug
|
|
1538
|
+
};
|
|
1539
|
+
}
|
|
1540
|
+
function rankContextClaims(claims, queryTerms) {
|
|
1541
|
+
return [...claims].sort((left, right) => {
|
|
1542
|
+
const scoreDelta = scoreClaim(right, queryTerms) - scoreClaim(left, queryTerms);
|
|
1543
|
+
if (scoreDelta !== 0) {
|
|
1544
|
+
return scoreDelta;
|
|
1545
|
+
}
|
|
1546
|
+
return left.pageSlug.localeCompare(right.pageSlug) || left.text.localeCompare(right.text);
|
|
1547
|
+
});
|
|
1548
|
+
}
|
|
1549
|
+
function scoreClaim(claim, queryTerms) {
|
|
1550
|
+
const haystack = `${claim.pageSlug} ${claim.text}`.toLowerCase();
|
|
1551
|
+
let score = claim.sources.length * 3;
|
|
1552
|
+
for (const term of queryTerms) {
|
|
1553
|
+
if (haystack.includes(term)) {
|
|
1554
|
+
score += 2;
|
|
1555
|
+
}
|
|
1556
|
+
}
|
|
1557
|
+
if (claim.status === 'current') {
|
|
1558
|
+
score += 1;
|
|
1559
|
+
}
|
|
1560
|
+
return score;
|
|
1561
|
+
}
|
|
1562
|
+
function buildOpenQuestions(claims, findings) {
|
|
1563
|
+
const claimQuestions = claims
|
|
1564
|
+
.map((claim) => {
|
|
1565
|
+
if (claim.status !== 'current' && claim.sources.length === 0) {
|
|
1566
|
+
return `Verify ${claim.pageSlug}: ${claim.text} (status: ${claim.status}). Add at least one supporting source.`;
|
|
1567
|
+
}
|
|
1568
|
+
if (claim.status !== 'current') {
|
|
1569
|
+
return `Verify ${claim.pageSlug}: ${claim.text} (status: ${claim.status}). Review ${claim.sources.map((source) => source.slug).join(', ')}.`;
|
|
1570
|
+
}
|
|
1571
|
+
if (claim.sources.length === 0) {
|
|
1572
|
+
return `Add at least one supporting source for ${claim.pageSlug}: ${claim.text}.`;
|
|
1573
|
+
}
|
|
1574
|
+
return undefined;
|
|
1575
|
+
})
|
|
1576
|
+
.filter((question) => Boolean(question));
|
|
1577
|
+
const guidanceQuestions = findings
|
|
1578
|
+
.filter((finding) => guidanceLintRules.has(finding.rule))
|
|
1579
|
+
.map((finding) => `Resolve ${finding.rule} in ${finding.path}: ${finding.message}`);
|
|
1580
|
+
return [...claimQuestions, ...guidanceQuestions];
|
|
1581
|
+
}
|
|
1582
|
+
const guidanceLintRules = new Set([
|
|
1583
|
+
'dormant-skill',
|
|
1584
|
+
'oversized-guidance',
|
|
1585
|
+
'duplicate-guidance',
|
|
1586
|
+
'stale-guidance-reference',
|
|
1587
|
+
'conflicting-guidance',
|
|
1588
|
+
'unrouted-guidance'
|
|
1589
|
+
]);
|
|
1590
|
+
async function listRecentProjectLogEntries(maxEntries) {
|
|
1591
|
+
const content = await fs.readFile(pagePathFromSlug('project-log'), 'utf8').catch(() => '');
|
|
1592
|
+
return content
|
|
1593
|
+
.split(/\r?\n/)
|
|
1594
|
+
.map((line) => line.trim())
|
|
1595
|
+
.filter((line) => line.startsWith('- '))
|
|
1596
|
+
.slice(-maxEntries)
|
|
1597
|
+
.reverse();
|
|
1598
|
+
}
|
|
1599
|
+
function extractRelatedWikiSlugs(content, sourcePath, pageByPath) {
|
|
1600
|
+
const sourceDir = path.posix.dirname(sourcePath);
|
|
1601
|
+
return Array.from(new Set(extractMarkdownLinks(content)
|
|
1602
|
+
.map((link) => resolveWikiLinkSlug(link, sourceDir, pageByPath))
|
|
1603
|
+
.filter((slug) => Boolean(slug))));
|
|
1604
|
+
}
|
|
1605
|
+
function extractMarkdownLinks(content) {
|
|
1606
|
+
return Array.from(content.matchAll(/\[[^\]]+\]\(([^)]+)\)/g), (match) => match[1]?.trim() ?? '')
|
|
1607
|
+
.filter(Boolean)
|
|
1608
|
+
.map((link) => link.split('#')[0]?.split('?')[0]?.trim() ?? '')
|
|
1609
|
+
.filter((link) => link.endsWith('.md'));
|
|
1610
|
+
}
|
|
1611
|
+
function resolveWikiLinkSlug(link, sourceDir, pageByPath) {
|
|
1612
|
+
if (/^[a-z]+:/i.test(link) || path.isAbsolute(link)) {
|
|
1613
|
+
return undefined;
|
|
1614
|
+
}
|
|
1615
|
+
const normalized = path.posix.normalize(path.posix.join(sourceDir, link.replace(/\\/g, '/')));
|
|
1616
|
+
return pageByPath.get(normalized);
|
|
1617
|
+
}
|