@rarusoft/dendrite-wiki 0.1.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +79 -0
- package/dist/api-extractor/extract.js +269 -0
- package/dist/api-extractor/language-extractor.js +15 -0
- package/dist/api-extractor/python-extractor.js +358 -0
- package/dist/api-extractor/render.js +195 -0
- package/dist/api-extractor/tree-sitter-extractor.js +1079 -0
- package/dist/api-extractor/types.js +11 -0
- package/dist/api-extractor/typescript-extractor.js +50 -0
- package/dist/api-extractor/walk.js +178 -0
- package/dist/api-reference.js +438 -0
- package/dist/benchmark-events.js +129 -0
- package/dist/benchmark.js +270 -0
- package/dist/binder-export.js +381 -0
- package/dist/canonical-target.js +168 -0
- package/dist/chart-insert.js +377 -0
- package/dist/chart-prompts.js +414 -0
- package/dist/context-cache.js +98 -0
- package/dist/contradicts-shipped-memory.js +232 -0
- package/dist/diff-context.js +142 -0
- package/dist/doctor.js +220 -0
- package/dist/generated-docs.js +219 -0
- package/dist/i18n.js +71 -0
- package/dist/index.js +49 -0
- package/dist/librarian.js +255 -0
- package/dist/maintenance-actions.js +244 -0
- package/dist/maintenance-inbox.js +842 -0
- package/dist/maintenance-runner.js +62 -0
- package/dist/page-drift.js +225 -0
- package/dist/page-inbox.js +168 -0
- package/dist/report-export.js +339 -0
- package/dist/review-bridge.js +1386 -0
- package/dist/search-index.js +199 -0
- package/dist/store.js +1617 -0
- package/dist/telemetry-defaults.js +44 -0
- package/dist/telemetry-report.js +263 -0
- package/dist/telemetry.js +544 -0
- package/dist/wiki-synthesis.js +901 -0
- package/package.json +35 -0
- package/src/api-extractor/extract.ts +333 -0
- package/src/api-extractor/language-extractor.ts +37 -0
- package/src/api-extractor/python-extractor.ts +380 -0
- package/src/api-extractor/render.ts +267 -0
- package/src/api-extractor/tree-sitter-extractor.ts +1210 -0
- package/src/api-extractor/types.ts +41 -0
- package/src/api-extractor/typescript-extractor.ts +56 -0
- package/src/api-extractor/walk.ts +209 -0
- package/src/api-reference.ts +552 -0
- package/src/benchmark-events.ts +216 -0
- package/src/benchmark.ts +376 -0
- package/src/binder-export.ts +437 -0
- package/src/canonical-target.ts +192 -0
- package/src/chart-insert.ts +478 -0
- package/src/chart-prompts.ts +417 -0
- package/src/context-cache.ts +129 -0
- package/src/contradicts-shipped-memory.ts +311 -0
- package/src/diff-context.ts +187 -0
- package/src/doctor.ts +260 -0
- package/src/generated-docs.ts +316 -0
- package/src/i18n.ts +106 -0
- package/src/index.ts +59 -0
- package/src/librarian.ts +331 -0
- package/src/maintenance-actions.ts +314 -0
- package/src/maintenance-inbox.ts +1132 -0
- package/src/maintenance-runner.ts +85 -0
- package/src/page-drift.ts +292 -0
- package/src/page-inbox.ts +254 -0
- package/src/report-export.ts +392 -0
- package/src/review-bridge.ts +1729 -0
- package/src/search-index.ts +266 -0
- package/src/store.ts +2171 -0
- package/src/telemetry-defaults.ts +50 -0
- package/src/telemetry-report.ts +365 -0
- package/src/telemetry.ts +757 -0
- package/src/wiki-synthesis.ts +1307 -0
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* The wiki refresh orchestrator — `npm run wiki:refresh` entry point.
|
|
3
|
+
*
|
|
4
|
+
* Rebuilds every deterministic derived view in the project: the catalog block in
|
|
5
|
+
* `docs/index.md`, `docs/wiki/maintenance-inbox.md` (and its JSON twin), the recent raw
|
|
6
|
+
* observation stream, the guidance-lifecycle table, the wiki search index (JSON +
|
|
7
|
+
* SQLite FTS5), and — since A5 of the API reference roadmap — the entire `docs/wiki/api/`
|
|
8
|
+
* tree via `refreshApiReference()` from `./api-reference.ts`.
|
|
9
|
+
*
|
|
10
|
+
* The order matters: API reference generation runs first so the page catalog and search
|
|
11
|
+
* index built later in the same call see the fresh generated pages. Every write goes
|
|
12
|
+
* through `writeIfChanged` so untouched files don't bump mtime, which keeps `npm run check`
|
|
13
|
+
* idempotent across repeated runs.
|
|
14
|
+
*
|
|
15
|
+
* This module deliberately does NOT write technical narrative pages like architecture.md.
|
|
16
|
+
* It only rebuilds derived views that map cleanly from primary data; humans own the prose.
|
|
17
|
+
*/
|
|
18
|
+
import { promises as fs } from 'node:fs';
|
|
19
|
+
import path from 'node:path';
|
|
20
|
+
import { refreshApiReference } from './api-reference.js';
|
|
21
|
+
import { reviewProjectMemories } from '@rarusoft/dendrite-memory';
|
|
22
|
+
import { buildWikiGraphSnapshot, extractWikiClaims, lintWikiPages, listGuidanceLifecycle, listWikiPages, listWikiProposals, readWikiPage, searchWikiPages } from './store.js';
|
|
23
|
+
import { buildMaintenanceInboxPage, buildMaintenanceInboxSnapshot } from './maintenance-inbox.js';
|
|
24
|
+
import { detectRawObservationClusters, readRawObservations } from '@rarusoft/dendrite-memory';
|
|
25
|
+
const indexPath = path.resolve(process.cwd(), 'docs', 'index.md');
|
|
26
|
+
const maintenanceInboxPath = path.resolve(process.cwd(), 'docs', 'wiki', 'maintenance-inbox.md');
|
|
27
|
+
const maintenanceInboxDataPath = path.resolve(process.cwd(), 'docs', 'public', 'maintenance-inbox.json');
|
|
28
|
+
const observationStreamDataPath = path.resolve(process.cwd(), 'docs', 'public', 'raw-observations-recent.json');
|
|
29
|
+
const observationStreamRecentSampleSize = 200;
|
|
30
|
+
const guidanceLifecyclePath = path.resolve(process.cwd(), 'docs', 'wiki', 'guidance-lifecycle.md');
|
|
31
|
+
const guidanceLifecycleDataPath = path.resolve(process.cwd(), 'docs', 'public', 'guidance-lifecycle.json');
|
|
32
|
+
const maintenanceActionResultPath = path.resolve(process.cwd(), 'docs', 'public', 'maintenance-action-result.json');
|
|
33
|
+
const wikiSearchIndexPath = path.resolve(process.cwd(), 'docs', 'public', 'wiki-search-index.json');
|
|
34
|
+
const sqliteSearchIndexPath = path.resolve(process.cwd(), process.env.DENDRITE_WIKI_DATA_DIR ?? 'local-data', 'wiki-search.sqlite');
|
|
35
|
+
const markerStart = '<!-- WIKI_CATALOG_START -->';
|
|
36
|
+
const markerEnd = '<!-- WIKI_CATALOG_END -->';
|
|
37
|
+
export async function refreshGeneratedWikiDocs() {
|
|
38
|
+
// Regenerate the API reference first so listWikiPages() and the lint pass below see the
|
|
39
|
+
// current set of generated pages. Projects without a `src/` directory get a harmless
|
|
40
|
+
// empty-result no-op (walkProjectSources returns []). Generation failures escalate so
|
|
41
|
+
// `npm run check` surfaces real breakage rather than silently shipping a stale catalog.
|
|
42
|
+
await refreshApiReference();
|
|
43
|
+
const [findings, proposals, memoryReview, observationClusters] = await Promise.all([
|
|
44
|
+
lintWikiPages(),
|
|
45
|
+
listWikiProposals(),
|
|
46
|
+
reviewProjectMemories(),
|
|
47
|
+
detectRawObservationClusters()
|
|
48
|
+
]);
|
|
49
|
+
const index = await fs.readFile(indexPath, 'utf8');
|
|
50
|
+
const indexEol = detectEol(index);
|
|
51
|
+
const maintenanceInbox = await fs.readFile(maintenanceInboxPath, 'utf8').catch(() => '');
|
|
52
|
+
const maintenanceInboxEol = '\n';
|
|
53
|
+
const reviewPageExists = async (reviewPath) => {
|
|
54
|
+
try {
|
|
55
|
+
await fs.access(path.resolve(process.cwd(), reviewPath));
|
|
56
|
+
return true;
|
|
57
|
+
}
|
|
58
|
+
catch {
|
|
59
|
+
return false;
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
const nextMaintenanceInbox = normalizeEol(await buildMaintenanceInboxPage(findings, proposals, {
|
|
63
|
+
reviewPageExists,
|
|
64
|
+
memoryFindings: memoryReview.findings,
|
|
65
|
+
observationClusters
|
|
66
|
+
}), maintenanceInboxEol);
|
|
67
|
+
await writeIfChanged(maintenanceInboxPath, maintenanceInbox, nextMaintenanceInbox);
|
|
68
|
+
const maintenanceInboxData = await fs.readFile(maintenanceInboxDataPath, 'utf8').catch(() => '');
|
|
69
|
+
const nextMaintenanceInboxData = ensureTrailingEol(JSON.stringify(await buildMaintenanceInboxSnapshot(findings, proposals, {
|
|
70
|
+
reviewPageExists,
|
|
71
|
+
memoryFindings: memoryReview.findings,
|
|
72
|
+
observationClusters
|
|
73
|
+
}), null, 2), '\n');
|
|
74
|
+
await writeIfChanged(maintenanceInboxDataPath, maintenanceInboxData, nextMaintenanceInboxData);
|
|
75
|
+
const observationStreamData = await fs.readFile(observationStreamDataPath, 'utf8').catch(() => '');
|
|
76
|
+
const recentObservations = await readRawObservations({ limit: observationStreamRecentSampleSize });
|
|
77
|
+
const nextObservationStreamData = ensureTrailingEol(JSON.stringify({
|
|
78
|
+
schemaVersion: 1,
|
|
79
|
+
generatedAt: new Date().toISOString(),
|
|
80
|
+
sampleSize: observationStreamRecentSampleSize,
|
|
81
|
+
observationCount: recentObservations.length,
|
|
82
|
+
clusterCount: observationClusters.length,
|
|
83
|
+
observations: recentObservations
|
|
84
|
+
}, null, 2), '\n');
|
|
85
|
+
await writeIfChanged(observationStreamDataPath, observationStreamData, nextObservationStreamData);
|
|
86
|
+
const guidanceLifecycle = await listGuidanceLifecycle();
|
|
87
|
+
const currentGuidanceLifecycle = await fs.readFile(guidanceLifecyclePath, 'utf8').catch(() => '');
|
|
88
|
+
const nextGuidanceLifecycle = normalizeEol(buildGuidanceLifecyclePage(guidanceLifecycle), '\n');
|
|
89
|
+
await writeIfChanged(guidanceLifecyclePath, currentGuidanceLifecycle, nextGuidanceLifecycle);
|
|
90
|
+
const guidanceLifecycleData = await fs.readFile(guidanceLifecycleDataPath, 'utf8').catch(() => '');
|
|
91
|
+
const nextGuidanceLifecycleData = ensureTrailingEol(JSON.stringify({ guidance: guidanceLifecycle }, null, 2), '\n');
|
|
92
|
+
await writeIfChanged(guidanceLifecycleDataPath, guidanceLifecycleData, nextGuidanceLifecycleData);
|
|
93
|
+
const pages = await listWikiPages();
|
|
94
|
+
const searchIndexData = await fs.readFile(wikiSearchIndexPath, 'utf8').catch(() => '');
|
|
95
|
+
const nextSearchIndexData = ensureTrailingEol(JSON.stringify({
|
|
96
|
+
graph: await buildWikiGraphSnapshot(),
|
|
97
|
+
sampleSearch: await searchWikiPages('project wiki')
|
|
98
|
+
}, null, 2), '\n');
|
|
99
|
+
await writeIfChanged(wikiSearchIndexPath, searchIndexData, nextSearchIndexData);
|
|
100
|
+
await writeSqliteSearchIndex(sqliteSearchIndexPath, pages);
|
|
101
|
+
const catalog = normalizeEol([
|
|
102
|
+
markerStart,
|
|
103
|
+
'',
|
|
104
|
+
'| Page | Slug |',
|
|
105
|
+
'|---|---|',
|
|
106
|
+
...pages.map((page) => `| [${page.title}](./wiki/${page.slug}.md) | \`${page.slug}\` |`),
|
|
107
|
+
'',
|
|
108
|
+
markerEnd
|
|
109
|
+
].join('\n'), indexEol);
|
|
110
|
+
let nextIndex = index;
|
|
111
|
+
if (nextIndex.includes(markerStart) && nextIndex.includes(markerEnd)) {
|
|
112
|
+
nextIndex = nextIndex.replace(new RegExp(`${markerStart}[\\s\\S]*?${markerEnd}`), catalog);
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
nextIndex += `${indexEol}${indexEol}## Generated Catalog${indexEol}${indexEol}${catalog}${indexEol}`;
|
|
116
|
+
}
|
|
117
|
+
await writeIfChanged(indexPath, index, ensureTrailingEol(nextIndex, indexEol));
|
|
118
|
+
return { pageCount: pages.length };
|
|
119
|
+
}
|
|
120
|
+
function buildGuidanceLifecyclePage(guidance) {
|
|
121
|
+
const statusCounts = [...new Map(guidance.map((item) => [item.status, guidance.filter((candidate) => candidate.status === item.status).length])).entries()]
|
|
122
|
+
.sort(([left], [right]) => left.localeCompare(right));
|
|
123
|
+
return [
|
|
124
|
+
'# Guidance Lifecycle',
|
|
125
|
+
'',
|
|
126
|
+
'This generated page shows active, dormant, superseded, and pending-review guidance files from the current project.',
|
|
127
|
+
'',
|
|
128
|
+
'## Status',
|
|
129
|
+
guidance.length > 0 ? `- Guidance files: ${guidance.length}` : '- Guidance files: none.',
|
|
130
|
+
statusCounts.length > 0
|
|
131
|
+
? `- Status groups: ${statusCounts.map(([status, count]) => `\`${status}\` (${count})`).join(', ')}`
|
|
132
|
+
: '- Status groups: none.',
|
|
133
|
+
'',
|
|
134
|
+
'## Lifecycle Table',
|
|
135
|
+
guidance.length === 0
|
|
136
|
+
? 'No guidance files found.'
|
|
137
|
+
: [
|
|
138
|
+
'| Path | Kind | Status | Review | Archive Target | Linked From | Reason |',
|
|
139
|
+
'|---|---|---|---|---|---|---|',
|
|
140
|
+
...guidance.map((item) => `| ${escapeTableCell(item.path)} | \`${item.kind}\` | \`${item.status}\` | ${item.reviewStatus ?? 'none'} | ${escapeTableCell(item.archiveTarget ?? '')} | ${escapeTableCell(item.linkedFrom.join(', ') || 'none')} | ${escapeTableCell(item.reason)} |`)
|
|
141
|
+
].join('\n'),
|
|
142
|
+
''
|
|
143
|
+
].join('\n');
|
|
144
|
+
}
|
|
145
|
+
function escapeTableCell(value) {
|
|
146
|
+
return value.replace(/\|/g, '\\|').replace(/\r?\n/g, ' ');
|
|
147
|
+
}
|
|
148
|
+
export async function writeLatestMaintenanceActionArtifact(artifact) {
|
|
149
|
+
const currentContent = await fs.readFile(maintenanceActionResultPath, 'utf8').catch(() => '');
|
|
150
|
+
const nextContent = ensureTrailingEol(JSON.stringify(artifact, null, 2), '\n');
|
|
151
|
+
await writeIfChanged(maintenanceActionResultPath, currentContent, nextContent);
|
|
152
|
+
}
|
|
153
|
+
function detectEol(content) {
|
|
154
|
+
return content.includes('\r\n') ? '\r\n' : '\n';
|
|
155
|
+
}
|
|
156
|
+
function normalizeEol(content, eol) {
|
|
157
|
+
return content.replace(/\r?\n/g, eol);
|
|
158
|
+
}
|
|
159
|
+
function ensureTrailingEol(content, eol) {
|
|
160
|
+
const withoutTrailingEol = content.replace(/(?:\r?\n)+$/g, '');
|
|
161
|
+
return `${withoutTrailingEol}${eol}`;
|
|
162
|
+
}
|
|
163
|
+
function extractSummaryParagraph(content) {
|
|
164
|
+
const lines = content.split(/\r?\n/);
|
|
165
|
+
const h1Index = lines.findIndex((line) => /^#\s+\S+/.test(line));
|
|
166
|
+
const bodyLines = lines.slice(h1Index === -1 ? 0 : h1Index + 1);
|
|
167
|
+
for (const line of bodyLines) {
|
|
168
|
+
const trimmed = line.trim();
|
|
169
|
+
if (!trimmed || trimmed.startsWith('#') || trimmed.startsWith('|') || trimmed.startsWith('- ') || /^\d+\.\s/.test(trimmed)) {
|
|
170
|
+
continue;
|
|
171
|
+
}
|
|
172
|
+
return trimmed;
|
|
173
|
+
}
|
|
174
|
+
return '';
|
|
175
|
+
}
|
|
176
|
+
async function writeIfChanged(filePath, currentContent, nextContent) {
|
|
177
|
+
if (currentContent === nextContent) {
|
|
178
|
+
return;
|
|
179
|
+
}
|
|
180
|
+
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
|
181
|
+
await fs.writeFile(filePath, nextContent, 'utf8');
|
|
182
|
+
}
|
|
183
|
+
async function writeSqliteSearchIndex(filePath, pages) {
|
|
184
|
+
const sqliteModule = await loadNodeSqliteModule();
|
|
185
|
+
if (!sqliteModule) {
|
|
186
|
+
return;
|
|
187
|
+
}
|
|
188
|
+
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
|
189
|
+
await fs.rm(filePath, { force: true });
|
|
190
|
+
const database = new sqliteModule.DatabaseSync(filePath);
|
|
191
|
+
try {
|
|
192
|
+
database.exec([
|
|
193
|
+
'CREATE VIRTUAL TABLE pages_fts USING fts5(slug, title, path, summary, content);',
|
|
194
|
+
'CREATE VIRTUAL TABLE claims_fts USING fts5(page_slug, status, text, sources);',
|
|
195
|
+
'CREATE TABLE graph_edges(source_slug TEXT NOT NULL, target_slug TEXT NOT NULL);'
|
|
196
|
+
].join('\n'));
|
|
197
|
+
const pageByPath = new Map(pages.map((page) => [page.path, page.slug]));
|
|
198
|
+
const insertPage = database.prepare('INSERT INTO pages_fts(slug, title, path, summary, content) VALUES (?, ?, ?, ?, ?)');
|
|
199
|
+
const insertClaim = database.prepare('INSERT INTO claims_fts(page_slug, status, text, sources) VALUES (?, ?, ?, ?)');
|
|
200
|
+
const insertEdge = database.prepare('INSERT INTO graph_edges(source_slug, target_slug) VALUES (?, ?)');
|
|
201
|
+
for (const page of pages) {
|
|
202
|
+
const content = await readWikiPage(page.slug);
|
|
203
|
+
insertPage.run(page.slug, page.title, page.path, extractSummaryParagraph(content) || page.title, content);
|
|
204
|
+
for (const claim of extractWikiClaims(page.slug, content, pageByPath)) {
|
|
205
|
+
insertClaim.run(claim.pageSlug, claim.status, claim.text, claim.sources.map((source) => source.slug).join(' '));
|
|
206
|
+
for (const source of claim.sources) {
|
|
207
|
+
insertEdge.run(page.slug, source.slug);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
finally {
|
|
213
|
+
database.close();
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
async function loadNodeSqliteModule() {
|
|
217
|
+
const dynamicImport = new Function('specifier', 'return import(specifier)');
|
|
218
|
+
return dynamicImport('node:sqlite').catch(() => undefined);
|
|
219
|
+
}
|
package/dist/i18n.js
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
// C7 slice 2: per-language modes for agent-facing strings.
|
|
2
|
+
//
|
|
3
|
+
// This is intentionally minimal: a single string-table per language code, resolved at call
|
|
4
|
+
// time via the DENDRITE_LANG env var. The default is English. The framework is the
|
|
5
|
+
// deliverable here; translations ship as forks or follow-up PRs once a real operator asks
|
|
6
|
+
// for one.
|
|
7
|
+
//
|
|
8
|
+
// Storage rules stay English-only: memory bodies, wiki pages, claims, and project-log
|
|
9
|
+
// entries are never translated by this module. Only operator-facing message text (cluster
|
|
10
|
+
// templates, ritual reminders, hook output) is routed through the i18n table.
|
|
11
|
+
//
|
|
12
|
+
// Adding a new language:
|
|
13
|
+
// 1. Pick an ISO 639-1 code (zh, ja, es, fr, de, etc.).
|
|
14
|
+
// 2. Add an entry to the `translations` map below with the message keys you want to
|
|
15
|
+
// localize. Missing keys fall back to English automatically.
|
|
16
|
+
// 3. Document the new code in docs/wiki/competitive-feature-roadmap.md (phase C7).
|
|
17
|
+
const englishMessages = {
|
|
18
|
+
'observation-cluster-template-header': ({ kind, target, observationCount, distinctSessionCount, lastSeen }) => `Recurring activity detected: ${kind} on ${target} (${observationCount} observation${observationCount === 1 ? '' : 's'} across ${distinctSessionCount} session${distinctSessionCount === 1 ? '' : 's'}, last seen ${lastSeen}).`,
|
|
19
|
+
'observation-cluster-template-considerations': () => 'Consider documenting why this {kindLabel} keeps coming up — for example:',
|
|
20
|
+
'observation-cluster-template-options-edit-or-read': () => [
|
|
21
|
+
'- a setup or onboarding gotcha future agents should know about',
|
|
22
|
+
'- a refactoring target that has accumulated repeated edits',
|
|
23
|
+
'- a frequently-broken integration or test',
|
|
24
|
+
'- a workflow pattern worth promoting to a scope-bound skill'
|
|
25
|
+
].join('\n'),
|
|
26
|
+
'observation-cluster-template-options-default': () => [
|
|
27
|
+
'- a setup or onboarding gotcha future agents should know about',
|
|
28
|
+
'- a refactoring target that has accumulated repeated activity',
|
|
29
|
+
'- a frequently-broken integration or test',
|
|
30
|
+
'- a workflow pattern worth promoting to a scope-bound skill'
|
|
31
|
+
].join('\n'),
|
|
32
|
+
'observation-cluster-template-replace-instruction': () => 'Replace this template text with the actual lesson, then optionally promote to a skill via memory_promote_skill once the lesson has been recalled enough times.'
|
|
33
|
+
};
|
|
34
|
+
// Sample non-English bundle — Spanish — so the routing has a real second path to test.
|
|
35
|
+
// Operators adding more languages should follow this exact shape.
|
|
36
|
+
const spanishMessages = {
|
|
37
|
+
'observation-cluster-template-header': ({ kind, target, observationCount, distinctSessionCount, lastSeen }) => `Actividad recurrente detectada: ${kind} en ${target} (${observationCount} observación${observationCount === 1 ? '' : 'es'} en ${distinctSessionCount} sesión${distinctSessionCount === 1 ? '' : 'es'}, vista por última vez ${lastSeen}).`,
|
|
38
|
+
'observation-cluster-template-replace-instruction': () => 'Reemplaza este texto plantilla con la lección real y luego promociónalo a una skill mediante memory_promote_skill una vez que la lección haya sido recordada suficientes veces.'
|
|
39
|
+
};
|
|
40
|
+
const translations = {
|
|
41
|
+
en: englishMessages,
|
|
42
|
+
es: spanishMessages
|
|
43
|
+
};
|
|
44
|
+
export function resolveDendriteLang(env = process.env) {
|
|
45
|
+
const raw = (env.DENDRITE_LANG ?? '').trim().toLowerCase();
|
|
46
|
+
if (!raw) {
|
|
47
|
+
return 'en';
|
|
48
|
+
}
|
|
49
|
+
// Accept full BCP-47 codes like en-US by stripping to the primary subtag.
|
|
50
|
+
return raw.split('-')[0];
|
|
51
|
+
}
|
|
52
|
+
export function translate(key, values = {}, options = {}) {
|
|
53
|
+
const lang = options.lang ?? resolveDendriteLang();
|
|
54
|
+
const bundle = translations[lang];
|
|
55
|
+
const fallback = translations.en;
|
|
56
|
+
const localized = bundle?.[key];
|
|
57
|
+
if (localized) {
|
|
58
|
+
return localized(values);
|
|
59
|
+
}
|
|
60
|
+
const fallbackEntry = fallback?.[key];
|
|
61
|
+
if (fallbackEntry) {
|
|
62
|
+
return fallbackEntry(values);
|
|
63
|
+
}
|
|
64
|
+
// Never throw on a missing key — agent-facing surfaces would prefer to render the key
|
|
65
|
+
// than fail. Operators can grep for any literal key in output to catch missing entries.
|
|
66
|
+
return key;
|
|
67
|
+
}
|
|
68
|
+
// Public helper used in tests + by anyone who wants to inspect available languages.
|
|
69
|
+
export function listAvailableDendriteLangs() {
|
|
70
|
+
return Object.keys(translations).sort();
|
|
71
|
+
}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
// Public surface of @rarusoft/dendrite-wiki.
|
|
2
|
+
//
|
|
3
|
+
// Phase 4 slice D wave 2 of the Library Extraction Roadmap. The markdown-wiki
|
|
4
|
+
// adapter for @rarusoft/dendrite-memory: implements `CanonicalTarget` against
|
|
5
|
+
// VitePress-rendered `docs/wiki/`, owns the wiki page store + lint + search +
|
|
6
|
+
// synthesis + maintenance review surface + browser-side review bridge.
|
|
7
|
+
//
|
|
8
|
+
// The canonical-target.ts module has a top-level side effect that registers
|
|
9
|
+
// `WikiCanonicalTarget` as the brain's default canonical target — any consumer
|
|
10
|
+
// that imports from `@rarusoft/dendrite-wiki` therefore auto-wires the wiki adapter for
|
|
11
|
+
// brain promotion functions.
|
|
12
|
+
// canonical-target.ts MUST be re-exported first so its top-level
|
|
13
|
+
// setDefaultCanonicalTarget side effect fires before anything else.
|
|
14
|
+
export * from './canonical-target.js';
|
|
15
|
+
// Core wiki page store + search + lint + context briefing surface.
|
|
16
|
+
export * from './store.js';
|
|
17
|
+
export * from './search-index.js';
|
|
18
|
+
export * from './context-cache.js';
|
|
19
|
+
// Maintenance review surface.
|
|
20
|
+
export * from './maintenance-actions.js';
|
|
21
|
+
export * from './maintenance-inbox.js';
|
|
22
|
+
export * from './maintenance-runner.js';
|
|
23
|
+
// Wiki page drift + contradicts-shipped-memory lint surface.
|
|
24
|
+
export * from './page-drift.js';
|
|
25
|
+
export * from './contradicts-shipped-memory.js';
|
|
26
|
+
// Per-page inbox + librarian audit (the multi-category maintenance aggregator).
|
|
27
|
+
export * from './page-inbox.js';
|
|
28
|
+
export * from './librarian.js';
|
|
29
|
+
// Browser-side review bridge.
|
|
30
|
+
export * from './review-bridge.js';
|
|
31
|
+
// API reference generator + chart insertion + chart prompts.
|
|
32
|
+
export * from './api-reference.js';
|
|
33
|
+
export * from './chart-insert.js';
|
|
34
|
+
export * from './chart-prompts.js';
|
|
35
|
+
// Synthesis provider (LLM-assisted wiki narration).
|
|
36
|
+
export * from './wiki-synthesis.js';
|
|
37
|
+
// Telemetry + benchmark + report/binder exports + doctor + diff-context.
|
|
38
|
+
export * from './telemetry.js';
|
|
39
|
+
export * from './telemetry-defaults.js';
|
|
40
|
+
export * from './telemetry-report.js';
|
|
41
|
+
export * from './benchmark.js';
|
|
42
|
+
export * from './benchmark-events.js';
|
|
43
|
+
export * from './report-export.js';
|
|
44
|
+
export * from './binder-export.js';
|
|
45
|
+
export * from './doctor.js';
|
|
46
|
+
export * from './diff-context.js';
|
|
47
|
+
export * from './generated-docs.js';
|
|
48
|
+
// i18n translation table.
|
|
49
|
+
export * from './i18n.js';
|
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Wiki Librarian audit — one-shot maintenance briefing for an agent that's been told
|
|
3
|
+
* "organize the wiki".
|
|
4
|
+
*
|
|
5
|
+
* Aggregates every open maintenance signal (lint findings, page-drift, contradicts-
|
|
6
|
+
* shipped-memory, promotion-ready memories) into a single structured payload with
|
|
7
|
+
* pre-gathered evidence and a per-item `recommendedAction` sentence. The agent reads
|
|
8
|
+
* this once, plans across categories, and then acts using the existing tool surface
|
|
9
|
+
* (`memory_promote`, `wiki_write`, `memory_forget`, etc.). Every change still flows
|
|
10
|
+
* through the audited write paths, so the operator's safety story stays exactly what
|
|
11
|
+
* it was for manual edits: git diff + project-log entry per change.
|
|
12
|
+
*
|
|
13
|
+
* This module is deliberately a projection — it doesn't write anything itself, it just
|
|
14
|
+
* gathers the evidence an LLM needs to make good organizing decisions in one tool call
|
|
15
|
+
* instead of forcing it to chain a dozen exploratory reads.
|
|
16
|
+
*/
|
|
17
|
+
import { detectContradictsShippedMemory } from './contradicts-shipped-memory.js';
|
|
18
|
+
// Side-effect import: registers WikiCanonicalTarget on the brain DI surface.
|
|
19
|
+
import './canonical-target.js';
|
|
20
|
+
import { listProjectMemories, previewProjectMemoryPromotion, resolvePromotionTargetSlug, reviewProjectMemories } from '@rarusoft/dendrite-memory';
|
|
21
|
+
import { detectPageDrift, extractPageIntent, extractRecentEntriesMentioningPage } from './page-drift.js';
|
|
22
|
+
import { lintWikiPages, pagePathFromSlug, readWikiPage } from './store.js';
|
|
23
|
+
import { promises as fs } from 'node:fs';
|
|
24
|
+
const DEFAULT_MAX_PER_CATEGORY = 25;
|
|
25
|
+
const PLAYBOOK_TEXT = [
|
|
26
|
+
'Librarian mode: work down this list category by category, highest-leverage first.',
|
|
27
|
+
'',
|
|
28
|
+
'1. promotion-ready: call memory_promote(memoryIds, mode="apply", targetPage) for each item — applies the memory text into the target page and marks the memory superseded.',
|
|
29
|
+
'2. contradicts-shipped-memory: read the page with wiki_read, study the contradicting memories in evidence.contradictingMemoryIds, then rewrite the offending section with wiki_write so the prose matches shipped reality. Add `contradicts-shipped-memory: ignore` to frontmatter if the negation is intentional design language.',
|
|
30
|
+
'3. page-drift: read the page intent + recent activity in the evidence block, then rewrite the first paragraph with wiki_write so it reflects what the page is now about. If the drift is healthy (e.g., a roadmap mostly delivered), the new paragraph should say so.',
|
|
31
|
+
'4. unsupported-claim / stale-claim: read the page, either attach a source citation or mark the claim status, then wiki_write the updated page.',
|
|
32
|
+
'5. orphan-page / missing-h1 / missing-summary: structural fixes — add an H1, add a summary paragraph, or link the page from a canonical surface.',
|
|
33
|
+
'',
|
|
34
|
+
'Every wiki_write call goes through the audited path — project-log entry is appended automatically and git diff is the operator review surface. Memory promotions mark the source memory superseded in the same operation.'
|
|
35
|
+
].join('\n');
|
|
36
|
+
export async function buildLibrarianAudit(options = {}) {
|
|
37
|
+
const maxPerCategory = options.maxPerCategory ?? DEFAULT_MAX_PER_CATEGORY;
|
|
38
|
+
const allowCategory = (cat) => !options.categories || options.categories.includes(cat);
|
|
39
|
+
const [lintFindings, memoryReview, allMemories, projectLogContent] = await Promise.all([
|
|
40
|
+
lintWikiPages(),
|
|
41
|
+
reviewProjectMemories(),
|
|
42
|
+
listProjectMemories({ includeArchived: true }),
|
|
43
|
+
fs.readFile(pagePathFromSlug('project-log'), 'utf8').catch(() => '')
|
|
44
|
+
]);
|
|
45
|
+
const activeMemories = allMemories.filter((record) => record.status === 'active' || record.status === 'superseded');
|
|
46
|
+
const memoriesById = new Map(allMemories.map((record) => [record.id, record]));
|
|
47
|
+
const items = [];
|
|
48
|
+
const counts = {
|
|
49
|
+
'page-drift': 0,
|
|
50
|
+
'contradicts-shipped-memory': 0,
|
|
51
|
+
'promotion-ready': 0,
|
|
52
|
+
'unsupported-claim': 0,
|
|
53
|
+
'stale-claim': 0,
|
|
54
|
+
'orphan-page': 0,
|
|
55
|
+
'missing-h1': 0,
|
|
56
|
+
'missing-summary': 0,
|
|
57
|
+
'other-lint': 0
|
|
58
|
+
};
|
|
59
|
+
// 1) Promotion-ready memories — highest leverage, fully deterministic apply path.
|
|
60
|
+
if (allowCategory('promotion-ready')) {
|
|
61
|
+
for (const finding of memoryReview.findings) {
|
|
62
|
+
if (finding.kind !== 'promotion-ready')
|
|
63
|
+
continue;
|
|
64
|
+
if (counts['promotion-ready'] >= maxPerCategory)
|
|
65
|
+
break;
|
|
66
|
+
const targetSlug = resolvePromotionTargetSlug(finding.records);
|
|
67
|
+
let proposedTextPreview = '';
|
|
68
|
+
let proposedHeading = '## Promoted Lessons';
|
|
69
|
+
try {
|
|
70
|
+
const preview = await previewProjectMemoryPromotion(finding.memoryIds, { targetPage: targetSlug });
|
|
71
|
+
proposedTextPreview = truncate(preview.proposedText, 400);
|
|
72
|
+
proposedHeading = preview.sectionHeading;
|
|
73
|
+
}
|
|
74
|
+
catch {
|
|
75
|
+
// preview may fail if a memory was archived between review and preview — skip evidence.
|
|
76
|
+
}
|
|
77
|
+
items.push({
|
|
78
|
+
category: 'promotion-ready',
|
|
79
|
+
slug: targetSlug,
|
|
80
|
+
summary: finding.summary,
|
|
81
|
+
evidence: {
|
|
82
|
+
memoryIds: finding.memoryIds,
|
|
83
|
+
recallCount: finding.records[0]?.recallCount ?? 0,
|
|
84
|
+
sourceRefs: finding.records.flatMap((record) => record.sources.map((source) => `${source.kind}:${source.slug}`)),
|
|
85
|
+
targetSlug,
|
|
86
|
+
proposedHeading,
|
|
87
|
+
proposedTextPreview
|
|
88
|
+
},
|
|
89
|
+
recommendedAction: `Call memory_promote(memoryIds=${JSON.stringify(finding.memoryIds)}, mode="apply", targetPage="${targetSlug}"). The memory becomes a "Promoted Lessons" bullet on the page and is marked superseded so the inbox stops surfacing it.`,
|
|
90
|
+
recommendedTools: ['memory_promote']
|
|
91
|
+
});
|
|
92
|
+
counts['promotion-ready'] += 1;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
// 2) Contradicts-shipped-memory + page-drift findings need page-level evidence,
|
|
96
|
+
// so we read each affected page once and produce both kinds of items from it.
|
|
97
|
+
const driftableSlugs = new Set(lintFindings
|
|
98
|
+
.filter((finding) => finding.rule === 'contradicts-shipped-memory' || finding.rule === 'page-drift')
|
|
99
|
+
.map((finding) => finding.slug));
|
|
100
|
+
for (const slug of driftableSlugs) {
|
|
101
|
+
const content = await readWikiPage(slug).catch(() => '');
|
|
102
|
+
if (!content)
|
|
103
|
+
continue;
|
|
104
|
+
if (allowCategory('contradicts-shipped-memory') && counts['contradicts-shipped-memory'] < maxPerCategory) {
|
|
105
|
+
const signals = detectContradictsShippedMemory(content, activeMemories, projectLogContent);
|
|
106
|
+
for (const signal of signals) {
|
|
107
|
+
if (counts['contradicts-shipped-memory'] >= maxPerCategory)
|
|
108
|
+
break;
|
|
109
|
+
const contradictingTexts = signal.contradictingMemoryIds.map((id) => {
|
|
110
|
+
const record = memoriesById.get(id);
|
|
111
|
+
return record ? { id, summary: record.summary, kind: record.kind } : { id };
|
|
112
|
+
});
|
|
113
|
+
items.push({
|
|
114
|
+
category: 'contradicts-shipped-memory',
|
|
115
|
+
slug,
|
|
116
|
+
summary: `${slug}: section "${signal.sectionHeading}" denies a feature that shipped`,
|
|
117
|
+
evidence: {
|
|
118
|
+
sectionHeading: signal.sectionHeading,
|
|
119
|
+
matchedNegation: signal.matchedNegation,
|
|
120
|
+
objectTokens: signal.objectTokens,
|
|
121
|
+
contradictingMemories: contradictingTexts,
|
|
122
|
+
affirmingSnippets: signal.affirmingSnippets
|
|
123
|
+
},
|
|
124
|
+
recommendedAction: `Read the page with wiki_read(slug="${slug}"). Locate the section "${signal.sectionHeading}". Rewrite its prose so the negation "${signal.matchedNegation}" is replaced with current shipped state — the contradicting memories prove the feature exists. Apply with wiki_write. If the negation is genuinely intentional design language (e.g., privacy boundary), instead add \`contradicts-shipped-memory: ignore\` to the page frontmatter.`,
|
|
125
|
+
recommendedTools: ['wiki_read', 'wiki_write']
|
|
126
|
+
});
|
|
127
|
+
counts['contradicts-shipped-memory'] += 1;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
if (allowCategory('page-drift') && counts['page-drift'] < maxPerCategory) {
|
|
131
|
+
const hasDriftFinding = lintFindings.some((finding) => finding.slug === slug && finding.rule === 'page-drift');
|
|
132
|
+
if (hasDriftFinding) {
|
|
133
|
+
const drift = detectPageDrift(content, slug, projectLogContent);
|
|
134
|
+
const intent = extractPageIntent(content);
|
|
135
|
+
const activityMatch = extractRecentEntriesMentioningPage(projectLogContent, slug, 8, 7);
|
|
136
|
+
items.push({
|
|
137
|
+
category: 'page-drift',
|
|
138
|
+
slug,
|
|
139
|
+
summary: `${slug}: page intent diverged from recent activity (~${drift ? Math.round(drift.similarity * 100) : 0}% overlap across ${activityMatch.distinctDays} day${activityMatch.distinctDays === 1 ? '' : 's'})`,
|
|
140
|
+
evidence: {
|
|
141
|
+
currentIntent: intent,
|
|
142
|
+
recentActivityEntries: activityMatch.entries,
|
|
143
|
+
matchedDistinctDays: activityMatch.distinctDays,
|
|
144
|
+
similarityPercent: drift ? Math.round(drift.similarity * 100) : 0
|
|
145
|
+
},
|
|
146
|
+
recommendedAction: `Read the page with wiki_read(slug="${slug}"). Read the recent activity in evidence.recentActivityEntries. Rewrite the first paragraph (right after the H1) so it reflects what the page is NOW about. If the drift is healthy — e.g., a roadmap that's mostly delivered — the new paragraph should say so explicitly. Apply with wiki_write.`,
|
|
147
|
+
recommendedTools: ['wiki_read', 'wiki_write']
|
|
148
|
+
});
|
|
149
|
+
counts['page-drift'] += 1;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
// 3) Remaining lint findings — surface in their own categories so the agent can
|
|
154
|
+
// triage them with appropriate care. Skipping ones we already processed above.
|
|
155
|
+
for (const finding of lintFindings) {
|
|
156
|
+
if (finding.rule === 'contradicts-shipped-memory' || finding.rule === 'page-drift') {
|
|
157
|
+
continue;
|
|
158
|
+
}
|
|
159
|
+
const category = mapLintRuleToCategory(finding.rule);
|
|
160
|
+
if (!allowCategory(category))
|
|
161
|
+
continue;
|
|
162
|
+
if (counts[category] >= maxPerCategory)
|
|
163
|
+
continue;
|
|
164
|
+
items.push(buildLintItem(category, finding));
|
|
165
|
+
counts[category] += 1;
|
|
166
|
+
}
|
|
167
|
+
// Stable sort: promotion-ready first (highest deterministic safety), then drift,
|
|
168
|
+
// then contradicts, then everything else alphabetically by slug.
|
|
169
|
+
const categoryOrder = {
|
|
170
|
+
'promotion-ready': 0,
|
|
171
|
+
'contradicts-shipped-memory': 1,
|
|
172
|
+
'page-drift': 2,
|
|
173
|
+
'stale-claim': 3,
|
|
174
|
+
'unsupported-claim': 4,
|
|
175
|
+
'orphan-page': 5,
|
|
176
|
+
'missing-h1': 6,
|
|
177
|
+
'missing-summary': 7,
|
|
178
|
+
'other-lint': 8
|
|
179
|
+
};
|
|
180
|
+
items.sort((left, right) => {
|
|
181
|
+
const delta = categoryOrder[left.category] - categoryOrder[right.category];
|
|
182
|
+
if (delta !== 0)
|
|
183
|
+
return delta;
|
|
184
|
+
return (left.slug ?? '').localeCompare(right.slug ?? '');
|
|
185
|
+
});
|
|
186
|
+
return {
|
|
187
|
+
totalItems: items.length,
|
|
188
|
+
byCategory: counts,
|
|
189
|
+
items,
|
|
190
|
+
playbook: PLAYBOOK_TEXT
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
function mapLintRuleToCategory(rule) {
|
|
194
|
+
switch (rule) {
|
|
195
|
+
case 'stale-claim':
|
|
196
|
+
return 'stale-claim';
|
|
197
|
+
case 'unsupported-claim':
|
|
198
|
+
return 'unsupported-claim';
|
|
199
|
+
case 'orphan-page':
|
|
200
|
+
return 'orphan-page';
|
|
201
|
+
case 'missing-h1':
|
|
202
|
+
return 'missing-h1';
|
|
203
|
+
case 'missing-summary':
|
|
204
|
+
return 'missing-summary';
|
|
205
|
+
default:
|
|
206
|
+
return 'other-lint';
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
function buildLintItem(category, finding) {
|
|
210
|
+
const summary = `${finding.slug}: ${finding.rule} — ${finding.message}`;
|
|
211
|
+
let recommendedAction = '';
|
|
212
|
+
const recommendedTools = [];
|
|
213
|
+
switch (category) {
|
|
214
|
+
case 'unsupported-claim':
|
|
215
|
+
recommendedAction = `Read the page with wiki_read(slug="${finding.slug}"). Locate the unsupported claim. Either attach a source citation (file, wiki page, decision) and write it back with wiki_write, or downgrade the claim status from [current] to [stale] if it's no longer accurate.`;
|
|
216
|
+
recommendedTools.push('wiki_read', 'wiki_write');
|
|
217
|
+
break;
|
|
218
|
+
case 'stale-claim':
|
|
219
|
+
recommendedAction = `Read the page with wiki_read(slug="${finding.slug}"). Locate the stale claim. Either update it to current truth (and flip status to [current]) or remove it. Apply with wiki_write.`;
|
|
220
|
+
recommendedTools.push('wiki_read', 'wiki_write');
|
|
221
|
+
break;
|
|
222
|
+
case 'orphan-page':
|
|
223
|
+
recommendedAction = `Page is not linked from anywhere. Either link it from a canonical surface (project plan, architecture, an index page) — wiki_read + wiki_write — or, if the page is no longer relevant, archive/delete it.`;
|
|
224
|
+
recommendedTools.push('wiki_read', 'wiki_write');
|
|
225
|
+
break;
|
|
226
|
+
case 'missing-h1':
|
|
227
|
+
recommendedAction = `Add a top-level H1 heading to the page. wiki_read + wiki_write.`;
|
|
228
|
+
recommendedTools.push('wiki_read', 'wiki_write');
|
|
229
|
+
break;
|
|
230
|
+
case 'missing-summary':
|
|
231
|
+
recommendedAction = `Add a short summary paragraph immediately after the H1 explaining what the page is about. wiki_read + wiki_write.`;
|
|
232
|
+
recommendedTools.push('wiki_read', 'wiki_write');
|
|
233
|
+
break;
|
|
234
|
+
default:
|
|
235
|
+
recommendedAction = `Open the finding in the central Review Board — this lint rule has a specialized action there (snooze, archive guidance, insert H1, etc.). Or read the page and resolve manually with wiki_write.`;
|
|
236
|
+
recommendedTools.push('wiki_read', 'wiki_write');
|
|
237
|
+
}
|
|
238
|
+
return {
|
|
239
|
+
category,
|
|
240
|
+
slug: finding.slug,
|
|
241
|
+
summary,
|
|
242
|
+
evidence: {
|
|
243
|
+
rule: finding.rule,
|
|
244
|
+
message: finding.message,
|
|
245
|
+
path: finding.path
|
|
246
|
+
},
|
|
247
|
+
recommendedAction,
|
|
248
|
+
recommendedTools
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
function truncate(text, max) {
|
|
252
|
+
if (text.length <= max)
|
|
253
|
+
return text;
|
|
254
|
+
return `${text.slice(0, max - 1).trimEnd()}…`;
|
|
255
|
+
}
|