gitnexus 1.1.8 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +50 -59
- package/dist/cli/ai-context.js +9 -9
- package/dist/cli/analyze.js +139 -47
- package/dist/cli/augment.d.ts +13 -0
- package/dist/cli/augment.js +33 -0
- package/dist/cli/claude-hooks.d.ts +22 -0
- package/dist/cli/claude-hooks.js +97 -0
- package/dist/cli/eval-server.d.ts +30 -0
- package/dist/cli/eval-server.js +372 -0
- package/dist/cli/index.js +56 -1
- package/dist/cli/mcp.js +9 -0
- package/dist/cli/setup.js +184 -5
- package/dist/cli/tool.d.ts +37 -0
- package/dist/cli/tool.js +91 -0
- package/dist/cli/wiki.d.ts +13 -0
- package/dist/cli/wiki.js +199 -0
- package/dist/core/augmentation/engine.d.ts +26 -0
- package/dist/core/augmentation/engine.js +213 -0
- package/dist/core/embeddings/embedder.d.ts +2 -2
- package/dist/core/embeddings/embedder.js +11 -11
- package/dist/core/embeddings/embedding-pipeline.d.ts +2 -1
- package/dist/core/embeddings/embedding-pipeline.js +13 -5
- package/dist/core/embeddings/types.d.ts +2 -2
- package/dist/core/ingestion/call-processor.d.ts +7 -0
- package/dist/core/ingestion/call-processor.js +61 -23
- package/dist/core/ingestion/community-processor.js +34 -26
- package/dist/core/ingestion/filesystem-walker.js +15 -10
- package/dist/core/ingestion/heritage-processor.d.ts +6 -0
- package/dist/core/ingestion/heritage-processor.js +68 -5
- package/dist/core/ingestion/import-processor.d.ts +22 -0
- package/dist/core/ingestion/import-processor.js +215 -20
- package/dist/core/ingestion/parsing-processor.d.ts +8 -1
- package/dist/core/ingestion/parsing-processor.js +66 -25
- package/dist/core/ingestion/pipeline.js +104 -40
- package/dist/core/ingestion/process-processor.js +1 -1
- package/dist/core/ingestion/workers/parse-worker.d.ts +58 -0
- package/dist/core/ingestion/workers/parse-worker.js +451 -0
- package/dist/core/ingestion/workers/worker-pool.d.ts +22 -0
- package/dist/core/ingestion/workers/worker-pool.js +65 -0
- package/dist/core/kuzu/kuzu-adapter.d.ts +15 -1
- package/dist/core/kuzu/kuzu-adapter.js +177 -63
- package/dist/core/kuzu/schema.d.ts +1 -1
- package/dist/core/kuzu/schema.js +3 -0
- package/dist/core/search/bm25-index.js +13 -15
- package/dist/core/wiki/generator.d.ts +96 -0
- package/dist/core/wiki/generator.js +674 -0
- package/dist/core/wiki/graph-queries.d.ts +80 -0
- package/dist/core/wiki/graph-queries.js +238 -0
- package/dist/core/wiki/html-viewer.d.ts +10 -0
- package/dist/core/wiki/html-viewer.js +297 -0
- package/dist/core/wiki/llm-client.d.ts +36 -0
- package/dist/core/wiki/llm-client.js +111 -0
- package/dist/core/wiki/prompts.d.ts +53 -0
- package/dist/core/wiki/prompts.js +174 -0
- package/dist/mcp/core/embedder.js +4 -2
- package/dist/mcp/core/kuzu-adapter.d.ts +2 -1
- package/dist/mcp/core/kuzu-adapter.js +35 -15
- package/dist/mcp/local/local-backend.d.ts +54 -1
- package/dist/mcp/local/local-backend.js +716 -171
- package/dist/mcp/resources.d.ts +1 -1
- package/dist/mcp/resources.js +111 -73
- package/dist/mcp/server.d.ts +1 -1
- package/dist/mcp/server.js +91 -22
- package/dist/mcp/tools.js +80 -61
- package/dist/storage/git.d.ts +0 -1
- package/dist/storage/git.js +1 -8
- package/dist/storage/repo-manager.d.ts +17 -0
- package/dist/storage/repo-manager.js +26 -0
- package/hooks/claude/gitnexus-hook.cjs +135 -0
- package/hooks/claude/pre-tool-use.sh +78 -0
- package/hooks/claude/session-start.sh +42 -0
- package/package.json +4 -2
- package/skills/debugging.md +24 -22
- package/skills/exploring.md +26 -24
- package/skills/impact-analysis.md +19 -13
- package/skills/refactoring.md +37 -26
|
@@ -0,0 +1,674 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Wiki Generator
|
|
3
|
+
*
|
|
4
|
+
* Orchestrates the full wiki generation pipeline:
|
|
5
|
+
* Phase 0: Validate prerequisites + gather graph structure
|
|
6
|
+
* Phase 1: Build module tree (one LLM call)
|
|
7
|
+
* Phase 2: Generate module pages (one LLM call per module, bottom-up)
|
|
8
|
+
* Phase 3: Generate overview page
|
|
9
|
+
*
|
|
10
|
+
* Supports incremental updates via git diff + module-file mapping.
|
|
11
|
+
*/
|
|
12
|
+
import fs from 'fs/promises';
|
|
13
|
+
import path from 'path';
|
|
14
|
+
import { execSync } from 'child_process';
|
|
15
|
+
import { initWikiDb, closeWikiDb, getFilesWithExports, getAllFiles, getIntraModuleCallEdges, getInterModuleCallEdges, getProcessesForFiles, getAllProcesses, getInterModuleEdgesForOverview, } from './graph-queries.js';
|
|
16
|
+
import { generateHTMLViewer } from './html-viewer.js';
|
|
17
|
+
import { callLLM, estimateTokens, } from './llm-client.js';
|
|
18
|
+
import { GROUPING_SYSTEM_PROMPT, GROUPING_USER_PROMPT, MODULE_SYSTEM_PROMPT, MODULE_USER_PROMPT, PARENT_SYSTEM_PROMPT, PARENT_USER_PROMPT, OVERVIEW_SYSTEM_PROMPT, OVERVIEW_USER_PROMPT, fillTemplate, formatFileListForGrouping, formatDirectoryTree, formatCallEdges, formatProcesses, } from './prompts.js';
|
|
19
|
+
import { shouldIgnorePath } from '../../config/ignore-service.js';
|
|
20
|
+
// ─── Constants ────────────────────────────────────────────────────────
|
|
21
|
+
const DEFAULT_MAX_TOKENS_PER_MODULE = 30_000;
|
|
22
|
+
const WIKI_DIR = 'wiki';
|
|
23
|
+
// ─── Generator Class ──────────────────────────────────────────────────
|
|
24
|
+
export class WikiGenerator {
|
|
25
|
+
repoPath;
|
|
26
|
+
storagePath;
|
|
27
|
+
wikiDir;
|
|
28
|
+
kuzuPath;
|
|
29
|
+
llmConfig;
|
|
30
|
+
maxTokensPerModule;
|
|
31
|
+
options;
|
|
32
|
+
onProgress;
|
|
33
|
+
failedModules = [];
|
|
34
|
+
constructor(repoPath, storagePath, kuzuPath, llmConfig, options = {}, onProgress) {
|
|
35
|
+
this.repoPath = repoPath;
|
|
36
|
+
this.storagePath = storagePath;
|
|
37
|
+
this.wikiDir = path.join(storagePath, WIKI_DIR);
|
|
38
|
+
this.kuzuPath = kuzuPath;
|
|
39
|
+
this.options = options;
|
|
40
|
+
this.llmConfig = llmConfig;
|
|
41
|
+
this.maxTokensPerModule = options.maxTokensPerModule ?? DEFAULT_MAX_TOKENS_PER_MODULE;
|
|
42
|
+
this.onProgress = onProgress || (() => { });
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Main entry point. Runs the full pipeline or incremental update.
|
|
46
|
+
*/
|
|
47
|
+
async run() {
|
|
48
|
+
await fs.mkdir(this.wikiDir, { recursive: true });
|
|
49
|
+
const existingMeta = await this.loadWikiMeta();
|
|
50
|
+
const currentCommit = this.getCurrentCommit();
|
|
51
|
+
const forceMode = this.options.force;
|
|
52
|
+
// Up-to-date check (skip if --force)
|
|
53
|
+
if (!forceMode && existingMeta && existingMeta.fromCommit === currentCommit) {
|
|
54
|
+
return { pagesGenerated: 0, mode: 'up-to-date', failedModules: [] };
|
|
55
|
+
}
|
|
56
|
+
// Force mode: delete snapshot to force full re-grouping
|
|
57
|
+
if (forceMode) {
|
|
58
|
+
try {
|
|
59
|
+
await fs.unlink(path.join(this.wikiDir, 'first_module_tree.json'));
|
|
60
|
+
}
|
|
61
|
+
catch { }
|
|
62
|
+
// Delete existing module pages so they get regenerated
|
|
63
|
+
const existingFiles = await fs.readdir(this.wikiDir).catch(() => []);
|
|
64
|
+
for (const f of existingFiles) {
|
|
65
|
+
if (f.endsWith('.md')) {
|
|
66
|
+
try {
|
|
67
|
+
await fs.unlink(path.join(this.wikiDir, f));
|
|
68
|
+
}
|
|
69
|
+
catch { }
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
// Init graph
|
|
74
|
+
this.onProgress('init', 2, 'Connecting to knowledge graph...');
|
|
75
|
+
await initWikiDb(this.kuzuPath);
|
|
76
|
+
let result;
|
|
77
|
+
try {
|
|
78
|
+
if (!forceMode && existingMeta && existingMeta.fromCommit) {
|
|
79
|
+
result = await this.incrementalUpdate(existingMeta, currentCommit);
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
result = await this.fullGeneration(currentCommit);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
finally {
|
|
86
|
+
await closeWikiDb();
|
|
87
|
+
}
|
|
88
|
+
// Generate self-contained HTML viewer
|
|
89
|
+
if (result.pagesGenerated > 0) {
|
|
90
|
+
this.onProgress('html', 98, 'Building HTML viewer...');
|
|
91
|
+
const repoName = path.basename(this.repoPath);
|
|
92
|
+
await generateHTMLViewer(this.wikiDir, repoName);
|
|
93
|
+
}
|
|
94
|
+
return result;
|
|
95
|
+
}
|
|
96
|
+
// ─── Full Generation ────────────────────────────────────────────────
|
|
97
|
+
async fullGeneration(currentCommit) {
|
|
98
|
+
let pagesGenerated = 0;
|
|
99
|
+
// Phase 0: Gather structure
|
|
100
|
+
this.onProgress('gather', 5, 'Querying graph for file structure...');
|
|
101
|
+
const filesWithExports = await getFilesWithExports();
|
|
102
|
+
const allFiles = await getAllFiles();
|
|
103
|
+
// Filter to source files only
|
|
104
|
+
const sourceFiles = allFiles.filter(f => !shouldIgnorePath(f));
|
|
105
|
+
if (sourceFiles.length === 0) {
|
|
106
|
+
throw new Error('No source files found in the knowledge graph. Nothing to document.');
|
|
107
|
+
}
|
|
108
|
+
// Build enriched file list (merge exports into all source files)
|
|
109
|
+
const exportMap = new Map(filesWithExports.map(f => [f.filePath, f]));
|
|
110
|
+
const enrichedFiles = sourceFiles.map(fp => {
|
|
111
|
+
return exportMap.get(fp) || { filePath: fp, symbols: [] };
|
|
112
|
+
});
|
|
113
|
+
this.onProgress('gather', 10, `Found ${sourceFiles.length} source files`);
|
|
114
|
+
// Phase 1: Build module tree
|
|
115
|
+
const moduleTree = await this.buildModuleTree(enrichedFiles);
|
|
116
|
+
pagesGenerated = 0;
|
|
117
|
+
// Phase 2: Generate module pages (bottom-up)
|
|
118
|
+
const totalModules = this.countModules(moduleTree);
|
|
119
|
+
let modulesProcessed = 0;
|
|
120
|
+
for (const node of moduleTree) {
|
|
121
|
+
const generated = await this.generateModulePage(node, () => {
|
|
122
|
+
modulesProcessed++;
|
|
123
|
+
const percent = 30 + Math.round((modulesProcessed / totalModules) * 55);
|
|
124
|
+
this.onProgress('modules', percent, `${modulesProcessed}/${totalModules} modules`);
|
|
125
|
+
});
|
|
126
|
+
pagesGenerated += generated;
|
|
127
|
+
}
|
|
128
|
+
// Phase 3: Generate overview
|
|
129
|
+
this.onProgress('overview', 88, 'Generating overview page...');
|
|
130
|
+
await this.generateOverview(moduleTree);
|
|
131
|
+
pagesGenerated++;
|
|
132
|
+
// Save metadata
|
|
133
|
+
this.onProgress('finalize', 95, 'Saving metadata...');
|
|
134
|
+
const moduleFiles = this.extractModuleFiles(moduleTree);
|
|
135
|
+
await this.saveModuleTree(moduleTree);
|
|
136
|
+
await this.saveWikiMeta({
|
|
137
|
+
fromCommit: currentCommit,
|
|
138
|
+
generatedAt: new Date().toISOString(),
|
|
139
|
+
model: this.llmConfig.model,
|
|
140
|
+
moduleFiles,
|
|
141
|
+
moduleTree,
|
|
142
|
+
});
|
|
143
|
+
this.onProgress('done', 100, 'Wiki generation complete');
|
|
144
|
+
return { pagesGenerated, mode: 'full', failedModules: [...this.failedModules] };
|
|
145
|
+
}
|
|
146
|
+
// ─── Phase 1: Build Module Tree ────────────────────────────────────
|
|
147
|
+
async buildModuleTree(files) {
|
|
148
|
+
// Check for existing immutable snapshot (resumability)
|
|
149
|
+
const snapshotPath = path.join(this.wikiDir, 'first_module_tree.json');
|
|
150
|
+
try {
|
|
151
|
+
const existing = await fs.readFile(snapshotPath, 'utf-8');
|
|
152
|
+
const parsed = JSON.parse(existing);
|
|
153
|
+
if (Array.isArray(parsed) && parsed.length > 0) {
|
|
154
|
+
this.onProgress('grouping', 25, 'Using existing module tree (resuming)');
|
|
155
|
+
return parsed;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
catch {
|
|
159
|
+
// No snapshot, generate new
|
|
160
|
+
}
|
|
161
|
+
this.onProgress('grouping', 15, 'Grouping files into modules (LLM)...');
|
|
162
|
+
const fileList = formatFileListForGrouping(files);
|
|
163
|
+
const dirTree = formatDirectoryTree(files.map(f => f.filePath));
|
|
164
|
+
const prompt = fillTemplate(GROUPING_USER_PROMPT, {
|
|
165
|
+
FILE_LIST: fileList,
|
|
166
|
+
DIRECTORY_TREE: dirTree,
|
|
167
|
+
});
|
|
168
|
+
const response = await callLLM(prompt, this.llmConfig, GROUPING_SYSTEM_PROMPT);
|
|
169
|
+
const grouping = this.parseGroupingResponse(response.content, files);
|
|
170
|
+
// Convert to tree nodes
|
|
171
|
+
const tree = [];
|
|
172
|
+
for (const [moduleName, modulePaths] of Object.entries(grouping)) {
|
|
173
|
+
const slug = this.slugify(moduleName);
|
|
174
|
+
const node = { name: moduleName, slug, files: modulePaths };
|
|
175
|
+
// Token budget check — split if too large
|
|
176
|
+
const totalTokens = await this.estimateModuleTokens(modulePaths);
|
|
177
|
+
if (totalTokens > this.maxTokensPerModule && modulePaths.length > 3) {
|
|
178
|
+
node.children = this.splitBySubdirectory(moduleName, modulePaths);
|
|
179
|
+
node.files = []; // Parent doesn't own files directly when split
|
|
180
|
+
}
|
|
181
|
+
tree.push(node);
|
|
182
|
+
}
|
|
183
|
+
// Save immutable snapshot for resumability
|
|
184
|
+
await fs.writeFile(snapshotPath, JSON.stringify(tree, null, 2), 'utf-8');
|
|
185
|
+
this.onProgress('grouping', 28, `Created ${tree.length} modules`);
|
|
186
|
+
return tree;
|
|
187
|
+
}
|
|
188
|
+
/**
|
|
189
|
+
* Parse LLM grouping response. Validates all files are assigned.
|
|
190
|
+
*/
|
|
191
|
+
parseGroupingResponse(content, files) {
|
|
192
|
+
// Extract JSON from response (handle markdown fences)
|
|
193
|
+
let jsonStr = content.trim();
|
|
194
|
+
const fenceMatch = jsonStr.match(/```(?:json)?\s*\n?([\s\S]*?)\n?```/);
|
|
195
|
+
if (fenceMatch) {
|
|
196
|
+
jsonStr = fenceMatch[1].trim();
|
|
197
|
+
}
|
|
198
|
+
let parsed;
|
|
199
|
+
try {
|
|
200
|
+
parsed = JSON.parse(jsonStr);
|
|
201
|
+
}
|
|
202
|
+
catch {
|
|
203
|
+
// Fallback: group by top-level directory
|
|
204
|
+
return this.fallbackGrouping(files);
|
|
205
|
+
}
|
|
206
|
+
if (typeof parsed !== 'object' || Array.isArray(parsed)) {
|
|
207
|
+
return this.fallbackGrouping(files);
|
|
208
|
+
}
|
|
209
|
+
// Validate — ensure all files are assigned
|
|
210
|
+
const allFilePaths = new Set(files.map(f => f.filePath));
|
|
211
|
+
const assignedFiles = new Set();
|
|
212
|
+
const validGrouping = {};
|
|
213
|
+
for (const [mod, paths] of Object.entries(parsed)) {
|
|
214
|
+
if (!Array.isArray(paths))
|
|
215
|
+
continue;
|
|
216
|
+
const validPaths = paths.filter(p => {
|
|
217
|
+
if (allFilePaths.has(p) && !assignedFiles.has(p)) {
|
|
218
|
+
assignedFiles.add(p);
|
|
219
|
+
return true;
|
|
220
|
+
}
|
|
221
|
+
return false;
|
|
222
|
+
});
|
|
223
|
+
if (validPaths.length > 0) {
|
|
224
|
+
validGrouping[mod] = validPaths;
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
// Assign unassigned files to a "Miscellaneous" module
|
|
228
|
+
const unassigned = files
|
|
229
|
+
.map(f => f.filePath)
|
|
230
|
+
.filter(fp => !assignedFiles.has(fp));
|
|
231
|
+
if (unassigned.length > 0) {
|
|
232
|
+
validGrouping['Other'] = unassigned;
|
|
233
|
+
}
|
|
234
|
+
return Object.keys(validGrouping).length > 0
|
|
235
|
+
? validGrouping
|
|
236
|
+
: this.fallbackGrouping(files);
|
|
237
|
+
}
|
|
238
|
+
/**
|
|
239
|
+
* Fallback grouping by top-level directory when LLM parsing fails.
|
|
240
|
+
*/
|
|
241
|
+
fallbackGrouping(files) {
|
|
242
|
+
const groups = new Map();
|
|
243
|
+
for (const f of files) {
|
|
244
|
+
const parts = f.filePath.replace(/\\/g, '/').split('/');
|
|
245
|
+
const topDir = parts.length > 1 ? parts[0] : 'Root';
|
|
246
|
+
let group = groups.get(topDir);
|
|
247
|
+
if (!group) {
|
|
248
|
+
group = [];
|
|
249
|
+
groups.set(topDir, group);
|
|
250
|
+
}
|
|
251
|
+
group.push(f.filePath);
|
|
252
|
+
}
|
|
253
|
+
return Object.fromEntries(groups);
|
|
254
|
+
}
|
|
255
|
+
/**
|
|
256
|
+
* Split a large module into sub-modules by subdirectory.
|
|
257
|
+
*/
|
|
258
|
+
splitBySubdirectory(moduleName, files) {
|
|
259
|
+
const subGroups = new Map();
|
|
260
|
+
for (const fp of files) {
|
|
261
|
+
const parts = fp.replace(/\\/g, '/').split('/');
|
|
262
|
+
// Use the deepest common-ish directory
|
|
263
|
+
const subDir = parts.length > 2 ? parts.slice(0, 2).join('/') : parts[0];
|
|
264
|
+
let group = subGroups.get(subDir);
|
|
265
|
+
if (!group) {
|
|
266
|
+
group = [];
|
|
267
|
+
subGroups.set(subDir, group);
|
|
268
|
+
}
|
|
269
|
+
group.push(fp);
|
|
270
|
+
}
|
|
271
|
+
return Array.from(subGroups.entries()).map(([subDir, subFiles]) => ({
|
|
272
|
+
name: `${moduleName} — ${path.basename(subDir)}`,
|
|
273
|
+
slug: this.slugify(`${moduleName}-${path.basename(subDir)}`),
|
|
274
|
+
files: subFiles,
|
|
275
|
+
}));
|
|
276
|
+
}
|
|
277
|
+
// ─── Phase 2: Generate Module Pages ─────────────────────────────────
|
|
278
|
+
/**
|
|
279
|
+
* Recursively generate pages for a module tree node.
|
|
280
|
+
* Returns count of pages generated.
|
|
281
|
+
*/
|
|
282
|
+
async generateModulePage(node, onPageDone) {
|
|
283
|
+
let count = 0;
|
|
284
|
+
// If node has children, generate children first (bottom-up)
|
|
285
|
+
if (node.children && node.children.length > 0) {
|
|
286
|
+
for (const child of node.children) {
|
|
287
|
+
count += await this.generateModulePage(child, onPageDone);
|
|
288
|
+
}
|
|
289
|
+
// Then generate parent page from children docs
|
|
290
|
+
const pagePath = path.join(this.wikiDir, `${node.slug}.md`);
|
|
291
|
+
// Resumability: skip if page already exists
|
|
292
|
+
if (await this.fileExists(pagePath)) {
|
|
293
|
+
onPageDone();
|
|
294
|
+
return count;
|
|
295
|
+
}
|
|
296
|
+
try {
|
|
297
|
+
await this.generateParentPage(node);
|
|
298
|
+
count++;
|
|
299
|
+
}
|
|
300
|
+
catch (err) {
|
|
301
|
+
this.failedModules.push(node.name);
|
|
302
|
+
this.onProgress('modules', 0, `Failed: ${node.name} — ${err.message?.slice(0, 80)}`);
|
|
303
|
+
}
|
|
304
|
+
onPageDone();
|
|
305
|
+
return count;
|
|
306
|
+
}
|
|
307
|
+
// Leaf module — generate from source code
|
|
308
|
+
const pagePath = path.join(this.wikiDir, `${node.slug}.md`);
|
|
309
|
+
// Resumability: skip if page already exists
|
|
310
|
+
if (await this.fileExists(pagePath)) {
|
|
311
|
+
onPageDone();
|
|
312
|
+
return count;
|
|
313
|
+
}
|
|
314
|
+
try {
|
|
315
|
+
await this.generateLeafPage(node);
|
|
316
|
+
count++;
|
|
317
|
+
}
|
|
318
|
+
catch (err) {
|
|
319
|
+
this.failedModules.push(node.name);
|
|
320
|
+
this.onProgress('modules', 0, `Failed: ${node.name} — ${err.message?.slice(0, 80)}`);
|
|
321
|
+
}
|
|
322
|
+
onPageDone();
|
|
323
|
+
return count;
|
|
324
|
+
}
|
|
325
|
+
/**
|
|
326
|
+
* Generate a leaf module page from source code + graph data.
|
|
327
|
+
*/
|
|
328
|
+
async generateLeafPage(node) {
|
|
329
|
+
const filePaths = node.files;
|
|
330
|
+
// Read source files from disk
|
|
331
|
+
const sourceCode = await this.readSourceFiles(filePaths);
|
|
332
|
+
// Token budget check — if too large, summarize in batches
|
|
333
|
+
const totalTokens = estimateTokens(sourceCode);
|
|
334
|
+
let finalSourceCode = sourceCode;
|
|
335
|
+
if (totalTokens > this.maxTokensPerModule) {
|
|
336
|
+
finalSourceCode = this.truncateSource(sourceCode, this.maxTokensPerModule);
|
|
337
|
+
}
|
|
338
|
+
// Get graph data
|
|
339
|
+
const [intraCalls, interCalls, processes] = await Promise.all([
|
|
340
|
+
getIntraModuleCallEdges(filePaths),
|
|
341
|
+
getInterModuleCallEdges(filePaths),
|
|
342
|
+
getProcessesForFiles(filePaths, 5),
|
|
343
|
+
]);
|
|
344
|
+
const prompt = fillTemplate(MODULE_USER_PROMPT, {
|
|
345
|
+
MODULE_NAME: node.name,
|
|
346
|
+
SOURCE_CODE: finalSourceCode,
|
|
347
|
+
INTRA_CALLS: formatCallEdges(intraCalls),
|
|
348
|
+
OUTGOING_CALLS: formatCallEdges(interCalls.outgoing),
|
|
349
|
+
INCOMING_CALLS: formatCallEdges(interCalls.incoming),
|
|
350
|
+
PROCESSES: formatProcesses(processes),
|
|
351
|
+
});
|
|
352
|
+
const response = await callLLM(prompt, this.llmConfig, MODULE_SYSTEM_PROMPT);
|
|
353
|
+
// Write page with front matter
|
|
354
|
+
const pageContent = `# ${node.name}\n\n${response.content}`;
|
|
355
|
+
await fs.writeFile(path.join(this.wikiDir, `${node.slug}.md`), pageContent, 'utf-8');
|
|
356
|
+
}
|
|
357
|
+
/**
|
|
358
|
+
* Generate a parent module page from children's documentation.
|
|
359
|
+
*/
|
|
360
|
+
async generateParentPage(node) {
|
|
361
|
+
if (!node.children || node.children.length === 0)
|
|
362
|
+
return;
|
|
363
|
+
// Read children's overview sections
|
|
364
|
+
const childDocs = [];
|
|
365
|
+
for (const child of node.children) {
|
|
366
|
+
const childPage = path.join(this.wikiDir, `${child.slug}.md`);
|
|
367
|
+
try {
|
|
368
|
+
const content = await fs.readFile(childPage, 'utf-8');
|
|
369
|
+
// Extract overview section (first ~500 chars or up to "### Architecture")
|
|
370
|
+
const overviewEnd = content.indexOf('### Architecture');
|
|
371
|
+
const overview = overviewEnd > 0 ? content.slice(0, overviewEnd).trim() : content.slice(0, 800).trim();
|
|
372
|
+
childDocs.push(`#### ${child.name}\n${overview}`);
|
|
373
|
+
}
|
|
374
|
+
catch {
|
|
375
|
+
childDocs.push(`#### ${child.name}\n(Documentation not yet generated)`);
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
// Get cross-child call edges
|
|
379
|
+
const allChildFiles = node.children.flatMap(c => c.files);
|
|
380
|
+
const crossCalls = await getIntraModuleCallEdges(allChildFiles);
|
|
381
|
+
const processes = await getProcessesForFiles(allChildFiles, 3);
|
|
382
|
+
const prompt = fillTemplate(PARENT_USER_PROMPT, {
|
|
383
|
+
MODULE_NAME: node.name,
|
|
384
|
+
CHILDREN_DOCS: childDocs.join('\n\n'),
|
|
385
|
+
CROSS_MODULE_CALLS: formatCallEdges(crossCalls),
|
|
386
|
+
CROSS_PROCESSES: formatProcesses(processes),
|
|
387
|
+
});
|
|
388
|
+
const response = await callLLM(prompt, this.llmConfig, PARENT_SYSTEM_PROMPT);
|
|
389
|
+
const pageContent = `# ${node.name}\n\n${response.content}`;
|
|
390
|
+
await fs.writeFile(path.join(this.wikiDir, `${node.slug}.md`), pageContent, 'utf-8');
|
|
391
|
+
}
|
|
392
|
+
// ─── Phase 3: Generate Overview ─────────────────────────────────────
|
|
393
|
+
async generateOverview(moduleTree) {
|
|
394
|
+
// Read module overview sections
|
|
395
|
+
const moduleSummaries = [];
|
|
396
|
+
for (const node of moduleTree) {
|
|
397
|
+
const pagePath = path.join(this.wikiDir, `${node.slug}.md`);
|
|
398
|
+
try {
|
|
399
|
+
const content = await fs.readFile(pagePath, 'utf-8');
|
|
400
|
+
const overviewEnd = content.indexOf('### Architecture');
|
|
401
|
+
const overview = overviewEnd > 0 ? content.slice(0, overviewEnd).trim() : content.slice(0, 600).trim();
|
|
402
|
+
moduleSummaries.push(`#### ${node.name}\n${overview}`);
|
|
403
|
+
}
|
|
404
|
+
catch {
|
|
405
|
+
moduleSummaries.push(`#### ${node.name}\n(Documentation pending)`);
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
// Get inter-module edges for architecture diagram
|
|
409
|
+
const moduleFiles = this.extractModuleFiles(moduleTree);
|
|
410
|
+
const moduleEdges = await getInterModuleEdgesForOverview(moduleFiles);
|
|
411
|
+
// Get top processes for key workflows
|
|
412
|
+
const topProcesses = await getAllProcesses(5);
|
|
413
|
+
// Read project config
|
|
414
|
+
const projectInfo = await this.readProjectInfo();
|
|
415
|
+
const edgesText = moduleEdges.length > 0
|
|
416
|
+
? moduleEdges.map(e => `${e.from} → ${e.to} (${e.count} calls)`).join('\n')
|
|
417
|
+
: 'No inter-module call edges detected';
|
|
418
|
+
const prompt = fillTemplate(OVERVIEW_USER_PROMPT, {
|
|
419
|
+
PROJECT_INFO: projectInfo,
|
|
420
|
+
MODULE_SUMMARIES: moduleSummaries.join('\n\n'),
|
|
421
|
+
MODULE_EDGES: edgesText,
|
|
422
|
+
TOP_PROCESSES: formatProcesses(topProcesses),
|
|
423
|
+
});
|
|
424
|
+
const response = await callLLM(prompt, this.llmConfig, OVERVIEW_SYSTEM_PROMPT);
|
|
425
|
+
const pageContent = `# ${path.basename(this.repoPath)} — Wiki\n\n${response.content}`;
|
|
426
|
+
await fs.writeFile(path.join(this.wikiDir, 'overview.md'), pageContent, 'utf-8');
|
|
427
|
+
}
|
|
428
|
+
// ─── Incremental Updates ────────────────────────────────────────────
|
|
429
|
+
async incrementalUpdate(existingMeta, currentCommit) {
|
|
430
|
+
this.onProgress('incremental', 5, 'Detecting changes...');
|
|
431
|
+
// Get changed files since last generation
|
|
432
|
+
const changedFiles = this.getChangedFiles(existingMeta.fromCommit, currentCommit);
|
|
433
|
+
if (changedFiles.length === 0) {
|
|
434
|
+
// No file changes but commit differs (e.g. merge commit)
|
|
435
|
+
await this.saveWikiMeta({
|
|
436
|
+
...existingMeta,
|
|
437
|
+
fromCommit: currentCommit,
|
|
438
|
+
generatedAt: new Date().toISOString(),
|
|
439
|
+
});
|
|
440
|
+
return { pagesGenerated: 0, mode: 'incremental', failedModules: [] };
|
|
441
|
+
}
|
|
442
|
+
this.onProgress('incremental', 10, `${changedFiles.length} files changed`);
|
|
443
|
+
// Determine affected modules
|
|
444
|
+
const affectedModules = new Set();
|
|
445
|
+
const newFiles = [];
|
|
446
|
+
for (const fp of changedFiles) {
|
|
447
|
+
let found = false;
|
|
448
|
+
for (const [mod, files] of Object.entries(existingMeta.moduleFiles)) {
|
|
449
|
+
if (files.includes(fp)) {
|
|
450
|
+
affectedModules.add(mod);
|
|
451
|
+
found = true;
|
|
452
|
+
break;
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
if (!found && !shouldIgnorePath(fp)) {
|
|
456
|
+
newFiles.push(fp);
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
// If significant new files exist, re-run full grouping
|
|
460
|
+
if (newFiles.length > 5) {
|
|
461
|
+
this.onProgress('incremental', 15, 'Significant new files detected, running full generation...');
|
|
462
|
+
// Delete old snapshot to force re-grouping
|
|
463
|
+
try {
|
|
464
|
+
await fs.unlink(path.join(this.wikiDir, 'first_module_tree.json'));
|
|
465
|
+
}
|
|
466
|
+
catch { }
|
|
467
|
+
const fullResult = await this.fullGeneration(currentCommit);
|
|
468
|
+
return { ...fullResult, mode: 'incremental' };
|
|
469
|
+
}
|
|
470
|
+
// Add new files to nearest module or "Other"
|
|
471
|
+
if (newFiles.length > 0) {
|
|
472
|
+
if (!existingMeta.moduleFiles['Other']) {
|
|
473
|
+
existingMeta.moduleFiles['Other'] = [];
|
|
474
|
+
}
|
|
475
|
+
existingMeta.moduleFiles['Other'].push(...newFiles);
|
|
476
|
+
affectedModules.add('Other');
|
|
477
|
+
}
|
|
478
|
+
// Regenerate affected module pages
|
|
479
|
+
let pagesGenerated = 0;
|
|
480
|
+
const moduleTree = existingMeta.moduleTree;
|
|
481
|
+
const affectedArray = Array.from(affectedModules);
|
|
482
|
+
this.onProgress('incremental', 20, `Regenerating ${affectedArray.length} module(s)...`);
|
|
483
|
+
for (let i = 0; i < affectedArray.length; i++) {
|
|
484
|
+
const modSlug = this.slugify(affectedArray[i]);
|
|
485
|
+
const node = this.findNodeBySlug(moduleTree, modSlug);
|
|
486
|
+
if (node) {
|
|
487
|
+
// Delete existing page to force re-generation
|
|
488
|
+
try {
|
|
489
|
+
await fs.unlink(path.join(this.wikiDir, `${node.slug}.md`));
|
|
490
|
+
}
|
|
491
|
+
catch { }
|
|
492
|
+
await this.generateModulePage(node, () => { });
|
|
493
|
+
pagesGenerated++;
|
|
494
|
+
}
|
|
495
|
+
const percent = 20 + Math.round(((i + 1) / affectedArray.length) * 60);
|
|
496
|
+
this.onProgress('incremental', percent, `${i + 1}/${affectedArray.length} modules`);
|
|
497
|
+
}
|
|
498
|
+
// Regenerate overview if any pages changed
|
|
499
|
+
if (pagesGenerated > 0) {
|
|
500
|
+
this.onProgress('incremental', 85, 'Updating overview...');
|
|
501
|
+
await this.generateOverview(moduleTree);
|
|
502
|
+
pagesGenerated++;
|
|
503
|
+
}
|
|
504
|
+
// Save updated metadata
|
|
505
|
+
this.onProgress('incremental', 95, 'Saving metadata...');
|
|
506
|
+
await this.saveWikiMeta({
|
|
507
|
+
...existingMeta,
|
|
508
|
+
fromCommit: currentCommit,
|
|
509
|
+
generatedAt: new Date().toISOString(),
|
|
510
|
+
model: this.llmConfig.model,
|
|
511
|
+
});
|
|
512
|
+
this.onProgress('done', 100, 'Incremental update complete');
|
|
513
|
+
return { pagesGenerated, mode: 'incremental', failedModules: [...this.failedModules] };
|
|
514
|
+
}
|
|
515
|
+
// ─── Helpers ────────────────────────────────────────────────────────
|
|
516
|
+
getCurrentCommit() {
|
|
517
|
+
try {
|
|
518
|
+
return execSync('git rev-parse HEAD', { cwd: this.repoPath }).toString().trim();
|
|
519
|
+
}
|
|
520
|
+
catch {
|
|
521
|
+
return '';
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
getChangedFiles(fromCommit, toCommit) {
|
|
525
|
+
try {
|
|
526
|
+
const output = execSync(`git diff ${fromCommit}..${toCommit} --name-only`, { cwd: this.repoPath }).toString().trim();
|
|
527
|
+
return output ? output.split('\n').filter(Boolean) : [];
|
|
528
|
+
}
|
|
529
|
+
catch {
|
|
530
|
+
return [];
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
async readSourceFiles(filePaths) {
|
|
534
|
+
const parts = [];
|
|
535
|
+
for (const fp of filePaths) {
|
|
536
|
+
const fullPath = path.join(this.repoPath, fp);
|
|
537
|
+
try {
|
|
538
|
+
const content = await fs.readFile(fullPath, 'utf-8');
|
|
539
|
+
parts.push(`\n--- ${fp} ---\n${content}`);
|
|
540
|
+
}
|
|
541
|
+
catch {
|
|
542
|
+
parts.push(`\n--- ${fp} ---\n(file not readable)`);
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
return parts.join('\n');
|
|
546
|
+
}
|
|
547
|
+
truncateSource(source, maxTokens) {
|
|
548
|
+
// Rough truncation: keep first maxTokens*4 chars and add notice
|
|
549
|
+
const maxChars = maxTokens * 4;
|
|
550
|
+
if (source.length <= maxChars)
|
|
551
|
+
return source;
|
|
552
|
+
return source.slice(0, maxChars) + '\n\n... (source truncated for context window limits)';
|
|
553
|
+
}
|
|
554
|
+
async estimateModuleTokens(filePaths) {
|
|
555
|
+
let total = 0;
|
|
556
|
+
for (const fp of filePaths) {
|
|
557
|
+
try {
|
|
558
|
+
const content = await fs.readFile(path.join(this.repoPath, fp), 'utf-8');
|
|
559
|
+
total += estimateTokens(content);
|
|
560
|
+
}
|
|
561
|
+
catch {
|
|
562
|
+
// File not readable, skip
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
return total;
|
|
566
|
+
}
|
|
567
|
+
async readProjectInfo() {
|
|
568
|
+
const candidates = ['package.json', 'Cargo.toml', 'pyproject.toml', 'go.mod', 'pom.xml', 'build.gradle'];
|
|
569
|
+
const lines = [`Project: ${path.basename(this.repoPath)}`];
|
|
570
|
+
for (const file of candidates) {
|
|
571
|
+
const fullPath = path.join(this.repoPath, file);
|
|
572
|
+
try {
|
|
573
|
+
const content = await fs.readFile(fullPath, 'utf-8');
|
|
574
|
+
if (file === 'package.json') {
|
|
575
|
+
const pkg = JSON.parse(content);
|
|
576
|
+
if (pkg.name)
|
|
577
|
+
lines.push(`Name: ${pkg.name}`);
|
|
578
|
+
if (pkg.description)
|
|
579
|
+
lines.push(`Description: ${pkg.description}`);
|
|
580
|
+
if (pkg.scripts)
|
|
581
|
+
lines.push(`Scripts: ${Object.keys(pkg.scripts).join(', ')}`);
|
|
582
|
+
}
|
|
583
|
+
else {
|
|
584
|
+
// Include first 500 chars of other config files
|
|
585
|
+
lines.push(`\n${file}:\n${content.slice(0, 500)}`);
|
|
586
|
+
}
|
|
587
|
+
break; // Use first config found
|
|
588
|
+
}
|
|
589
|
+
catch {
|
|
590
|
+
continue;
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
// Read README excerpt
|
|
594
|
+
for (const readme of ['README.md', 'readme.md', 'README.txt']) {
|
|
595
|
+
try {
|
|
596
|
+
const content = await fs.readFile(path.join(this.repoPath, readme), 'utf-8');
|
|
597
|
+
lines.push(`\nREADME excerpt:\n${content.slice(0, 1000)}`);
|
|
598
|
+
break;
|
|
599
|
+
}
|
|
600
|
+
catch {
|
|
601
|
+
continue;
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
return lines.join('\n');
|
|
605
|
+
}
|
|
606
|
+
extractModuleFiles(tree) {
|
|
607
|
+
const result = {};
|
|
608
|
+
for (const node of tree) {
|
|
609
|
+
if (node.children && node.children.length > 0) {
|
|
610
|
+
result[node.name] = node.children.flatMap(c => c.files);
|
|
611
|
+
for (const child of node.children) {
|
|
612
|
+
result[child.name] = child.files;
|
|
613
|
+
}
|
|
614
|
+
}
|
|
615
|
+
else {
|
|
616
|
+
result[node.name] = node.files;
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
return result;
|
|
620
|
+
}
|
|
621
|
+
countModules(tree) {
|
|
622
|
+
let count = 0;
|
|
623
|
+
for (const node of tree) {
|
|
624
|
+
count++;
|
|
625
|
+
if (node.children) {
|
|
626
|
+
count += node.children.length;
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
return count;
|
|
630
|
+
}
|
|
631
|
+
findNodeBySlug(tree, slug) {
|
|
632
|
+
for (const node of tree) {
|
|
633
|
+
if (node.slug === slug)
|
|
634
|
+
return node;
|
|
635
|
+
if (node.children) {
|
|
636
|
+
const found = this.findNodeBySlug(node.children, slug);
|
|
637
|
+
if (found)
|
|
638
|
+
return found;
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
return null;
|
|
642
|
+
}
|
|
643
|
+
slugify(name) {
|
|
644
|
+
return name
|
|
645
|
+
.toLowerCase()
|
|
646
|
+
.replace(/[^a-z0-9]+/g, '-')
|
|
647
|
+
.replace(/^-+|-+$/g, '')
|
|
648
|
+
.slice(0, 60);
|
|
649
|
+
}
|
|
650
|
+
async fileExists(fp) {
|
|
651
|
+
try {
|
|
652
|
+
await fs.access(fp);
|
|
653
|
+
return true;
|
|
654
|
+
}
|
|
655
|
+
catch {
|
|
656
|
+
return false;
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
async loadWikiMeta() {
|
|
660
|
+
try {
|
|
661
|
+
const raw = await fs.readFile(path.join(this.wikiDir, 'meta.json'), 'utf-8');
|
|
662
|
+
return JSON.parse(raw);
|
|
663
|
+
}
|
|
664
|
+
catch {
|
|
665
|
+
return null;
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
async saveWikiMeta(meta) {
|
|
669
|
+
await fs.writeFile(path.join(this.wikiDir, 'meta.json'), JSON.stringify(meta, null, 2), 'utf-8');
|
|
670
|
+
}
|
|
671
|
+
async saveModuleTree(tree) {
|
|
672
|
+
await fs.writeFile(path.join(this.wikiDir, 'module_tree.json'), JSON.stringify(tree, null, 2), 'utf-8');
|
|
673
|
+
}
|
|
674
|
+
}
|