@veewo/gitnexus 1.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (231) hide show
  1. package/README.md +234 -0
  2. package/dist/benchmark/agent-context/evaluators.d.ts +9 -0
  3. package/dist/benchmark/agent-context/evaluators.js +196 -0
  4. package/dist/benchmark/agent-context/evaluators.test.d.ts +1 -0
  5. package/dist/benchmark/agent-context/evaluators.test.js +39 -0
  6. package/dist/benchmark/agent-context/io.d.ts +2 -0
  7. package/dist/benchmark/agent-context/io.js +23 -0
  8. package/dist/benchmark/agent-context/io.test.d.ts +1 -0
  9. package/dist/benchmark/agent-context/io.test.js +19 -0
  10. package/dist/benchmark/agent-context/report.d.ts +2 -0
  11. package/dist/benchmark/agent-context/report.js +59 -0
  12. package/dist/benchmark/agent-context/report.test.d.ts +1 -0
  13. package/dist/benchmark/agent-context/report.test.js +85 -0
  14. package/dist/benchmark/agent-context/runner.d.ts +46 -0
  15. package/dist/benchmark/agent-context/runner.js +111 -0
  16. package/dist/benchmark/agent-context/runner.test.d.ts +1 -0
  17. package/dist/benchmark/agent-context/runner.test.js +79 -0
  18. package/dist/benchmark/agent-context/tool-runner.d.ts +7 -0
  19. package/dist/benchmark/agent-context/tool-runner.js +18 -0
  20. package/dist/benchmark/agent-context/tool-runner.test.d.ts +1 -0
  21. package/dist/benchmark/agent-context/tool-runner.test.js +11 -0
  22. package/dist/benchmark/agent-context/types.d.ts +40 -0
  23. package/dist/benchmark/agent-context/types.js +1 -0
  24. package/dist/benchmark/analyze-runner.d.ts +16 -0
  25. package/dist/benchmark/analyze-runner.js +51 -0
  26. package/dist/benchmark/analyze-runner.test.d.ts +1 -0
  27. package/dist/benchmark/analyze-runner.test.js +37 -0
  28. package/dist/benchmark/evaluators.d.ts +6 -0
  29. package/dist/benchmark/evaluators.js +10 -0
  30. package/dist/benchmark/evaluators.test.d.ts +1 -0
  31. package/dist/benchmark/evaluators.test.js +12 -0
  32. package/dist/benchmark/io.d.ts +7 -0
  33. package/dist/benchmark/io.js +25 -0
  34. package/dist/benchmark/io.test.d.ts +1 -0
  35. package/dist/benchmark/io.test.js +35 -0
  36. package/dist/benchmark/neonspark-candidates.d.ts +19 -0
  37. package/dist/benchmark/neonspark-candidates.js +94 -0
  38. package/dist/benchmark/neonspark-candidates.test.d.ts +1 -0
  39. package/dist/benchmark/neonspark-candidates.test.js +43 -0
  40. package/dist/benchmark/neonspark-materialize.d.ts +19 -0
  41. package/dist/benchmark/neonspark-materialize.js +111 -0
  42. package/dist/benchmark/neonspark-materialize.test.d.ts +1 -0
  43. package/dist/benchmark/neonspark-materialize.test.js +124 -0
  44. package/dist/benchmark/neonspark-sync.d.ts +3 -0
  45. package/dist/benchmark/neonspark-sync.js +53 -0
  46. package/dist/benchmark/neonspark-sync.test.d.ts +1 -0
  47. package/dist/benchmark/neonspark-sync.test.js +20 -0
  48. package/dist/benchmark/report.d.ts +1 -0
  49. package/dist/benchmark/report.js +7 -0
  50. package/dist/benchmark/runner.d.ts +48 -0
  51. package/dist/benchmark/runner.js +302 -0
  52. package/dist/benchmark/runner.test.d.ts +1 -0
  53. package/dist/benchmark/runner.test.js +50 -0
  54. package/dist/benchmark/scoring.d.ts +16 -0
  55. package/dist/benchmark/scoring.js +27 -0
  56. package/dist/benchmark/scoring.test.d.ts +1 -0
  57. package/dist/benchmark/scoring.test.js +24 -0
  58. package/dist/benchmark/tool-runner.d.ts +6 -0
  59. package/dist/benchmark/tool-runner.js +17 -0
  60. package/dist/benchmark/types.d.ts +36 -0
  61. package/dist/benchmark/types.js +1 -0
  62. package/dist/cli/ai-context.d.ts +22 -0
  63. package/dist/cli/ai-context.js +184 -0
  64. package/dist/cli/ai-context.test.d.ts +1 -0
  65. package/dist/cli/ai-context.test.js +30 -0
  66. package/dist/cli/analyze-multi-scope-regression.test.d.ts +1 -0
  67. package/dist/cli/analyze-multi-scope-regression.test.js +22 -0
  68. package/dist/cli/analyze-options.d.ts +7 -0
  69. package/dist/cli/analyze-options.js +56 -0
  70. package/dist/cli/analyze-options.test.d.ts +1 -0
  71. package/dist/cli/analyze-options.test.js +36 -0
  72. package/dist/cli/analyze.d.ts +14 -0
  73. package/dist/cli/analyze.js +384 -0
  74. package/dist/cli/augment.d.ts +13 -0
  75. package/dist/cli/augment.js +33 -0
  76. package/dist/cli/benchmark-agent-context.d.ts +29 -0
  77. package/dist/cli/benchmark-agent-context.js +61 -0
  78. package/dist/cli/benchmark-agent-context.test.d.ts +1 -0
  79. package/dist/cli/benchmark-agent-context.test.js +80 -0
  80. package/dist/cli/benchmark-unity.d.ts +15 -0
  81. package/dist/cli/benchmark-unity.js +31 -0
  82. package/dist/cli/benchmark-unity.test.d.ts +1 -0
  83. package/dist/cli/benchmark-unity.test.js +18 -0
  84. package/dist/cli/claude-hooks.d.ts +22 -0
  85. package/dist/cli/claude-hooks.js +97 -0
  86. package/dist/cli/clean.d.ts +10 -0
  87. package/dist/cli/clean.js +60 -0
  88. package/dist/cli/eval-server.d.ts +30 -0
  89. package/dist/cli/eval-server.js +372 -0
  90. package/dist/cli/index.d.ts +2 -0
  91. package/dist/cli/index.js +182 -0
  92. package/dist/cli/list.d.ts +6 -0
  93. package/dist/cli/list.js +33 -0
  94. package/dist/cli/mcp.d.ts +8 -0
  95. package/dist/cli/mcp.js +34 -0
  96. package/dist/cli/repo-manager-alias.test.d.ts +1 -0
  97. package/dist/cli/repo-manager-alias.test.js +40 -0
  98. package/dist/cli/scope-filter.test.d.ts +1 -0
  99. package/dist/cli/scope-filter.test.js +49 -0
  100. package/dist/cli/serve.d.ts +4 -0
  101. package/dist/cli/serve.js +6 -0
  102. package/dist/cli/setup.d.ts +8 -0
  103. package/dist/cli/setup.js +311 -0
  104. package/dist/cli/setup.test.d.ts +1 -0
  105. package/dist/cli/setup.test.js +31 -0
  106. package/dist/cli/status.d.ts +6 -0
  107. package/dist/cli/status.js +27 -0
  108. package/dist/cli/tool.d.ts +40 -0
  109. package/dist/cli/tool.js +94 -0
  110. package/dist/cli/version.test.d.ts +1 -0
  111. package/dist/cli/version.test.js +19 -0
  112. package/dist/cli/wiki.d.ts +15 -0
  113. package/dist/cli/wiki.js +361 -0
  114. package/dist/config/ignore-service.d.ts +1 -0
  115. package/dist/config/ignore-service.js +210 -0
  116. package/dist/config/supported-languages.d.ts +12 -0
  117. package/dist/config/supported-languages.js +15 -0
  118. package/dist/core/augmentation/engine.d.ts +26 -0
  119. package/dist/core/augmentation/engine.js +213 -0
  120. package/dist/core/embeddings/embedder.d.ts +60 -0
  121. package/dist/core/embeddings/embedder.js +251 -0
  122. package/dist/core/embeddings/embedding-pipeline.d.ts +51 -0
  123. package/dist/core/embeddings/embedding-pipeline.js +329 -0
  124. package/dist/core/embeddings/index.d.ts +9 -0
  125. package/dist/core/embeddings/index.js +9 -0
  126. package/dist/core/embeddings/text-generator.d.ts +24 -0
  127. package/dist/core/embeddings/text-generator.js +182 -0
  128. package/dist/core/embeddings/types.d.ts +87 -0
  129. package/dist/core/embeddings/types.js +32 -0
  130. package/dist/core/graph/graph.d.ts +2 -0
  131. package/dist/core/graph/graph.js +66 -0
  132. package/dist/core/graph/types.d.ts +61 -0
  133. package/dist/core/graph/types.js +1 -0
  134. package/dist/core/ingestion/ast-cache.d.ts +11 -0
  135. package/dist/core/ingestion/ast-cache.js +34 -0
  136. package/dist/core/ingestion/call-processor.d.ts +15 -0
  137. package/dist/core/ingestion/call-processor.js +327 -0
  138. package/dist/core/ingestion/cluster-enricher.d.ts +38 -0
  139. package/dist/core/ingestion/cluster-enricher.js +170 -0
  140. package/dist/core/ingestion/community-processor.d.ts +39 -0
  141. package/dist/core/ingestion/community-processor.js +312 -0
  142. package/dist/core/ingestion/entry-point-scoring.d.ts +39 -0
  143. package/dist/core/ingestion/entry-point-scoring.js +260 -0
  144. package/dist/core/ingestion/filesystem-walker.d.ts +28 -0
  145. package/dist/core/ingestion/filesystem-walker.js +80 -0
  146. package/dist/core/ingestion/framework-detection.d.ts +39 -0
  147. package/dist/core/ingestion/framework-detection.js +235 -0
  148. package/dist/core/ingestion/heritage-processor.d.ts +20 -0
  149. package/dist/core/ingestion/heritage-processor.js +197 -0
  150. package/dist/core/ingestion/import-processor.d.ts +38 -0
  151. package/dist/core/ingestion/import-processor.js +778 -0
  152. package/dist/core/ingestion/parsing-processor.d.ts +15 -0
  153. package/dist/core/ingestion/parsing-processor.js +291 -0
  154. package/dist/core/ingestion/pipeline.d.ts +5 -0
  155. package/dist/core/ingestion/pipeline.js +323 -0
  156. package/dist/core/ingestion/process-processor.d.ts +51 -0
  157. package/dist/core/ingestion/process-processor.js +309 -0
  158. package/dist/core/ingestion/scope-filter.d.ts +25 -0
  159. package/dist/core/ingestion/scope-filter.js +100 -0
  160. package/dist/core/ingestion/structure-processor.d.ts +2 -0
  161. package/dist/core/ingestion/structure-processor.js +36 -0
  162. package/dist/core/ingestion/symbol-table.d.ts +33 -0
  163. package/dist/core/ingestion/symbol-table.js +38 -0
  164. package/dist/core/ingestion/tree-sitter-queries.d.ts +12 -0
  165. package/dist/core/ingestion/tree-sitter-queries.js +398 -0
  166. package/dist/core/ingestion/utils.d.ts +10 -0
  167. package/dist/core/ingestion/utils.js +50 -0
  168. package/dist/core/ingestion/workers/parse-worker.d.ts +59 -0
  169. package/dist/core/ingestion/workers/parse-worker.js +672 -0
  170. package/dist/core/ingestion/workers/worker-pool.d.ts +16 -0
  171. package/dist/core/ingestion/workers/worker-pool.js +120 -0
  172. package/dist/core/kuzu/csv-generator.d.ts +29 -0
  173. package/dist/core/kuzu/csv-generator.js +336 -0
  174. package/dist/core/kuzu/kuzu-adapter.d.ts +101 -0
  175. package/dist/core/kuzu/kuzu-adapter.js +753 -0
  176. package/dist/core/kuzu/schema.d.ts +53 -0
  177. package/dist/core/kuzu/schema.js +407 -0
  178. package/dist/core/search/bm25-index.d.ts +23 -0
  179. package/dist/core/search/bm25-index.js +95 -0
  180. package/dist/core/search/hybrid-search.d.ts +49 -0
  181. package/dist/core/search/hybrid-search.js +118 -0
  182. package/dist/core/tree-sitter/parser-loader.d.ts +4 -0
  183. package/dist/core/tree-sitter/parser-loader.js +44 -0
  184. package/dist/core/wiki/generator.d.ts +110 -0
  185. package/dist/core/wiki/generator.js +786 -0
  186. package/dist/core/wiki/graph-queries.d.ts +80 -0
  187. package/dist/core/wiki/graph-queries.js +238 -0
  188. package/dist/core/wiki/html-viewer.d.ts +10 -0
  189. package/dist/core/wiki/html-viewer.js +297 -0
  190. package/dist/core/wiki/llm-client.d.ts +40 -0
  191. package/dist/core/wiki/llm-client.js +162 -0
  192. package/dist/core/wiki/prompts.d.ts +53 -0
  193. package/dist/core/wiki/prompts.js +174 -0
  194. package/dist/lib/utils.d.ts +1 -0
  195. package/dist/lib/utils.js +3 -0
  196. package/dist/mcp/core/embedder.d.ts +27 -0
  197. package/dist/mcp/core/embedder.js +108 -0
  198. package/dist/mcp/core/kuzu-adapter.d.ts +34 -0
  199. package/dist/mcp/core/kuzu-adapter.js +231 -0
  200. package/dist/mcp/local/local-backend.d.ts +160 -0
  201. package/dist/mcp/local/local-backend.js +1646 -0
  202. package/dist/mcp/resources.d.ts +31 -0
  203. package/dist/mcp/resources.js +407 -0
  204. package/dist/mcp/server.d.ts +23 -0
  205. package/dist/mcp/server.js +251 -0
  206. package/dist/mcp/staleness.d.ts +15 -0
  207. package/dist/mcp/staleness.js +29 -0
  208. package/dist/mcp/tools.d.ts +24 -0
  209. package/dist/mcp/tools.js +195 -0
  210. package/dist/server/api.d.ts +10 -0
  211. package/dist/server/api.js +344 -0
  212. package/dist/server/mcp-http.d.ts +13 -0
  213. package/dist/server/mcp-http.js +100 -0
  214. package/dist/storage/git.d.ts +6 -0
  215. package/dist/storage/git.js +32 -0
  216. package/dist/storage/repo-manager.d.ts +125 -0
  217. package/dist/storage/repo-manager.js +257 -0
  218. package/dist/types/pipeline.d.ts +34 -0
  219. package/dist/types/pipeline.js +18 -0
  220. package/hooks/claude/gitnexus-hook.cjs +135 -0
  221. package/hooks/claude/pre-tool-use.sh +78 -0
  222. package/hooks/claude/session-start.sh +42 -0
  223. package/package.json +92 -0
  224. package/skills/gitnexus-cli.md +82 -0
  225. package/skills/gitnexus-debugging.md +89 -0
  226. package/skills/gitnexus-exploring.md +78 -0
  227. package/skills/gitnexus-guide.md +64 -0
  228. package/skills/gitnexus-impact-analysis.md +97 -0
  229. package/skills/gitnexus-refactoring.md +121 -0
  230. package/vendor/leiden/index.cjs +355 -0
  231. package/vendor/leiden/utils.cjs +392 -0
@@ -0,0 +1,786 @@
1
+ /**
2
+ * Wiki Generator
3
+ *
4
+ * Orchestrates the full wiki generation pipeline:
5
+ * Phase 0: Validate prerequisites + gather graph structure
6
+ * Phase 1: Build module tree (one LLM call)
7
+ * Phase 2: Generate module pages (one LLM call per module, bottom-up)
8
+ * Phase 3: Generate overview page
9
+ *
10
+ * Supports incremental updates via git diff + module-file mapping.
11
+ */
12
+ import fs from 'fs/promises';
13
+ import path from 'path';
14
+ import { execSync } from 'child_process';
15
+ import { initWikiDb, closeWikiDb, getFilesWithExports, getAllFiles, getIntraModuleCallEdges, getInterModuleCallEdges, getProcessesForFiles, getAllProcesses, getInterModuleEdgesForOverview, } from './graph-queries.js';
16
+ import { generateHTMLViewer } from './html-viewer.js';
17
+ import { callLLM, estimateTokens, } from './llm-client.js';
18
+ import { GROUPING_SYSTEM_PROMPT, GROUPING_USER_PROMPT, MODULE_SYSTEM_PROMPT, MODULE_USER_PROMPT, PARENT_SYSTEM_PROMPT, PARENT_USER_PROMPT, OVERVIEW_SYSTEM_PROMPT, OVERVIEW_USER_PROMPT, fillTemplate, formatFileListForGrouping, formatDirectoryTree, formatCallEdges, formatProcesses, } from './prompts.js';
19
+ import { shouldIgnorePath } from '../../config/ignore-service.js';
20
+ // ─── Constants ────────────────────────────────────────────────────────
21
+ const DEFAULT_MAX_TOKENS_PER_MODULE = 30_000;
22
+ const WIKI_DIR = 'wiki';
23
+ // ─── Generator Class ──────────────────────────────────────────────────
24
+ export class WikiGenerator {
25
+ repoPath;
26
+ storagePath;
27
+ wikiDir;
28
+ kuzuPath;
29
+ llmConfig;
30
+ maxTokensPerModule;
31
+ concurrency;
32
+ options;
33
+ onProgress;
34
+ failedModules = [];
35
+ constructor(repoPath, storagePath, kuzuPath, llmConfig, options = {}, onProgress) {
36
+ this.repoPath = repoPath;
37
+ this.storagePath = storagePath;
38
+ this.wikiDir = path.join(storagePath, WIKI_DIR);
39
+ this.kuzuPath = kuzuPath;
40
+ this.options = options;
41
+ this.llmConfig = llmConfig;
42
+ this.maxTokensPerModule = options.maxTokensPerModule ?? DEFAULT_MAX_TOKENS_PER_MODULE;
43
+ this.concurrency = options.concurrency ?? 3;
44
+ const progressFn = onProgress || (() => { });
45
+ this.onProgress = (phase, percent, detail) => {
46
+ if (percent > 0)
47
+ this.lastPercent = percent;
48
+ progressFn(phase, percent, detail);
49
+ };
50
+ }
51
+ lastPercent = 0;
52
+ /**
53
+ * Create streaming options that report LLM progress to the progress bar.
54
+ * Uses the last known percent so streaming doesn't reset the bar backwards.
55
+ */
56
+ streamOpts(label, fixedPercent) {
57
+ return {
58
+ onChunk: (chars) => {
59
+ const tokens = Math.round(chars / 4);
60
+ const pct = fixedPercent ?? this.lastPercent;
61
+ this.onProgress('stream', pct, `${label} (${tokens} tok)`);
62
+ },
63
+ };
64
+ }
65
+ /**
66
+ * Main entry point. Runs the full pipeline or incremental update.
67
+ */
68
+ async run() {
69
+ await fs.mkdir(this.wikiDir, { recursive: true });
70
+ const existingMeta = await this.loadWikiMeta();
71
+ const currentCommit = this.getCurrentCommit();
72
+ const forceMode = this.options.force;
73
+ // Up-to-date check (skip if --force)
74
+ if (!forceMode && existingMeta && existingMeta.fromCommit === currentCommit) {
75
+ // Still regenerate the HTML viewer in case it's missing
76
+ await this.ensureHTMLViewer();
77
+ return { pagesGenerated: 0, mode: 'up-to-date', failedModules: [] };
78
+ }
79
+ // Force mode: delete snapshot to force full re-grouping
80
+ if (forceMode) {
81
+ try {
82
+ await fs.unlink(path.join(this.wikiDir, 'first_module_tree.json'));
83
+ }
84
+ catch { }
85
+ // Delete existing module pages so they get regenerated
86
+ const existingFiles = await fs.readdir(this.wikiDir).catch(() => []);
87
+ for (const f of existingFiles) {
88
+ if (f.endsWith('.md')) {
89
+ try {
90
+ await fs.unlink(path.join(this.wikiDir, f));
91
+ }
92
+ catch { }
93
+ }
94
+ }
95
+ }
96
+ // Init graph
97
+ this.onProgress('init', 2, 'Connecting to knowledge graph...');
98
+ await initWikiDb(this.kuzuPath);
99
+ let result;
100
+ try {
101
+ if (!forceMode && existingMeta && existingMeta.fromCommit) {
102
+ result = await this.incrementalUpdate(existingMeta, currentCommit);
103
+ }
104
+ else {
105
+ result = await this.fullGeneration(currentCommit);
106
+ }
107
+ }
108
+ finally {
109
+ await closeWikiDb();
110
+ }
111
+ // Always generate the HTML viewer after wiki content changes
112
+ await this.ensureHTMLViewer();
113
+ return result;
114
+ }
115
+ // ─── HTML Viewer ─────────────────────────────────────────────────────
116
+ async ensureHTMLViewer() {
117
+ // Only generate if there are markdown pages to bundle
118
+ const dirEntries = await fs.readdir(this.wikiDir).catch(() => []);
119
+ const hasMd = dirEntries.some(f => f.endsWith('.md'));
120
+ if (!hasMd)
121
+ return;
122
+ this.onProgress('html', 98, 'Building HTML viewer...');
123
+ const repoName = path.basename(this.repoPath);
124
+ await generateHTMLViewer(this.wikiDir, repoName);
125
+ }
126
+ // ─── Full Generation ────────────────────────────────────────────────
127
+ async fullGeneration(currentCommit) {
128
+ let pagesGenerated = 0;
129
+ // Phase 0: Gather structure
130
+ this.onProgress('gather', 5, 'Querying graph for file structure...');
131
+ const filesWithExports = await getFilesWithExports();
132
+ const allFiles = await getAllFiles();
133
+ // Filter to source files only
134
+ const sourceFiles = allFiles.filter(f => !shouldIgnorePath(f));
135
+ if (sourceFiles.length === 0) {
136
+ throw new Error('No source files found in the knowledge graph. Nothing to document.');
137
+ }
138
+ // Build enriched file list (merge exports into all source files)
139
+ const exportMap = new Map(filesWithExports.map(f => [f.filePath, f]));
140
+ const enrichedFiles = sourceFiles.map(fp => {
141
+ return exportMap.get(fp) || { filePath: fp, symbols: [] };
142
+ });
143
+ this.onProgress('gather', 10, `Found ${sourceFiles.length} source files`);
144
+ // Phase 1: Build module tree
145
+ const moduleTree = await this.buildModuleTree(enrichedFiles);
146
+ pagesGenerated = 0;
147
+ // Phase 2: Generate module pages (parallel with concurrency limit)
148
+ const totalModules = this.countModules(moduleTree);
149
+ let modulesProcessed = 0;
150
+ const reportProgress = (moduleName) => {
151
+ modulesProcessed++;
152
+ const percent = 30 + Math.round((modulesProcessed / totalModules) * 55);
153
+ const detail = moduleName
154
+ ? `${modulesProcessed}/${totalModules} — ${moduleName}`
155
+ : `${modulesProcessed}/${totalModules} modules`;
156
+ this.onProgress('modules', percent, detail);
157
+ };
158
+ // Flatten tree into layers: leaves first, then parents
159
+ // Leaves can run in parallel; parents must wait for their children
160
+ const { leaves, parents } = this.flattenModuleTree(moduleTree);
161
+ // Process all leaf modules in parallel
162
+ pagesGenerated += await this.runParallel(leaves, async (node) => {
163
+ const pagePath = path.join(this.wikiDir, `${node.slug}.md`);
164
+ if (await this.fileExists(pagePath)) {
165
+ reportProgress(node.name);
166
+ return 0;
167
+ }
168
+ try {
169
+ await this.generateLeafPage(node);
170
+ reportProgress(node.name);
171
+ return 1;
172
+ }
173
+ catch (err) {
174
+ this.failedModules.push(node.name);
175
+ reportProgress(`Failed: ${node.name}`);
176
+ return 0;
177
+ }
178
+ });
179
+ // Process parent modules sequentially (they depend on child docs)
180
+ for (const node of parents) {
181
+ const pagePath = path.join(this.wikiDir, `${node.slug}.md`);
182
+ if (await this.fileExists(pagePath)) {
183
+ reportProgress(node.name);
184
+ continue;
185
+ }
186
+ try {
187
+ await this.generateParentPage(node);
188
+ pagesGenerated++;
189
+ reportProgress(node.name);
190
+ }
191
+ catch (err) {
192
+ this.failedModules.push(node.name);
193
+ reportProgress(`Failed: ${node.name}`);
194
+ }
195
+ }
196
+ // Phase 3: Generate overview
197
+ this.onProgress('overview', 88, 'Generating overview page...');
198
+ await this.generateOverview(moduleTree);
199
+ pagesGenerated++;
200
+ // Save metadata
201
+ this.onProgress('finalize', 95, 'Saving metadata...');
202
+ const moduleFiles = this.extractModuleFiles(moduleTree);
203
+ await this.saveModuleTree(moduleTree);
204
+ await this.saveWikiMeta({
205
+ fromCommit: currentCommit,
206
+ generatedAt: new Date().toISOString(),
207
+ model: this.llmConfig.model,
208
+ moduleFiles,
209
+ moduleTree,
210
+ });
211
+ this.onProgress('done', 100, 'Wiki generation complete');
212
+ return { pagesGenerated, mode: 'full', failedModules: [...this.failedModules] };
213
+ }
214
+ // ─── Phase 1: Build Module Tree ────────────────────────────────────
215
+ async buildModuleTree(files) {
216
+ // Check for existing immutable snapshot (resumability)
217
+ const snapshotPath = path.join(this.wikiDir, 'first_module_tree.json');
218
+ try {
219
+ const existing = await fs.readFile(snapshotPath, 'utf-8');
220
+ const parsed = JSON.parse(existing);
221
+ if (Array.isArray(parsed) && parsed.length > 0) {
222
+ this.onProgress('grouping', 25, 'Using existing module tree (resuming)');
223
+ return parsed;
224
+ }
225
+ }
226
+ catch {
227
+ // No snapshot, generate new
228
+ }
229
+ this.onProgress('grouping', 15, 'Grouping files into modules (LLM)...');
230
+ const fileList = formatFileListForGrouping(files);
231
+ const dirTree = formatDirectoryTree(files.map(f => f.filePath));
232
+ const prompt = fillTemplate(GROUPING_USER_PROMPT, {
233
+ FILE_LIST: fileList,
234
+ DIRECTORY_TREE: dirTree,
235
+ });
236
+ const response = await callLLM(prompt, this.llmConfig, GROUPING_SYSTEM_PROMPT, this.streamOpts('Grouping files', 15));
237
+ const grouping = this.parseGroupingResponse(response.content, files);
238
+ // Convert to tree nodes
239
+ const tree = [];
240
+ for (const [moduleName, modulePaths] of Object.entries(grouping)) {
241
+ const slug = this.slugify(moduleName);
242
+ const node = { name: moduleName, slug, files: modulePaths };
243
+ // Token budget check — split if too large
244
+ const totalTokens = await this.estimateModuleTokens(modulePaths);
245
+ if (totalTokens > this.maxTokensPerModule && modulePaths.length > 3) {
246
+ node.children = this.splitBySubdirectory(moduleName, modulePaths);
247
+ node.files = []; // Parent doesn't own files directly when split
248
+ }
249
+ tree.push(node);
250
+ }
251
+ // Save immutable snapshot for resumability
252
+ await fs.writeFile(snapshotPath, JSON.stringify(tree, null, 2), 'utf-8');
253
+ this.onProgress('grouping', 28, `Created ${tree.length} modules`);
254
+ return tree;
255
+ }
256
+ /**
257
+ * Parse LLM grouping response. Validates all files are assigned.
258
+ */
259
+ parseGroupingResponse(content, files) {
260
+ // Extract JSON from response (handle markdown fences)
261
+ let jsonStr = content.trim();
262
+ const fenceMatch = jsonStr.match(/```(?:json)?\s*\n?([\s\S]*?)\n?```/);
263
+ if (fenceMatch) {
264
+ jsonStr = fenceMatch[1].trim();
265
+ }
266
+ let parsed;
267
+ try {
268
+ parsed = JSON.parse(jsonStr);
269
+ }
270
+ catch {
271
+ // Fallback: group by top-level directory
272
+ return this.fallbackGrouping(files);
273
+ }
274
+ if (typeof parsed !== 'object' || Array.isArray(parsed)) {
275
+ return this.fallbackGrouping(files);
276
+ }
277
+ // Validate — ensure all files are assigned
278
+ const allFilePaths = new Set(files.map(f => f.filePath));
279
+ const assignedFiles = new Set();
280
+ const validGrouping = {};
281
+ for (const [mod, paths] of Object.entries(parsed)) {
282
+ if (!Array.isArray(paths))
283
+ continue;
284
+ const validPaths = paths.filter(p => {
285
+ if (allFilePaths.has(p) && !assignedFiles.has(p)) {
286
+ assignedFiles.add(p);
287
+ return true;
288
+ }
289
+ return false;
290
+ });
291
+ if (validPaths.length > 0) {
292
+ validGrouping[mod] = validPaths;
293
+ }
294
+ }
295
+ // Assign unassigned files to a "Miscellaneous" module
296
+ const unassigned = files
297
+ .map(f => f.filePath)
298
+ .filter(fp => !assignedFiles.has(fp));
299
+ if (unassigned.length > 0) {
300
+ validGrouping['Other'] = unassigned;
301
+ }
302
+ return Object.keys(validGrouping).length > 0
303
+ ? validGrouping
304
+ : this.fallbackGrouping(files);
305
+ }
306
+ /**
307
+ * Fallback grouping by top-level directory when LLM parsing fails.
308
+ */
309
+ fallbackGrouping(files) {
310
+ const groups = new Map();
311
+ for (const f of files) {
312
+ const parts = f.filePath.replace(/\\/g, '/').split('/');
313
+ const topDir = parts.length > 1 ? parts[0] : 'Root';
314
+ let group = groups.get(topDir);
315
+ if (!group) {
316
+ group = [];
317
+ groups.set(topDir, group);
318
+ }
319
+ group.push(f.filePath);
320
+ }
321
+ return Object.fromEntries(groups);
322
+ }
323
+ /**
324
+ * Split a large module into sub-modules by subdirectory.
325
+ */
326
+ splitBySubdirectory(moduleName, files) {
327
+ const subGroups = new Map();
328
+ for (const fp of files) {
329
+ const parts = fp.replace(/\\/g, '/').split('/');
330
+ // Use the deepest common-ish directory
331
+ const subDir = parts.length > 2 ? parts.slice(0, 2).join('/') : parts[0];
332
+ let group = subGroups.get(subDir);
333
+ if (!group) {
334
+ group = [];
335
+ subGroups.set(subDir, group);
336
+ }
337
+ group.push(fp);
338
+ }
339
+ return Array.from(subGroups.entries()).map(([subDir, subFiles]) => ({
340
+ name: `${moduleName} — ${path.basename(subDir)}`,
341
+ slug: this.slugify(`${moduleName}-${path.basename(subDir)}`),
342
+ files: subFiles,
343
+ }));
344
+ }
345
+ // ─── Phase 2: Generate Module Pages ─────────────────────────────────
346
+ /**
347
+ * Generate a leaf module page from source code + graph data.
348
+ */
349
+ async generateLeafPage(node) {
350
+ const filePaths = node.files;
351
+ // Read source files from disk
352
+ const sourceCode = await this.readSourceFiles(filePaths);
353
+ // Token budget check — if too large, summarize in batches
354
+ const totalTokens = estimateTokens(sourceCode);
355
+ let finalSourceCode = sourceCode;
356
+ if (totalTokens > this.maxTokensPerModule) {
357
+ finalSourceCode = this.truncateSource(sourceCode, this.maxTokensPerModule);
358
+ }
359
+ // Get graph data
360
+ const [intraCalls, interCalls, processes] = await Promise.all([
361
+ getIntraModuleCallEdges(filePaths),
362
+ getInterModuleCallEdges(filePaths),
363
+ getProcessesForFiles(filePaths, 5),
364
+ ]);
365
+ const prompt = fillTemplate(MODULE_USER_PROMPT, {
366
+ MODULE_NAME: node.name,
367
+ SOURCE_CODE: finalSourceCode,
368
+ INTRA_CALLS: formatCallEdges(intraCalls),
369
+ OUTGOING_CALLS: formatCallEdges(interCalls.outgoing),
370
+ INCOMING_CALLS: formatCallEdges(interCalls.incoming),
371
+ PROCESSES: formatProcesses(processes),
372
+ });
373
+ const response = await callLLM(prompt, this.llmConfig, MODULE_SYSTEM_PROMPT, this.streamOpts(node.name));
374
+ // Write page with front matter
375
+ const pageContent = `# ${node.name}\n\n${response.content}`;
376
+ await fs.writeFile(path.join(this.wikiDir, `${node.slug}.md`), pageContent, 'utf-8');
377
+ }
378
+ /**
379
+ * Generate a parent module page from children's documentation.
380
+ */
381
+ async generateParentPage(node) {
382
+ if (!node.children || node.children.length === 0)
383
+ return;
384
+ // Read children's overview sections
385
+ const childDocs = [];
386
+ for (const child of node.children) {
387
+ const childPage = path.join(this.wikiDir, `${child.slug}.md`);
388
+ try {
389
+ const content = await fs.readFile(childPage, 'utf-8');
390
+ // Extract overview section (first ~500 chars or up to "### Architecture")
391
+ const overviewEnd = content.indexOf('### Architecture');
392
+ const overview = overviewEnd > 0 ? content.slice(0, overviewEnd).trim() : content.slice(0, 800).trim();
393
+ childDocs.push(`#### ${child.name}\n${overview}`);
394
+ }
395
+ catch {
396
+ childDocs.push(`#### ${child.name}\n(Documentation not yet generated)`);
397
+ }
398
+ }
399
+ // Get cross-child call edges
400
+ const allChildFiles = node.children.flatMap(c => c.files);
401
+ const crossCalls = await getIntraModuleCallEdges(allChildFiles);
402
+ const processes = await getProcessesForFiles(allChildFiles, 3);
403
+ const prompt = fillTemplate(PARENT_USER_PROMPT, {
404
+ MODULE_NAME: node.name,
405
+ CHILDREN_DOCS: childDocs.join('\n\n'),
406
+ CROSS_MODULE_CALLS: formatCallEdges(crossCalls),
407
+ CROSS_PROCESSES: formatProcesses(processes),
408
+ });
409
+ const response = await callLLM(prompt, this.llmConfig, PARENT_SYSTEM_PROMPT, this.streamOpts(node.name));
410
+ const pageContent = `# ${node.name}\n\n${response.content}`;
411
+ await fs.writeFile(path.join(this.wikiDir, `${node.slug}.md`), pageContent, 'utf-8');
412
+ }
413
+ // ─── Phase 3: Generate Overview ─────────────────────────────────────
414
+ async generateOverview(moduleTree) {
415
+ // Read module overview sections
416
+ const moduleSummaries = [];
417
+ for (const node of moduleTree) {
418
+ const pagePath = path.join(this.wikiDir, `${node.slug}.md`);
419
+ try {
420
+ const content = await fs.readFile(pagePath, 'utf-8');
421
+ const overviewEnd = content.indexOf('### Architecture');
422
+ const overview = overviewEnd > 0 ? content.slice(0, overviewEnd).trim() : content.slice(0, 600).trim();
423
+ moduleSummaries.push(`#### ${node.name}\n${overview}`);
424
+ }
425
+ catch {
426
+ moduleSummaries.push(`#### ${node.name}\n(Documentation pending)`);
427
+ }
428
+ }
429
+ // Get inter-module edges for architecture diagram
430
+ const moduleFiles = this.extractModuleFiles(moduleTree);
431
+ const moduleEdges = await getInterModuleEdgesForOverview(moduleFiles);
432
+ // Get top processes for key workflows
433
+ const topProcesses = await getAllProcesses(5);
434
+ // Read project config
435
+ const projectInfo = await this.readProjectInfo();
436
+ const edgesText = moduleEdges.length > 0
437
+ ? moduleEdges.map(e => `${e.from} → ${e.to} (${e.count} calls)`).join('\n')
438
+ : 'No inter-module call edges detected';
439
+ const prompt = fillTemplate(OVERVIEW_USER_PROMPT, {
440
+ PROJECT_INFO: projectInfo,
441
+ MODULE_SUMMARIES: moduleSummaries.join('\n\n'),
442
+ MODULE_EDGES: edgesText,
443
+ TOP_PROCESSES: formatProcesses(topProcesses),
444
+ });
445
+ const response = await callLLM(prompt, this.llmConfig, OVERVIEW_SYSTEM_PROMPT, this.streamOpts('Generating overview', 88));
446
+ const pageContent = `# ${path.basename(this.repoPath)} — Wiki\n\n${response.content}`;
447
+ await fs.writeFile(path.join(this.wikiDir, 'overview.md'), pageContent, 'utf-8');
448
+ }
449
+ // ─── Incremental Updates ────────────────────────────────────────────
450
+ async incrementalUpdate(existingMeta, currentCommit) {
451
+ this.onProgress('incremental', 5, 'Detecting changes...');
452
+ // Get changed files since last generation
453
+ const changedFiles = this.getChangedFiles(existingMeta.fromCommit, currentCommit);
454
+ if (changedFiles.length === 0) {
455
+ // No file changes but commit differs (e.g. merge commit)
456
+ await this.saveWikiMeta({
457
+ ...existingMeta,
458
+ fromCommit: currentCommit,
459
+ generatedAt: new Date().toISOString(),
460
+ });
461
+ return { pagesGenerated: 0, mode: 'incremental', failedModules: [] };
462
+ }
463
+ this.onProgress('incremental', 10, `${changedFiles.length} files changed`);
464
+ // Determine affected modules
465
+ const affectedModules = new Set();
466
+ const newFiles = [];
467
+ for (const fp of changedFiles) {
468
+ let found = false;
469
+ for (const [mod, files] of Object.entries(existingMeta.moduleFiles)) {
470
+ if (files.includes(fp)) {
471
+ affectedModules.add(mod);
472
+ found = true;
473
+ break;
474
+ }
475
+ }
476
+ if (!found && !shouldIgnorePath(fp)) {
477
+ newFiles.push(fp);
478
+ }
479
+ }
480
+ // If significant new files exist, re-run full grouping
481
+ if (newFiles.length > 5) {
482
+ this.onProgress('incremental', 15, 'Significant new files detected, running full generation...');
483
+ // Delete old snapshot to force re-grouping
484
+ try {
485
+ await fs.unlink(path.join(this.wikiDir, 'first_module_tree.json'));
486
+ }
487
+ catch { }
488
+ const fullResult = await this.fullGeneration(currentCommit);
489
+ return { ...fullResult, mode: 'incremental' };
490
+ }
491
+ // Add new files to nearest module or "Other"
492
+ if (newFiles.length > 0) {
493
+ if (!existingMeta.moduleFiles['Other']) {
494
+ existingMeta.moduleFiles['Other'] = [];
495
+ }
496
+ existingMeta.moduleFiles['Other'].push(...newFiles);
497
+ affectedModules.add('Other');
498
+ }
499
+ // Regenerate affected module pages (parallel)
500
+ let pagesGenerated = 0;
501
+ const moduleTree = existingMeta.moduleTree;
502
+ const affectedArray = Array.from(affectedModules);
503
+ this.onProgress('incremental', 20, `Regenerating ${affectedArray.length} module(s)...`);
504
+ const affectedNodes = [];
505
+ for (const mod of affectedArray) {
506
+ const modSlug = this.slugify(mod);
507
+ const node = this.findNodeBySlug(moduleTree, modSlug);
508
+ if (node) {
509
+ try {
510
+ await fs.unlink(path.join(this.wikiDir, `${node.slug}.md`));
511
+ }
512
+ catch { }
513
+ affectedNodes.push(node);
514
+ }
515
+ }
516
+ let incProcessed = 0;
517
+ pagesGenerated += await this.runParallel(affectedNodes, async (node) => {
518
+ try {
519
+ if (node.children && node.children.length > 0) {
520
+ await this.generateParentPage(node);
521
+ }
522
+ else {
523
+ await this.generateLeafPage(node);
524
+ }
525
+ incProcessed++;
526
+ const percent = 20 + Math.round((incProcessed / affectedNodes.length) * 60);
527
+ this.onProgress('incremental', percent, `${incProcessed}/${affectedNodes.length} — ${node.name}`);
528
+ return 1;
529
+ }
530
+ catch (err) {
531
+ this.failedModules.push(node.name);
532
+ incProcessed++;
533
+ return 0;
534
+ }
535
+ });
536
+ // Regenerate overview if any pages changed
537
+ if (pagesGenerated > 0) {
538
+ this.onProgress('incremental', 85, 'Updating overview...');
539
+ await this.generateOverview(moduleTree);
540
+ pagesGenerated++;
541
+ }
542
+ // Save updated metadata
543
+ this.onProgress('incremental', 95, 'Saving metadata...');
544
+ await this.saveWikiMeta({
545
+ ...existingMeta,
546
+ fromCommit: currentCommit,
547
+ generatedAt: new Date().toISOString(),
548
+ model: this.llmConfig.model,
549
+ });
550
+ this.onProgress('done', 100, 'Incremental update complete');
551
+ return { pagesGenerated, mode: 'incremental', failedModules: [...this.failedModules] };
552
+ }
553
+ // ─── Helpers ────────────────────────────────────────────────────────
554
+ getCurrentCommit() {
555
+ try {
556
+ return execSync('git rev-parse HEAD', { cwd: this.repoPath }).toString().trim();
557
+ }
558
+ catch {
559
+ return '';
560
+ }
561
+ }
562
+ getChangedFiles(fromCommit, toCommit) {
563
+ try {
564
+ const output = execSync(`git diff ${fromCommit}..${toCommit} --name-only`, { cwd: this.repoPath }).toString().trim();
565
+ return output ? output.split('\n').filter(Boolean) : [];
566
+ }
567
+ catch {
568
+ return [];
569
+ }
570
+ }
571
+ async readSourceFiles(filePaths) {
572
+ const parts = [];
573
+ for (const fp of filePaths) {
574
+ const fullPath = path.join(this.repoPath, fp);
575
+ try {
576
+ const content = await fs.readFile(fullPath, 'utf-8');
577
+ parts.push(`\n--- ${fp} ---\n${content}`);
578
+ }
579
+ catch {
580
+ parts.push(`\n--- ${fp} ---\n(file not readable)`);
581
+ }
582
+ }
583
+ return parts.join('\n');
584
+ }
585
+ truncateSource(source, maxTokens) {
586
+ // Rough truncation: keep first maxTokens*4 chars and add notice
587
+ const maxChars = maxTokens * 4;
588
+ if (source.length <= maxChars)
589
+ return source;
590
+ return source.slice(0, maxChars) + '\n\n... (source truncated for context window limits)';
591
+ }
592
+ async estimateModuleTokens(filePaths) {
593
+ let total = 0;
594
+ for (const fp of filePaths) {
595
+ try {
596
+ const content = await fs.readFile(path.join(this.repoPath, fp), 'utf-8');
597
+ total += estimateTokens(content);
598
+ }
599
+ catch {
600
+ // File not readable, skip
601
+ }
602
+ }
603
+ return total;
604
+ }
605
+ async readProjectInfo() {
606
+ const candidates = ['package.json', 'Cargo.toml', 'pyproject.toml', 'go.mod', 'pom.xml', 'build.gradle'];
607
+ const lines = [`Project: ${path.basename(this.repoPath)}`];
608
+ for (const file of candidates) {
609
+ const fullPath = path.join(this.repoPath, file);
610
+ try {
611
+ const content = await fs.readFile(fullPath, 'utf-8');
612
+ if (file === 'package.json') {
613
+ const pkg = JSON.parse(content);
614
+ if (pkg.name)
615
+ lines.push(`Name: ${pkg.name}`);
616
+ if (pkg.description)
617
+ lines.push(`Description: ${pkg.description}`);
618
+ if (pkg.scripts)
619
+ lines.push(`Scripts: ${Object.keys(pkg.scripts).join(', ')}`);
620
+ }
621
+ else {
622
+ // Include first 500 chars of other config files
623
+ lines.push(`\n${file}:\n${content.slice(0, 500)}`);
624
+ }
625
+ break; // Use first config found
626
+ }
627
+ catch {
628
+ continue;
629
+ }
630
+ }
631
+ // Read README excerpt
632
+ for (const readme of ['README.md', 'readme.md', 'README.txt']) {
633
+ try {
634
+ const content = await fs.readFile(path.join(this.repoPath, readme), 'utf-8');
635
+ lines.push(`\nREADME excerpt:\n${content.slice(0, 1000)}`);
636
+ break;
637
+ }
638
+ catch {
639
+ continue;
640
+ }
641
+ }
642
+ return lines.join('\n');
643
+ }
644
+ extractModuleFiles(tree) {
645
+ const result = {};
646
+ for (const node of tree) {
647
+ if (node.children && node.children.length > 0) {
648
+ result[node.name] = node.children.flatMap(c => c.files);
649
+ for (const child of node.children) {
650
+ result[child.name] = child.files;
651
+ }
652
+ }
653
+ else {
654
+ result[node.name] = node.files;
655
+ }
656
+ }
657
+ return result;
658
+ }
659
+ countModules(tree) {
660
+ let count = 0;
661
+ for (const node of tree) {
662
+ count++;
663
+ if (node.children) {
664
+ count += node.children.length;
665
+ }
666
+ }
667
+ return count;
668
+ }
669
+ /**
670
+ * Flatten the module tree into leaf nodes and parent nodes.
671
+ * Leaves can be processed in parallel; parents must wait for children.
672
+ */
673
+ flattenModuleTree(tree) {
674
+ const leaves = [];
675
+ const parents = [];
676
+ for (const node of tree) {
677
+ if (node.children && node.children.length > 0) {
678
+ for (const child of node.children) {
679
+ leaves.push(child);
680
+ }
681
+ parents.push(node);
682
+ }
683
+ else {
684
+ leaves.push(node);
685
+ }
686
+ }
687
+ return { leaves, parents };
688
+ }
689
+ /**
690
+ * Run async tasks in parallel with a concurrency limit and adaptive rate limiting.
691
+ * If a 429 rate limit is hit, concurrency is temporarily reduced.
692
+ */
693
+ async runParallel(items, fn) {
694
+ let total = 0;
695
+ let activeConcurrency = this.concurrency;
696
+ let running = 0;
697
+ let idx = 0;
698
+ return new Promise((resolve, reject) => {
699
+ const next = () => {
700
+ while (running < activeConcurrency && idx < items.length) {
701
+ const item = items[idx++];
702
+ running++;
703
+ fn(item)
704
+ .then((count) => {
705
+ total += count;
706
+ running--;
707
+ if (idx >= items.length && running === 0) {
708
+ resolve(total);
709
+ }
710
+ else {
711
+ next();
712
+ }
713
+ })
714
+ .catch((err) => {
715
+ running--;
716
+ // On rate limit, reduce concurrency temporarily
717
+ if (err.message?.includes('429')) {
718
+ activeConcurrency = Math.max(1, activeConcurrency - 1);
719
+ this.onProgress('modules', this.lastPercent, `Rate limited — concurrency → ${activeConcurrency}`);
720
+ // Re-queue the item
721
+ idx--;
722
+ setTimeout(next, 5000);
723
+ }
724
+ else {
725
+ if (idx >= items.length && running === 0) {
726
+ resolve(total);
727
+ }
728
+ else {
729
+ next();
730
+ }
731
+ }
732
+ });
733
+ }
734
+ };
735
+ if (items.length === 0) {
736
+ resolve(0);
737
+ }
738
+ else {
739
+ next();
740
+ }
741
+ });
742
+ }
743
+ findNodeBySlug(tree, slug) {
744
+ for (const node of tree) {
745
+ if (node.slug === slug)
746
+ return node;
747
+ if (node.children) {
748
+ const found = this.findNodeBySlug(node.children, slug);
749
+ if (found)
750
+ return found;
751
+ }
752
+ }
753
+ return null;
754
+ }
755
+ slugify(name) {
756
+ return name
757
+ .toLowerCase()
758
+ .replace(/[^a-z0-9]+/g, '-')
759
+ .replace(/^-+|-+$/g, '')
760
+ .slice(0, 60);
761
+ }
762
+ async fileExists(fp) {
763
+ try {
764
+ await fs.access(fp);
765
+ return true;
766
+ }
767
+ catch {
768
+ return false;
769
+ }
770
+ }
771
+ async loadWikiMeta() {
772
+ try {
773
+ const raw = await fs.readFile(path.join(this.wikiDir, 'meta.json'), 'utf-8');
774
+ return JSON.parse(raw);
775
+ }
776
+ catch {
777
+ return null;
778
+ }
779
+ }
780
+ async saveWikiMeta(meta) {
781
+ await fs.writeFile(path.join(this.wikiDir, 'meta.json'), JSON.stringify(meta, null, 2), 'utf-8');
782
+ }
783
+ async saveModuleTree(tree) {
784
+ await fs.writeFile(path.join(this.wikiDir, 'module_tree.json'), JSON.stringify(tree, null, 2), 'utf-8');
785
+ }
786
+ }