gitnexus 1.4.1 → 1.4.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. package/README.md +215 -194
  2. package/dist/cli/ai-context.d.ts +2 -1
  3. package/dist/cli/ai-context.js +117 -90
  4. package/dist/cli/analyze.d.ts +2 -0
  5. package/dist/cli/analyze.js +57 -30
  6. package/dist/cli/augment.js +1 -1
  7. package/dist/cli/eval-server.d.ts +1 -1
  8. package/dist/cli/eval-server.js +14 -6
  9. package/dist/cli/index.js +18 -25
  10. package/dist/cli/lazy-action.d.ts +6 -0
  11. package/dist/cli/lazy-action.js +18 -0
  12. package/dist/cli/mcp.js +1 -1
  13. package/dist/cli/setup.js +42 -32
  14. package/dist/cli/skill-gen.d.ts +26 -0
  15. package/dist/cli/skill-gen.js +549 -0
  16. package/dist/cli/status.js +13 -4
  17. package/dist/cli/tool.d.ts +3 -2
  18. package/dist/cli/tool.js +48 -13
  19. package/dist/cli/wiki.js +2 -2
  20. package/dist/config/ignore-service.d.ts +25 -0
  21. package/dist/config/ignore-service.js +76 -0
  22. package/dist/config/supported-languages.d.ts +1 -0
  23. package/dist/config/supported-languages.js +1 -1
  24. package/dist/core/augmentation/engine.js +99 -72
  25. package/dist/core/embeddings/embedder.d.ts +1 -1
  26. package/dist/core/embeddings/embedder.js +1 -1
  27. package/dist/core/embeddings/embedding-pipeline.d.ts +3 -3
  28. package/dist/core/embeddings/embedding-pipeline.js +74 -47
  29. package/dist/core/embeddings/types.d.ts +1 -1
  30. package/dist/core/graph/types.d.ts +5 -2
  31. package/dist/core/ingestion/ast-cache.js +3 -2
  32. package/dist/core/ingestion/call-processor.d.ts +5 -7
  33. package/dist/core/ingestion/call-processor.js +430 -283
  34. package/dist/core/ingestion/call-routing.d.ts +53 -0
  35. package/dist/core/ingestion/call-routing.js +108 -0
  36. package/dist/core/ingestion/cluster-enricher.js +16 -16
  37. package/dist/core/ingestion/constants.d.ts +16 -0
  38. package/dist/core/ingestion/constants.js +16 -0
  39. package/dist/core/ingestion/entry-point-scoring.d.ts +2 -1
  40. package/dist/core/ingestion/entry-point-scoring.js +94 -24
  41. package/dist/core/ingestion/export-detection.d.ts +18 -0
  42. package/dist/core/ingestion/export-detection.js +231 -0
  43. package/dist/core/ingestion/filesystem-walker.js +4 -3
  44. package/dist/core/ingestion/framework-detection.d.ts +5 -1
  45. package/dist/core/ingestion/framework-detection.js +48 -8
  46. package/dist/core/ingestion/heritage-processor.d.ts +13 -5
  47. package/dist/core/ingestion/heritage-processor.js +109 -55
  48. package/dist/core/ingestion/import-processor.d.ts +16 -20
  49. package/dist/core/ingestion/import-processor.js +202 -696
  50. package/dist/core/ingestion/language-config.d.ts +46 -0
  51. package/dist/core/ingestion/language-config.js +167 -0
  52. package/dist/core/ingestion/mro-processor.d.ts +45 -0
  53. package/dist/core/ingestion/mro-processor.js +369 -0
  54. package/dist/core/ingestion/named-binding-extraction.d.ts +61 -0
  55. package/dist/core/ingestion/named-binding-extraction.js +363 -0
  56. package/dist/core/ingestion/parsing-processor.d.ts +3 -11
  57. package/dist/core/ingestion/parsing-processor.js +85 -181
  58. package/dist/core/ingestion/pipeline.d.ts +5 -1
  59. package/dist/core/ingestion/pipeline.js +192 -116
  60. package/dist/core/ingestion/process-processor.js +2 -1
  61. package/dist/core/ingestion/resolution-context.d.ts +53 -0
  62. package/dist/core/ingestion/resolution-context.js +132 -0
  63. package/dist/core/ingestion/resolvers/csharp.d.ts +22 -0
  64. package/dist/core/ingestion/resolvers/csharp.js +109 -0
  65. package/dist/core/ingestion/resolvers/go.d.ts +19 -0
  66. package/dist/core/ingestion/resolvers/go.js +42 -0
  67. package/dist/core/ingestion/resolvers/index.d.ts +18 -0
  68. package/dist/core/ingestion/resolvers/index.js +13 -0
  69. package/dist/core/ingestion/resolvers/jvm.d.ts +23 -0
  70. package/dist/core/ingestion/resolvers/jvm.js +87 -0
  71. package/dist/core/ingestion/resolvers/php.d.ts +15 -0
  72. package/dist/core/ingestion/resolvers/php.js +35 -0
  73. package/dist/core/ingestion/resolvers/python.d.ts +19 -0
  74. package/dist/core/ingestion/resolvers/python.js +52 -0
  75. package/dist/core/ingestion/resolvers/ruby.d.ts +12 -0
  76. package/dist/core/ingestion/resolvers/ruby.js +15 -0
  77. package/dist/core/ingestion/resolvers/rust.d.ts +15 -0
  78. package/dist/core/ingestion/resolvers/rust.js +73 -0
  79. package/dist/core/ingestion/resolvers/standard.d.ts +28 -0
  80. package/dist/core/ingestion/resolvers/standard.js +123 -0
  81. package/dist/core/ingestion/resolvers/utils.d.ts +33 -0
  82. package/dist/core/ingestion/resolvers/utils.js +122 -0
  83. package/dist/core/ingestion/symbol-table.d.ts +21 -1
  84. package/dist/core/ingestion/symbol-table.js +40 -12
  85. package/dist/core/ingestion/tree-sitter-queries.d.ts +12 -11
  86. package/dist/core/ingestion/tree-sitter-queries.js +642 -485
  87. package/dist/core/ingestion/type-env.d.ts +49 -0
  88. package/dist/core/ingestion/type-env.js +611 -0
  89. package/dist/core/ingestion/type-extractors/c-cpp.d.ts +2 -0
  90. package/dist/core/ingestion/type-extractors/c-cpp.js +385 -0
  91. package/dist/core/ingestion/type-extractors/csharp.d.ts +2 -0
  92. package/dist/core/ingestion/type-extractors/csharp.js +383 -0
  93. package/dist/core/ingestion/type-extractors/go.d.ts +2 -0
  94. package/dist/core/ingestion/type-extractors/go.js +467 -0
  95. package/dist/core/ingestion/type-extractors/index.d.ts +22 -0
  96. package/dist/core/ingestion/type-extractors/index.js +31 -0
  97. package/dist/core/ingestion/type-extractors/jvm.d.ts +3 -0
  98. package/dist/core/ingestion/type-extractors/jvm.js +681 -0
  99. package/dist/core/ingestion/type-extractors/php.d.ts +2 -0
  100. package/dist/core/ingestion/type-extractors/php.js +549 -0
  101. package/dist/core/ingestion/type-extractors/python.d.ts +2 -0
  102. package/dist/core/ingestion/type-extractors/python.js +406 -0
  103. package/dist/core/ingestion/type-extractors/ruby.d.ts +2 -0
  104. package/dist/core/ingestion/type-extractors/ruby.js +389 -0
  105. package/dist/core/ingestion/type-extractors/rust.d.ts +2 -0
  106. package/dist/core/ingestion/type-extractors/rust.js +449 -0
  107. package/dist/core/ingestion/type-extractors/shared.d.ts +133 -0
  108. package/dist/core/ingestion/type-extractors/shared.js +703 -0
  109. package/dist/core/ingestion/type-extractors/swift.d.ts +2 -0
  110. package/dist/core/ingestion/type-extractors/swift.js +137 -0
  111. package/dist/core/ingestion/type-extractors/types.d.ts +127 -0
  112. package/dist/core/ingestion/type-extractors/types.js +1 -0
  113. package/dist/core/ingestion/type-extractors/typescript.d.ts +2 -0
  114. package/dist/core/ingestion/type-extractors/typescript.js +494 -0
  115. package/dist/core/ingestion/utils.d.ts +98 -0
  116. package/dist/core/ingestion/utils.js +1064 -9
  117. package/dist/core/ingestion/workers/parse-worker.d.ts +38 -4
  118. package/dist/core/ingestion/workers/parse-worker.js +251 -359
  119. package/dist/core/ingestion/workers/worker-pool.js +8 -0
  120. package/dist/core/{kuzu → lbug}/csv-generator.d.ts +1 -1
  121. package/dist/core/{kuzu → lbug}/csv-generator.js +20 -4
  122. package/dist/core/{kuzu/kuzu-adapter.d.ts → lbug/lbug-adapter.d.ts} +19 -19
  123. package/dist/core/{kuzu/kuzu-adapter.js → lbug/lbug-adapter.js} +82 -82
  124. package/dist/core/{kuzu → lbug}/schema.d.ts +4 -4
  125. package/dist/core/{kuzu → lbug}/schema.js +304 -289
  126. package/dist/core/search/bm25-index.d.ts +4 -4
  127. package/dist/core/search/bm25-index.js +17 -16
  128. package/dist/core/search/hybrid-search.d.ts +2 -2
  129. package/dist/core/search/hybrid-search.js +9 -9
  130. package/dist/core/tree-sitter/parser-loader.js +9 -2
  131. package/dist/core/wiki/generator.d.ts +4 -52
  132. package/dist/core/wiki/generator.js +53 -552
  133. package/dist/core/wiki/graph-queries.d.ts +4 -46
  134. package/dist/core/wiki/graph-queries.js +103 -282
  135. package/dist/core/wiki/html-viewer.js +192 -192
  136. package/dist/core/wiki/llm-client.js +11 -73
  137. package/dist/core/wiki/prompts.d.ts +8 -52
  138. package/dist/core/wiki/prompts.js +86 -200
  139. package/dist/mcp/compatible-stdio-transport.d.ts +25 -0
  140. package/dist/mcp/compatible-stdio-transport.js +200 -0
  141. package/dist/mcp/core/{kuzu-adapter.d.ts → lbug-adapter.d.ts} +7 -9
  142. package/dist/mcp/core/{kuzu-adapter.js → lbug-adapter.js} +77 -79
  143. package/dist/mcp/local/local-backend.d.ts +7 -6
  144. package/dist/mcp/local/local-backend.js +176 -147
  145. package/dist/mcp/resources.js +42 -42
  146. package/dist/mcp/server.js +18 -19
  147. package/dist/mcp/tools.js +103 -104
  148. package/dist/server/api.js +12 -12
  149. package/dist/server/mcp-http.d.ts +1 -1
  150. package/dist/server/mcp-http.js +1 -1
  151. package/dist/storage/repo-manager.d.ts +20 -2
  152. package/dist/storage/repo-manager.js +55 -1
  153. package/dist/types/pipeline.d.ts +1 -1
  154. package/hooks/claude/gitnexus-hook.cjs +238 -155
  155. package/hooks/claude/pre-tool-use.sh +79 -79
  156. package/hooks/claude/session-start.sh +42 -42
  157. package/package.json +99 -96
  158. package/scripts/patch-tree-sitter-swift.cjs +74 -74
  159. package/skills/gitnexus-cli.md +82 -82
  160. package/skills/gitnexus-debugging.md +89 -89
  161. package/skills/gitnexus-exploring.md +78 -78
  162. package/skills/gitnexus-guide.md +64 -64
  163. package/skills/gitnexus-impact-analysis.md +97 -97
  164. package/skills/gitnexus-pr-review.md +163 -163
  165. package/skills/gitnexus-refactoring.md +121 -121
  166. package/vendor/leiden/index.cjs +355 -355
  167. package/vendor/leiden/utils.cjs +392 -392
  168. package/dist/core/wiki/diagrams.d.ts +0 -27
  169. package/dist/core/wiki/diagrams.js +0 -163
@@ -12,11 +12,10 @@
12
12
  import fs from 'fs/promises';
13
13
  import path from 'path';
14
14
  import { execSync, execFileSync } from 'child_process';
15
- import { initWikiDb, closeWikiDb, getFilesWithExports, getAllFiles, getIntraModuleCallEdges, getInterModuleCallEdges, getProcessesForFiles, getAllProcesses, getInterModuleEdgesForOverview, getCallGraphNeighborFiles, getCommunityFileMapping, getInterCommunityCallEdges, getCrossCommunityProcesses, } from './graph-queries.js';
15
+ import { initWikiDb, closeWikiDb, getFilesWithExports, getAllFiles, getIntraModuleCallEdges, getInterModuleCallEdges, getProcessesForFiles, getAllProcesses, getInterModuleEdgesForOverview, } from './graph-queries.js';
16
16
  import { generateHTMLViewer } from './html-viewer.js';
17
17
  import { callLLM, estimateTokens, } from './llm-client.js';
18
- import { GROUPING_SYSTEM_PROMPT, GROUPING_USER_PROMPT, GROUPING_SYSTEM_PROMPT_LEGACY, GROUPING_USER_PROMPT_LEGACY, MODULE_SYSTEM_PROMPT, MODULE_USER_PROMPT, PARENT_SYSTEM_PROMPT, PARENT_USER_PROMPT, OVERVIEW_SYSTEM_PROMPT, OVERVIEW_USER_PROMPT, fillTemplate, formatFileListForGrouping, formatDirectoryTree, formatCallEdges, formatProcesses, formatCommunityGroups, formatInterCommunityEdges, formatCrossCommunityProcesses, formatModuleRegistry, } from './prompts.js';
19
- import { buildCallGraphMermaid, buildSequenceDiagram, buildInterModuleDiagram, } from './diagrams.js';
18
+ import { GROUPING_SYSTEM_PROMPT, GROUPING_USER_PROMPT, MODULE_SYSTEM_PROMPT, MODULE_USER_PROMPT, PARENT_SYSTEM_PROMPT, PARENT_USER_PROMPT, OVERVIEW_SYSTEM_PROMPT, OVERVIEW_USER_PROMPT, fillTemplate, formatFileListForGrouping, formatDirectoryTree, formatCallEdges, formatProcesses, } from './prompts.js';
20
19
  import { shouldIgnorePath } from '../../config/ignore-service.js';
21
20
  // ─── Constants ────────────────────────────────────────────────────────
22
21
  const DEFAULT_MAX_TOKENS_PER_MODULE = 30_000;
@@ -26,19 +25,18 @@ export class WikiGenerator {
26
25
  repoPath;
27
26
  storagePath;
28
27
  wikiDir;
29
- kuzuPath;
28
+ lbugPath;
30
29
  llmConfig;
31
30
  maxTokensPerModule;
32
31
  concurrency;
33
32
  options;
34
33
  onProgress;
35
34
  failedModules = [];
36
- moduleRegistry = new Map();
37
- constructor(repoPath, storagePath, kuzuPath, llmConfig, options = {}, onProgress) {
35
+ constructor(repoPath, storagePath, lbugPath, llmConfig, options = {}, onProgress) {
38
36
  this.repoPath = repoPath;
39
37
  this.storagePath = storagePath;
40
38
  this.wikiDir = path.join(storagePath, WIKI_DIR);
41
- this.kuzuPath = kuzuPath;
39
+ this.lbugPath = lbugPath;
42
40
  this.options = options;
43
41
  this.llmConfig = llmConfig;
44
42
  this.maxTokensPerModule = options.maxTokensPerModule ?? DEFAULT_MAX_TOKENS_PER_MODULE;
@@ -78,20 +76,12 @@ export class WikiGenerator {
78
76
  await this.ensureHTMLViewer();
79
77
  return { pagesGenerated: 0, mode: 'up-to-date', failedModules: [] };
80
78
  }
81
- // Force mode: clean all state and delete pages
79
+ // Force mode: delete snapshot to force full re-grouping
82
80
  if (forceMode) {
83
81
  try {
84
82
  await fs.unlink(path.join(this.wikiDir, 'first_module_tree.json'));
85
83
  }
86
84
  catch { }
87
- try {
88
- await fs.unlink(path.join(this.wikiDir, 'meta.json'));
89
- }
90
- catch { }
91
- try {
92
- await fs.unlink(path.join(this.wikiDir, 'module_tree.json'));
93
- }
94
- catch { }
95
85
  // Delete existing module pages so they get regenerated
96
86
  const existingFiles = await fs.readdir(this.wikiDir).catch(() => []);
97
87
  for (const f of existingFiles) {
@@ -105,7 +95,7 @@ export class WikiGenerator {
105
95
  }
106
96
  // Init graph
107
97
  this.onProgress('init', 2, 'Connecting to knowledge graph...');
108
- await initWikiDb(this.kuzuPath);
98
+ await initWikiDb(this.lbugPath);
109
99
  let result;
110
100
  try {
111
101
  if (!forceMode && existingMeta && existingMeta.fromCommit) {
@@ -116,14 +106,10 @@ export class WikiGenerator {
116
106
  }
117
107
  }
118
108
  finally {
119
- console.log('[wiki] Closing KuzuDB...');
120
109
  await closeWikiDb();
121
- console.log('[wiki] KuzuDB closed');
122
110
  }
123
111
  // Always generate the HTML viewer after wiki content changes
124
- console.log('[wiki] Building HTML viewer...');
125
112
  await this.ensureHTMLViewer();
126
- console.log('[wiki] HTML viewer done');
127
113
  return result;
128
114
  }
129
115
  // ─── HTML Viewer ─────────────────────────────────────────────────────
@@ -158,8 +144,6 @@ export class WikiGenerator {
158
144
  // Phase 1: Build module tree
159
145
  const moduleTree = await this.buildModuleTree(enrichedFiles);
160
146
  pagesGenerated = 0;
161
- // Build module registry for cross-references
162
- this.moduleRegistry = this.buildModuleRegistry(moduleTree, enrichedFiles);
163
147
  // Phase 2: Generate module pages (parallel with concurrency limit)
164
148
  const totalModules = this.countModules(moduleTree);
165
149
  let modulesProcessed = 0;
@@ -211,18 +195,9 @@ export class WikiGenerator {
211
195
  }
212
196
  // Phase 3: Generate overview
213
197
  this.onProgress('overview', 88, 'Generating overview page...');
214
- try {
215
- await this.generateOverview(moduleTree);
216
- pagesGenerated++;
217
- console.log('[wiki] Overview generated successfully');
218
- }
219
- catch (err) {
220
- console.error('[wiki] Overview generation failed:', err.message);
221
- this.failedModules.push('_overview');
222
- this.onProgress('overview', 90, `Overview generation failed: ${err.message?.slice(0, 120) || 'unknown error'}`);
223
- }
198
+ await this.generateOverview(moduleTree);
199
+ pagesGenerated++;
224
200
  // Save metadata
225
- console.log('[wiki] Saving metadata...');
226
201
  this.onProgress('finalize', 95, 'Saving metadata...');
227
202
  const moduleFiles = this.extractModuleFiles(moduleTree);
228
203
  await this.saveModuleTree(moduleTree);
@@ -251,41 +226,15 @@ export class WikiGenerator {
251
226
  catch {
252
227
  // No snapshot, generate new
253
228
  }
254
- this.onProgress('grouping', 12, 'Querying graph communities...');
255
- // Try to get community data for graph-driven decomposition
256
- const communityGroups = await getCommunityFileMapping();
257
- const useCommunities = communityGroups.length > 0;
258
- let systemPrompt;
259
- let prompt;
229
+ this.onProgress('grouping', 15, 'Grouping files into modules (LLM)...');
260
230
  const fileList = formatFileListForGrouping(files);
261
231
  const dirTree = formatDirectoryTree(files.map(f => f.filePath));
262
- if (useCommunities) {
263
- // Graph-aware grouping: use communities as primary signal
264
- this.onProgress('grouping', 15, 'Grouping files into modules (graph-driven LLM)...');
265
- const [interEdges, crossProcs] = await Promise.all([
266
- getInterCommunityCallEdges(),
267
- getCrossCommunityProcesses(),
268
- ]);
269
- systemPrompt = GROUPING_SYSTEM_PROMPT;
270
- prompt = fillTemplate(GROUPING_USER_PROMPT, {
271
- COMMUNITY_GROUPS: formatCommunityGroups(communityGroups),
272
- INTER_COMMUNITY_EDGES: formatInterCommunityEdges(interEdges),
273
- CROSS_COMMUNITY_PROCESSES: formatCrossCommunityProcesses(crossProcs),
274
- FILE_LIST: fileList,
275
- DIRECTORY_TREE: dirTree,
276
- });
277
- }
278
- else {
279
- // Legacy grouping: file-only approach
280
- this.onProgress('grouping', 15, 'Grouping files into modules (LLM)...');
281
- systemPrompt = GROUPING_SYSTEM_PROMPT_LEGACY;
282
- prompt = fillTemplate(GROUPING_USER_PROMPT_LEGACY, {
283
- FILE_LIST: fileList,
284
- DIRECTORY_TREE: dirTree,
285
- });
286
- }
287
- const response = await callLLM(prompt, this.llmConfig, systemPrompt, this.streamOpts('Grouping files', 15));
288
- const grouping = this.parseGroupingResponse(response.content, files, communityGroups);
232
+ const prompt = fillTemplate(GROUPING_USER_PROMPT, {
233
+ FILE_LIST: fileList,
234
+ DIRECTORY_TREE: dirTree,
235
+ });
236
+ const response = await callLLM(prompt, this.llmConfig, GROUPING_SYSTEM_PROMPT, this.streamOpts('Grouping files', 15));
237
+ const grouping = this.parseGroupingResponse(response.content, files);
289
238
  // Convert to tree nodes
290
239
  const tree = [];
291
240
  for (const [moduleName, modulePaths] of Object.entries(grouping)) {
@@ -294,9 +243,7 @@ export class WikiGenerator {
294
243
  // Token budget check — split if too large
295
244
  const totalTokens = await this.estimateModuleTokens(modulePaths);
296
245
  if (totalTokens > this.maxTokensPerModule && modulePaths.length > 3) {
297
- node.children = useCommunities
298
- ? this.splitByCommunity(moduleName, modulePaths, communityGroups)
299
- : this.splitBySubdirectory(moduleName, modulePaths);
246
+ node.children = this.splitBySubdirectory(moduleName, modulePaths);
300
247
  node.files = []; // Parent doesn't own files directly when split
301
248
  }
302
249
  tree.push(node);
@@ -309,7 +256,7 @@ export class WikiGenerator {
309
256
  /**
310
257
  * Parse LLM grouping response. Validates all files are assigned.
311
258
  */
312
- parseGroupingResponse(content, files, communityGroups) {
259
+ parseGroupingResponse(content, files) {
313
260
  // Extract JSON from response (handle markdown fences)
314
261
  let jsonStr = content.trim();
315
262
  const fenceMatch = jsonStr.match(/```(?:json)?\s*\n?([\s\S]*?)\n?```/);
@@ -321,11 +268,11 @@ export class WikiGenerator {
321
268
  parsed = JSON.parse(jsonStr);
322
269
  }
323
270
  catch {
324
- // Fallback: group by community or top-level directory
325
- return this.fallbackGrouping(files, communityGroups);
271
+ // Fallback: group by top-level directory
272
+ return this.fallbackGrouping(files);
326
273
  }
327
274
  if (typeof parsed !== 'object' || Array.isArray(parsed)) {
328
- return this.fallbackGrouping(files, communityGroups);
275
+ return this.fallbackGrouping(files);
329
276
  }
330
277
  // Validate — ensure all files are assigned
331
278
  const allFilePaths = new Set(files.map(f => f.filePath));
@@ -354,33 +301,12 @@ export class WikiGenerator {
354
301
  }
355
302
  return Object.keys(validGrouping).length > 0
356
303
  ? validGrouping
357
- : this.fallbackGrouping(files, communityGroups);
304
+ : this.fallbackGrouping(files);
358
305
  }
359
306
  /**
360
- * Fallback grouping. Uses community file mapping when available,
361
- * otherwise groups by top-level directory.
307
+ * Fallback grouping by top-level directory when LLM parsing fails.
362
308
  */
363
- fallbackGrouping(files, communityGroups) {
364
- // Use community data if available
365
- if (communityGroups && communityGroups.length > 0) {
366
- const result = {};
367
- const assignedFiles = new Set();
368
- for (const group of communityGroups) {
369
- if (group.files.length > 0) {
370
- result[group.label] = [...group.files];
371
- for (const f of group.files)
372
- assignedFiles.add(f);
373
- }
374
- }
375
- // Assign unassigned files
376
- const unassigned = files.map(f => f.filePath).filter(fp => !assignedFiles.has(fp));
377
- if (unassigned.length > 0) {
378
- result['Other'] = unassigned;
379
- }
380
- if (Object.keys(result).length > 0)
381
- return result;
382
- }
383
- // Directory-based fallback
309
+ fallbackGrouping(files) {
384
310
  const groups = new Map();
385
311
  for (const f of files) {
386
312
  const parts = f.filePath.replace(/\\/g, '/').split('/');
@@ -416,159 +342,6 @@ export class WikiGenerator {
416
342
  files: subFiles,
417
343
  }));
418
344
  }
419
- /**
420
- * Split a large module into sub-modules using community data.
421
- * Falls back to subdirectory splitting if community data doesn't help.
422
- */
423
- splitByCommunity(moduleName, files, communityGroups) {
424
- const subGroups = new Map();
425
- const unassigned = [];
426
- // Group files by their community membership
427
- for (const fp of files) {
428
- let bestCommunity = '';
429
- let bestCount = 0;
430
- for (const group of communityGroups) {
431
- const count = group.files.filter(f => f === fp).length +
432
- group.secondaryFiles.filter(f => f === fp).length;
433
- if (count > bestCount) {
434
- bestCount = count;
435
- bestCommunity = group.label;
436
- }
437
- }
438
- if (bestCommunity) {
439
- let group = subGroups.get(bestCommunity);
440
- if (!group) {
441
- group = [];
442
- subGroups.set(bestCommunity, group);
443
- }
444
- group.push(fp);
445
- }
446
- else {
447
- unassigned.push(fp);
448
- }
449
- }
450
- // If community split didn't produce meaningful groups, fall back to subdirectory
451
- if (subGroups.size <= 1) {
452
- return this.splitBySubdirectory(moduleName, files);
453
- }
454
- // Add unassigned to largest group
455
- if (unassigned.length > 0) {
456
- let largestKey = '';
457
- let largestSize = 0;
458
- for (const [key, group] of subGroups) {
459
- if (group.length > largestSize) {
460
- largestSize = group.length;
461
- largestKey = key;
462
- }
463
- }
464
- if (largestKey) {
465
- subGroups.get(largestKey).push(...unassigned);
466
- }
467
- }
468
- return Array.from(subGroups.entries()).map(([label, subFiles]) => ({
469
- name: `${moduleName} — ${label}`,
470
- slug: this.slugify(`${moduleName}-${label}`),
471
- files: subFiles,
472
- }));
473
- }
474
- // ─── Module Registry (Cross-References) ─────────────────────────────
475
- /**
476
- * Build a registry mapping module slugs to their names and exported symbols.
477
- */
478
- buildModuleRegistry(tree, filesWithExports) {
479
- const exportMap = new Map(filesWithExports.map(f => [f.filePath, f]));
480
- const registry = new Map();
481
- const addNode = (node) => {
482
- const symbols = [];
483
- const nodeFiles = node.children
484
- ? node.children.flatMap(c => c.files)
485
- : node.files;
486
- for (const fp of nodeFiles) {
487
- const fileEntry = exportMap.get(fp);
488
- if (fileEntry) {
489
- for (const sym of fileEntry.symbols) {
490
- if (symbols.length < 10)
491
- symbols.push(sym.name);
492
- }
493
- }
494
- }
495
- registry.set(node.slug, { name: node.name, slug: node.slug, symbols });
496
- if (node.children) {
497
- for (const child of node.children) {
498
- addNode(child);
499
- }
500
- }
501
- };
502
- for (const node of tree) {
503
- addNode(node);
504
- }
505
- return registry;
506
- }
507
- // ─── Cross-Reference Validation ─────────────────────────────────────
508
- /**
509
- * Validate and fix cross-reference links in generated markdown.
510
- * Rewrites invalid slug references using fuzzy matching.
511
- */
512
- validateAndFixCrossReferences(markdown, registry) {
513
- const validSlugs = new Set(Array.from(registry.values()).map(e => e.slug));
514
- const slugByName = new Map();
515
- for (const entry of registry.values()) {
516
- slugByName.set(entry.name.toLowerCase(), entry.slug);
517
- }
518
- return markdown.replace(/\[([^\]]+)\]\(([a-z0-9-]+)\.md\)/g, (match, text, slug) => {
519
- if (validSlugs.has(slug))
520
- return match;
521
- // Try fuzzy match by link text
522
- const fuzzySlug = slugByName.get(text.toLowerCase());
523
- if (fuzzySlug)
524
- return `[${text}](${fuzzySlug}.md)`;
525
- // Try matching slug as partial
526
- for (const validSlug of validSlugs) {
527
- if (validSlug.includes(slug) || slug.includes(validSlug)) {
528
- return `[${text}](${validSlug}.md)`;
529
- }
530
- }
531
- // Strip broken link
532
- return text;
533
- });
534
- }
535
- // ─── Summary Extraction ─────────────────────────────────────────────
536
- /**
537
- * Extract the overview summary from a generated page.
538
- * Uses structured markers when available, falls back to heuristics.
539
- */
540
- extractSummary(content, maxLength = 800) {
541
- // Priority 1: <!-- summary-end --> marker
542
- const markerIdx = content.indexOf('<!-- summary-end -->');
543
- if (markerIdx > 0) {
544
- return content.slice(0, markerIdx).trim();
545
- }
546
- // Priority 2: Content up to first ## heading (skip # title)
547
- const lines = content.split('\n');
548
- let pastTitle = false;
549
- let result = '';
550
- for (const line of lines) {
551
- if (!pastTitle && line.startsWith('# ')) {
552
- pastTitle = true;
553
- result += line + '\n';
554
- continue;
555
- }
556
- if (pastTitle && /^##\s/.test(line)) {
557
- break;
558
- }
559
- result += line + '\n';
560
- }
561
- if (result.trim().length > 20) {
562
- return result.trim().slice(0, maxLength);
563
- }
564
- // Priority 3: Truncate at sentence boundary near maxLength
565
- const truncated = content.slice(0, maxLength);
566
- const lastSentence = truncated.lastIndexOf('. ');
567
- if (lastSentence > maxLength * 0.5) {
568
- return truncated.slice(0, lastSentence + 1).trim();
569
- }
570
- return truncated.trim();
571
- }
572
345
  // ─── Phase 2: Generate Module Pages ─────────────────────────────────
573
346
  /**
574
347
  * Generate a leaf module page from source code + graph data.
@@ -596,25 +369,10 @@ export class WikiGenerator {
596
369
  OUTGOING_CALLS: formatCallEdges(interCalls.outgoing),
597
370
  INCOMING_CALLS: formatCallEdges(interCalls.incoming),
598
371
  PROCESSES: formatProcesses(processes),
599
- MODULE_REGISTRY: formatModuleRegistry(this.moduleRegistry, node.slug),
600
372
  });
601
373
  const response = await callLLM(prompt, this.llmConfig, MODULE_SYSTEM_PROMPT, this.streamOpts(node.name));
602
- // Build deterministic diagrams
603
- let diagramSection = '';
604
- const callGraph = buildCallGraphMermaid(node.name, intraCalls);
605
- if (callGraph) {
606
- diagramSection += `\n\n## Internal Call Graph\n\n\`\`\`mermaid\n${callGraph}\n\`\`\``;
607
- }
608
- const topProcesses = processes.slice(0, 3);
609
- for (const proc of topProcesses) {
610
- const seqDiagram = buildSequenceDiagram(proc);
611
- if (seqDiagram) {
612
- diagramSection += `\n\n## Workflow: ${proc.label}\n\n\`\`\`mermaid\n${seqDiagram}\n\`\`\``;
613
- }
614
- }
615
- // Assemble page, validate cross-references, and write
616
- let pageContent = `# ${node.name}\n\n${response.content}${diagramSection}`;
617
- pageContent = this.validateAndFixCrossReferences(pageContent, this.moduleRegistry);
374
+ // Write page with front matter
375
+ const pageContent = `# ${node.name}\n\n${response.content}`;
618
376
  await fs.writeFile(path.join(this.wikiDir, `${node.slug}.md`), pageContent, 'utf-8');
619
377
  }
620
378
  /**
@@ -629,7 +387,9 @@ export class WikiGenerator {
629
387
  const childPage = path.join(this.wikiDir, `${child.slug}.md`);
630
388
  try {
631
389
  const content = await fs.readFile(childPage, 'utf-8');
632
- const overview = this.extractSummary(content);
390
+ // Extract overview section (first ~500 chars or up to "### Architecture")
391
+ const overviewEnd = content.indexOf('### Architecture');
392
+ const overview = overviewEnd > 0 ? content.slice(0, overviewEnd).trim() : content.slice(0, 800).trim();
633
393
  childDocs.push(`#### ${child.name}\n${overview}`);
634
394
  }
635
395
  catch {
@@ -638,26 +398,16 @@ export class WikiGenerator {
638
398
  }
639
399
  // Get cross-child call edges
640
400
  const allChildFiles = node.children.flatMap(c => c.files);
641
- const [crossCalls, processes] = await Promise.all([
642
- getIntraModuleCallEdges(allChildFiles),
643
- getProcessesForFiles(allChildFiles, 3),
644
- ]);
401
+ const crossCalls = await getIntraModuleCallEdges(allChildFiles);
402
+ const processes = await getProcessesForFiles(allChildFiles, 3);
645
403
  const prompt = fillTemplate(PARENT_USER_PROMPT, {
646
404
  MODULE_NAME: node.name,
647
405
  CHILDREN_DOCS: childDocs.join('\n\n'),
648
406
  CROSS_MODULE_CALLS: formatCallEdges(crossCalls),
649
407
  CROSS_PROCESSES: formatProcesses(processes),
650
- MODULE_REGISTRY: formatModuleRegistry(this.moduleRegistry, node.slug),
651
408
  });
652
409
  const response = await callLLM(prompt, this.llmConfig, PARENT_SYSTEM_PROMPT, this.streamOpts(node.name));
653
- // Append cross-child call graph diagram
654
- let diagramSection = '';
655
- const callGraph = buildCallGraphMermaid(node.name, crossCalls);
656
- if (callGraph) {
657
- diagramSection += `\n\n## Cross-Module Call Graph\n\n\`\`\`mermaid\n${callGraph}\n\`\`\``;
658
- }
659
- let pageContent = `# ${node.name}\n\n${response.content}${diagramSection}`;
660
- pageContent = this.validateAndFixCrossReferences(pageContent, this.moduleRegistry);
410
+ const pageContent = `# ${node.name}\n\n${response.content}`;
661
411
  await fs.writeFile(path.join(this.wikiDir, `${node.slug}.md`), pageContent, 'utf-8');
662
412
  }
663
413
  // ─── Phase 3: Generate Overview ─────────────────────────────────────
@@ -668,7 +418,8 @@ export class WikiGenerator {
668
418
  const pagePath = path.join(this.wikiDir, `${node.slug}.md`);
669
419
  try {
670
420
  const content = await fs.readFile(pagePath, 'utf-8');
671
- const overview = this.extractSummary(content, 600);
421
+ const overviewEnd = content.indexOf('### Architecture');
422
+ const overview = overviewEnd > 0 ? content.slice(0, overviewEnd).trim() : content.slice(0, 600).trim();
672
423
  moduleSummaries.push(`#### ${node.name}\n${overview}`);
673
424
  }
674
425
  catch {
@@ -685,49 +436,22 @@ export class WikiGenerator {
685
436
  const edgesText = moduleEdges.length > 0
686
437
  ? moduleEdges.map(e => `${e.from} → ${e.to} (${e.count} calls)`).join('\n')
687
438
  : 'No inter-module call edges detected';
688
- // Cap module summaries to avoid blowing up the prompt
689
- let summariesText = moduleSummaries.join('\n\n');
690
- const MAX_SUMMARIES_CHARS = 30_000; // ~7.5k tokens
691
- if (summariesText.length > MAX_SUMMARIES_CHARS) {
692
- summariesText = summariesText.slice(0, MAX_SUMMARIES_CHARS) + '\n\n(... remaining modules truncated for brevity)';
693
- }
694
439
  const prompt = fillTemplate(OVERVIEW_USER_PROMPT, {
695
440
  PROJECT_INFO: projectInfo,
696
- MODULE_SUMMARIES: summariesText,
441
+ MODULE_SUMMARIES: moduleSummaries.join('\n\n'),
697
442
  MODULE_EDGES: edgesText,
698
443
  TOP_PROCESSES: formatProcesses(topProcesses),
699
- MODULE_REGISTRY: formatModuleRegistry(this.moduleRegistry),
700
444
  });
701
- const promptTokens = estimateTokens(prompt + OVERVIEW_SYSTEM_PROMPT);
702
- this.onProgress('overview', 88, `Generating overview (~${promptTokens} input tokens)...`);
703
445
  const response = await callLLM(prompt, this.llmConfig, OVERVIEW_SYSTEM_PROMPT, this.streamOpts('Generating overview', 88));
704
- // Append architecture diagram
705
- let diagramSection = '';
706
- const archDiagram = buildInterModuleDiagram(moduleEdges);
707
- if (archDiagram) {
708
- diagramSection += `\n\n## Architecture Diagram\n\n\`\`\`mermaid\n${archDiagram}\n\`\`\``;
709
- }
710
- const displayName = this.options.repoName || path.basename(this.repoPath);
711
- let pageContent = `# ${displayName} — Wiki\n\n${response.content}${diagramSection}`;
712
- pageContent = this.validateAndFixCrossReferences(pageContent, this.moduleRegistry);
446
+ const pageContent = `# ${path.basename(this.repoPath)} — Wiki\n\n${response.content}`;
713
447
  await fs.writeFile(path.join(this.wikiDir, 'overview.md'), pageContent, 'utf-8');
714
448
  }
715
449
  // ─── Incremental Updates ────────────────────────────────────────────
716
450
  async incrementalUpdate(existingMeta, currentCommit) {
717
451
  this.onProgress('incremental', 5, 'Detecting changes...');
718
- // Get changed files with status since last generation
719
- const changedEntries = this.getChangedFilesWithStatus(existingMeta.fromCommit, currentCommit);
720
- // Shallow clone fallback — commit history too shallow for incremental diff
721
- if (changedEntries.some(e => e.filePath === '__SHALLOW_CLONE_FALLBACK__')) {
722
- this.onProgress('incremental', 10, 'Commit history too shallow — running full generation...');
723
- try {
724
- await fs.unlink(path.join(this.wikiDir, 'first_module_tree.json'));
725
- }
726
- catch { }
727
- const fullResult = await this.fullGeneration(currentCommit);
728
- return { ...fullResult, mode: 'incremental' };
729
- }
730
- if (changedEntries.length === 0) {
452
+ // Get changed files since last generation
453
+ const changedFiles = this.getChangedFiles(existingMeta.fromCommit, currentCommit);
454
+ if (changedFiles.length === 0) {
731
455
  // No file changes but commit differs (e.g. merge commit)
732
456
  await this.saveWikiMeta({
733
457
  ...existingMeta,
@@ -736,27 +460,11 @@ export class WikiGenerator {
736
460
  });
737
461
  return { pagesGenerated: 0, mode: 'incremental', failedModules: [] };
738
462
  }
739
- // Categorize changes
740
- const deletedFiles = [];
741
- const addedFiles = [];
742
- const modifiedFiles = [];
743
- for (const entry of changedEntries) {
744
- if (entry.status === 'D')
745
- deletedFiles.push(entry.filePath);
746
- else if (entry.status === 'A')
747
- addedFiles.push(entry.filePath);
748
- else
749
- modifiedFiles.push(entry.filePath);
750
- }
751
- this.onProgress('incremental', 10, `${changedEntries.length} files changed (${addedFiles.length}A/${modifiedFiles.length}M/${deletedFiles.length}D)`);
752
- // Purge deleted files from metadata and tree
753
- if (deletedFiles.length > 0) {
754
- this.purgeDeletedFiles(existingMeta, deletedFiles);
755
- }
756
- // Determine affected modules from modified files
463
+ this.onProgress('incremental', 10, `${changedFiles.length} files changed`);
464
+ // Determine affected modules
757
465
  const affectedModules = new Set();
758
466
  const newFiles = [];
759
- for (const fp of [...modifiedFiles, ...addedFiles]) {
467
+ for (const fp of changedFiles) {
760
468
  let found = false;
761
469
  for (const [mod, files] of Object.entries(existingMeta.moduleFiles)) {
762
470
  if (files.includes(fp)) {
@@ -769,15 +477,6 @@ export class WikiGenerator {
769
477
  newFiles.push(fp);
770
478
  }
771
479
  }
772
- // Also mark modules that lost files as affected
773
- for (const fp of deletedFiles) {
774
- for (const [mod, files] of Object.entries(existingMeta.moduleFiles)) {
775
- if (files.includes(fp)) {
776
- affectedModules.add(mod);
777
- break;
778
- }
779
- }
780
- }
781
480
  // If significant new files exist, re-run full grouping
782
481
  if (newFiles.length > 5) {
783
482
  this.onProgress('incremental', 15, 'Significant new files detected, running full generation...');
@@ -789,17 +488,14 @@ export class WikiGenerator {
789
488
  const fullResult = await this.fullGeneration(currentCommit);
790
489
  return { ...fullResult, mode: 'incremental' };
791
490
  }
792
- // Assign new files to nearest module using call-graph neighbors
491
+ // Add new files to nearest module or "Other"
793
492
  if (newFiles.length > 0) {
794
- const assignments = await this.assignNewFilesToModules(newFiles, existingMeta);
795
- this.syncNewFilesToTree(existingMeta, assignments);
796
- for (const mod of new Set(Object.values(assignments))) {
797
- affectedModules.add(mod);
493
+ if (!existingMeta.moduleFiles['Other']) {
494
+ existingMeta.moduleFiles['Other'] = [];
798
495
  }
496
+ existingMeta.moduleFiles['Other'].push(...newFiles);
497
+ affectedModules.add('Other');
799
498
  }
800
- // Build registry for cross-references
801
- const enrichedFiles = await getFilesWithExports();
802
- this.moduleRegistry = this.buildModuleRegistry(existingMeta.moduleTree, enrichedFiles);
803
499
  // Regenerate affected module pages (parallel)
804
500
  let pagesGenerated = 0;
805
501
  const moduleTree = existingMeta.moduleTree;
@@ -843,9 +539,8 @@ export class WikiGenerator {
843
539
  await this.generateOverview(moduleTree);
844
540
  pagesGenerated++;
845
541
  }
846
- // Save updated metadata and module tree
542
+ // Save updated metadata
847
543
  this.onProgress('incremental', 95, 'Saving metadata...');
848
- await this.saveModuleTree(moduleTree);
849
544
  await this.saveWikiMeta({
850
545
  ...existingMeta,
851
546
  fromCommit: currentCommit,
@@ -855,119 +550,6 @@ export class WikiGenerator {
855
550
  this.onProgress('done', 100, 'Incremental update complete');
856
551
  return { pagesGenerated, mode: 'incremental', failedModules: [...this.failedModules] };
857
552
  }
858
- // ─── Incremental Helpers ───────────────────────────────────────────
859
- /**
860
- * Purge deleted files from module metadata and tree.
861
- * Removes orphaned modules that lost all files.
862
- */
863
- purgeDeletedFiles(meta, deletedFiles) {
864
- const deletedSet = new Set(deletedFiles);
865
- // Remove from moduleFiles
866
- for (const [mod, files] of Object.entries(meta.moduleFiles)) {
867
- meta.moduleFiles[mod] = files.filter(f => !deletedSet.has(f));
868
- }
869
- // Prune empty modules from moduleFiles
870
- for (const mod of Object.keys(meta.moduleFiles)) {
871
- if (meta.moduleFiles[mod].length === 0) {
872
- delete meta.moduleFiles[mod];
873
- // Delete orphaned markdown page
874
- const slug = this.slugify(mod);
875
- fs.unlink(path.join(this.wikiDir, `${slug}.md`)).catch(() => { });
876
- }
877
- }
878
- // Walk moduleTree recursively, filter files, prune empty nodes
879
- const pruneTree = (nodes) => {
880
- return nodes.filter(node => {
881
- node.files = node.files.filter(f => !deletedSet.has(f));
882
- if (node.children) {
883
- node.children = pruneTree(node.children);
884
- // If parent lost all children and has no files, prune it
885
- if (node.children.length === 0 && node.files.length === 0) {
886
- fs.unlink(path.join(this.wikiDir, `${node.slug}.md`)).catch(() => { });
887
- return false;
888
- }
889
- }
890
- else if (node.files.length === 0) {
891
- fs.unlink(path.join(this.wikiDir, `${node.slug}.md`)).catch(() => { });
892
- return false;
893
- }
894
- return true;
895
- });
896
- };
897
- meta.moduleTree = pruneTree(meta.moduleTree);
898
- }
899
- /**
900
- * Assign new files to existing modules using call-graph neighbor analysis.
901
- * Falls back to "Other" if no neighbors found.
902
- */
903
- async assignNewFilesToModules(newFiles, meta) {
904
- const assignments = {};
905
- // Build file-to-module lookup from existing metadata
906
- const fileToModule = new Map();
907
- for (const [mod, files] of Object.entries(meta.moduleFiles)) {
908
- for (const f of files)
909
- fileToModule.set(f, mod);
910
- }
911
- // Query per-file (newFiles.length <= 5 since >5 triggers full regen)
912
- for (const fp of newFiles) {
913
- const neighbors = await getCallGraphNeighborFiles([fp]);
914
- const knownNeighbors = neighbors.filter(n => fileToModule.has(n));
915
- if (knownNeighbors.length > 0) {
916
- // Count hits per module
917
- const moduleCounts = new Map();
918
- for (const n of knownNeighbors) {
919
- const mod = fileToModule.get(n);
920
- moduleCounts.set(mod, (moduleCounts.get(mod) || 0) + 1);
921
- }
922
- // Assign to module with most neighbor hits
923
- let bestMod = 'Other';
924
- let bestCount = 0;
925
- for (const [mod, count] of moduleCounts) {
926
- if (count > bestCount) {
927
- bestCount = count;
928
- bestMod = mod;
929
- }
930
- }
931
- assignments[fp] = bestMod;
932
- }
933
- else {
934
- assignments[fp] = 'Other';
935
- }
936
- }
937
- return assignments;
938
- }
939
- /**
940
- * Sync new file assignments into both moduleFiles and moduleTree.
941
- */
942
- syncNewFilesToTree(meta, assignments) {
943
- for (const [fp, mod] of Object.entries(assignments)) {
944
- // Update moduleFiles
945
- if (!meta.moduleFiles[mod]) {
946
- meta.moduleFiles[mod] = [];
947
- }
948
- meta.moduleFiles[mod].push(fp);
949
- // Update moduleTree
950
- const modSlug = this.slugify(mod);
951
- const node = this.findNodeBySlug(meta.moduleTree, modSlug);
952
- if (node) {
953
- if (node.children && node.children.length > 0) {
954
- // Add to first child as default
955
- node.children[0].files.push(fp);
956
- }
957
- else {
958
- node.files.push(fp);
959
- }
960
- }
961
- else {
962
- // Create new "Other" node if needed
963
- meta.moduleTree.push({
964
- name: mod,
965
- slug: modSlug,
966
- files: [fp],
967
- });
968
- }
969
- }
970
- }
971
553
  // ─── Helpers ────────────────────────────────────────────────────────
972
554
  getCurrentCommit() {
973
555
  try {
@@ -977,84 +559,13 @@ export class WikiGenerator {
977
559
  return '';
978
560
  }
979
561
  }
980
- /**
981
- * Parse git diff --name-status output into structured entries.
982
- */
983
- parseNameStatusOutput(output) {
984
- if (!output)
985
- return [];
986
- const results = [];
987
- for (const line of output.split('\n').filter(Boolean)) {
988
- const parts = line.split('\t');
989
- if (parts.length < 2)
990
- continue;
991
- const rawStatus = parts[0].charAt(0);
992
- if (rawStatus === 'R' && parts.length >= 3) {
993
- // Rename: treat as delete old + add new
994
- results.push({ status: 'D', filePath: parts[1] });
995
- results.push({ status: 'A', filePath: parts[2] });
996
- }
997
- else if (rawStatus === 'A' || rawStatus === 'M' || rawStatus === 'D') {
998
- results.push({ status: rawStatus, filePath: parts[1] });
999
- }
1000
- else {
1001
- // Default to modified for unknown statuses (C for copy, etc.)
1002
- results.push({ status: 'M', filePath: parts[parts.length - 1] });
1003
- }
1004
- }
1005
- return results;
1006
- }
1007
- /**
1008
- * Get changed files with their status (Added, Modified, Deleted).
1009
- * Handles renames as Delete + Add.
1010
- * Detects shallow clones and attempts to unshallow if needed.
1011
- */
1012
- getChangedFilesWithStatus(fromCommit, toCommit) {
1013
- try {
1014
- const output = execFileSync('git', ['diff', `${fromCommit}..${toCommit}`, '--name-status'], { cwd: this.repoPath }).toString().trim();
1015
- if (output)
1016
- return this.parseNameStatusOutput(output);
1017
- // Empty output — verify fromCommit is reachable (not a shallow clone issue)
1018
- try {
1019
- execFileSync('git', ['cat-file', '-t', fromCommit], { cwd: this.repoPath });
1020
- // Commit is reachable, genuinely no changes
1021
- return [];
1022
- }
1023
- catch {
1024
- // fromCommit not reachable — fall through to unshallow logic
1025
- }
1026
- }
1027
- catch {
1028
- // git diff threw — most common shallow clone symptom ("fatal: bad revision")
1029
- // Verify it's actually a reachability issue before attempting unshallow
1030
- try {
1031
- execFileSync('git', ['cat-file', '-t', fromCommit], { cwd: this.repoPath });
1032
- // Commit exists but diff still failed — genuine error, not shallow
1033
- return [];
1034
- }
1035
- catch {
1036
- // fromCommit not reachable — fall through to unshallow logic
1037
- }
1038
- }
1039
- // fromCommit not reachable — likely a shallow clone
1040
- console.log(`[wiki] fromCommit ${fromCommit.slice(0, 8)} not reachable, attempting to unshallow...`);
1041
- try {
1042
- execFileSync('git', ['fetch', '--unshallow'], {
1043
- cwd: this.repoPath,
1044
- timeout: 120_000,
1045
- });
1046
- }
1047
- catch (fetchErr) {
1048
- console.warn(`[wiki] git fetch --unshallow failed:`, fetchErr);
1049
- return [{ status: 'A', filePath: '__SHALLOW_CLONE_FALLBACK__' }];
1050
- }
1051
- // Retry the diff after unshallowing
562
+ getChangedFiles(fromCommit, toCommit) {
1052
563
  try {
1053
- const retryOutput = execFileSync('git', ['diff', `${fromCommit}..${toCommit}`, '--name-status'], { cwd: this.repoPath }).toString().trim();
1054
- return this.parseNameStatusOutput(retryOutput);
564
+ const output = execFileSync('git', ['diff', `${fromCommit}..${toCommit}`, '--name-only'], { cwd: this.repoPath }).toString().trim();
565
+ return output ? output.split('\n').filter(Boolean) : [];
1055
566
  }
1056
567
  catch {
1057
- return [{ status: 'A', filePath: '__SHALLOW_CLONE_FALLBACK__' }];
568
+ return [];
1058
569
  }
1059
570
  }
1060
571
  async readSourceFiles(filePaths) {
@@ -1241,22 +752,12 @@ export class WikiGenerator {
1241
752
  }
1242
753
  return null;
1243
754
  }
1244
- /** Set of all slugs assigned so far — used to prevent collisions. */
1245
- assignedSlugs = new Set();
1246
755
  slugify(name) {
1247
- let base = name
756
+ return name
1248
757
  .toLowerCase()
1249
758
  .replace(/[^a-z0-9]+/g, '-')
1250
759
  .replace(/^-+|-+$/g, '')
1251
760
  .slice(0, 60);
1252
- // Deduplicate: append -2, -3, etc. if slug already taken
1253
- let slug = base;
1254
- let counter = 2;
1255
- while (this.assignedSlugs.has(slug)) {
1256
- slug = `${base}-${counter++}`;
1257
- }
1258
- this.assignedSlugs.add(slug);
1259
- return slug;
1260
761
  }
1261
762
  async fileExists(fp) {
1262
763
  try {