gitnexushub 0.4.4 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,13 +1,108 @@
1
+ import fs from 'fs/promises';
1
2
  import { resolveWikiContext } from './resolve-context.js';
2
3
  import { runWikiUploadSession } from './session.js';
3
4
  import { GnxError, ErrorCode } from './errors.js';
4
- import { info } from '../cli-helpers.js';
5
+ import { info, warn } from '../cli-helpers.js';
6
+ import { buildModuleRegistry, ensureAllFilesAssigned, estimateModuleTokens, extractSummary, fallbackGrouping, formatModuleRegistry, splitByCommunity, validateAndFixCrossReferences, } from './helpers.js';
7
+ import { diffChangedFiles, tryIncrementalUpload } from './incremental.js';
8
+ import { runConcurrent } from './concurrency.js';
9
+ import { composeOverview } from './compose-overview.js';
10
+ const DEFAULT_WIKI_CONCURRENCY = 10;
11
+ function resolveConcurrency(optValue) {
12
+ if (typeof optValue === 'number' && optValue > 0)
13
+ return optValue;
14
+ const env = process.env.GNX_WIKI_CONCURRENCY;
15
+ if (env) {
16
+ const parsed = Number.parseInt(env, 10);
17
+ if (Number.isFinite(parsed) && parsed > 0)
18
+ return parsed;
19
+ }
20
+ return DEFAULT_WIKI_CONCURRENCY;
21
+ }
22
+ function formatBytes(n) {
23
+ if (n < 1024)
24
+ return `${n} B`;
25
+ if (n < 1024 * 1024)
26
+ return `${(n / 1024).toFixed(1)} KB`;
27
+ return `${(n / (1024 * 1024)).toFixed(2)} MB`;
28
+ }
29
+ function formatDuration(ms) {
30
+ const s = Math.floor(ms / 1000);
31
+ if (s < 60)
32
+ return `${s}s`;
33
+ const m = Math.floor(s / 60);
34
+ const rem = s % 60;
35
+ return `${m}m${rem.toString().padStart(2, '0')}s`;
36
+ }
37
+ async function withHeartbeat(label, intervalMs, task) {
38
+ const start = Date.now();
39
+ const timer = setInterval(() => {
40
+ info(`${label} still working... ${formatDuration(Date.now() - start)} elapsed`);
41
+ }, intervalMs);
42
+ timer.unref?.();
43
+ try {
44
+ return await task();
45
+ }
46
+ finally {
47
+ clearInterval(timer);
48
+ }
49
+ }
5
50
  function slugify(name) {
6
51
  return name
7
52
  .toLowerCase()
8
53
  .replace(/[^a-z0-9]+/g, '-')
9
54
  .replace(/^-|-$/g, '');
10
55
  }
56
+ const MODULE_TREE_SCHEMA = {
57
+ type: 'object',
58
+ properties: {
59
+ modules: {
60
+ type: 'array',
61
+ description: 'Ordered list of documentation modules grouping the repo files',
62
+ items: {
63
+ type: 'object',
64
+ properties: {
65
+ name: {
66
+ type: 'string',
67
+ description: 'Human-readable module name (e.g. "Authentication")',
68
+ },
69
+ files: {
70
+ type: 'array',
71
+ items: { type: 'string' },
72
+ description: 'Repo-relative file paths belonging to this module',
73
+ },
74
+ },
75
+ required: ['name', 'files'],
76
+ },
77
+ },
78
+ },
79
+ required: ['modules'],
80
+ };
81
+ function buildModulesFromStructured(structured) {
82
+ if (!structured || typeof structured !== 'object')
83
+ return [];
84
+ const arr = structured.modules;
85
+ if (!Array.isArray(arr))
86
+ return [];
87
+ const out = [];
88
+ for (const entry of arr) {
89
+ if (!entry || typeof entry !== 'object')
90
+ continue;
91
+ const name = entry.name;
92
+ const files = entry.files;
93
+ if (typeof name !== 'string' || !Array.isArray(files))
94
+ continue;
95
+ const slug = slugify(name);
96
+ if (!slug)
97
+ continue;
98
+ out.push({
99
+ slug,
100
+ title: name,
101
+ files: files.filter((f) => typeof f === 'string'),
102
+ });
103
+ }
104
+ return out;
105
+ }
11
106
  function parseModuleTree(raw) {
12
107
  const cleaned = raw
13
108
  .trim()
@@ -23,7 +118,12 @@ function parseModuleTree(raw) {
23
118
  cause: err,
24
119
  });
25
120
  }
26
- if (parsed && !Array.isArray(parsed.modules) && typeof parsed === 'object') {
121
+ if (parsed && Array.isArray(parsed.modules)) {
122
+ const modules = buildModulesFromStructured(parsed);
123
+ if (modules.length > 0)
124
+ return modules;
125
+ }
126
+ if (parsed && typeof parsed === 'object') {
27
127
  const modules = [];
28
128
  for (const [name, files] of Object.entries(parsed)) {
29
129
  if (!Array.isArray(files))
@@ -42,7 +142,13 @@ function parseModuleTree(raw) {
42
142
  }
43
143
  throw new GnxError(ErrorCode.GENERATION_FAILED, 'module tree had no valid modules');
44
144
  }
45
- const SPLIT_FILE_THRESHOLD = 20;
145
+ // Token budget for a single wiki page — matches
146
+ // gitnexus/src/core/wiki/generator.ts:DEFAULT_MAX_TOKENS_PER_MODULE.
147
+ // A module splits only when BOTH (total file tokens > budget) AND (files > 3),
148
+ // so a small handful of huge files stays together but a truly large module
149
+ // gets broken into sub-pages.
150
+ const MAX_TOKENS_PER_MODULE = 30_000;
151
+ const MIN_FILES_FOR_SPLIT = 3;
46
152
  function splitBySubdirectory(parentName, files) {
47
153
  const subGroups = new Map();
48
154
  for (const fp of files) {
@@ -66,15 +172,21 @@ function splitBySubdirectory(parentName, files) {
66
172
  };
67
173
  });
68
174
  }
69
- function applyModuleSplitting(modules) {
175
+ async function applyModuleSplitting(modules, communityMapping, repoRoot) {
70
176
  for (const node of modules) {
71
177
  const files = node.files ?? [];
72
- if (files.length > SPLIT_FILE_THRESHOLD) {
73
- const children = splitBySubdirectory(node.title, files);
74
- if (children.length > 1) {
75
- node.children = children;
76
- node.files = [];
77
- }
178
+ if (files.length <= MIN_FILES_FOR_SPLIT)
179
+ continue;
180
+ const totalTokens = await estimateModuleTokens(files, repoRoot);
181
+ if (totalTokens <= MAX_TOKENS_PER_MODULE)
182
+ continue;
183
+ const byCommunity = splitByCommunity(node.title, files, communityMapping);
184
+ const children = byCommunity
185
+ ? byCommunity.map((c) => ({ slug: c.slug, title: c.name, files: c.files }))
186
+ : splitBySubdirectory(node.title, files);
187
+ if (children.length > 1) {
188
+ node.children = children;
189
+ node.files = [];
78
190
  }
79
191
  }
80
192
  return modules;
@@ -111,10 +223,69 @@ function fillTemplate(template, vars) {
111
223
  }
112
224
  return result;
113
225
  }
226
+ function formatDryRunHeader(args) {
227
+ const { repoFullName, headCommit, model, modules } = args;
228
+ const generatedAt = new Date().toISOString();
229
+ const parts = [];
230
+ parts.push(`# ${repoFullName} — Wiki Dry Run`);
231
+ parts.push('');
232
+ parts.push(`> Generated ${generatedAt} · commit \`${headCommit.slice(0, 8)}\` · model \`${model}\` · ${modules.length} pages`);
233
+ parts.push('');
234
+ parts.push('> Streaming dry-run output — no content has been uploaded to the Hub. Pages appear below in COMPLETION order (not tree order) as each LLM call finishes, so partial output is preserved on interrupt. The table of contents is in tree order.');
235
+ parts.push('');
236
+ parts.push('## Table of Contents');
237
+ parts.push('');
238
+ for (const m of modules) {
239
+ parts.push(`- [${m.title}](#${m.slug})`);
240
+ }
241
+ parts.push('');
242
+ return parts.join('\n') + '\n';
243
+ }
244
+ function formatDryRunPage(page) {
245
+ const parts = [];
246
+ parts.push('---');
247
+ parts.push('');
248
+ parts.push(`<a id="${page.slug}"></a>`);
249
+ parts.push('');
250
+ parts.push(`> **Slug:** \`${page.slug}\``);
251
+ parts.push('');
252
+ parts.push(page.contentMd);
253
+ parts.push('');
254
+ return parts.join('\n') + '\n';
255
+ }
114
256
  export async function runWikiUpload(opts, deps) {
115
257
  const ctx = await resolveWikiContext(deps);
116
258
  if (opts.abortSignal?.aborted)
117
259
  throw new GnxError(ErrorCode.USER_ABORTED, 'aborted before session start');
260
+ // For incremental mode, compute the git diff once up-front. If it's empty we
261
+ // short-circuit the whole pipeline; otherwise we pass it forward to
262
+ // tryIncrementalUpload so it doesn't re-shell-out to git.
263
+ // Dry-run always forces a full regeneration regardless of mode.
264
+ let precomputedStatus = null;
265
+ let precomputedDiff = null;
266
+ if (opts.mode === 'incremental' && !opts.dryRunOutputPath) {
267
+ precomputedStatus = await ctx.api.wikiStatus(ctx.hubRepoId);
268
+ if (precomputedStatus?.fromCommit) {
269
+ precomputedDiff = diffChangedFiles(ctx.repoRoot, precomputedStatus.fromCommit, ctx.headCommit);
270
+ if (precomputedDiff &&
271
+ precomputedDiff.added.length +
272
+ precomputedDiff.modified.length +
273
+ precomputedDiff.deleted.length ===
274
+ 0) {
275
+ const session = await ctx.api.wikiUploadStart(ctx.hubRepoId, {
276
+ mode: 'incremental',
277
+ fromCommit: ctx.headCommit,
278
+ clientVersion: deps.clientVersion,
279
+ clientModel: deps.model,
280
+ });
281
+ await ctx.api.wikiUploadFinish(ctx.hubRepoId, session.sessionId, {
282
+ moduleTree: [],
283
+ receivedSlugs: [],
284
+ });
285
+ return { sessionId: session.sessionId, pagesPersisted: 0, failedSlugs: [] };
286
+ }
287
+ }
288
+ }
118
289
  const runner = deps.createClaudeRunner();
119
290
  info(`Fetching graph context from Hub (${ctx.hubFullName})...`);
120
291
  const [prompts, groupingCtx] = await Promise.all([
@@ -122,7 +293,7 @@ export async function runWikiUpload(opts, deps) {
122
293
  ctx.api.wikiGroupingContext(ctx.hubRepoId),
123
294
  ]);
124
295
  // Phase 1: Module tree
125
- info('Phase 1/3: generating module tree with Claude Code...');
296
+ info('Phase 1/3: generating module tree with Claude Code (one LLM call, typically 30s–2min)...');
126
297
  const groupingPrompt = fillTemplate(prompts.grouping.user, {
127
298
  COMMUNITY_GROUPS: groupingCtx.communityGroups,
128
299
  INTER_COMMUNITY_EDGES: groupingCtx.interCommunityEdges,
@@ -130,16 +301,52 @@ export async function runWikiUpload(opts, deps) {
130
301
  FILE_LIST: groupingCtx.filesWithExports,
131
302
  DIRECTORY_TREE: groupingCtx.directoryTree,
132
303
  });
133
- const moduleTreeRaw = await runner.run(`${prompts.grouping.system}\n\n${groupingPrompt}`, {
304
+ const moduleTreeRaw = await withHeartbeat(' module tree', 10_000, () => runner.run(`${prompts.grouping.system}\n\n${groupingPrompt}\n\nReturn the result as a JSON object with a "modules" array. Each entry has a "name" (human-readable module title) and "files" (array of repo-relative file paths).`, {
134
305
  cwd: ctx.repoRoot,
135
306
  model: deps.model,
136
307
  allowedTools: [],
137
- });
138
- const rawModules = parseModuleTree(moduleTreeRaw.text);
139
- const modules = applyModuleSplitting(rawModules);
308
+ outputFormat: { type: 'json_schema', schema: MODULE_TREE_SCHEMA },
309
+ }));
310
+ info(` module tree generated in ${formatDuration(moduleTreeRaw.durationMs)}`);
311
+ const communityMapping = groupingCtx.communityMapping ?? [];
312
+ const filesWithExportsRaw = groupingCtx.filesWithExportsRaw ?? [];
313
+ // Try structured output first, then free-form JSON parse, then community/directory fallback.
314
+ let rawModules = buildModulesFromStructured(moduleTreeRaw.structuredOutput);
315
+ if (rawModules.length === 0) {
316
+ try {
317
+ rawModules = parseModuleTree(moduleTreeRaw.text);
318
+ }
319
+ catch {
320
+ const fb = fallbackGrouping(filesWithExportsRaw, communityMapping);
321
+ rawModules = Object.entries(fb).map(([title, files]) => ({
322
+ slug: slugify(title),
323
+ title,
324
+ files,
325
+ }));
326
+ }
327
+ }
328
+ if (rawModules.length === 0)
329
+ throw new GnxError(ErrorCode.GENERATION_FAILED, 'module tree had no valid modules');
330
+ // Sweep any files the LLM silently dropped into an "Other" bucket so they
331
+ // still get documented. Matches Hub generator.ts:499-502.
332
+ rawModules = ensureAllFilesAssigned(rawModules, filesWithExportsRaw, (slug, title, files) => ({
333
+ slug,
334
+ title,
335
+ files,
336
+ }));
337
+ const modules = await applyModuleSplitting(rawModules, communityMapping, ctx.repoRoot);
140
338
  const { leaves, parents } = flattenModuleTree(modules);
141
339
  const allPageNodes = [...leaves, ...parents];
142
- const moduleRegistry = allPageNodes.map((m) => `- [${m.title}](${m.slug}.md)`).join('\n');
340
+ const moduleRegistryMap = buildModuleRegistry(allPageNodes.map((n) => ({
341
+ name: n.title,
342
+ slug: n.slug,
343
+ files: n.files ?? [],
344
+ children: n.children?.map((c) => ({
345
+ name: c.title,
346
+ slug: c.slug,
347
+ files: c.files ?? [],
348
+ })),
349
+ })), filesWithExportsRaw);
143
350
  const generatedPages = new Map();
144
351
  info(`Phase 2/3: generating ${leaves.length} leaf page(s) + ${parents.length} parent page(s)...`);
145
352
  if (opts.abortSignal?.aborted)
@@ -162,10 +369,7 @@ export async function runWikiUpload(opts, deps) {
162
369
  processes: 'No execution flows detected.',
163
370
  };
164
371
  }
165
- const reg = allPageNodes
166
- .filter((m) => m.slug !== node.slug)
167
- .map((m) => `- [${m.title}](${m.slug}.md)`)
168
- .join('\n');
372
+ const reg = formatModuleRegistry(moduleRegistryMap, node.slug);
169
373
  const pagePrompt = fillTemplate(prompts.module.user, {
170
374
  MODULE_NAME: node.title,
171
375
  SOURCE_CODE: leafCtx.sourceCode,
@@ -180,7 +384,16 @@ export async function runWikiUpload(opts, deps) {
180
384
  model: deps.model,
181
385
  allowedTools: [],
182
386
  });
183
- const content = `# ${node.title}\n\n${extractPageMarkdown(result.text)}`;
387
+ let diagramSection = '';
388
+ const mermaid = leafCtx.mermaid;
389
+ if (mermaid?.callGraph) {
390
+ diagramSection += `\n\n## Internal Call Graph\n\n\`\`\`mermaid\n${mermaid.callGraph}\n\`\`\``;
391
+ }
392
+ for (const seq of (mermaid?.sequenceDiagrams ?? []).slice(0, 3)) {
393
+ diagramSection += `\n\n## Workflow\n\n\`\`\`mermaid\n${seq}\n\`\`\``;
394
+ }
395
+ let content = `# ${node.title}\n\n${extractPageMarkdown(result.text)}${diagramSection}`;
396
+ content = validateAndFixCrossReferences(content, moduleRegistryMap);
184
397
  generatedPages.set(node.slug, content);
185
398
  return { title: node.title, contentMd: content };
186
399
  };
@@ -192,7 +405,7 @@ export async function runWikiUpload(opts, deps) {
192
405
  .map((child) => {
193
406
  const content = generatedPages.get(child.slug);
194
407
  return content
195
- ? `#### ${child.title}\n${content.slice(0, 800).trim()}`
408
+ ? `#### ${child.title}\n${extractSummary(content, 800)}`
196
409
  : `#### ${child.title}\n(Documentation not yet generated)`;
197
410
  })
198
411
  .join('\n\n');
@@ -204,10 +417,7 @@ export async function runWikiUpload(opts, deps) {
204
417
  catch {
205
418
  parentCtx = { intraCalls: 'None', processes: 'No execution flows detected.' };
206
419
  }
207
- const reg = allPageNodes
208
- .filter((m) => m.slug !== node.slug)
209
- .map((m) => `- [${m.title}](${m.slug}.md)`)
210
- .join('\n');
420
+ const reg = formatModuleRegistry(moduleRegistryMap, node.slug);
211
421
  const parentPrompt = fillTemplate(prompts.parent.user, {
212
422
  MODULE_NAME: node.title,
213
423
  CHILDREN_DOCS: childDocs,
@@ -220,51 +430,56 @@ export async function runWikiUpload(opts, deps) {
220
430
  model: deps.model,
221
431
  allowedTools: [],
222
432
  });
223
- const content = `# ${node.title}\n\n${extractPageMarkdown(result.text)}`;
433
+ let parentDiagramSection = '';
434
+ const parentMermaid = parentCtx?.mermaid;
435
+ if (parentMermaid?.callGraph) {
436
+ parentDiagramSection += `\n\n## Cross-Module Call Graph\n\n\`\`\`mermaid\n${parentMermaid.callGraph}\n\`\`\``;
437
+ }
438
+ let content = `# ${node.title}\n\n${extractPageMarkdown(result.text)}${parentDiagramSection}`;
439
+ content = validateAndFixCrossReferences(content, moduleRegistryMap);
224
440
  generatedPages.set(node.slug, content);
225
441
  return { title: node.title, contentMd: content };
226
442
  };
227
- // Phase 3: Overview page
443
+ // Phase 3: Overview page (Phase 5.2 — deterministic, no LLM call).
444
+ // Skips the per-repo overview LLM round-trip in favour of stitching
445
+ // together each module's first-paragraph summary plus the architecture
446
+ // Mermaid diagram. Saves ~1 minute per generation; quality is comparable
447
+ // because the LLM was largely rephrasing the leaf summaries we already
448
+ // have in `generatedPages`.
228
449
  const generateOverview = async () => {
229
450
  if (opts.abortSignal?.aborted)
230
451
  throw new GnxError(ErrorCode.USER_ABORTED, 'aborted during overview generation');
231
- const moduleSummaries = modules
232
- .map((m) => {
452
+ const overviewModules = modules.map((m) => {
233
453
  const content = generatedPages.get(m.slug);
234
- return content
235
- ? `#### ${m.title}\n${content.slice(0, 600).trim()}`
236
- : `#### ${m.title}\n(Documentation pending)`;
237
- })
238
- .join('\n\n');
454
+ return {
455
+ slug: m.slug,
456
+ title: m.title,
457
+ summary: content ? extractSummary(content, 600) : '',
458
+ };
459
+ });
460
+ // Architecture diagram still comes from the Hub's overview context
461
+ // endpoint, which precomputes inter-module Mermaid edges from the graph.
239
462
  const moduleFiles = {};
240
463
  for (const m of modules) {
241
464
  const files = m.children ? m.children.flatMap((c) => c.files ?? []) : (m.files ?? []);
242
465
  if (files.length > 0)
243
466
  moduleFiles[m.title] = files;
244
467
  }
245
- let overviewCtx;
468
+ let architectureMermaid = '';
246
469
  try {
247
- overviewCtx = await ctx.api.wikiOverviewContext(ctx.hubRepoId, moduleFiles);
470
+ const overviewCtx = await ctx.api.wikiOverviewContext(ctx.hubRepoId, moduleFiles);
471
+ const overviewMermaid = overviewCtx?.mermaid;
472
+ architectureMermaid = overviewMermaid?.architecture ?? '';
248
473
  }
249
474
  catch {
250
- overviewCtx = {
251
- interModuleEdges: 'No inter-module call edges detected',
252
- topProcesses: 'No execution flows detected.',
253
- };
475
+ // Architecture diagram is best-effort; proceed without it on failure.
254
476
  }
255
- const overviewPrompt = fillTemplate(prompts.overview.user, {
256
- PROJECT_INFO: `Repository: ${ctx.hubFullName}`,
257
- MODULE_SUMMARIES: moduleSummaries,
258
- MODULE_EDGES: overviewCtx.interModuleEdges,
259
- TOP_PROCESSES: overviewCtx.topProcesses,
260
- MODULE_REGISTRY: moduleRegistry,
261
- });
262
- const result = await runner.run(`${prompts.overview.system}\n\n${overviewPrompt}`, {
263
- cwd: ctx.repoRoot,
264
- model: deps.model,
265
- allowedTools: [],
477
+ const contentMd = composeOverview({
478
+ repoName: `${ctx.hubFullName} — Wiki`,
479
+ modules: overviewModules,
480
+ architectureMermaid,
266
481
  });
267
- return { title: 'Overview', contentMd: extractPageMarkdown(result.text) };
482
+ return { title: 'Overview', contentMd };
268
483
  };
269
484
  // Execute: leaves → parents → overview
270
485
  const overviewNode = { slug: 'overview', title: 'Overview', files: [] };
@@ -283,7 +498,134 @@ export async function runWikiUpload(opts, deps) {
283
498
  summary: m.summary,
284
499
  files: m.files,
285
500
  }));
286
- info('Phase 3/3: streaming pages to Hub + generating overview...');
501
+ // Dry-run: stream every generated page to disk as it completes, so partial
502
+ // output is preserved on timeout/interrupt. Never touches the Hub upload
503
+ // session. Pages land in completion order (not tree order); the TOC at the
504
+ // top of the file is in tree order and uses slug anchors so you can jump.
505
+ if (opts.dryRunOutputPath) {
506
+ const concurrency = resolveConcurrency(opts.concurrency);
507
+ const total = orderedModules.length;
508
+ let doneCount = 0;
509
+ let failedCount = 0;
510
+ const runStartAt = Date.now();
511
+ // Write header + TOC up front (titles are all known after module-tree phase)
512
+ await fs.writeFile(opts.dryRunOutputPath, formatDryRunHeader({
513
+ repoFullName: ctx.hubFullName,
514
+ headCommit: ctx.headCommit,
515
+ model: deps.model,
516
+ modules: orderedModules.map((n) => ({ title: n.title, slug: n.slug })),
517
+ }), 'utf-8');
518
+ info(` streaming output to ${opts.dryRunOutputPath}`);
519
+ // Serialize appends so concurrent leaves don't interleave writes.
520
+ let writeLock = Promise.resolve();
521
+ const appendPage = (page) => {
522
+ writeLock = writeLock.then(() => fs.appendFile(opts.dryRunOutputPath, formatDryRunPage(page), 'utf-8'));
523
+ return writeLock;
524
+ };
525
+ const tryGenerate = async (node) => {
526
+ if (opts.abortSignal?.aborted)
527
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted during dry-run generation');
528
+ info(` [${doneCount + 1}/${total}] ${node.title} — generating...`);
529
+ let page;
530
+ try {
531
+ const result = await generatePageByType(node);
532
+ page = { title: result.title, slug: node.slug, contentMd: result.contentMd };
533
+ }
534
+ catch (err) {
535
+ failedCount++;
536
+ page = {
537
+ title: node.title,
538
+ slug: node.slug,
539
+ contentMd: `# ${node.title}\n\n_(Generation failed: ${err instanceof Error ? err.message : String(err)})_`,
540
+ };
541
+ }
542
+ await appendPage(page);
543
+ doneCount++;
544
+ const elapsed = formatDuration(Date.now() - runStartAt);
545
+ const pct = Math.round((doneCount / total) * 100);
546
+ info(` [${doneCount}/${total}] ${node.title} — done · ${pct}% · total ${elapsed}`);
547
+ };
548
+ try {
549
+ // Phase 2a: leaves in parallel (they don't depend on each other)
550
+ info(` running ${leaves.length} leaf page(s) at concurrency ${concurrency}...`);
551
+ await runConcurrent(leaves, tryGenerate, {
552
+ concurrency,
553
+ abortSignal: opts.abortSignal,
554
+ onRateLimit: (c) => warn(` rate limited — concurrency → ${c}`),
555
+ });
556
+ // Phase 2b (Phase 5.1 mirror): parents in parallel. Children's content
557
+ // is already in `generatedPages` after the leaf phase resolves, so
558
+ // parents can fan out without races.
559
+ if (parents.length > 0) {
560
+ info(` running ${parents.length} parent page(s) at concurrency ${concurrency}...`);
561
+ await runConcurrent(parents, tryGenerate, {
562
+ concurrency,
563
+ abortSignal: opts.abortSignal,
564
+ onRateLimit: (c) => warn(` rate limited — concurrency → ${c}`),
565
+ });
566
+ }
567
+ // Phase 3: overview single (depends on leaves + parents)
568
+ await tryGenerate(overviewNode);
569
+ }
570
+ finally {
571
+ // Flush any pending appends even if we're bailing out early so partial
572
+ // output on disk is consistent.
573
+ await writeLock.catch(() => { });
574
+ }
575
+ return {
576
+ sessionId: '(dry-run)',
577
+ pagesPersisted: doneCount - failedCount,
578
+ failedSlugs: [],
579
+ };
580
+ }
581
+ // Incremental path: regenerate only affected modules using local git diff.
582
+ // Falls through to full gen below on any signal the state can't support it.
583
+ if (opts.mode === 'incremental') {
584
+ // Pre-populate generatedPages from the Hub so generateOverview sees
585
+ // real summaries for unchanged modules, not "(Documentation pending)".
586
+ await Promise.all(allPageNodes.map(async (node) => {
587
+ const existing = await ctx.api.wikiPage(ctx.hubRepoId, node.slug);
588
+ if (existing?.content)
589
+ generatedPages.set(node.slug, existing.content);
590
+ }));
591
+ const regenerateModule = async (node) => {
592
+ const moduleNode = { slug: node.slug, title: node.name, files: node.files };
593
+ const treeNode = parents.find((p) => p.slug === node.slug);
594
+ if (treeNode?.children)
595
+ return generateParentPage(treeNode);
596
+ return generateLeafPage(moduleNode);
597
+ };
598
+ const regenerateOverview = async () => generateOverview();
599
+ try {
600
+ const incResult = await tryIncrementalUpload({
601
+ repoId: ctx.hubRepoId,
602
+ repoRoot: ctx.repoRoot,
603
+ headCommit: ctx.headCommit,
604
+ clientVersion: deps.clientVersion,
605
+ clientModel: deps.model,
606
+ api: ctx.api,
607
+ regenerateModule: regenerateModule,
608
+ regenerateOverview: regenerateOverview,
609
+ precomputedStatus,
610
+ precomputedDiff,
611
+ });
612
+ if (incResult.status === 'ok') {
613
+ return {
614
+ sessionId: incResult.sessionId,
615
+ pagesPersisted: incResult.pagesPersisted,
616
+ failedSlugs: [],
617
+ };
618
+ }
619
+ // else fall through to full gen
620
+ }
621
+ catch (err) {
622
+ if (opts.abortSignal?.aborted) {
623
+ throw new GnxError(ErrorCode.USER_ABORTED, 'user aborted', { cause: err });
624
+ }
625
+ // Incremental path threw — fall through to full gen as a safety net.
626
+ }
627
+ }
628
+ info(`Phase 3/3: streaming ${orderedModules.length} page(s) to Hub...`);
287
629
  let activeSessionId = null;
288
630
  const onAbort = async () => {
289
631
  if (!activeSessionId)
@@ -296,20 +638,50 @@ export async function runWikiUpload(opts, deps) {
296
638
  }
297
639
  };
298
640
  opts.abortSignal?.addEventListener('abort', onAbort, { once: true });
641
+ const total = orderedModules.length;
642
+ const titleBySlug = new Map(orderedModules.map((m) => [m.slug, m.title]));
643
+ const pageStartAt = new Map();
644
+ const runStartAt = Date.now();
645
+ let startedCount = 0;
646
+ let doneCount = 0;
647
+ const sessionConcurrency = resolveConcurrency(opts.concurrency);
299
648
  try {
300
649
  return await runWikiUploadSession({
301
650
  api: ctx.api,
302
651
  repoId: ctx.hubRepoId,
303
- mode: opts.mode ?? 'full',
652
+ mode: 'full',
304
653
  fromCommit: ctx.headCommit,
305
654
  clientVersion: deps.clientVersion,
306
655
  clientModel: deps.model,
307
656
  modules: orderedModules,
308
657
  moduleTree,
309
658
  generatePage: generatePageByType,
659
+ parallelCount: leaves.length,
660
+ concurrency: sessionConcurrency,
661
+ onRateLimit: (c) => warn(`rate limited — concurrency → ${c}`),
310
662
  onSessionStart: (id) => {
311
663
  activeSessionId = id;
312
664
  },
665
+ onPageStart: (slug) => {
666
+ startedCount += 1;
667
+ pageStartAt.set(slug, Date.now());
668
+ const title = titleBySlug.get(slug) ?? slug;
669
+ info(`[${startedCount}/${total}] ${title} — generating...`);
670
+ },
671
+ onPageDone: (slug, bytes) => {
672
+ doneCount += 1;
673
+ const startedAt = pageStartAt.get(slug) ?? Date.now();
674
+ const elapsed = formatDuration(Date.now() - startedAt);
675
+ const totalElapsed = formatDuration(Date.now() - runStartAt);
676
+ const pct = Math.round((doneCount / total) * 100);
677
+ const title = titleBySlug.get(slug) ?? slug;
678
+ info(`[${doneCount}/${total}] ${title} — uploaded (${formatBytes(bytes)}, ${elapsed}) · ${pct}% · total ${totalElapsed}`);
679
+ },
680
+ onPageFail: (slug, err) => {
681
+ const title = titleBySlug.get(slug) ?? slug;
682
+ const msg = err instanceof Error ? err.message : String(err);
683
+ warn(`[${startedCount}/${total}] ${title} — failed: ${msg}`);
684
+ },
313
685
  });
314
686
  }
315
687
  catch (err) {