gitnexushub 0.4.5 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api.d.ts +90 -1
- package/dist/api.js +34 -0
- package/dist/index.js +27 -0
- package/dist/install-ci-command.d.ts +176 -0
- package/dist/install-ci-command.js +680 -0
- package/dist/wiki/claude.d.ts +11 -5
- package/dist/wiki/claude.js +8 -3
- package/dist/wiki/compose-overview.d.ts +29 -0
- package/dist/wiki/compose-overview.js +48 -0
- package/dist/wiki/concurrency.d.ts +20 -0
- package/dist/wiki/concurrency.js +91 -0
- package/dist/wiki/helpers.d.ts +102 -0
- package/dist/wiki/helpers.js +308 -0
- package/dist/wiki/incremental.d.ts +72 -0
- package/dist/wiki/incremental.js +214 -0
- package/dist/wiki/index.js +37 -0
- package/dist/wiki/session.d.ts +10 -0
- package/dist/wiki/session.js +89 -9
- package/dist/wiki/upload-command.d.ts +12 -0
- package/dist/wiki/upload-command.js +384 -53
- package/hooks/gitnexus-enterprise-hook.cjs +134 -0
- package/package.json +1 -1
- package/skills/gitnexus-debugging.md +89 -89
- package/skills/gitnexus-exploring.md +78 -78
- package/skills/gitnexus-impact-analysis.md +99 -99
- package/skills/gitnexus-pr-review.md +161 -161
|
@@ -1,7 +1,24 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
1
2
|
import { resolveWikiContext } from './resolve-context.js';
|
|
2
3
|
import { runWikiUploadSession } from './session.js';
|
|
3
4
|
import { GnxError, ErrorCode } from './errors.js';
|
|
4
5
|
import { info, warn } from '../cli-helpers.js';
|
|
6
|
+
import { buildModuleRegistry, ensureAllFilesAssigned, estimateModuleTokens, extractSummary, fallbackGrouping, formatModuleRegistry, splitByCommunity, validateAndFixCrossReferences, } from './helpers.js';
|
|
7
|
+
import { diffChangedFiles, tryIncrementalUpload } from './incremental.js';
|
|
8
|
+
import { runConcurrent } from './concurrency.js';
|
|
9
|
+
import { composeOverview } from './compose-overview.js';
|
|
10
|
+
const DEFAULT_WIKI_CONCURRENCY = 10;
|
|
11
|
+
function resolveConcurrency(optValue) {
|
|
12
|
+
if (typeof optValue === 'number' && optValue > 0)
|
|
13
|
+
return optValue;
|
|
14
|
+
const env = process.env.GNX_WIKI_CONCURRENCY;
|
|
15
|
+
if (env) {
|
|
16
|
+
const parsed = Number.parseInt(env, 10);
|
|
17
|
+
if (Number.isFinite(parsed) && parsed > 0)
|
|
18
|
+
return parsed;
|
|
19
|
+
}
|
|
20
|
+
return DEFAULT_WIKI_CONCURRENCY;
|
|
21
|
+
}
|
|
5
22
|
function formatBytes(n) {
|
|
6
23
|
if (n < 1024)
|
|
7
24
|
return `${n} B`;
|
|
@@ -17,12 +34,75 @@ function formatDuration(ms) {
|
|
|
17
34
|
const rem = s % 60;
|
|
18
35
|
return `${m}m${rem.toString().padStart(2, '0')}s`;
|
|
19
36
|
}
|
|
37
|
+
async function withHeartbeat(label, intervalMs, task) {
|
|
38
|
+
const start = Date.now();
|
|
39
|
+
const timer = setInterval(() => {
|
|
40
|
+
info(`${label} still working... ${formatDuration(Date.now() - start)} elapsed`);
|
|
41
|
+
}, intervalMs);
|
|
42
|
+
timer.unref?.();
|
|
43
|
+
try {
|
|
44
|
+
return await task();
|
|
45
|
+
}
|
|
46
|
+
finally {
|
|
47
|
+
clearInterval(timer);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
20
50
|
function slugify(name) {
|
|
21
51
|
return name
|
|
22
52
|
.toLowerCase()
|
|
23
53
|
.replace(/[^a-z0-9]+/g, '-')
|
|
24
54
|
.replace(/^-|-$/g, '');
|
|
25
55
|
}
|
|
56
|
+
const MODULE_TREE_SCHEMA = {
|
|
57
|
+
type: 'object',
|
|
58
|
+
properties: {
|
|
59
|
+
modules: {
|
|
60
|
+
type: 'array',
|
|
61
|
+
description: 'Ordered list of documentation modules grouping the repo files',
|
|
62
|
+
items: {
|
|
63
|
+
type: 'object',
|
|
64
|
+
properties: {
|
|
65
|
+
name: {
|
|
66
|
+
type: 'string',
|
|
67
|
+
description: 'Human-readable module name (e.g. "Authentication")',
|
|
68
|
+
},
|
|
69
|
+
files: {
|
|
70
|
+
type: 'array',
|
|
71
|
+
items: { type: 'string' },
|
|
72
|
+
description: 'Repo-relative file paths belonging to this module',
|
|
73
|
+
},
|
|
74
|
+
},
|
|
75
|
+
required: ['name', 'files'],
|
|
76
|
+
},
|
|
77
|
+
},
|
|
78
|
+
},
|
|
79
|
+
required: ['modules'],
|
|
80
|
+
};
|
|
81
|
+
function buildModulesFromStructured(structured) {
|
|
82
|
+
if (!structured || typeof structured !== 'object')
|
|
83
|
+
return [];
|
|
84
|
+
const arr = structured.modules;
|
|
85
|
+
if (!Array.isArray(arr))
|
|
86
|
+
return [];
|
|
87
|
+
const out = [];
|
|
88
|
+
for (const entry of arr) {
|
|
89
|
+
if (!entry || typeof entry !== 'object')
|
|
90
|
+
continue;
|
|
91
|
+
const name = entry.name;
|
|
92
|
+
const files = entry.files;
|
|
93
|
+
if (typeof name !== 'string' || !Array.isArray(files))
|
|
94
|
+
continue;
|
|
95
|
+
const slug = slugify(name);
|
|
96
|
+
if (!slug)
|
|
97
|
+
continue;
|
|
98
|
+
out.push({
|
|
99
|
+
slug,
|
|
100
|
+
title: name,
|
|
101
|
+
files: files.filter((f) => typeof f === 'string'),
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
return out;
|
|
105
|
+
}
|
|
26
106
|
function parseModuleTree(raw) {
|
|
27
107
|
const cleaned = raw
|
|
28
108
|
.trim()
|
|
@@ -38,7 +118,12 @@ function parseModuleTree(raw) {
|
|
|
38
118
|
cause: err,
|
|
39
119
|
});
|
|
40
120
|
}
|
|
41
|
-
if (parsed &&
|
|
121
|
+
if (parsed && Array.isArray(parsed.modules)) {
|
|
122
|
+
const modules = buildModulesFromStructured(parsed);
|
|
123
|
+
if (modules.length > 0)
|
|
124
|
+
return modules;
|
|
125
|
+
}
|
|
126
|
+
if (parsed && typeof parsed === 'object') {
|
|
42
127
|
const modules = [];
|
|
43
128
|
for (const [name, files] of Object.entries(parsed)) {
|
|
44
129
|
if (!Array.isArray(files))
|
|
@@ -57,7 +142,13 @@ function parseModuleTree(raw) {
|
|
|
57
142
|
}
|
|
58
143
|
throw new GnxError(ErrorCode.GENERATION_FAILED, 'module tree had no valid modules');
|
|
59
144
|
}
|
|
60
|
-
|
|
145
|
+
// Token budget for a single wiki page — matches
|
|
146
|
+
// gitnexus/src/core/wiki/generator.ts:DEFAULT_MAX_TOKENS_PER_MODULE.
|
|
147
|
+
// A module splits only when BOTH (total file tokens > budget) AND (files > 3),
|
|
148
|
+
// so a small handful of huge files stays together but a truly large module
|
|
149
|
+
// gets broken into sub-pages.
|
|
150
|
+
const MAX_TOKENS_PER_MODULE = 30_000;
|
|
151
|
+
const MIN_FILES_FOR_SPLIT = 3;
|
|
61
152
|
function splitBySubdirectory(parentName, files) {
|
|
62
153
|
const subGroups = new Map();
|
|
63
154
|
for (const fp of files) {
|
|
@@ -81,15 +172,21 @@ function splitBySubdirectory(parentName, files) {
|
|
|
81
172
|
};
|
|
82
173
|
});
|
|
83
174
|
}
|
|
84
|
-
function applyModuleSplitting(modules) {
|
|
175
|
+
async function applyModuleSplitting(modules, communityMapping, repoRoot) {
|
|
85
176
|
for (const node of modules) {
|
|
86
177
|
const files = node.files ?? [];
|
|
87
|
-
if (files.length
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
178
|
+
if (files.length <= MIN_FILES_FOR_SPLIT)
|
|
179
|
+
continue;
|
|
180
|
+
const totalTokens = await estimateModuleTokens(files, repoRoot);
|
|
181
|
+
if (totalTokens <= MAX_TOKENS_PER_MODULE)
|
|
182
|
+
continue;
|
|
183
|
+
const byCommunity = splitByCommunity(node.title, files, communityMapping);
|
|
184
|
+
const children = byCommunity
|
|
185
|
+
? byCommunity.map((c) => ({ slug: c.slug, title: c.name, files: c.files }))
|
|
186
|
+
: splitBySubdirectory(node.title, files);
|
|
187
|
+
if (children.length > 1) {
|
|
188
|
+
node.children = children;
|
|
189
|
+
node.files = [];
|
|
93
190
|
}
|
|
94
191
|
}
|
|
95
192
|
return modules;
|
|
@@ -126,10 +223,69 @@ function fillTemplate(template, vars) {
|
|
|
126
223
|
}
|
|
127
224
|
return result;
|
|
128
225
|
}
|
|
226
|
+
function formatDryRunHeader(args) {
|
|
227
|
+
const { repoFullName, headCommit, model, modules } = args;
|
|
228
|
+
const generatedAt = new Date().toISOString();
|
|
229
|
+
const parts = [];
|
|
230
|
+
parts.push(`# ${repoFullName} — Wiki Dry Run`);
|
|
231
|
+
parts.push('');
|
|
232
|
+
parts.push(`> Generated ${generatedAt} · commit \`${headCommit.slice(0, 8)}\` · model \`${model}\` · ${modules.length} pages`);
|
|
233
|
+
parts.push('');
|
|
234
|
+
parts.push('> Streaming dry-run output — no content has been uploaded to the Hub. Pages appear below in COMPLETION order (not tree order) as each LLM call finishes, so partial output is preserved on interrupt. The table of contents is in tree order.');
|
|
235
|
+
parts.push('');
|
|
236
|
+
parts.push('## Table of Contents');
|
|
237
|
+
parts.push('');
|
|
238
|
+
for (const m of modules) {
|
|
239
|
+
parts.push(`- [${m.title}](#${m.slug})`);
|
|
240
|
+
}
|
|
241
|
+
parts.push('');
|
|
242
|
+
return parts.join('\n') + '\n';
|
|
243
|
+
}
|
|
244
|
+
function formatDryRunPage(page) {
|
|
245
|
+
const parts = [];
|
|
246
|
+
parts.push('---');
|
|
247
|
+
parts.push('');
|
|
248
|
+
parts.push(`<a id="${page.slug}"></a>`);
|
|
249
|
+
parts.push('');
|
|
250
|
+
parts.push(`> **Slug:** \`${page.slug}\``);
|
|
251
|
+
parts.push('');
|
|
252
|
+
parts.push(page.contentMd);
|
|
253
|
+
parts.push('');
|
|
254
|
+
return parts.join('\n') + '\n';
|
|
255
|
+
}
|
|
129
256
|
export async function runWikiUpload(opts, deps) {
|
|
130
257
|
const ctx = await resolveWikiContext(deps);
|
|
131
258
|
if (opts.abortSignal?.aborted)
|
|
132
259
|
throw new GnxError(ErrorCode.USER_ABORTED, 'aborted before session start');
|
|
260
|
+
// For incremental mode, compute the git diff once up-front. If it's empty we
|
|
261
|
+
// short-circuit the whole pipeline; otherwise we pass it forward to
|
|
262
|
+
// tryIncrementalUpload so it doesn't re-shell-out to git.
|
|
263
|
+
// Dry-run always forces a full regeneration regardless of mode.
|
|
264
|
+
let precomputedStatus = null;
|
|
265
|
+
let precomputedDiff = null;
|
|
266
|
+
if (opts.mode === 'incremental' && !opts.dryRunOutputPath) {
|
|
267
|
+
precomputedStatus = await ctx.api.wikiStatus(ctx.hubRepoId);
|
|
268
|
+
if (precomputedStatus?.fromCommit) {
|
|
269
|
+
precomputedDiff = diffChangedFiles(ctx.repoRoot, precomputedStatus.fromCommit, ctx.headCommit);
|
|
270
|
+
if (precomputedDiff &&
|
|
271
|
+
precomputedDiff.added.length +
|
|
272
|
+
precomputedDiff.modified.length +
|
|
273
|
+
precomputedDiff.deleted.length ===
|
|
274
|
+
0) {
|
|
275
|
+
const session = await ctx.api.wikiUploadStart(ctx.hubRepoId, {
|
|
276
|
+
mode: 'incremental',
|
|
277
|
+
fromCommit: ctx.headCommit,
|
|
278
|
+
clientVersion: deps.clientVersion,
|
|
279
|
+
clientModel: deps.model,
|
|
280
|
+
});
|
|
281
|
+
await ctx.api.wikiUploadFinish(ctx.hubRepoId, session.sessionId, {
|
|
282
|
+
moduleTree: [],
|
|
283
|
+
receivedSlugs: [],
|
|
284
|
+
});
|
|
285
|
+
return { sessionId: session.sessionId, pagesPersisted: 0, failedSlugs: [] };
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
}
|
|
133
289
|
const runner = deps.createClaudeRunner();
|
|
134
290
|
info(`Fetching graph context from Hub (${ctx.hubFullName})...`);
|
|
135
291
|
const [prompts, groupingCtx] = await Promise.all([
|
|
@@ -137,7 +293,7 @@ export async function runWikiUpload(opts, deps) {
|
|
|
137
293
|
ctx.api.wikiGroupingContext(ctx.hubRepoId),
|
|
138
294
|
]);
|
|
139
295
|
// Phase 1: Module tree
|
|
140
|
-
info('Phase 1/3: generating module tree with Claude Code...');
|
|
296
|
+
info('Phase 1/3: generating module tree with Claude Code (one LLM call, typically 30s–2min)...');
|
|
141
297
|
const groupingPrompt = fillTemplate(prompts.grouping.user, {
|
|
142
298
|
COMMUNITY_GROUPS: groupingCtx.communityGroups,
|
|
143
299
|
INTER_COMMUNITY_EDGES: groupingCtx.interCommunityEdges,
|
|
@@ -145,16 +301,52 @@ export async function runWikiUpload(opts, deps) {
|
|
|
145
301
|
FILE_LIST: groupingCtx.filesWithExports,
|
|
146
302
|
DIRECTORY_TREE: groupingCtx.directoryTree,
|
|
147
303
|
});
|
|
148
|
-
const moduleTreeRaw = await runner.run(`${prompts.grouping.system}\n\n${groupingPrompt}
|
|
304
|
+
const moduleTreeRaw = await withHeartbeat(' module tree', 10_000, () => runner.run(`${prompts.grouping.system}\n\n${groupingPrompt}\n\nReturn the result as a JSON object with a "modules" array. Each entry has a "name" (human-readable module title) and "files" (array of repo-relative file paths).`, {
|
|
149
305
|
cwd: ctx.repoRoot,
|
|
150
306
|
model: deps.model,
|
|
151
307
|
allowedTools: [],
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
308
|
+
outputFormat: { type: 'json_schema', schema: MODULE_TREE_SCHEMA },
|
|
309
|
+
}));
|
|
310
|
+
info(` module tree generated in ${formatDuration(moduleTreeRaw.durationMs)}`);
|
|
311
|
+
const communityMapping = groupingCtx.communityMapping ?? [];
|
|
312
|
+
const filesWithExportsRaw = groupingCtx.filesWithExportsRaw ?? [];
|
|
313
|
+
// Try structured output first, then free-form JSON parse, then community/directory fallback.
|
|
314
|
+
let rawModules = buildModulesFromStructured(moduleTreeRaw.structuredOutput);
|
|
315
|
+
if (rawModules.length === 0) {
|
|
316
|
+
try {
|
|
317
|
+
rawModules = parseModuleTree(moduleTreeRaw.text);
|
|
318
|
+
}
|
|
319
|
+
catch {
|
|
320
|
+
const fb = fallbackGrouping(filesWithExportsRaw, communityMapping);
|
|
321
|
+
rawModules = Object.entries(fb).map(([title, files]) => ({
|
|
322
|
+
slug: slugify(title),
|
|
323
|
+
title,
|
|
324
|
+
files,
|
|
325
|
+
}));
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
if (rawModules.length === 0)
|
|
329
|
+
throw new GnxError(ErrorCode.GENERATION_FAILED, 'module tree had no valid modules');
|
|
330
|
+
// Sweep any files the LLM silently dropped into an "Other" bucket so they
|
|
331
|
+
// still get documented. Matches Hub generator.ts:499-502.
|
|
332
|
+
rawModules = ensureAllFilesAssigned(rawModules, filesWithExportsRaw, (slug, title, files) => ({
|
|
333
|
+
slug,
|
|
334
|
+
title,
|
|
335
|
+
files,
|
|
336
|
+
}));
|
|
337
|
+
const modules = await applyModuleSplitting(rawModules, communityMapping, ctx.repoRoot);
|
|
155
338
|
const { leaves, parents } = flattenModuleTree(modules);
|
|
156
339
|
const allPageNodes = [...leaves, ...parents];
|
|
157
|
-
const
|
|
340
|
+
const moduleRegistryMap = buildModuleRegistry(allPageNodes.map((n) => ({
|
|
341
|
+
name: n.title,
|
|
342
|
+
slug: n.slug,
|
|
343
|
+
files: n.files ?? [],
|
|
344
|
+
children: n.children?.map((c) => ({
|
|
345
|
+
name: c.title,
|
|
346
|
+
slug: c.slug,
|
|
347
|
+
files: c.files ?? [],
|
|
348
|
+
})),
|
|
349
|
+
})), filesWithExportsRaw);
|
|
158
350
|
const generatedPages = new Map();
|
|
159
351
|
info(`Phase 2/3: generating ${leaves.length} leaf page(s) + ${parents.length} parent page(s)...`);
|
|
160
352
|
if (opts.abortSignal?.aborted)
|
|
@@ -177,10 +369,7 @@ export async function runWikiUpload(opts, deps) {
|
|
|
177
369
|
processes: 'No execution flows detected.',
|
|
178
370
|
};
|
|
179
371
|
}
|
|
180
|
-
const reg =
|
|
181
|
-
.filter((m) => m.slug !== node.slug)
|
|
182
|
-
.map((m) => `- [${m.title}](${m.slug}.md)`)
|
|
183
|
-
.join('\n');
|
|
372
|
+
const reg = formatModuleRegistry(moduleRegistryMap, node.slug);
|
|
184
373
|
const pagePrompt = fillTemplate(prompts.module.user, {
|
|
185
374
|
MODULE_NAME: node.title,
|
|
186
375
|
SOURCE_CODE: leafCtx.sourceCode,
|
|
@@ -195,7 +384,16 @@ export async function runWikiUpload(opts, deps) {
|
|
|
195
384
|
model: deps.model,
|
|
196
385
|
allowedTools: [],
|
|
197
386
|
});
|
|
198
|
-
|
|
387
|
+
let diagramSection = '';
|
|
388
|
+
const mermaid = leafCtx.mermaid;
|
|
389
|
+
if (mermaid?.callGraph) {
|
|
390
|
+
diagramSection += `\n\n## Internal Call Graph\n\n\`\`\`mermaid\n${mermaid.callGraph}\n\`\`\``;
|
|
391
|
+
}
|
|
392
|
+
for (const seq of (mermaid?.sequenceDiagrams ?? []).slice(0, 3)) {
|
|
393
|
+
diagramSection += `\n\n## Workflow\n\n\`\`\`mermaid\n${seq}\n\`\`\``;
|
|
394
|
+
}
|
|
395
|
+
let content = `# ${node.title}\n\n${extractPageMarkdown(result.text)}${diagramSection}`;
|
|
396
|
+
content = validateAndFixCrossReferences(content, moduleRegistryMap);
|
|
199
397
|
generatedPages.set(node.slug, content);
|
|
200
398
|
return { title: node.title, contentMd: content };
|
|
201
399
|
};
|
|
@@ -207,7 +405,7 @@ export async function runWikiUpload(opts, deps) {
|
|
|
207
405
|
.map((child) => {
|
|
208
406
|
const content = generatedPages.get(child.slug);
|
|
209
407
|
return content
|
|
210
|
-
? `#### ${child.title}\n${content
|
|
408
|
+
? `#### ${child.title}\n${extractSummary(content, 800)}`
|
|
211
409
|
: `#### ${child.title}\n(Documentation not yet generated)`;
|
|
212
410
|
})
|
|
213
411
|
.join('\n\n');
|
|
@@ -219,10 +417,7 @@ export async function runWikiUpload(opts, deps) {
|
|
|
219
417
|
catch {
|
|
220
418
|
parentCtx = { intraCalls: 'None', processes: 'No execution flows detected.' };
|
|
221
419
|
}
|
|
222
|
-
const reg =
|
|
223
|
-
.filter((m) => m.slug !== node.slug)
|
|
224
|
-
.map((m) => `- [${m.title}](${m.slug}.md)`)
|
|
225
|
-
.join('\n');
|
|
420
|
+
const reg = formatModuleRegistry(moduleRegistryMap, node.slug);
|
|
226
421
|
const parentPrompt = fillTemplate(prompts.parent.user, {
|
|
227
422
|
MODULE_NAME: node.title,
|
|
228
423
|
CHILDREN_DOCS: childDocs,
|
|
@@ -235,51 +430,56 @@ export async function runWikiUpload(opts, deps) {
|
|
|
235
430
|
model: deps.model,
|
|
236
431
|
allowedTools: [],
|
|
237
432
|
});
|
|
238
|
-
|
|
433
|
+
let parentDiagramSection = '';
|
|
434
|
+
const parentMermaid = parentCtx?.mermaid;
|
|
435
|
+
if (parentMermaid?.callGraph) {
|
|
436
|
+
parentDiagramSection += `\n\n## Cross-Module Call Graph\n\n\`\`\`mermaid\n${parentMermaid.callGraph}\n\`\`\``;
|
|
437
|
+
}
|
|
438
|
+
let content = `# ${node.title}\n\n${extractPageMarkdown(result.text)}${parentDiagramSection}`;
|
|
439
|
+
content = validateAndFixCrossReferences(content, moduleRegistryMap);
|
|
239
440
|
generatedPages.set(node.slug, content);
|
|
240
441
|
return { title: node.title, contentMd: content };
|
|
241
442
|
};
|
|
242
|
-
// Phase 3: Overview page
|
|
443
|
+
// Phase 3: Overview page (Phase 5.2 — deterministic, no LLM call).
|
|
444
|
+
// Skips the per-repo overview LLM round-trip in favour of stitching
|
|
445
|
+
// together each module's first-paragraph summary plus the architecture
|
|
446
|
+
// Mermaid diagram. Saves ~1 minute per generation; quality is comparable
|
|
447
|
+
// because the LLM was largely rephrasing the leaf summaries we already
|
|
448
|
+
// have in `generatedPages`.
|
|
243
449
|
const generateOverview = async () => {
|
|
244
450
|
if (opts.abortSignal?.aborted)
|
|
245
451
|
throw new GnxError(ErrorCode.USER_ABORTED, 'aborted during overview generation');
|
|
246
|
-
const
|
|
247
|
-
.map((m) => {
|
|
452
|
+
const overviewModules = modules.map((m) => {
|
|
248
453
|
const content = generatedPages.get(m.slug);
|
|
249
|
-
return
|
|
250
|
-
|
|
251
|
-
:
|
|
252
|
-
|
|
253
|
-
|
|
454
|
+
return {
|
|
455
|
+
slug: m.slug,
|
|
456
|
+
title: m.title,
|
|
457
|
+
summary: content ? extractSummary(content, 600) : '',
|
|
458
|
+
};
|
|
459
|
+
});
|
|
460
|
+
// Architecture diagram still comes from the Hub's overview context
|
|
461
|
+
// endpoint, which precomputes inter-module Mermaid edges from the graph.
|
|
254
462
|
const moduleFiles = {};
|
|
255
463
|
for (const m of modules) {
|
|
256
464
|
const files = m.children ? m.children.flatMap((c) => c.files ?? []) : (m.files ?? []);
|
|
257
465
|
if (files.length > 0)
|
|
258
466
|
moduleFiles[m.title] = files;
|
|
259
467
|
}
|
|
260
|
-
let
|
|
468
|
+
let architectureMermaid = '';
|
|
261
469
|
try {
|
|
262
|
-
overviewCtx = await ctx.api.wikiOverviewContext(ctx.hubRepoId, moduleFiles);
|
|
470
|
+
const overviewCtx = await ctx.api.wikiOverviewContext(ctx.hubRepoId, moduleFiles);
|
|
471
|
+
const overviewMermaid = overviewCtx?.mermaid;
|
|
472
|
+
architectureMermaid = overviewMermaid?.architecture ?? '';
|
|
263
473
|
}
|
|
264
474
|
catch {
|
|
265
|
-
|
|
266
|
-
interModuleEdges: 'No inter-module call edges detected',
|
|
267
|
-
topProcesses: 'No execution flows detected.',
|
|
268
|
-
};
|
|
475
|
+
// Architecture diagram is best-effort; proceed without it on failure.
|
|
269
476
|
}
|
|
270
|
-
const
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
TOP_PROCESSES: overviewCtx.topProcesses,
|
|
275
|
-
MODULE_REGISTRY: moduleRegistry,
|
|
276
|
-
});
|
|
277
|
-
const result = await runner.run(`${prompts.overview.system}\n\n${overviewPrompt}`, {
|
|
278
|
-
cwd: ctx.repoRoot,
|
|
279
|
-
model: deps.model,
|
|
280
|
-
allowedTools: [],
|
|
477
|
+
const contentMd = composeOverview({
|
|
478
|
+
repoName: `${ctx.hubFullName} — Wiki`,
|
|
479
|
+
modules: overviewModules,
|
|
480
|
+
architectureMermaid,
|
|
281
481
|
});
|
|
282
|
-
return { title: 'Overview', contentMd
|
|
482
|
+
return { title: 'Overview', contentMd };
|
|
283
483
|
};
|
|
284
484
|
// Execute: leaves → parents → overview
|
|
285
485
|
const overviewNode = { slug: 'overview', title: 'Overview', files: [] };
|
|
@@ -298,6 +498,133 @@ export async function runWikiUpload(opts, deps) {
|
|
|
298
498
|
summary: m.summary,
|
|
299
499
|
files: m.files,
|
|
300
500
|
}));
|
|
501
|
+
// Dry-run: stream every generated page to disk as it completes, so partial
|
|
502
|
+
// output is preserved on timeout/interrupt. Never touches the Hub upload
|
|
503
|
+
// session. Pages land in completion order (not tree order); the TOC at the
|
|
504
|
+
// top of the file is in tree order and uses slug anchors so you can jump.
|
|
505
|
+
if (opts.dryRunOutputPath) {
|
|
506
|
+
const concurrency = resolveConcurrency(opts.concurrency);
|
|
507
|
+
const total = orderedModules.length;
|
|
508
|
+
let doneCount = 0;
|
|
509
|
+
let failedCount = 0;
|
|
510
|
+
const runStartAt = Date.now();
|
|
511
|
+
// Write header + TOC up front (titles are all known after module-tree phase)
|
|
512
|
+
await fs.writeFile(opts.dryRunOutputPath, formatDryRunHeader({
|
|
513
|
+
repoFullName: ctx.hubFullName,
|
|
514
|
+
headCommit: ctx.headCommit,
|
|
515
|
+
model: deps.model,
|
|
516
|
+
modules: orderedModules.map((n) => ({ title: n.title, slug: n.slug })),
|
|
517
|
+
}), 'utf-8');
|
|
518
|
+
info(` streaming output to ${opts.dryRunOutputPath}`);
|
|
519
|
+
// Serialize appends so concurrent leaves don't interleave writes.
|
|
520
|
+
let writeLock = Promise.resolve();
|
|
521
|
+
const appendPage = (page) => {
|
|
522
|
+
writeLock = writeLock.then(() => fs.appendFile(opts.dryRunOutputPath, formatDryRunPage(page), 'utf-8'));
|
|
523
|
+
return writeLock;
|
|
524
|
+
};
|
|
525
|
+
const tryGenerate = async (node) => {
|
|
526
|
+
if (opts.abortSignal?.aborted)
|
|
527
|
+
throw new GnxError(ErrorCode.USER_ABORTED, 'aborted during dry-run generation');
|
|
528
|
+
info(` [${doneCount + 1}/${total}] ${node.title} — generating...`);
|
|
529
|
+
let page;
|
|
530
|
+
try {
|
|
531
|
+
const result = await generatePageByType(node);
|
|
532
|
+
page = { title: result.title, slug: node.slug, contentMd: result.contentMd };
|
|
533
|
+
}
|
|
534
|
+
catch (err) {
|
|
535
|
+
failedCount++;
|
|
536
|
+
page = {
|
|
537
|
+
title: node.title,
|
|
538
|
+
slug: node.slug,
|
|
539
|
+
contentMd: `# ${node.title}\n\n_(Generation failed: ${err instanceof Error ? err.message : String(err)})_`,
|
|
540
|
+
};
|
|
541
|
+
}
|
|
542
|
+
await appendPage(page);
|
|
543
|
+
doneCount++;
|
|
544
|
+
const elapsed = formatDuration(Date.now() - runStartAt);
|
|
545
|
+
const pct = Math.round((doneCount / total) * 100);
|
|
546
|
+
info(` [${doneCount}/${total}] ${node.title} — done · ${pct}% · total ${elapsed}`);
|
|
547
|
+
};
|
|
548
|
+
try {
|
|
549
|
+
// Phase 2a: leaves in parallel (they don't depend on each other)
|
|
550
|
+
info(` running ${leaves.length} leaf page(s) at concurrency ${concurrency}...`);
|
|
551
|
+
await runConcurrent(leaves, tryGenerate, {
|
|
552
|
+
concurrency,
|
|
553
|
+
abortSignal: opts.abortSignal,
|
|
554
|
+
onRateLimit: (c) => warn(` rate limited — concurrency → ${c}`),
|
|
555
|
+
});
|
|
556
|
+
// Phase 2b (Phase 5.1 mirror): parents in parallel. Children's content
|
|
557
|
+
// is already in `generatedPages` after the leaf phase resolves, so
|
|
558
|
+
// parents can fan out without races.
|
|
559
|
+
if (parents.length > 0) {
|
|
560
|
+
info(` running ${parents.length} parent page(s) at concurrency ${concurrency}...`);
|
|
561
|
+
await runConcurrent(parents, tryGenerate, {
|
|
562
|
+
concurrency,
|
|
563
|
+
abortSignal: opts.abortSignal,
|
|
564
|
+
onRateLimit: (c) => warn(` rate limited — concurrency → ${c}`),
|
|
565
|
+
});
|
|
566
|
+
}
|
|
567
|
+
// Phase 3: overview single (depends on leaves + parents)
|
|
568
|
+
await tryGenerate(overviewNode);
|
|
569
|
+
}
|
|
570
|
+
finally {
|
|
571
|
+
// Flush any pending appends even if we're bailing out early so partial
|
|
572
|
+
// output on disk is consistent.
|
|
573
|
+
await writeLock.catch(() => { });
|
|
574
|
+
}
|
|
575
|
+
return {
|
|
576
|
+
sessionId: '(dry-run)',
|
|
577
|
+
pagesPersisted: doneCount - failedCount,
|
|
578
|
+
failedSlugs: [],
|
|
579
|
+
};
|
|
580
|
+
}
|
|
581
|
+
// Incremental path: regenerate only affected modules using local git diff.
|
|
582
|
+
// Falls through to full gen below on any signal the state can't support it.
|
|
583
|
+
if (opts.mode === 'incremental') {
|
|
584
|
+
// Pre-populate generatedPages from the Hub so generateOverview sees
|
|
585
|
+
// real summaries for unchanged modules, not "(Documentation pending)".
|
|
586
|
+
await Promise.all(allPageNodes.map(async (node) => {
|
|
587
|
+
const existing = await ctx.api.wikiPage(ctx.hubRepoId, node.slug);
|
|
588
|
+
if (existing?.content)
|
|
589
|
+
generatedPages.set(node.slug, existing.content);
|
|
590
|
+
}));
|
|
591
|
+
const regenerateModule = async (node) => {
|
|
592
|
+
const moduleNode = { slug: node.slug, title: node.name, files: node.files };
|
|
593
|
+
const treeNode = parents.find((p) => p.slug === node.slug);
|
|
594
|
+
if (treeNode?.children)
|
|
595
|
+
return generateParentPage(treeNode);
|
|
596
|
+
return generateLeafPage(moduleNode);
|
|
597
|
+
};
|
|
598
|
+
const regenerateOverview = async () => generateOverview();
|
|
599
|
+
try {
|
|
600
|
+
const incResult = await tryIncrementalUpload({
|
|
601
|
+
repoId: ctx.hubRepoId,
|
|
602
|
+
repoRoot: ctx.repoRoot,
|
|
603
|
+
headCommit: ctx.headCommit,
|
|
604
|
+
clientVersion: deps.clientVersion,
|
|
605
|
+
clientModel: deps.model,
|
|
606
|
+
api: ctx.api,
|
|
607
|
+
regenerateModule: regenerateModule,
|
|
608
|
+
regenerateOverview: regenerateOverview,
|
|
609
|
+
precomputedStatus,
|
|
610
|
+
precomputedDiff,
|
|
611
|
+
});
|
|
612
|
+
if (incResult.status === 'ok') {
|
|
613
|
+
return {
|
|
614
|
+
sessionId: incResult.sessionId,
|
|
615
|
+
pagesPersisted: incResult.pagesPersisted,
|
|
616
|
+
failedSlugs: [],
|
|
617
|
+
};
|
|
618
|
+
}
|
|
619
|
+
// else fall through to full gen
|
|
620
|
+
}
|
|
621
|
+
catch (err) {
|
|
622
|
+
if (opts.abortSignal?.aborted) {
|
|
623
|
+
throw new GnxError(ErrorCode.USER_ABORTED, 'user aborted', { cause: err });
|
|
624
|
+
}
|
|
625
|
+
// Incremental path threw — fall through to full gen as a safety net.
|
|
626
|
+
}
|
|
627
|
+
}
|
|
301
628
|
info(`Phase 3/3: streaming ${orderedModules.length} page(s) to Hub...`);
|
|
302
629
|
let activeSessionId = null;
|
|
303
630
|
const onAbort = async () => {
|
|
@@ -317,17 +644,21 @@ export async function runWikiUpload(opts, deps) {
|
|
|
317
644
|
const runStartAt = Date.now();
|
|
318
645
|
let startedCount = 0;
|
|
319
646
|
let doneCount = 0;
|
|
647
|
+
const sessionConcurrency = resolveConcurrency(opts.concurrency);
|
|
320
648
|
try {
|
|
321
649
|
return await runWikiUploadSession({
|
|
322
650
|
api: ctx.api,
|
|
323
651
|
repoId: ctx.hubRepoId,
|
|
324
|
-
mode:
|
|
652
|
+
mode: 'full',
|
|
325
653
|
fromCommit: ctx.headCommit,
|
|
326
654
|
clientVersion: deps.clientVersion,
|
|
327
655
|
clientModel: deps.model,
|
|
328
656
|
modules: orderedModules,
|
|
329
657
|
moduleTree,
|
|
330
658
|
generatePage: generatePageByType,
|
|
659
|
+
parallelCount: leaves.length,
|
|
660
|
+
concurrency: sessionConcurrency,
|
|
661
|
+
onRateLimit: (c) => warn(`rate limited — concurrency → ${c}`),
|
|
331
662
|
onSessionStart: (id) => {
|
|
332
663
|
activeSessionId = id;
|
|
333
664
|
},
|