gitnexushub 0.4.5 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,72 @@
1
+ import type { HubAPI } from '../api.js';
2
+ export interface FileDiff {
3
+ added: string[];
4
+ modified: string[];
5
+ deleted: string[];
6
+ }
7
+ /**
8
+ * Run `git diff --name-status <from>..<to>` in `repoRoot` and classify
9
+ * changed files as added/modified/deleted. Renames (R100) are treated
10
+ * as delete+add so both source and destination modules are re-generated.
11
+ * Returns null on any git failure — caller should fall back to a full
12
+ * regeneration.
13
+ */
14
+ export declare function diffChangedFiles(repoRoot: string, fromCommit: string, toCommit: string): FileDiff | null;
15
+ export interface TreeNode {
16
+ name: string;
17
+ slug: string;
18
+ files: string[];
19
+ children?: TreeNode[];
20
+ }
21
+ export interface FilterResult {
22
+ /** Subset of the tree's leaf nodes whose pages need regenerating. */
23
+ affected: TreeNode[];
24
+ /** The tree with deleted files removed and unmapped new files added to "Other". */
25
+ tree: TreeNode[];
26
+ }
27
+ /**
28
+ * Walk `prevTree` and determine which leaf modules are affected by `diff`.
29
+ * - Modified/deleted files → their containing module is affected.
30
+ * - Added files → if >5 can't be mapped to an existing module, return null
31
+ * (caller falls back to full gen). ≤5 unmapped are appended to an "Other"
32
+ * module (created if absent).
33
+ * - Empty modules (all files deleted) are dropped from the returned tree.
34
+ */
35
+ export declare function filterAffectedModules(diff: FileDiff, prevTree: TreeNode[]): FilterResult | null;
36
+ export interface IncrementalUploadInput {
37
+ repoId: string;
38
+ repoRoot: string;
39
+ headCommit: string;
40
+ clientVersion: string;
41
+ clientModel: string;
42
+ api: HubAPI;
43
+ regenerateModule: (node: TreeNode) => Promise<{
44
+ title: string;
45
+ contentMd: string;
46
+ }>;
47
+ regenerateOverview: (tree: TreeNode[]) => Promise<{
48
+ title: string;
49
+ contentMd: string;
50
+ }>;
51
+ /** Reuse a status already fetched by the caller. Avoids a redundant API round-trip. */
52
+ precomputedStatus?: {
53
+ fromCommit?: string | null;
54
+ } | null;
55
+ /** Reuse a diff already computed by the caller. Avoids a redundant git diff. */
56
+ precomputedDiff?: FileDiff | null;
57
+ }
58
+ export type IncrementalUploadResult = {
59
+ status: 'ok';
60
+ sessionId: string;
61
+ pagesPersisted: number;
62
+ } | {
63
+ status: 'fallback-full';
64
+ };
65
+ /**
66
+ * Run an incremental wiki upload: compare local HEAD to the Hub's recorded
67
+ * fromCommit, regenerate only affected modules, upload via incremental session.
68
+ * On any signal that the local or remote state can't support an incremental
69
+ * update (no prior wiki, divergent branch, too many new unmapped files),
70
+ * returns { status: 'fallback-full' } and the caller should run full gen.
71
+ */
72
+ export declare function tryIncrementalUpload(input: IncrementalUploadInput): Promise<IncrementalUploadResult>;
@@ -0,0 +1,214 @@
1
+ import { execFileSync } from 'child_process';
2
+ /**
3
+ * Run `git diff --name-status <from>..<to>` in `repoRoot` and classify
4
+ * changed files as added/modified/deleted. Renames (R100) are treated
5
+ * as delete+add so both source and destination modules are re-generated.
6
+ * Returns null on any git failure — caller should fall back to a full
7
+ * regeneration.
8
+ */
9
+ export function diffChangedFiles(repoRoot, fromCommit, toCommit) {
10
+ let raw;
11
+ try {
12
+ raw = execFileSync('git', ['diff', '--name-status', '-M', `${fromCommit}..${toCommit}`], {
13
+ cwd: repoRoot,
14
+ encoding: 'utf-8',
15
+ stdio: ['ignore', 'pipe', 'ignore'],
16
+ });
17
+ }
18
+ catch {
19
+ return null;
20
+ }
21
+ const added = [];
22
+ const modified = [];
23
+ const deleted = [];
24
+ for (const line of raw.split('\n')) {
25
+ if (!line.trim())
26
+ continue;
27
+ const parts = line.split('\t');
28
+ const code = parts[0] ?? '';
29
+ if (code === 'A') {
30
+ added.push(parts[1]);
31
+ }
32
+ else if (code === 'M') {
33
+ modified.push(parts[1]);
34
+ }
35
+ else if (code === 'D') {
36
+ deleted.push(parts[1]);
37
+ }
38
+ else if (code.startsWith('R')) {
39
+ if (parts[1])
40
+ deleted.push(parts[1]);
41
+ if (parts[2])
42
+ added.push(parts[2]);
43
+ }
44
+ }
45
+ return { added, modified, deleted };
46
+ }
47
+ /**
48
+ * Walk `prevTree` and determine which leaf modules are affected by `diff`.
49
+ * - Modified/deleted files → their containing module is affected.
50
+ * - Added files → if >5 can't be mapped to an existing module, return null
51
+ * (caller falls back to full gen). ≤5 unmapped are appended to an "Other"
52
+ * module (created if absent).
53
+ * - Empty modules (all files deleted) are dropped from the returned tree.
54
+ */
55
+ export function filterAffectedModules(diff, prevTree) {
56
+ // Deep copy the tree so we can mutate.
57
+ const tree = JSON.parse(JSON.stringify(prevTree));
58
+ // Build file→node lookup (leaves only).
59
+ const fileToLeaf = new Map();
60
+ const leaves = [];
61
+ const walkLeaves = (nodes) => {
62
+ for (const n of nodes) {
63
+ if (n.children && n.children.length > 0) {
64
+ walkLeaves(n.children);
65
+ }
66
+ else {
67
+ leaves.push(n);
68
+ for (const f of n.files)
69
+ fileToLeaf.set(f, n);
70
+ }
71
+ }
72
+ };
73
+ walkLeaves(tree);
74
+ const affected = new Set();
75
+ for (const fp of diff.modified) {
76
+ const leaf = fileToLeaf.get(fp);
77
+ if (leaf)
78
+ affected.add(leaf);
79
+ }
80
+ for (const fp of diff.deleted) {
81
+ const leaf = fileToLeaf.get(fp);
82
+ if (leaf) {
83
+ leaf.files = leaf.files.filter((f) => f !== fp);
84
+ affected.add(leaf);
85
+ }
86
+ }
87
+ // Classify added files.
88
+ const unmapped = [];
89
+ for (const fp of diff.added) {
90
+ // Try to place it in an existing leaf whose files share the deepest prefix.
91
+ const leaf = findClosestLeaf(fp, leaves);
92
+ if (leaf) {
93
+ leaf.files.push(fp);
94
+ affected.add(leaf);
95
+ }
96
+ else {
97
+ unmapped.push(fp);
98
+ }
99
+ }
100
+ if (unmapped.length > 5)
101
+ return null;
102
+ if (unmapped.length > 0) {
103
+ let other = tree.find((n) => n.slug === 'other');
104
+ if (!other) {
105
+ other = { name: 'Other', slug: 'other', files: [] };
106
+ tree.push(other);
107
+ leaves.push(other);
108
+ }
109
+ other.files.push(...unmapped);
110
+ affected.add(other);
111
+ }
112
+ // Drop leaves with no files and propagate to parents.
113
+ const pruneEmpty = (nodes) => nodes
114
+ .map((n) => {
115
+ if (n.children) {
116
+ n.children = pruneEmpty(n.children);
117
+ return n.children.length === 0 && n.files.length === 0 ? null : n;
118
+ }
119
+ return n.files.length === 0 ? null : n;
120
+ })
121
+ .filter((n) => n !== null);
122
+ const prunedTree = pruneEmpty(tree);
123
+ return {
124
+ affected: Array.from(affected).filter((n) => n.files.length > 0),
125
+ tree: prunedTree,
126
+ };
127
+ }
128
+ /**
129
+ * Find the leaf whose existing files share the longest common directory prefix
130
+ * with `newFile`. Returns null when no leaf has any common prefix.
131
+ */
132
+ function findClosestLeaf(newFile, leaves) {
133
+ const newParts = newFile.replace(/\\/g, '/').split('/');
134
+ let bestLeaf = null;
135
+ let bestDepth = 0;
136
+ for (const leaf of leaves) {
137
+ for (const f of leaf.files) {
138
+ const parts = f.replace(/\\/g, '/').split('/');
139
+ let depth = 0;
140
+ while (depth < newParts.length && depth < parts.length && newParts[depth] === parts[depth]) {
141
+ depth++;
142
+ }
143
+ // Only count if we matched at least 2 directory components (e.g., src/auth).
144
+ // A match on just "src" alone is too broad.
145
+ if (depth >= 2 && depth > bestDepth) {
146
+ bestDepth = depth;
147
+ bestLeaf = leaf;
148
+ }
149
+ }
150
+ }
151
+ return bestLeaf;
152
+ }
153
+ /**
154
+ * Run an incremental wiki upload: compare local HEAD to the Hub's recorded
155
+ * fromCommit, regenerate only affected modules, upload via incremental session.
156
+ * On any signal that the local or remote state can't support an incremental
157
+ * update (no prior wiki, divergent branch, too many new unmapped files),
158
+ * returns { status: 'fallback-full' } and the caller should run full gen.
159
+ */
160
+ export async function tryIncrementalUpload(input) {
161
+ const status = input.precomputedStatus ?? (await input.api.wikiStatus(input.repoId));
162
+ if (!status?.fromCommit)
163
+ return { status: 'fallback-full' };
164
+ const treeResp = await input.api.wikiTree(input.repoId);
165
+ if (!treeResp?.tree?.length)
166
+ return { status: 'fallback-full' };
167
+ const diff = input.precomputedDiff ?? diffChangedFiles(input.repoRoot, status.fromCommit, input.headCommit);
168
+ if (!diff)
169
+ return { status: 'fallback-full' };
170
+ const filtered = filterAffectedModules(diff, treeResp.tree);
171
+ if (!filtered)
172
+ return { status: 'fallback-full' };
173
+ const session = await input.api.wikiUploadStart(input.repoId, {
174
+ mode: 'incremental',
175
+ fromCommit: input.headCommit,
176
+ clientVersion: input.clientVersion,
177
+ clientModel: input.clientModel,
178
+ });
179
+ if (filtered.affected.length === 0) {
180
+ await input.api.wikiUploadFinish(input.repoId, session.sessionId, {
181
+ moduleTree: filtered.tree,
182
+ receivedSlugs: [],
183
+ });
184
+ return { status: 'ok', sessionId: session.sessionId, pagesPersisted: 0 };
185
+ }
186
+ const receivedSlugs = [];
187
+ try {
188
+ for (const node of filtered.affected) {
189
+ const page = await input.regenerateModule(node);
190
+ await input.api.wikiUploadPage(input.repoId, session.sessionId, {
191
+ slug: node.slug,
192
+ title: page.title,
193
+ contentMd: page.contentMd,
194
+ });
195
+ receivedSlugs.push(node.slug);
196
+ }
197
+ const overview = await input.regenerateOverview(filtered.tree);
198
+ await input.api.wikiUploadPage(input.repoId, session.sessionId, {
199
+ slug: 'overview',
200
+ title: overview.title,
201
+ contentMd: overview.contentMd,
202
+ });
203
+ receivedSlugs.push('overview');
204
+ }
205
+ catch (err) {
206
+ await input.api.wikiUploadAbort(input.repoId, session.sessionId).catch(() => { });
207
+ throw err;
208
+ }
209
+ const finished = await input.api.wikiUploadFinish(input.repoId, session.sessionId, {
210
+ moduleTree: filtered.tree,
211
+ receivedSlugs,
212
+ });
213
+ return { status: 'ok', sessionId: session.sessionId, pagesPersisted: finished.pagesPersisted };
214
+ }
@@ -97,6 +97,43 @@ export function registerWikiCommand(program) {
97
97
  reportError(err);
98
98
  }
99
99
  });
100
+ wiki
101
+ .command('dry-run')
102
+ .description('Generate the wiki locally and write it to a single markdown file — nothing is uploaded to the Hub')
103
+ .option('--output <path>', 'output markdown file', 'wiki-dry-run.md')
104
+ .option('--model <model>', 'Claude model to use', REAL_DEFAULT_MODEL)
105
+ .action(async (opts) => {
106
+ const cwd = process.cwd();
107
+ const deps = buildRealDeps(cwd);
108
+ if (opts.model)
109
+ deps.model = opts.model;
110
+ const ac = new AbortController();
111
+ const onSig = () => {
112
+ warn('interrupt received, aborting dry run...');
113
+ ac.abort();
114
+ };
115
+ process.on('SIGINT', onSig);
116
+ process.on('SIGTERM', onSig);
117
+ try {
118
+ info(`Resolving repo context...`);
119
+ const result = await runWikiUpload({
120
+ cwd,
121
+ mode: 'full',
122
+ model: opts.model,
123
+ abortSignal: ac.signal,
124
+ dryRunOutputPath: opts.output,
125
+ }, deps);
126
+ ok(`Dry-run complete: ${result.pagesPersisted} pages written to ${opts.output}`);
127
+ info('Nothing was uploaded to the Hub.');
128
+ }
129
+ catch (err) {
130
+ reportError(err);
131
+ }
132
+ finally {
133
+ process.off('SIGINT', onSig);
134
+ process.off('SIGTERM', onSig);
135
+ }
136
+ });
100
137
  wiki
101
138
  .command('abort')
102
139
  .description('Abort the active wiki upload session for the current repo')
@@ -21,10 +21,20 @@ export interface RunSessionDeps {
21
21
  modules: ModuleNode[];
22
22
  moduleTree: unknown;
23
23
  generatePage: GeneratePageFn;
24
+ /**
25
+ * Number of leading `modules[]` entries that may run in parallel (leaves).
26
+ * Remaining modules (parents + overview) always run sequentially because
27
+ * they depend on prior phase outputs via extractSummary. Defaults to 0 —
28
+ * fully sequential, matching original behavior.
29
+ */
30
+ parallelCount?: number;
31
+ /** Max concurrent tasks within the parallel phase. Defaults to 1. */
32
+ concurrency?: number;
24
33
  onSessionStart?: (sessionId: string) => void;
25
34
  onPageStart?: (slug: string) => void;
26
35
  onPageDone?: (slug: string, bytes: number) => void;
27
36
  onPageFail?: (slug: string, err: unknown) => void;
37
+ onRateLimit?: (newConcurrency: number) => void;
28
38
  }
29
39
  export interface RunSessionResult {
30
40
  sessionId: string;
@@ -1,4 +1,5 @@
1
1
  import { GnxError, ErrorCode } from './errors.js';
2
+ import { runConcurrent } from './concurrency.js';
2
3
  export async function runWikiUploadSession(deps) {
3
4
  const { api, repoId } = deps;
4
5
  const started = await api.wikiUploadStart(repoId, {
@@ -12,21 +13,22 @@ export async function runWikiUploadSession(deps) {
12
13
  const receivedSlugs = [];
13
14
  const failedSlugs = [];
14
15
  let aborted = false;
16
+ let uploadError = null;
15
17
  const abortOnce = async (reason) => {
16
18
  if (aborted)
17
19
  return;
18
20
  aborted = true;
21
+ uploadError = reason;
19
22
  try {
20
23
  await api.wikiUploadAbort(repoId, sessionId);
21
24
  }
22
25
  catch {
23
26
  /* ignore secondary errors */
24
27
  }
25
- throw reason instanceof Error
26
- ? reason
27
- : new GnxError(ErrorCode.GENERATION_FAILED, String(reason));
28
28
  };
29
- for (const mod of deps.modules) {
29
+ const processModule = async (mod) => {
30
+ if (aborted)
31
+ return;
30
32
  deps.onPageStart?.(mod.slug);
31
33
  let page;
32
34
  try {
@@ -41,13 +43,13 @@ export async function runWikiUploadSession(deps) {
41
43
  }
42
44
  catch (err2) {
43
45
  deps.onPageFail?.(mod.slug, err2);
44
- continue;
46
+ return;
45
47
  }
46
48
  }
47
49
  if (!page.contentMd || !page.contentMd.trim()) {
48
50
  failedSlugs.push(mod.slug);
49
51
  deps.onPageFail?.(mod.slug, new Error('empty generation result'));
50
- continue;
52
+ return;
51
53
  }
52
54
  try {
53
55
  await api.wikiUploadPage(repoId, sessionId, {
@@ -55,16 +57,94 @@ export async function runWikiUploadSession(deps) {
55
57
  title: page.title,
56
58
  contentMd: page.contentMd,
57
59
  });
58
- receivedSlugs.push(mod.slug);
59
- deps.onPageDone?.(mod.slug, Buffer.byteLength(page.contentMd, 'utf8'));
60
+ if (!aborted) {
61
+ receivedSlugs.push(mod.slug);
62
+ deps.onPageDone?.(mod.slug, Buffer.byteLength(page.contentMd, 'utf8'));
63
+ }
60
64
  }
61
65
  catch (err) {
62
66
  await abortOnce(err);
63
67
  }
68
+ };
69
+ const parallelCount = Math.max(0, Math.min(deps.parallelCount ?? 0, deps.modules.length));
70
+ const concurrency = Math.max(1, deps.concurrency ?? 1);
71
+ const parallelModules = deps.modules.slice(0, parallelCount);
72
+ const sequentialModules = deps.modules.slice(parallelCount);
73
+ if (parallelModules.length > 0) {
74
+ try {
75
+ await runConcurrent(parallelModules, processModule, {
76
+ concurrency,
77
+ onRateLimit: deps.onRateLimit,
78
+ });
79
+ }
80
+ catch (err) {
81
+ // runConcurrent only rejects on non-429 errors from processModule. Since
82
+ // processModule already swallows generation errors and routes upload
83
+ // errors through abortOnce, this branch only fires for truly unexpected
84
+ // rejections. Ensure the session is aborted before rethrowing.
85
+ if (!aborted)
86
+ await abortOnce(err);
87
+ }
88
+ // If every leaf failed generation, bail out before running the sequential
89
+ // phase. Parents and overview read leaf content via extractSummary — without
90
+ // any leaves they'd emit "(Documentation not yet generated)" placeholders
91
+ // and the session would "succeed" with a garbage wiki. failedSlugs may be
92
+ // non-zero here, but the real signal is whether anything was uploaded.
93
+ if (!aborted && receivedSlugs.length === 0) {
94
+ await abortOnce(new GnxError(ErrorCode.GENERATION_FAILED, `all ${parallelModules.length} leaf page(s) failed to generate`));
95
+ }
64
96
  }
65
- if (receivedSlugs.length === 0) {
97
+ // Phase 5.1 (connect mirror): parents run in parallel too. They read leaf
98
+ // content via extractSummary on already-uploaded leaf pages — the Hub
99
+ // persists every leaf synchronously inside wikiUploadPage, so by the time
100
+ // the leaf phase resolves all leaves are durable and parents can fetch
101
+ // them independently.
102
+ //
103
+ // Overview is the one exception: it summarises parents AND leaves, so it
104
+ // must run after both. The upload-command contract places the overview
105
+ // entry as the trailing element of `modules`. We respect that here by
106
+ // peeling it off the end and running it after the parent fan-out.
107
+ // If the caller doesn't follow that contract (e.g. unit tests with
108
+ // arbitrary module ordering), we fall back to the original sequential
109
+ // semantics so behavior stays predictable.
110
+ if (!aborted) {
111
+ const tail = sequentialModules[sequentialModules.length - 1];
112
+ const overviewIsTrailing = tail?.slug === 'overview';
113
+ if (overviewIsTrailing) {
114
+ const parentMods = sequentialModules.slice(0, -1);
115
+ if (parentMods.length > 0) {
116
+ try {
117
+ await runConcurrent(parentMods, processModule, {
118
+ concurrency,
119
+ onRateLimit: deps.onRateLimit,
120
+ });
121
+ }
122
+ catch (err) {
123
+ if (!aborted)
124
+ await abortOnce(err);
125
+ }
126
+ }
127
+ if (!aborted) {
128
+ await processModule(tail);
129
+ }
130
+ }
131
+ else {
132
+ // Legacy ordering: process every sequential module in declared order.
133
+ for (const mod of sequentialModules) {
134
+ if (aborted)
135
+ break;
136
+ await processModule(mod);
137
+ }
138
+ }
139
+ }
140
+ if (uploadError === null && receivedSlugs.length === 0) {
66
141
  await abortOnce(new GnxError(ErrorCode.GENERATION_FAILED, 'no pages were generated successfully'));
67
142
  }
143
+ if (uploadError !== null) {
144
+ throw uploadError instanceof Error
145
+ ? uploadError
146
+ : new GnxError(ErrorCode.GENERATION_FAILED, String(uploadError));
147
+ }
68
148
  const finished = await api.wikiUploadFinish(repoId, sessionId, {
69
149
  moduleTree: deps.moduleTree,
70
150
  receivedSlugs,
@@ -5,6 +5,18 @@ export interface RunWikiUploadOptions {
5
5
  mode?: 'full' | 'incremental';
6
6
  model?: string;
7
7
  abortSignal?: AbortSignal;
8
+ /**
9
+ * When set, skip the upload session entirely: generate every page and write
10
+ * the concatenated markdown to this path. Hub `wiki_pages` is untouched.
11
+ * Always uses full-generation mode regardless of `mode`.
12
+ */
13
+ dryRunOutputPath?: string;
14
+ /**
15
+ * Max concurrent Claude Code calls for leaf-page generation. Defaults to
16
+ * the GNX_WIKI_CONCURRENCY env var (10 if unset). Parents and overview
17
+ * always run sequentially because they depend on prior phase outputs.
18
+ */
19
+ concurrency?: number;
8
20
  }
9
21
  export interface RunWikiUploadDeps extends ResolveContextDeps {
10
22
  createClaudeRunner: () => ClaudeRunner;