gitnexushub 0.4.2 → 0.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,9 @@
1
+ import { createClaudeRunner } from './claude.js';
2
+ import type { ResolveContextDeps } from './resolve-context.js';
3
+ export declare const REAL_CLIENT_VERSION: string;
4
+ export declare const REAL_DEFAULT_MODEL: string;
5
+ export declare function buildRealDeps(cwd: string): ResolveContextDeps & {
6
+ createClaudeRunner: typeof createClaudeRunner;
7
+ clientVersion: string;
8
+ model: string;
9
+ };
@@ -0,0 +1,63 @@
1
+ import { execFileSync } from 'child_process';
2
+ import fs from 'fs';
3
+ import path from 'path';
4
+ import os from 'os';
5
+ import { HubAPI } from '../api.js';
6
+ import { loadConfig as loadConnectConfig } from '../config.js';
7
+ import { createClaudeRunner } from './claude.js';
8
+ export const REAL_CLIENT_VERSION = 'gnx/' + (process.env.npm_package_version ?? 'dev');
9
+ export const REAL_DEFAULT_MODEL = process.env.GNX_WIKI_MODEL ?? 'claude-sonnet-4-6';
10
+ function runGit(cwd, args) {
11
+ try {
12
+ return execFileSync('git', args, { cwd, stdio: ['ignore', 'pipe', 'ignore'] })
13
+ .toString('utf-8')
14
+ .trim();
15
+ }
16
+ catch {
17
+ return null;
18
+ }
19
+ }
20
+ export function buildRealDeps(cwd) {
21
+ return {
22
+ cwd,
23
+ loadConfig: loadConnectConfig,
24
+ getGitRoot: (c) => runGit(c, ['rev-parse', '--show-toplevel']),
25
+ getGitRemoteUrl: (c) => runGit(c, ['remote', 'get-url', 'origin']),
26
+ getGitHead: (c) => runGit(c, ['rev-parse', 'HEAD']),
27
+ getGitDirty: (c) => {
28
+ const out = runGit(c, ['status', '--porcelain']);
29
+ return (out?.length ?? 0) > 0;
30
+ },
31
+ buildApi: (hubUrl, token) => new HubAPI(hubUrl, token),
32
+ verifyClaudeCodeInstalled: async () => {
33
+ try {
34
+ execFileSync('claude', ['--version'], { stdio: 'ignore' });
35
+ return true;
36
+ }
37
+ catch {
38
+ return false;
39
+ }
40
+ },
41
+ verifyClaudeCodeMcpRegistered: async (hubUrl) => {
42
+ try {
43
+ const claudeJson = path.join(os.homedir(), '.claude.json');
44
+ if (!fs.existsSync(claudeJson))
45
+ return false;
46
+ const parsed = JSON.parse(fs.readFileSync(claudeJson, 'utf-8'));
47
+ const mcpServers = parsed?.mcpServers ?? {};
48
+ for (const key of Object.keys(mcpServers)) {
49
+ const entry = mcpServers[key];
50
+ if (entry?.url && String(entry.url).startsWith(hubUrl))
51
+ return true;
52
+ }
53
+ return false;
54
+ }
55
+ catch {
56
+ return false;
57
+ }
58
+ },
59
+ createClaudeRunner,
60
+ clientVersion: REAL_CLIENT_VERSION,
61
+ model: REAL_DEFAULT_MODEL,
62
+ };
63
+ }
@@ -0,0 +1,31 @@
1
+ import type { HubAPI } from '../api.js';
2
+ export interface WikiContext {
3
+ repoRoot: string;
4
+ headCommit: string;
5
+ isDirty: boolean;
6
+ canonicalRemote: string;
7
+ hubRepoId: string;
8
+ hubFullName: string;
9
+ hubIndexedCommit: string | null;
10
+ hubIndexedStatus: string;
11
+ hubUrl: string;
12
+ api: HubAPI;
13
+ }
14
+ export interface ResolveContextDeps {
15
+ cwd: string;
16
+ loadConfig: () => Promise<{
17
+ hubToken?: string;
18
+ hubUrl?: string;
19
+ }>;
20
+ getGitRemoteUrl: (cwd: string) => string | null;
21
+ getGitRoot: (cwd: string) => string | null;
22
+ getGitHead: (cwd: string) => string | null;
23
+ getGitDirty: (cwd: string) => boolean;
24
+ buildApi: (hubUrl: string, token: string) => HubAPI;
25
+ verifyClaudeCodeInstalled: () => Promise<boolean>;
26
+ verifyClaudeCodeMcpRegistered: (hubUrl: string) => Promise<boolean>;
27
+ }
28
+ export interface ResolveContextOptions {
29
+ skipClaudeChecks?: boolean;
30
+ }
31
+ export declare function resolveWikiContext(deps: ResolveContextDeps, opts?: ResolveContextOptions): Promise<WikiContext>;
@@ -0,0 +1,82 @@
1
+ import { canonicalizeGitRemote } from '../git-remote.js';
2
+ import { GnxError, ErrorCode } from './errors.js';
3
+ export async function resolveWikiContext(deps, opts = {}) {
4
+ const repoRoot = deps.getGitRoot(deps.cwd);
5
+ if (!repoRoot) {
6
+ throw new GnxError(ErrorCode.NOT_GIT_REPO, 'not a git repository', {
7
+ hint: 'cd into your project, or run: gnx wiki <path>',
8
+ });
9
+ }
10
+ const headCommit = deps.getGitHead(repoRoot);
11
+ if (!headCommit) {
12
+ throw new GnxError(ErrorCode.NO_COMMITS, 'git repository has no commits yet', {
13
+ hint: 'commit something before generating a wiki',
14
+ });
15
+ }
16
+ const rawRemote = deps.getGitRemoteUrl(repoRoot);
17
+ if (!rawRemote) {
18
+ throw new GnxError(ErrorCode.NO_REMOTE, "no 'origin' remote found", {
19
+ hint: 'add one with: git remote add origin <url>',
20
+ });
21
+ }
22
+ if (rawRemote.startsWith('/') ||
23
+ /^[a-zA-Z]:[\\/]/.test(rawRemote) ||
24
+ rawRemote.startsWith('file://')) {
25
+ throw new GnxError(ErrorCode.LOCAL_PATH_REMOTE, `origin points at a local path ('${rawRemote}')`, { hint: 'gnx needs a public/forge URL to match against the Hub' });
26
+ }
27
+ const canonicalRemote = canonicalizeGitRemote(rawRemote);
28
+ if (!canonicalRemote) {
29
+ throw new GnxError(ErrorCode.UNPARSEABLE_REMOTE, `origin is '${rawRemote}' which isn't a recognized git URL`, { hint: 'pass --remote <name> to use a different remote' });
30
+ }
31
+ const isDirty = deps.getGitDirty(repoRoot);
32
+ const config = await deps.loadConfig();
33
+ if (!config.hubToken) {
34
+ throw new GnxError(ErrorCode.NOT_CONNECTED, 'not connected to the Hub', {
35
+ hint: 'run: gnx connect <token>',
36
+ });
37
+ }
38
+ const hubUrl = config.hubUrl || 'https://gitnexus-enterprise-production.up.railway.app';
39
+ if (!opts.skipClaudeChecks) {
40
+ if (!(await deps.verifyClaudeCodeInstalled())) {
41
+ throw new GnxError(ErrorCode.CLAUDE_NOT_INSTALLED, "Claude Code isn't installed or on PATH", {
42
+ hint: 'install: https://claude.com/claude-code',
43
+ });
44
+ }
45
+ if (!(await deps.verifyClaudeCodeMcpRegistered(hubUrl))) {
46
+ throw new GnxError(ErrorCode.CLAUDE_MCP_MISSING, "Claude Code isn't wired to the Hub MCP server", { hint: 're-run: gnx connect' });
47
+ }
48
+ }
49
+ const api = deps.buildApi(hubUrl, config.hubToken);
50
+ let resolve;
51
+ try {
52
+ resolve = await api.resolveRepoByRemote(canonicalRemote);
53
+ }
54
+ catch (err) {
55
+ const msg = err?.message ?? String(err);
56
+ if (msg.includes('404') || msg.toLowerCase().includes('not found')) {
57
+ throw new GnxError(ErrorCode.REPO_NOT_ON_HUB, `${canonicalRemote} isn't indexed on the Hub`, {
58
+ hint: 'add it with: gnx index <owner>/<repo>',
59
+ cause: err,
60
+ });
61
+ }
62
+ throw new GnxError(ErrorCode.NETWORK, `Hub request failed: ${msg}`, { cause: err });
63
+ }
64
+ if (resolve.status !== 'ready') {
65
+ throw new GnxError(ErrorCode.REPO_NOT_INDEXED, `${resolve.fullName} is on the Hub but still ${resolve.status}`, { hint: 'wait for indexing, or run: gnx sync --wait' });
66
+ }
67
+ if (!resolve.permissions.write) {
68
+ throw new GnxError(ErrorCode.REPO_READ_ONLY, `you have read-only access to ${resolve.fullName}`, { hint: 'wiki uploads require write access — ask an owner to grant write' });
69
+ }
70
+ return {
71
+ repoRoot,
72
+ headCommit,
73
+ isDirty,
74
+ canonicalRemote,
75
+ hubRepoId: resolve.repoId,
76
+ hubFullName: resolve.fullName,
77
+ hubIndexedCommit: resolve.lastCommit,
78
+ hubIndexedStatus: resolve.status,
79
+ hubUrl,
80
+ api,
81
+ };
82
+ }
@@ -0,0 +1,34 @@
1
+ import type { HubAPI } from '../api.js';
2
+ export interface ModuleNode {
3
+ slug: string;
4
+ title: string;
5
+ summary?: string;
6
+ files?: string[];
7
+ }
8
+ export interface GeneratePageFn {
9
+ (node: ModuleNode): Promise<{
10
+ title: string;
11
+ contentMd: string;
12
+ }>;
13
+ }
14
+ export interface RunSessionDeps {
15
+ api: HubAPI;
16
+ repoId: string;
17
+ mode: 'full' | 'incremental';
18
+ fromCommit: string;
19
+ clientVersion: string;
20
+ clientModel: string;
21
+ modules: ModuleNode[];
22
+ moduleTree: unknown;
23
+ generatePage: GeneratePageFn;
24
+ onSessionStart?: (sessionId: string) => void;
25
+ onPageStart?: (slug: string) => void;
26
+ onPageDone?: (slug: string, bytes: number) => void;
27
+ onPageFail?: (slug: string, err: unknown) => void;
28
+ }
29
+ export interface RunSessionResult {
30
+ sessionId: string;
31
+ pagesPersisted: number;
32
+ failedSlugs: string[];
33
+ }
34
+ export declare function runWikiUploadSession(deps: RunSessionDeps): Promise<RunSessionResult>;
@@ -0,0 +1,73 @@
1
+ import { GnxError, ErrorCode } from './errors.js';
2
+ export async function runWikiUploadSession(deps) {
3
+ const { api, repoId } = deps;
4
+ const started = await api.wikiUploadStart(repoId, {
5
+ mode: deps.mode,
6
+ fromCommit: deps.fromCommit,
7
+ clientVersion: deps.clientVersion,
8
+ clientModel: deps.clientModel,
9
+ });
10
+ const sessionId = started.sessionId;
11
+ deps.onSessionStart?.(sessionId);
12
+ const receivedSlugs = [];
13
+ const failedSlugs = [];
14
+ let aborted = false;
15
+ const abortOnce = async (reason) => {
16
+ if (aborted)
17
+ return;
18
+ aborted = true;
19
+ try {
20
+ await api.wikiUploadAbort(repoId, sessionId);
21
+ }
22
+ catch {
23
+ /* ignore secondary errors */
24
+ }
25
+ throw reason instanceof Error
26
+ ? reason
27
+ : new GnxError(ErrorCode.GENERATION_FAILED, String(reason));
28
+ };
29
+ for (const mod of deps.modules) {
30
+ deps.onPageStart?.(mod.slug);
31
+ let page;
32
+ try {
33
+ page = await deps.generatePage(mod);
34
+ }
35
+ catch (err) {
36
+ failedSlugs.push(mod.slug);
37
+ deps.onPageFail?.(mod.slug, err);
38
+ try {
39
+ page = await deps.generatePage(mod);
40
+ failedSlugs.splice(failedSlugs.indexOf(mod.slug), 1);
41
+ }
42
+ catch (err2) {
43
+ deps.onPageFail?.(mod.slug, err2);
44
+ continue;
45
+ }
46
+ }
47
+ if (!page.contentMd || !page.contentMd.trim()) {
48
+ failedSlugs.push(mod.slug);
49
+ deps.onPageFail?.(mod.slug, new Error('empty generation result'));
50
+ continue;
51
+ }
52
+ try {
53
+ await api.wikiUploadPage(repoId, sessionId, {
54
+ slug: mod.slug,
55
+ title: page.title,
56
+ contentMd: page.contentMd,
57
+ });
58
+ receivedSlugs.push(mod.slug);
59
+ deps.onPageDone?.(mod.slug, Buffer.byteLength(page.contentMd, 'utf8'));
60
+ }
61
+ catch (err) {
62
+ await abortOnce(err);
63
+ }
64
+ }
65
+ if (receivedSlugs.length === 0) {
66
+ await abortOnce(new GnxError(ErrorCode.GENERATION_FAILED, 'no pages were generated successfully'));
67
+ }
68
+ const finished = await api.wikiUploadFinish(repoId, sessionId, {
69
+ moduleTree: deps.moduleTree,
70
+ receivedSlugs,
71
+ });
72
+ return { sessionId, pagesPersisted: finished.pagesPersisted, failedSlugs };
73
+ }
@@ -0,0 +1,8 @@
1
+ import type { ResolveContextDeps } from './resolve-context.js';
2
+ export declare function runWikiStatus(deps: ResolveContextDeps): Promise<{
3
+ fullName: string;
4
+ canonicalRemote: string;
5
+ hubIndexedCommit: string | null;
6
+ active: unknown;
7
+ last: unknown;
8
+ }>;
@@ -0,0 +1,12 @@
1
+ import { resolveWikiContext } from './resolve-context.js';
2
+ export async function runWikiStatus(deps) {
3
+ const ctx = await resolveWikiContext(deps, { skipClaudeChecks: true });
4
+ const status = await ctx.api.wikiUploadStatus(ctx.hubRepoId);
5
+ return {
6
+ fullName: ctx.hubFullName,
7
+ canonicalRemote: ctx.canonicalRemote,
8
+ hubIndexedCommit: ctx.hubIndexedCommit,
9
+ active: status.active,
10
+ last: status.last,
11
+ };
12
+ }
@@ -0,0 +1,19 @@
1
+ import type { ClaudeRunner } from './claude.js';
2
+ import { type ResolveContextDeps } from './resolve-context.js';
3
+ export interface RunWikiUploadOptions {
4
+ cwd: string;
5
+ mode?: 'full' | 'incremental';
6
+ model?: string;
7
+ abortSignal?: AbortSignal;
8
+ }
9
+ export interface RunWikiUploadDeps extends ResolveContextDeps {
10
+ createClaudeRunner: () => ClaudeRunner;
11
+ clientVersion: string;
12
+ model: string;
13
+ }
14
+ export interface RunWikiUploadResult {
15
+ pagesPersisted: number;
16
+ failedSlugs: string[];
17
+ sessionId: string;
18
+ }
19
+ export declare function runWikiUpload(opts: RunWikiUploadOptions, deps: RunWikiUploadDeps): Promise<RunWikiUploadResult>;
@@ -0,0 +1,320 @@
1
+ import { resolveWikiContext } from './resolve-context.js';
2
+ import { runWikiUploadSession } from './session.js';
3
+ import { GnxError, ErrorCode } from './errors.js';
4
+ import { info } from '../cli-helpers.js';
5
+ function slugify(name) {
6
+ return name
7
+ .toLowerCase()
8
+ .replace(/[^a-z0-9]+/g, '-')
9
+ .replace(/^-|-$/g, '');
10
+ }
11
+ function parseModuleTree(raw) {
12
+ const cleaned = raw
13
+ .trim()
14
+ .replace(/^```(?:json)?\s*/i, '')
15
+ .replace(/```$/, '')
16
+ .trim();
17
+ let parsed;
18
+ try {
19
+ parsed = JSON.parse(cleaned);
20
+ }
21
+ catch (err) {
22
+ throw new GnxError(ErrorCode.GENERATION_FAILED, 'module tree response was not valid JSON', {
23
+ cause: err,
24
+ });
25
+ }
26
+ if (parsed && !Array.isArray(parsed.modules) && typeof parsed === 'object') {
27
+ const modules = [];
28
+ for (const [name, files] of Object.entries(parsed)) {
29
+ if (!Array.isArray(files))
30
+ continue;
31
+ const slug = slugify(name);
32
+ if (!slug)
33
+ continue;
34
+ modules.push({
35
+ slug,
36
+ title: name,
37
+ files: files.filter((f) => typeof f === 'string'),
38
+ });
39
+ }
40
+ if (modules.length > 0)
41
+ return modules;
42
+ }
43
+ throw new GnxError(ErrorCode.GENERATION_FAILED, 'module tree had no valid modules');
44
+ }
45
+ const SPLIT_FILE_THRESHOLD = 20;
46
+ function splitBySubdirectory(parentName, files) {
47
+ const subGroups = new Map();
48
+ for (const fp of files) {
49
+ const parts = fp.replace(/\\/g, '/').split('/');
50
+ const subDir = parts.length > 2 ? parts.slice(0, 2).join('/') : parts[0];
51
+ let group = subGroups.get(subDir);
52
+ if (!group) {
53
+ group = [];
54
+ subGroups.set(subDir, group);
55
+ }
56
+ group.push(fp);
57
+ }
58
+ const basenames = Array.from(subGroups.keys()).map((s) => s.split('/').pop());
59
+ const hasCollisions = new Set(basenames).size < basenames.length;
60
+ return Array.from(subGroups.entries()).map(([subDir, subFiles]) => {
61
+ const label = hasCollisions ? subDir.replace(/\//g, '-') : subDir.split('/').pop();
62
+ return {
63
+ slug: slugify(`${parentName}-${label}`),
64
+ title: `${parentName} — ${label}`,
65
+ files: subFiles,
66
+ };
67
+ });
68
+ }
69
+ function applyModuleSplitting(modules) {
70
+ for (const node of modules) {
71
+ const files = node.files ?? [];
72
+ if (files.length > SPLIT_FILE_THRESHOLD) {
73
+ const children = splitBySubdirectory(node.title, files);
74
+ if (children.length > 1) {
75
+ node.children = children;
76
+ node.files = [];
77
+ }
78
+ }
79
+ }
80
+ return modules;
81
+ }
82
+ function flattenModuleTree(tree) {
83
+ const leaves = [];
84
+ const parents = [];
85
+ for (const node of tree) {
86
+ if (node.children && node.children.length > 0) {
87
+ for (const child of node.children)
88
+ leaves.push(child);
89
+ parents.push(node);
90
+ }
91
+ else {
92
+ leaves.push(node);
93
+ }
94
+ }
95
+ return { leaves, parents };
96
+ }
97
+ function extractPageMarkdown(raw) {
98
+ const trimmed = raw.trim();
99
+ if (trimmed.startsWith('```')) {
100
+ return trimmed
101
+ .replace(/^```(?:markdown)?\s*/i, '')
102
+ .replace(/```$/, '')
103
+ .trim();
104
+ }
105
+ return trimmed;
106
+ }
107
+ function fillTemplate(template, vars) {
108
+ let result = template;
109
+ for (const [key, value] of Object.entries(vars)) {
110
+ result = result.replaceAll(`{{${key}}}`, value);
111
+ }
112
+ return result;
113
+ }
114
+ export async function runWikiUpload(opts, deps) {
115
+ const ctx = await resolveWikiContext(deps);
116
+ if (opts.abortSignal?.aborted)
117
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted before session start');
118
+ const runner = deps.createClaudeRunner();
119
+ info(`Fetching graph context from Hub (${ctx.hubFullName})...`);
120
+ const [prompts, groupingCtx] = await Promise.all([
121
+ ctx.api.wikiPromptTemplates(ctx.hubRepoId),
122
+ ctx.api.wikiGroupingContext(ctx.hubRepoId),
123
+ ]);
124
+ // Phase 1: Module tree
125
+ info('Phase 1/3: generating module tree with Claude Code...');
126
+ const groupingPrompt = fillTemplate(prompts.grouping.user, {
127
+ COMMUNITY_GROUPS: groupingCtx.communityGroups,
128
+ INTER_COMMUNITY_EDGES: groupingCtx.interCommunityEdges,
129
+ CROSS_COMMUNITY_PROCESSES: groupingCtx.crossCommunityProcesses,
130
+ FILE_LIST: groupingCtx.filesWithExports,
131
+ DIRECTORY_TREE: groupingCtx.directoryTree,
132
+ });
133
+ const moduleTreeRaw = await runner.run(`${prompts.grouping.system}\n\n${groupingPrompt}`, {
134
+ cwd: ctx.repoRoot,
135
+ model: deps.model,
136
+ allowedTools: [],
137
+ });
138
+ const rawModules = parseModuleTree(moduleTreeRaw.text);
139
+ const modules = applyModuleSplitting(rawModules);
140
+ const { leaves, parents } = flattenModuleTree(modules);
141
+ const allPageNodes = [...leaves, ...parents];
142
+ const moduleRegistry = allPageNodes.map((m) => `- [${m.title}](${m.slug}.md)`).join('\n');
143
+ const generatedPages = new Map();
144
+ info(`Phase 2/3: generating ${leaves.length} leaf page(s) + ${parents.length} parent page(s)...`);
145
+ if (opts.abortSignal?.aborted)
146
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted before page generation');
147
+ // Phase 2a: Leaf pages
148
+ const generateLeafPage = async (node) => {
149
+ if (opts.abortSignal?.aborted)
150
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted during page generation');
151
+ const filePaths = node.files ?? [];
152
+ let leafCtx;
153
+ try {
154
+ leafCtx = await ctx.api.wikiLeafContext(ctx.hubRepoId, node.title, filePaths);
155
+ }
156
+ catch {
157
+ leafCtx = {
158
+ sourceCode: '',
159
+ intraCalls: 'None',
160
+ outgoingCalls: 'None',
161
+ incomingCalls: 'None',
162
+ processes: 'No execution flows detected.',
163
+ };
164
+ }
165
+ const reg = allPageNodes
166
+ .filter((m) => m.slug !== node.slug)
167
+ .map((m) => `- [${m.title}](${m.slug}.md)`)
168
+ .join('\n');
169
+ const pagePrompt = fillTemplate(prompts.module.user, {
170
+ MODULE_NAME: node.title,
171
+ SOURCE_CODE: leafCtx.sourceCode,
172
+ INTRA_CALLS: leafCtx.intraCalls,
173
+ OUTGOING_CALLS: leafCtx.outgoingCalls,
174
+ INCOMING_CALLS: leafCtx.incomingCalls,
175
+ PROCESSES: leafCtx.processes,
176
+ MODULE_REGISTRY: reg,
177
+ });
178
+ const result = await runner.run(`${prompts.module.system}\n\n${pagePrompt}`, {
179
+ cwd: ctx.repoRoot,
180
+ model: deps.model,
181
+ allowedTools: [],
182
+ });
183
+ const content = `# ${node.title}\n\n${extractPageMarkdown(result.text)}`;
184
+ generatedPages.set(node.slug, content);
185
+ return { title: node.title, contentMd: content };
186
+ };
187
+ // Phase 2b: Parent pages
188
+ const generateParentPage = async (node) => {
189
+ if (opts.abortSignal?.aborted)
190
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted during parent page generation');
191
+ const childDocs = (node.children ?? [])
192
+ .map((child) => {
193
+ const content = generatedPages.get(child.slug);
194
+ return content
195
+ ? `#### ${child.title}\n${content.slice(0, 800).trim()}`
196
+ : `#### ${child.title}\n(Documentation not yet generated)`;
197
+ })
198
+ .join('\n\n');
199
+ const allChildFiles = (node.children ?? []).flatMap((c) => c.files ?? []);
200
+ let parentCtx;
201
+ try {
202
+ parentCtx = await ctx.api.wikiLeafContext(ctx.hubRepoId, node.title, allChildFiles);
203
+ }
204
+ catch {
205
+ parentCtx = { intraCalls: 'None', processes: 'No execution flows detected.' };
206
+ }
207
+ const reg = allPageNodes
208
+ .filter((m) => m.slug !== node.slug)
209
+ .map((m) => `- [${m.title}](${m.slug}.md)`)
210
+ .join('\n');
211
+ const parentPrompt = fillTemplate(prompts.parent.user, {
212
+ MODULE_NAME: node.title,
213
+ CHILDREN_DOCS: childDocs,
214
+ CROSS_MODULE_CALLS: parentCtx.intraCalls ?? 'None',
215
+ CROSS_PROCESSES: parentCtx.processes ?? 'No execution flows detected.',
216
+ MODULE_REGISTRY: reg,
217
+ });
218
+ const result = await runner.run(`${prompts.parent.system}\n\n${parentPrompt}`, {
219
+ cwd: ctx.repoRoot,
220
+ model: deps.model,
221
+ allowedTools: [],
222
+ });
223
+ const content = `# ${node.title}\n\n${extractPageMarkdown(result.text)}`;
224
+ generatedPages.set(node.slug, content);
225
+ return { title: node.title, contentMd: content };
226
+ };
227
+ // Phase 3: Overview page
228
+ const generateOverview = async () => {
229
+ if (opts.abortSignal?.aborted)
230
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted during overview generation');
231
+ const moduleSummaries = modules
232
+ .map((m) => {
233
+ const content = generatedPages.get(m.slug);
234
+ return content
235
+ ? `#### ${m.title}\n${content.slice(0, 600).trim()}`
236
+ : `#### ${m.title}\n(Documentation pending)`;
237
+ })
238
+ .join('\n\n');
239
+ const moduleFiles = {};
240
+ for (const m of modules) {
241
+ const files = m.children ? m.children.flatMap((c) => c.files ?? []) : (m.files ?? []);
242
+ if (files.length > 0)
243
+ moduleFiles[m.title] = files;
244
+ }
245
+ let overviewCtx;
246
+ try {
247
+ overviewCtx = await ctx.api.wikiOverviewContext(ctx.hubRepoId, moduleFiles);
248
+ }
249
+ catch {
250
+ overviewCtx = {
251
+ interModuleEdges: 'No inter-module call edges detected',
252
+ topProcesses: 'No execution flows detected.',
253
+ };
254
+ }
255
+ const overviewPrompt = fillTemplate(prompts.overview.user, {
256
+ PROJECT_INFO: `Repository: ${ctx.hubFullName}`,
257
+ MODULE_SUMMARIES: moduleSummaries,
258
+ MODULE_EDGES: overviewCtx.interModuleEdges,
259
+ TOP_PROCESSES: overviewCtx.topProcesses,
260
+ MODULE_REGISTRY: moduleRegistry,
261
+ });
262
+ const result = await runner.run(`${prompts.overview.system}\n\n${overviewPrompt}`, {
263
+ cwd: ctx.repoRoot,
264
+ model: deps.model,
265
+ allowedTools: [],
266
+ });
267
+ return { title: 'Overview', contentMd: extractPageMarkdown(result.text) };
268
+ };
269
+ // Execute: leaves → parents → overview
270
+ const overviewNode = { slug: 'overview', title: 'Overview', files: [] };
271
+ const orderedModules = [...leaves, ...parents, overviewNode];
272
+ const generatePageByType = async (node) => {
273
+ if (node.slug === 'overview' && (node.files ?? []).length === 0)
274
+ return generateOverview();
275
+ const treeNode = parents.find((p) => p.slug === node.slug);
276
+ if (treeNode?.children)
277
+ return generateParentPage(treeNode);
278
+ return generateLeafPage(node);
279
+ };
280
+ const moduleTree = orderedModules.map((m) => ({
281
+ name: m.title,
282
+ slug: m.slug,
283
+ summary: m.summary,
284
+ files: m.files,
285
+ }));
286
+ info('Phase 3/3: streaming pages to Hub + generating overview...');
287
+ let activeSessionId = null;
288
+ const onAbort = async () => {
289
+ if (!activeSessionId)
290
+ return;
291
+ try {
292
+ await ctx.api.wikiUploadAbort(ctx.hubRepoId, activeSessionId);
293
+ }
294
+ catch {
295
+ /* ignore secondary errors */
296
+ }
297
+ };
298
+ opts.abortSignal?.addEventListener('abort', onAbort, { once: true });
299
+ try {
300
+ return await runWikiUploadSession({
301
+ api: ctx.api,
302
+ repoId: ctx.hubRepoId,
303
+ mode: opts.mode ?? 'full',
304
+ fromCommit: ctx.headCommit,
305
+ clientVersion: deps.clientVersion,
306
+ clientModel: deps.model,
307
+ modules: orderedModules,
308
+ moduleTree,
309
+ generatePage: generatePageByType,
310
+ onSessionStart: (id) => {
311
+ activeSessionId = id;
312
+ },
313
+ });
314
+ }
315
+ catch (err) {
316
+ if (opts.abortSignal?.aborted)
317
+ throw new GnxError(ErrorCode.USER_ABORTED, 'user aborted', { cause: err });
318
+ throw err;
319
+ }
320
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gitnexushub",
3
- "version": "0.4.2",
3
+ "version": "0.4.4",
4
4
  "description": "Connect your editor to GitNexus Hub — one command MCP setup + project context",
5
5
  "author": "Abhigyan Patwari",
6
6
  "license": "PolyForm-Noncommercial-1.0.0",
@@ -35,6 +35,7 @@
35
35
  "prepare": "npm run build"
36
36
  },
37
37
  "dependencies": {
38
+ "@anthropic-ai/claude-agent-sdk": "^0.2.109",
38
39
  "commander": "^12.0.0",
39
40
  "picocolors": "^1.1.1",
40
41
  "tar-stream": "^3.1.8"