gitnexushub 0.4.2 → 0.4.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/api.d.ts CHANGED
@@ -72,6 +72,55 @@ export interface SyncJobStatus {
72
72
  phase: string | null;
73
73
  error: string | null;
74
74
  }
75
+ export interface ResolveRepoResult {
76
+ repoId: string;
77
+ fullName: string;
78
+ status: string;
79
+ indexedAt: string | null;
80
+ lastCommit: string | null;
81
+ permissions: {
82
+ read: boolean;
83
+ write: boolean;
84
+ };
85
+ }
86
+ export interface WikiUploadStartParams {
87
+ mode: 'full' | 'incremental';
88
+ fromCommit?: string;
89
+ clientVersion?: string;
90
+ clientModel?: string;
91
+ }
92
+ export interface WikiUploadStartResult {
93
+ sessionId: string;
94
+ status: 'active';
95
+ startedAt: string;
96
+ }
97
+ export interface WikiUploadPagePayload {
98
+ slug: string;
99
+ title: string;
100
+ contentMd: string;
101
+ }
102
+ export interface WikiUploadFinishParams {
103
+ moduleTree: unknown;
104
+ receivedSlugs: string[];
105
+ }
106
+ export interface WikiConfig {
107
+ serverGenerationEnabled: boolean;
108
+ clientUploadAvailable: boolean;
109
+ clientCommand: string;
110
+ }
111
+ export interface WikiUploadSessionRow {
112
+ id: string;
113
+ userId: string;
114
+ status: 'active' | 'finished' | 'aborted' | 'stale';
115
+ mode: 'full' | 'incremental';
116
+ pagesReceived: number;
117
+ startedAt: string;
118
+ finishedAt: string | null;
119
+ }
120
+ export interface WikiUploadStatus {
121
+ active: WikiUploadSessionRow | null;
122
+ last: WikiUploadSessionRow | null;
123
+ }
75
124
  export declare class HubAPI {
76
125
  private hubUrl;
77
126
  private token;
@@ -81,6 +130,7 @@ export declare class HubAPI {
81
130
  private get authHeaders();
82
131
  private request;
83
132
  private post;
133
+ private postJson;
84
134
  getMe(): Promise<UserProfile>;
85
135
  listRepos(): Promise<HubRepo[]>;
86
136
  getConnectContext(repoFullName: string): Promise<ConnectContext>;
@@ -92,4 +142,41 @@ export declare class HubAPI {
92
142
  tarball: NodeJS.ReadableStream;
93
143
  }): Promise<SyncResult>;
94
144
  syncStatus(repoId: string, jobId: string): Promise<SyncJobStatus>;
145
+ resolveRepoByRemote(remote: string): Promise<ResolveRepoResult>;
146
+ wikiUploadStart(repoId: string, params: WikiUploadStartParams): Promise<WikiUploadStartResult>;
147
+ wikiUploadPage(repoId: string, sessionId: string, page: WikiUploadPagePayload): Promise<{
148
+ slug: string;
149
+ pagesReceived: number;
150
+ }>;
151
+ wikiUploadFinish(repoId: string, sessionId: string, params: WikiUploadFinishParams): Promise<{
152
+ sessionId: string;
153
+ pagesPersisted: number;
154
+ }>;
155
+ wikiUploadAbort(repoId: string, sessionId: string): Promise<{
156
+ sessionId: string;
157
+ status: string;
158
+ }>;
159
+ wikiUploadStatus(repoId: string): Promise<WikiUploadStatus>;
160
+ getWikiConfig(): Promise<WikiConfig>;
161
+ wikiGroupingContext(repoId: string): Promise<any>;
162
+ wikiLeafContext(repoId: string, moduleName: string, filePaths: string[]): Promise<any>;
163
+ wikiOverviewContext(repoId: string, moduleFiles: Record<string, string[]>): Promise<any>;
164
+ wikiPromptTemplates(repoId: string): Promise<{
165
+ grouping: {
166
+ system: string;
167
+ user: string;
168
+ };
169
+ module: {
170
+ system: string;
171
+ user: string;
172
+ };
173
+ parent: {
174
+ system: string;
175
+ user: string;
176
+ };
177
+ overview: {
178
+ system: string;
179
+ user: string;
180
+ };
181
+ }>;
95
182
  }
package/dist/api.js CHANGED
@@ -49,6 +49,22 @@ export class HubAPI {
49
49
  }
50
50
  return res.json();
51
51
  }
52
+ async postJson(path, body, extraHeaders = {}) {
53
+ const res = await fetch(`${this.hubUrl}${path}`, {
54
+ method: 'POST',
55
+ headers: {
56
+ ...this.authHeaders,
57
+ 'Content-Type': 'application/json',
58
+ ...extraHeaders,
59
+ },
60
+ body: JSON.stringify(body),
61
+ });
62
+ if (!res.ok) {
63
+ const err = await res.json().catch(() => ({ error: res.statusText }));
64
+ throw new Error(err.error || `HTTP ${res.status}`);
65
+ }
66
+ return res.json();
67
+ }
52
68
  async getMe() {
53
69
  return this.request('/auth/me');
54
70
  }
@@ -103,4 +119,39 @@ export class HubAPI {
103
119
  async syncStatus(repoId, jobId) {
104
120
  return this.request(`/api/repos/${repoId}/sync/${jobId}`);
105
121
  }
122
+ async resolveRepoByRemote(remote) {
123
+ return this.postJson('/api/repos/resolve', { remote });
124
+ }
125
+ async wikiUploadStart(repoId, params) {
126
+ return this.postJson(`/api/repos/${repoId}/wiki/upload/start`, params);
127
+ }
128
+ async wikiUploadPage(repoId, sessionId, page) {
129
+ return this.postJson(`/api/repos/${repoId}/wiki/upload/page`, { slug: page.slug, title: page.title, content_md: page.contentMd }, { 'X-Wiki-Upload-Session': sessionId });
130
+ }
131
+ async wikiUploadFinish(repoId, sessionId, params) {
132
+ return this.postJson(`/api/repos/${repoId}/wiki/upload/finish`, params, { 'X-Wiki-Upload-Session': sessionId });
133
+ }
134
+ async wikiUploadAbort(repoId, sessionId) {
135
+ return this.postJson(`/api/repos/${repoId}/wiki/upload/abort`, {}, { 'X-Wiki-Upload-Session': sessionId });
136
+ }
137
+ async wikiUploadStatus(repoId) {
138
+ return this.request(`/api/repos/${repoId}/wiki/upload/status`);
139
+ }
140
+ async getWikiConfig() {
141
+ return this.request(`/api/wiki-config`);
142
+ }
143
+ async wikiGroupingContext(repoId) {
144
+ return this.request(`/api/repos/${repoId}/wiki/context/grouping`);
145
+ }
146
+ async wikiLeafContext(repoId, moduleName, filePaths) {
147
+ const params = new URLSearchParams({ name: moduleName, files: filePaths.join(',') });
148
+ return this.request(`/api/repos/${repoId}/wiki/context/leaf?${params}`);
149
+ }
150
+ async wikiOverviewContext(repoId, moduleFiles) {
151
+ const params = new URLSearchParams({ moduleFiles: JSON.stringify(moduleFiles) });
152
+ return this.request(`/api/repos/${repoId}/wiki/context/overview?${params}`);
153
+ }
154
+ async wikiPromptTemplates(repoId) {
155
+ return this.request(`/api/repos/${repoId}/wiki/context/prompts`);
156
+ }
106
157
  }
@@ -0,0 +1,6 @@
1
+ /**
2
+ * Canonicalizes a git remote URL into "host/path" form so that two variants
3
+ * of the same upstream (https vs ssh, with/without `.git`, with/without creds)
4
+ * compare equal. Returns null if the input isn't a recognizable git URL.
5
+ */
6
+ export declare function canonicalizeGitRemote(raw: string): string | null;
@@ -0,0 +1,57 @@
1
+ const CASE_INSENSITIVE_HOSTS = new Set([
2
+ 'github.com',
3
+ 'gitlab.com',
4
+ 'bitbucket.org',
5
+ 'codeberg.org',
6
+ 'gitea.com',
7
+ ]);
8
+ /**
9
+ * Canonicalizes a git remote URL into "host/path" form so that two variants
10
+ * of the same upstream (https vs ssh, with/without `.git`, with/without creds)
11
+ * compare equal. Returns null if the input isn't a recognizable git URL.
12
+ */
13
+ export function canonicalizeGitRemote(raw) {
14
+ if (!raw || typeof raw !== 'string')
15
+ return null;
16
+ const trimmed = raw.trim();
17
+ if (!trimmed)
18
+ return null;
19
+ // Reject local paths before URL parsing
20
+ if (trimmed.startsWith('/') || /^[a-zA-Z]:[\\/]/.test(trimmed))
21
+ return null;
22
+ if (trimmed.startsWith('file://'))
23
+ return null;
24
+ let host;
25
+ let pathPart;
26
+ // SCP-style: user@host:path
27
+ const scpMatch = trimmed.match(/^(?:([^@]+)@)?([^:/@]+):([^:].*)$/);
28
+ if (scpMatch && !trimmed.includes('://')) {
29
+ host = scpMatch[2];
30
+ pathPart = scpMatch[3];
31
+ }
32
+ else {
33
+ try {
34
+ const url = new URL(trimmed);
35
+ if (!['http:', 'https:', 'ssh:', 'git:'].includes(url.protocol))
36
+ return null;
37
+ host = url.hostname;
38
+ pathPart = url.pathname;
39
+ }
40
+ catch {
41
+ return null;
42
+ }
43
+ }
44
+ if (!host)
45
+ return null;
46
+ // Strip leading/trailing slashes and .git suffix
47
+ pathPart = pathPart
48
+ .replace(/^\/+/, '')
49
+ .replace(/\/+$/, '')
50
+ .replace(/\.git$/i, '');
51
+ if (!pathPart)
52
+ return null;
53
+ // Lowercase host always; lowercase path only for known case-insensitive hosts
54
+ const lowerHost = host.toLowerCase();
55
+ const normalizedPath = CASE_INSENSITIVE_HOSTS.has(lowerHost) ? pathPart.toLowerCase() : pathPart;
56
+ return `${lowerHost}/${normalizedPath}`;
57
+ }
package/dist/index.js CHANGED
@@ -18,6 +18,7 @@ import { removeProjectContext } from './context.js';
18
18
  import { runSync } from './sync-command.js';
19
19
  import { ok, info, warn, fail, resolveAuth, DEFAULT_HUB_URL, EDITORS } from './cli-helpers.js';
20
20
  import { runConnect } from './connect-command.js';
21
+ import { registerWikiCommand } from './wiki/index.js';
21
22
  const BANNER = [
22
23
  ' ██████╗ ██╗████████╗███╗ ██╗███████╗██╗ ██╗██╗ ██╗███████╗',
23
24
  '██╔════╝ ██║╚══██╔══╝████╗ ██║██╔════╝╚██╗██╔╝██║ ██║██╔════╝',
@@ -110,6 +111,8 @@ program
110
111
  process.exit(1);
111
112
  }
112
113
  });
114
+ // ─── wiki commands ────────────────────────────────────────────────
115
+ registerWikiCommand(program);
113
116
  program.parse();
114
117
  // ─── Disconnect Flow ──────────────────────────────────────────────
115
118
  async function runDisconnect(opts) {
@@ -0,0 +1,7 @@
1
+ import type { ResolveContextDeps } from './resolve-context.js';
2
+ export declare function runWikiAbort(deps: ResolveContextDeps): Promise<{
3
+ sessionId: string;
4
+ status: string;
5
+ } | {
6
+ noActive: true;
7
+ }>;
@@ -0,0 +1,15 @@
1
+ import { resolveWikiContext } from './resolve-context.js';
2
+ import { GnxError, ErrorCode } from './errors.js';
3
+ export async function runWikiAbort(deps) {
4
+ const ctx = await resolveWikiContext(deps, { skipClaudeChecks: true });
5
+ const status = await ctx.api.wikiUploadStatus(ctx.hubRepoId);
6
+ if (!status.active) {
7
+ return { noActive: true };
8
+ }
9
+ try {
10
+ return await ctx.api.wikiUploadAbort(ctx.hubRepoId, status.active.id);
11
+ }
12
+ catch (err) {
13
+ throw new GnxError(ErrorCode.NETWORK, 'failed to abort session', { cause: err });
14
+ }
15
+ }
@@ -0,0 +1,12 @@
1
+ export interface ClaudeGenerationResult {
2
+ text: string;
3
+ durationMs: number;
4
+ }
5
+ export interface ClaudeRunner {
6
+ run(prompt: string, opts: {
7
+ cwd: string;
8
+ model?: string;
9
+ allowedTools?: string[];
10
+ }): Promise<ClaudeGenerationResult>;
11
+ }
12
+ export declare function createClaudeRunner(): ClaudeRunner;
@@ -0,0 +1,48 @@
1
+ import { query } from '@anthropic-ai/claude-agent-sdk';
2
+ import { GnxError, ErrorCode } from './errors.js';
3
+ const DEFAULT_ALLOWED_TOOLS = [
4
+ 'Read',
5
+ 'Grep',
6
+ 'mcp__gitnexus__context',
7
+ 'mcp__gitnexus__query',
8
+ 'mcp__gitnexus__group_list',
9
+ 'mcp__gitnexus__group_contracts',
10
+ 'mcp__gitnexus__cypher',
11
+ ];
12
+ export function createClaudeRunner() {
13
+ return {
14
+ async run(prompt, opts) {
15
+ const start = Date.now();
16
+ let finalText = '';
17
+ try {
18
+ for await (const msg of query({
19
+ prompt,
20
+ options: {
21
+ cwd: opts.cwd,
22
+ model: opts.model,
23
+ allowedTools: opts.allowedTools ?? DEFAULT_ALLOWED_TOOLS,
24
+ },
25
+ })) {
26
+ const m = msg;
27
+ if (m.type === 'result' && typeof m.result === 'string') {
28
+ finalText = m.result;
29
+ }
30
+ }
31
+ }
32
+ catch (err) {
33
+ const msg = err?.message ?? String(err);
34
+ if (/auth|login|credentials/i.test(msg)) {
35
+ throw new GnxError(ErrorCode.CLAUDE_AUTH_FAILED, 'Claude Code authentication failed', {
36
+ hint: 're-authenticate: run `claude /login`',
37
+ cause: err,
38
+ });
39
+ }
40
+ throw new GnxError(ErrorCode.GENERATION_FAILED, `Claude Code error: ${msg}`, {
41
+ cause: err,
42
+ });
43
+ }
44
+ const durationMs = Date.now() - start;
45
+ return { text: finalText, durationMs };
46
+ },
47
+ };
48
+ }
@@ -0,0 +1,49 @@
1
+ /**
2
+ * Typed errors with stable exit codes for `gnx wiki *` commands.
3
+ *
4
+ * Exit code ranges (keep stable — used by shell scripts / CI):
5
+ * 10-19 local git problems
6
+ * 20-29 repo resolution / Hub repo state
7
+ * 30-39 auth / config problems
8
+ * 40-49 Claude Code problems
9
+ * 50-59 network / Hub reachability
10
+ * 60-69 permissions (403, read-only)
11
+ * 70-79 session conflict / rate limit
12
+ * 80-89 upload validation / in-flight failures
13
+ */
14
+ export declare enum ErrorCode {
15
+ NOT_GIT_REPO = "NOT_GIT_REPO",
16
+ NO_COMMITS = "NO_COMMITS",
17
+ NO_REMOTE = "NO_REMOTE",
18
+ UNPARSEABLE_REMOTE = "UNPARSEABLE_REMOTE",
19
+ LOCAL_PATH_REMOTE = "LOCAL_PATH_REMOTE",
20
+ NOT_CONNECTED = "NOT_CONNECTED",
21
+ CONFIG_CORRUPT = "CONFIG_CORRUPT",
22
+ AUTH_INVALID = "AUTH_INVALID",
23
+ CLAUDE_NOT_INSTALLED = "CLAUDE_NOT_INSTALLED",
24
+ CLAUDE_MCP_MISSING = "CLAUDE_MCP_MISSING",
25
+ CLAUDE_AUTH_FAILED = "CLAUDE_AUTH_FAILED",
26
+ NETWORK = "NETWORK",
27
+ HUB_UNREACHABLE = "HUB_UNREACHABLE",
28
+ REPO_NOT_ON_HUB = "REPO_NOT_ON_HUB",
29
+ REPO_NOT_INDEXED = "REPO_NOT_INDEXED",
30
+ REPO_READ_ONLY = "REPO_READ_ONLY",
31
+ SESSION_CONFLICT = "SESSION_CONFLICT",
32
+ RATE_LIMITED = "RATE_LIMITED",
33
+ PAGE_VALIDATION = "PAGE_VALIDATION",
34
+ PAGE_TOO_LARGE = "PAGE_TOO_LARGE",
35
+ GENERATION_FAILED = "GENERATION_FAILED",
36
+ USER_ABORTED = "USER_ABORTED",
37
+ UNKNOWN = "UNKNOWN"
38
+ }
39
+ export interface GnxErrorOptions {
40
+ hint?: string;
41
+ cause?: unknown;
42
+ }
43
+ export declare class GnxError extends Error {
44
+ readonly code: ErrorCode;
45
+ readonly exitCode: number;
46
+ readonly hint?: string;
47
+ readonly cause?: unknown;
48
+ constructor(code: ErrorCode, message: string, opts?: GnxErrorOptions);
49
+ }
@@ -0,0 +1,78 @@
1
+ /**
2
+ * Typed errors with stable exit codes for `gnx wiki *` commands.
3
+ *
4
+ * Exit code ranges (keep stable — used by shell scripts / CI):
5
+ * 10-19 local git problems
6
+ * 20-29 repo resolution / Hub repo state
7
+ * 30-39 auth / config problems
8
+ * 40-49 Claude Code problems
9
+ * 50-59 network / Hub reachability
10
+ * 60-69 permissions (403, read-only)
11
+ * 70-79 session conflict / rate limit
12
+ * 80-89 upload validation / in-flight failures
13
+ */
14
+ export var ErrorCode;
15
+ (function (ErrorCode) {
16
+ ErrorCode["NOT_GIT_REPO"] = "NOT_GIT_REPO";
17
+ ErrorCode["NO_COMMITS"] = "NO_COMMITS";
18
+ ErrorCode["NO_REMOTE"] = "NO_REMOTE";
19
+ ErrorCode["UNPARSEABLE_REMOTE"] = "UNPARSEABLE_REMOTE";
20
+ ErrorCode["LOCAL_PATH_REMOTE"] = "LOCAL_PATH_REMOTE";
21
+ ErrorCode["NOT_CONNECTED"] = "NOT_CONNECTED";
22
+ ErrorCode["CONFIG_CORRUPT"] = "CONFIG_CORRUPT";
23
+ ErrorCode["AUTH_INVALID"] = "AUTH_INVALID";
24
+ ErrorCode["CLAUDE_NOT_INSTALLED"] = "CLAUDE_NOT_INSTALLED";
25
+ ErrorCode["CLAUDE_MCP_MISSING"] = "CLAUDE_MCP_MISSING";
26
+ ErrorCode["CLAUDE_AUTH_FAILED"] = "CLAUDE_AUTH_FAILED";
27
+ ErrorCode["NETWORK"] = "NETWORK";
28
+ ErrorCode["HUB_UNREACHABLE"] = "HUB_UNREACHABLE";
29
+ ErrorCode["REPO_NOT_ON_HUB"] = "REPO_NOT_ON_HUB";
30
+ ErrorCode["REPO_NOT_INDEXED"] = "REPO_NOT_INDEXED";
31
+ ErrorCode["REPO_READ_ONLY"] = "REPO_READ_ONLY";
32
+ ErrorCode["SESSION_CONFLICT"] = "SESSION_CONFLICT";
33
+ ErrorCode["RATE_LIMITED"] = "RATE_LIMITED";
34
+ ErrorCode["PAGE_VALIDATION"] = "PAGE_VALIDATION";
35
+ ErrorCode["PAGE_TOO_LARGE"] = "PAGE_TOO_LARGE";
36
+ ErrorCode["GENERATION_FAILED"] = "GENERATION_FAILED";
37
+ ErrorCode["USER_ABORTED"] = "USER_ABORTED";
38
+ ErrorCode["UNKNOWN"] = "UNKNOWN";
39
+ })(ErrorCode || (ErrorCode = {}));
40
+ const EXIT_CODES = {
41
+ [ErrorCode.NOT_GIT_REPO]: 10,
42
+ [ErrorCode.NO_COMMITS]: 11,
43
+ [ErrorCode.NO_REMOTE]: 12,
44
+ [ErrorCode.UNPARSEABLE_REMOTE]: 13,
45
+ [ErrorCode.LOCAL_PATH_REMOTE]: 14,
46
+ [ErrorCode.REPO_NOT_INDEXED]: 20,
47
+ [ErrorCode.REPO_NOT_ON_HUB]: 21,
48
+ [ErrorCode.NOT_CONNECTED]: 30,
49
+ [ErrorCode.CONFIG_CORRUPT]: 31,
50
+ [ErrorCode.AUTH_INVALID]: 32,
51
+ [ErrorCode.CLAUDE_NOT_INSTALLED]: 40,
52
+ [ErrorCode.CLAUDE_MCP_MISSING]: 41,
53
+ [ErrorCode.CLAUDE_AUTH_FAILED]: 42,
54
+ [ErrorCode.NETWORK]: 50,
55
+ [ErrorCode.HUB_UNREACHABLE]: 51,
56
+ [ErrorCode.REPO_READ_ONLY]: 62,
57
+ [ErrorCode.SESSION_CONFLICT]: 70,
58
+ [ErrorCode.RATE_LIMITED]: 72,
59
+ [ErrorCode.PAGE_VALIDATION]: 80,
60
+ [ErrorCode.PAGE_TOO_LARGE]: 81,
61
+ [ErrorCode.GENERATION_FAILED]: 82,
62
+ [ErrorCode.USER_ABORTED]: 130,
63
+ [ErrorCode.UNKNOWN]: 1,
64
+ };
65
+ export class GnxError extends Error {
66
+ code;
67
+ exitCode;
68
+ hint;
69
+ cause;
70
+ constructor(code, message, opts = {}) {
71
+ super(message);
72
+ this.name = 'GnxError';
73
+ this.code = code;
74
+ this.exitCode = EXIT_CODES[code];
75
+ this.hint = opts.hint;
76
+ this.cause = opts.cause;
77
+ }
78
+ }
@@ -0,0 +1,2 @@
1
+ import { Command } from 'commander';
2
+ export declare function registerWikiCommand(program: Command): void;
@@ -0,0 +1,98 @@
1
+ import pc from 'picocolors';
2
+ import { ok, info, warn, fail } from '../cli-helpers.js';
3
+ import { buildRealDeps, REAL_DEFAULT_MODEL } from './real-deps.js';
4
+ import { runWikiUpload } from './upload-command.js';
5
+ import { runWikiStatus } from './status-command.js';
6
+ import { runWikiAbort } from './abort-command.js';
7
+ import { GnxError } from './errors.js';
8
+ function reportError(err) {
9
+ if (err instanceof GnxError) {
10
+ fail(err.message);
11
+ if (err.hint)
12
+ console.error(` ${pc.dim(err.hint)}`);
13
+ process.exit(err.exitCode);
14
+ }
15
+ fail(err instanceof Error ? err.message : String(err));
16
+ process.exit(1);
17
+ }
18
+ export function registerWikiCommand(program) {
19
+ const wiki = program.command('wiki').description('Client-side wiki generation and upload');
20
+ wiki
21
+ .command('upload')
22
+ .description('Generate the wiki locally with Claude Code and upload it to the Hub')
23
+ .option('--mode <mode>', 'full | incremental', 'full')
24
+ .option('--model <model>', 'Claude model to use', REAL_DEFAULT_MODEL)
25
+ .action(async (opts) => {
26
+ const cwd = process.cwd();
27
+ const deps = buildRealDeps(cwd);
28
+ if (opts.model)
29
+ deps.model = opts.model;
30
+ const ac = new AbortController();
31
+ const onSig = () => {
32
+ warn('interrupt received, aborting session...');
33
+ ac.abort();
34
+ };
35
+ process.on('SIGINT', onSig);
36
+ process.on('SIGTERM', onSig);
37
+ try {
38
+ info(`Resolving repo context...`);
39
+ const result = await runWikiUpload({ cwd, mode: opts.mode, model: opts.model, abortSignal: ac.signal }, deps);
40
+ ok(`Uploaded ${result.pagesPersisted} pages.`);
41
+ if (result.failedSlugs.length > 0) {
42
+ warn(`${result.failedSlugs.length} page(s) failed: ${result.failedSlugs.join(', ')}`);
43
+ }
44
+ }
45
+ catch (err) {
46
+ reportError(err);
47
+ }
48
+ finally {
49
+ process.off('SIGINT', onSig);
50
+ process.off('SIGTERM', onSig);
51
+ }
52
+ });
53
+ wiki
54
+ .command('status')
55
+ .description('Show the wiki upload status for the current repo')
56
+ .action(async () => {
57
+ try {
58
+ const deps = buildRealDeps(process.cwd());
59
+ const status = await runWikiStatus(deps);
60
+ info(`Repo: ${pc.bold(status.fullName)} (${status.canonicalRemote})`);
61
+ info(`Hub indexed commit: ${status.hubIndexedCommit ?? '(none)'}`);
62
+ if (status.active) {
63
+ info(`Active session: ${pc.yellow(status.active.id)} ` +
64
+ `started ${status.active.startedAt}, ` +
65
+ `${status.active.pagesReceived} pages received`);
66
+ }
67
+ else {
68
+ info('No active upload session.');
69
+ }
70
+ if (status.last) {
71
+ info(`Last session: ${status.last.status}, ` +
72
+ `${status.last.pagesReceived} pages, ` +
73
+ `started ${status.last.startedAt}`);
74
+ }
75
+ }
76
+ catch (err) {
77
+ reportError(err);
78
+ }
79
+ });
80
+ wiki
81
+ .command('abort')
82
+ .description('Abort the active wiki upload session for the current repo')
83
+ .action(async () => {
84
+ try {
85
+ const deps = buildRealDeps(process.cwd());
86
+ const result = await runWikiAbort(deps);
87
+ if ('noActive' in result) {
88
+ info('No active session to abort.');
89
+ }
90
+ else {
91
+ ok(`Aborted session ${result.sessionId}`);
92
+ }
93
+ }
94
+ catch (err) {
95
+ reportError(err);
96
+ }
97
+ });
98
+ }
@@ -0,0 +1,11 @@
1
+ /**
2
+ * Prompt templates for client-side wiki generation via Claude Code headless.
3
+ *
4
+ * The runner passes these prompts to Claude Code with the user's repo as cwd
5
+ * and read-only tools (Read, Grep) plus the gitnexus MCP tools available.
6
+ * Graph context comes from the Hub via MCP — no local LadybugDB needed.
7
+ */
8
+ export declare function renderModuleTreePrompt(fullName: string): string;
9
+ export declare function renderPagePrompt(fullName: string, moduleSlug: string, moduleTitle: string): string;
10
+ export declare const MODULE_TREE_PROMPT_TEMPLATE: string;
11
+ export declare const PAGE_PROMPT_TEMPLATE: string;
@@ -0,0 +1,72 @@
1
+ /**
2
+ * Prompt templates for client-side wiki generation via Claude Code headless.
3
+ *
4
+ * The runner passes these prompts to Claude Code with the user's repo as cwd
5
+ * and read-only tools (Read, Grep) plus the gitnexus MCP tools available.
6
+ * Graph context comes from the Hub via MCP — no local LadybugDB needed.
7
+ */
8
+ export function renderModuleTreePrompt(fullName) {
9
+ return `You are generating a repository wiki for the repo "${fullName}".
10
+
11
+ Your task: produce a JSON module hierarchy for this repo.
12
+
13
+ Use these MCP tools (they query the live Hub-indexed graph):
14
+ - mcp__gitnexus__context — overview of the repo
15
+ - mcp__gitnexus__group_list — existing cluster/module groupings
16
+ - mcp__gitnexus__query — concept/code search
17
+
18
+ Output ONLY valid JSON with this exact shape (no prose, no markdown fences):
19
+
20
+ {
21
+ "modules": [
22
+ {
23
+ "slug": "kebab-case-slug",
24
+ "title": "Human-Readable Title",
25
+ "summary": "One-sentence description",
26
+ "files": ["optional/representative/file.ts"]
27
+ }
28
+ ]
29
+ }
30
+
31
+ Rules:
32
+ - 6–15 top-level modules (adjust based on repo size).
33
+ - "slug" MUST match ^[a-z0-9][a-z0-9/_-]*$ and be unique within the array.
34
+ - Include an "overview" module as the first entry.
35
+ - Group by responsibility, not file type. "parser" > "typescript-files".
36
+ - No duplicates, no empty strings, no trailing punctuation in titles.`;
37
+ }
38
+ export function renderPagePrompt(fullName, moduleSlug, moduleTitle) {
39
+ return `You are writing a single wiki page for the repo "${fullName}".
40
+
41
+ Module: "${moduleSlug}" (${moduleTitle})
42
+
43
+ Use these tools freely:
44
+ - mcp__gitnexus__context — caller/callee graph for symbols
45
+ - mcp__gitnexus__query — concept search across the codebase
46
+ - mcp__gitnexus__group_contracts — structural facts about a group
47
+ - Read, Grep — inspect local files in the repo for concrete snippets
48
+
49
+ Output: a single Markdown document starting with "# ${moduleTitle}" as the H1.
50
+
51
+ Structure:
52
+ # ${moduleTitle}
53
+
54
+ One-paragraph summary.
55
+
56
+ ## Responsibilities
57
+ - Bulleted list of what this module owns.
58
+
59
+ ## Key Components
60
+ Short sections per major symbol/class/function with a brief explanation.
61
+ Include short code blocks (<= 20 lines) from the actual repo when illustrative.
62
+
63
+ ## How It Fits In
64
+ How this module connects to the rest of the system (callers, callees, flows).
65
+
66
+ Rules:
67
+ - Return ONLY the markdown — no preamble, no conclusion, no "Here's the wiki page".
68
+ - Do not invent symbols or file paths. If you're unsure, query the graph.
69
+ - Keep the total length under 50 KB.`;
70
+ }
71
+ export const MODULE_TREE_PROMPT_TEMPLATE = renderModuleTreePrompt('__FULL_NAME__');
72
+ export const PAGE_PROMPT_TEMPLATE = renderPagePrompt('__FULL_NAME__', '__SLUG__', '__TITLE__');
@@ -0,0 +1,9 @@
1
+ import { createClaudeRunner } from './claude.js';
2
+ import type { ResolveContextDeps } from './resolve-context.js';
3
+ export declare const REAL_CLIENT_VERSION: string;
4
+ export declare const REAL_DEFAULT_MODEL: string;
5
+ export declare function buildRealDeps(cwd: string): ResolveContextDeps & {
6
+ createClaudeRunner: typeof createClaudeRunner;
7
+ clientVersion: string;
8
+ model: string;
9
+ };
@@ -0,0 +1,63 @@
1
+ import { execFileSync } from 'child_process';
2
+ import fs from 'fs';
3
+ import path from 'path';
4
+ import os from 'os';
5
+ import { HubAPI } from '../api.js';
6
+ import { loadConfig as loadConnectConfig } from '../config.js';
7
+ import { createClaudeRunner } from './claude.js';
8
+ export const REAL_CLIENT_VERSION = 'gnx/' + (process.env.npm_package_version ?? 'dev');
9
+ export const REAL_DEFAULT_MODEL = process.env.GNX_WIKI_MODEL ?? 'claude-sonnet-4-6';
10
+ function runGit(cwd, args) {
11
+ try {
12
+ return execFileSync('git', args, { cwd, stdio: ['ignore', 'pipe', 'ignore'] })
13
+ .toString('utf-8')
14
+ .trim();
15
+ }
16
+ catch {
17
+ return null;
18
+ }
19
+ }
20
+ export function buildRealDeps(cwd) {
21
+ return {
22
+ cwd,
23
+ loadConfig: loadConnectConfig,
24
+ getGitRoot: (c) => runGit(c, ['rev-parse', '--show-toplevel']),
25
+ getGitRemoteUrl: (c) => runGit(c, ['remote', 'get-url', 'origin']),
26
+ getGitHead: (c) => runGit(c, ['rev-parse', 'HEAD']),
27
+ getGitDirty: (c) => {
28
+ const out = runGit(c, ['status', '--porcelain']);
29
+ return (out?.length ?? 0) > 0;
30
+ },
31
+ buildApi: (hubUrl, token) => new HubAPI(hubUrl, token),
32
+ verifyClaudeCodeInstalled: async () => {
33
+ try {
34
+ execFileSync('claude', ['--version'], { stdio: 'ignore' });
35
+ return true;
36
+ }
37
+ catch {
38
+ return false;
39
+ }
40
+ },
41
+ verifyClaudeCodeMcpRegistered: async (hubUrl) => {
42
+ try {
43
+ const claudeJson = path.join(os.homedir(), '.claude.json');
44
+ if (!fs.existsSync(claudeJson))
45
+ return false;
46
+ const parsed = JSON.parse(fs.readFileSync(claudeJson, 'utf-8'));
47
+ const mcpServers = parsed?.mcpServers ?? {};
48
+ for (const key of Object.keys(mcpServers)) {
49
+ const entry = mcpServers[key];
50
+ if (entry?.url && String(entry.url).startsWith(hubUrl))
51
+ return true;
52
+ }
53
+ return false;
54
+ }
55
+ catch {
56
+ return false;
57
+ }
58
+ },
59
+ createClaudeRunner,
60
+ clientVersion: REAL_CLIENT_VERSION,
61
+ model: REAL_DEFAULT_MODEL,
62
+ };
63
+ }
@@ -0,0 +1,31 @@
1
+ import type { HubAPI } from '../api.js';
2
+ export interface WikiContext {
3
+ repoRoot: string;
4
+ headCommit: string;
5
+ isDirty: boolean;
6
+ canonicalRemote: string;
7
+ hubRepoId: string;
8
+ hubFullName: string;
9
+ hubIndexedCommit: string | null;
10
+ hubIndexedStatus: string;
11
+ hubUrl: string;
12
+ api: HubAPI;
13
+ }
14
+ export interface ResolveContextDeps {
15
+ cwd: string;
16
+ loadConfig: () => Promise<{
17
+ hubToken?: string;
18
+ hubUrl?: string;
19
+ }>;
20
+ getGitRemoteUrl: (cwd: string) => string | null;
21
+ getGitRoot: (cwd: string) => string | null;
22
+ getGitHead: (cwd: string) => string | null;
23
+ getGitDirty: (cwd: string) => boolean;
24
+ buildApi: (hubUrl: string, token: string) => HubAPI;
25
+ verifyClaudeCodeInstalled: () => Promise<boolean>;
26
+ verifyClaudeCodeMcpRegistered: (hubUrl: string) => Promise<boolean>;
27
+ }
28
+ export interface ResolveContextOptions {
29
+ skipClaudeChecks?: boolean;
30
+ }
31
+ export declare function resolveWikiContext(deps: ResolveContextDeps, opts?: ResolveContextOptions): Promise<WikiContext>;
@@ -0,0 +1,82 @@
1
+ import { canonicalizeGitRemote } from '../git-remote.js';
2
+ import { GnxError, ErrorCode } from './errors.js';
3
+ export async function resolveWikiContext(deps, opts = {}) {
4
+ const repoRoot = deps.getGitRoot(deps.cwd);
5
+ if (!repoRoot) {
6
+ throw new GnxError(ErrorCode.NOT_GIT_REPO, 'not a git repository', {
7
+ hint: 'cd into your project, or run: gnx wiki upload <path>',
8
+ });
9
+ }
10
+ const headCommit = deps.getGitHead(repoRoot);
11
+ if (!headCommit) {
12
+ throw new GnxError(ErrorCode.NO_COMMITS, 'git repository has no commits yet', {
13
+ hint: 'commit something before generating a wiki',
14
+ });
15
+ }
16
+ const rawRemote = deps.getGitRemoteUrl(repoRoot);
17
+ if (!rawRemote) {
18
+ throw new GnxError(ErrorCode.NO_REMOTE, "no 'origin' remote found", {
19
+ hint: 'add one with: git remote add origin <url>',
20
+ });
21
+ }
22
+ if (rawRemote.startsWith('/') ||
23
+ /^[a-zA-Z]:[\\/]/.test(rawRemote) ||
24
+ rawRemote.startsWith('file://')) {
25
+ throw new GnxError(ErrorCode.LOCAL_PATH_REMOTE, `origin points at a local path ('${rawRemote}')`, { hint: 'gnx needs a public/forge URL to match against the Hub' });
26
+ }
27
+ const canonicalRemote = canonicalizeGitRemote(rawRemote);
28
+ if (!canonicalRemote) {
29
+ throw new GnxError(ErrorCode.UNPARSEABLE_REMOTE, `origin is '${rawRemote}' which isn't a recognized git URL`, { hint: 'pass --remote <name> to use a different remote' });
30
+ }
31
+ const isDirty = deps.getGitDirty(repoRoot);
32
+ const config = await deps.loadConfig();
33
+ if (!config.hubToken) {
34
+ throw new GnxError(ErrorCode.NOT_CONNECTED, 'not connected to the Hub', {
35
+ hint: 'run: gnx connect <token>',
36
+ });
37
+ }
38
+ const hubUrl = config.hubUrl || 'https://gitnexus-enterprise-production.up.railway.app';
39
+ if (!opts.skipClaudeChecks) {
40
+ if (!(await deps.verifyClaudeCodeInstalled())) {
41
+ throw new GnxError(ErrorCode.CLAUDE_NOT_INSTALLED, "Claude Code isn't installed or on PATH", {
42
+ hint: 'install: https://claude.com/claude-code',
43
+ });
44
+ }
45
+ if (!(await deps.verifyClaudeCodeMcpRegistered(hubUrl))) {
46
+ throw new GnxError(ErrorCode.CLAUDE_MCP_MISSING, "Claude Code isn't wired to the Hub MCP server", { hint: 're-run: gnx connect' });
47
+ }
48
+ }
49
+ const api = deps.buildApi(hubUrl, config.hubToken);
50
+ let resolve;
51
+ try {
52
+ resolve = await api.resolveRepoByRemote(canonicalRemote);
53
+ }
54
+ catch (err) {
55
+ const msg = err?.message ?? String(err);
56
+ if (msg.includes('404') || msg.toLowerCase().includes('not found')) {
57
+ throw new GnxError(ErrorCode.REPO_NOT_ON_HUB, `${canonicalRemote} isn't indexed on the Hub`, {
58
+ hint: 'add it with: gnx index <owner>/<repo>',
59
+ cause: err,
60
+ });
61
+ }
62
+ throw new GnxError(ErrorCode.NETWORK, `Hub request failed: ${msg}`, { cause: err });
63
+ }
64
+ if (resolve.status !== 'ready') {
65
+ throw new GnxError(ErrorCode.REPO_NOT_INDEXED, `${resolve.fullName} is on the Hub but still ${resolve.status}`, { hint: 'wait for indexing, or run: gnx sync --wait' });
66
+ }
67
+ if (!resolve.permissions.write) {
68
+ throw new GnxError(ErrorCode.REPO_READ_ONLY, `you have read-only access to ${resolve.fullName}`, { hint: 'wiki uploads require write access — ask an owner to grant write' });
69
+ }
70
+ return {
71
+ repoRoot,
72
+ headCommit,
73
+ isDirty,
74
+ canonicalRemote,
75
+ hubRepoId: resolve.repoId,
76
+ hubFullName: resolve.fullName,
77
+ hubIndexedCommit: resolve.lastCommit,
78
+ hubIndexedStatus: resolve.status,
79
+ hubUrl,
80
+ api,
81
+ };
82
+ }
@@ -0,0 +1,33 @@
1
+ import type { HubAPI } from '../api.js';
2
+ export interface ModuleNode {
3
+ slug: string;
4
+ title: string;
5
+ summary?: string;
6
+ files?: string[];
7
+ }
8
+ export interface GeneratePageFn {
9
+ (node: ModuleNode): Promise<{
10
+ title: string;
11
+ contentMd: string;
12
+ }>;
13
+ }
14
+ export interface RunSessionDeps {
15
+ api: HubAPI;
16
+ repoId: string;
17
+ mode: 'full' | 'incremental';
18
+ fromCommit: string;
19
+ clientVersion: string;
20
+ clientModel: string;
21
+ modules: ModuleNode[];
22
+ moduleTree: unknown;
23
+ generatePage: GeneratePageFn;
24
+ onPageStart?: (slug: string) => void;
25
+ onPageDone?: (slug: string, bytes: number) => void;
26
+ onPageFail?: (slug: string, err: unknown) => void;
27
+ }
28
+ export interface RunSessionResult {
29
+ sessionId: string;
30
+ pagesPersisted: number;
31
+ failedSlugs: string[];
32
+ }
33
+ export declare function runWikiUploadSession(deps: RunSessionDeps): Promise<RunSessionResult>;
@@ -0,0 +1,72 @@
1
+ import { GnxError, ErrorCode } from './errors.js';
2
+ export async function runWikiUploadSession(deps) {
3
+ const { api, repoId } = deps;
4
+ const started = await api.wikiUploadStart(repoId, {
5
+ mode: deps.mode,
6
+ fromCommit: deps.fromCommit,
7
+ clientVersion: deps.clientVersion,
8
+ clientModel: deps.clientModel,
9
+ });
10
+ const sessionId = started.sessionId;
11
+ const receivedSlugs = [];
12
+ const failedSlugs = [];
13
+ let aborted = false;
14
+ const abortOnce = async (reason) => {
15
+ if (aborted)
16
+ return;
17
+ aborted = true;
18
+ try {
19
+ await api.wikiUploadAbort(repoId, sessionId);
20
+ }
21
+ catch {
22
+ /* ignore secondary errors */
23
+ }
24
+ throw reason instanceof Error
25
+ ? reason
26
+ : new GnxError(ErrorCode.GENERATION_FAILED, String(reason));
27
+ };
28
+ for (const mod of deps.modules) {
29
+ deps.onPageStart?.(mod.slug);
30
+ let page;
31
+ try {
32
+ page = await deps.generatePage(mod);
33
+ }
34
+ catch (err) {
35
+ failedSlugs.push(mod.slug);
36
+ deps.onPageFail?.(mod.slug, err);
37
+ try {
38
+ page = await deps.generatePage(mod);
39
+ failedSlugs.splice(failedSlugs.indexOf(mod.slug), 1);
40
+ }
41
+ catch (err2) {
42
+ deps.onPageFail?.(mod.slug, err2);
43
+ continue;
44
+ }
45
+ }
46
+ if (!page.contentMd || !page.contentMd.trim()) {
47
+ failedSlugs.push(mod.slug);
48
+ deps.onPageFail?.(mod.slug, new Error('empty generation result'));
49
+ continue;
50
+ }
51
+ try {
52
+ await api.wikiUploadPage(repoId, sessionId, {
53
+ slug: mod.slug,
54
+ title: page.title,
55
+ contentMd: page.contentMd,
56
+ });
57
+ receivedSlugs.push(mod.slug);
58
+ deps.onPageDone?.(mod.slug, Buffer.byteLength(page.contentMd, 'utf8'));
59
+ }
60
+ catch (err) {
61
+ await abortOnce(err);
62
+ }
63
+ }
64
+ if (receivedSlugs.length === 0) {
65
+ await abortOnce(new GnxError(ErrorCode.GENERATION_FAILED, 'no pages were generated successfully'));
66
+ }
67
+ const finished = await api.wikiUploadFinish(repoId, sessionId, {
68
+ moduleTree: deps.moduleTree,
69
+ receivedSlugs,
70
+ });
71
+ return { sessionId, pagesPersisted: finished.pagesPersisted, failedSlugs };
72
+ }
@@ -0,0 +1,8 @@
1
+ import type { ResolveContextDeps } from './resolve-context.js';
2
+ export declare function runWikiStatus(deps: ResolveContextDeps): Promise<{
3
+ fullName: string;
4
+ canonicalRemote: string;
5
+ hubIndexedCommit: string | null;
6
+ active: unknown;
7
+ last: unknown;
8
+ }>;
@@ -0,0 +1,12 @@
1
+ import { resolveWikiContext } from './resolve-context.js';
2
+ export async function runWikiStatus(deps) {
3
+ const ctx = await resolveWikiContext(deps, { skipClaudeChecks: true });
4
+ const status = await ctx.api.wikiUploadStatus(ctx.hubRepoId);
5
+ return {
6
+ fullName: ctx.hubFullName,
7
+ canonicalRemote: ctx.canonicalRemote,
8
+ hubIndexedCommit: ctx.hubIndexedCommit,
9
+ active: status.active,
10
+ last: status.last,
11
+ };
12
+ }
@@ -0,0 +1,19 @@
1
+ import type { ClaudeRunner } from './claude.js';
2
+ import { type ResolveContextDeps } from './resolve-context.js';
3
+ export interface RunWikiUploadOptions {
4
+ cwd: string;
5
+ mode?: 'full' | 'incremental';
6
+ model?: string;
7
+ abortSignal?: AbortSignal;
8
+ }
9
+ export interface RunWikiUploadDeps extends ResolveContextDeps {
10
+ createClaudeRunner: () => ClaudeRunner;
11
+ clientVersion: string;
12
+ model: string;
13
+ }
14
+ export interface RunWikiUploadResult {
15
+ pagesPersisted: number;
16
+ failedSlugs: string[];
17
+ sessionId: string;
18
+ }
19
+ export declare function runWikiUpload(opts: RunWikiUploadOptions, deps: RunWikiUploadDeps): Promise<RunWikiUploadResult>;
@@ -0,0 +1,300 @@
1
+ import { resolveWikiContext } from './resolve-context.js';
2
+ import { runWikiUploadSession } from './session.js';
3
+ import { GnxError, ErrorCode } from './errors.js';
4
+ function slugify(name) {
5
+ return name
6
+ .toLowerCase()
7
+ .replace(/[^a-z0-9]+/g, '-')
8
+ .replace(/^-|-$/g, '');
9
+ }
10
+ function parseModuleTree(raw) {
11
+ const cleaned = raw
12
+ .trim()
13
+ .replace(/^```(?:json)?\s*/i, '')
14
+ .replace(/```$/, '')
15
+ .trim();
16
+ let parsed;
17
+ try {
18
+ parsed = JSON.parse(cleaned);
19
+ }
20
+ catch (err) {
21
+ throw new GnxError(ErrorCode.GENERATION_FAILED, 'module tree response was not valid JSON', {
22
+ cause: err,
23
+ });
24
+ }
25
+ if (parsed && !Array.isArray(parsed.modules) && typeof parsed === 'object') {
26
+ const modules = [];
27
+ for (const [name, files] of Object.entries(parsed)) {
28
+ if (!Array.isArray(files))
29
+ continue;
30
+ const slug = slugify(name);
31
+ if (!slug)
32
+ continue;
33
+ modules.push({
34
+ slug,
35
+ title: name,
36
+ files: files.filter((f) => typeof f === 'string'),
37
+ });
38
+ }
39
+ if (modules.length > 0)
40
+ return modules;
41
+ }
42
+ throw new GnxError(ErrorCode.GENERATION_FAILED, 'module tree had no valid modules');
43
+ }
44
+ const SPLIT_FILE_THRESHOLD = 20;
45
+ function splitBySubdirectory(parentName, files) {
46
+ const subGroups = new Map();
47
+ for (const fp of files) {
48
+ const parts = fp.replace(/\\/g, '/').split('/');
49
+ const subDir = parts.length > 2 ? parts.slice(0, 2).join('/') : parts[0];
50
+ let group = subGroups.get(subDir);
51
+ if (!group) {
52
+ group = [];
53
+ subGroups.set(subDir, group);
54
+ }
55
+ group.push(fp);
56
+ }
57
+ const basenames = Array.from(subGroups.keys()).map((s) => s.split('/').pop());
58
+ const hasCollisions = new Set(basenames).size < basenames.length;
59
+ return Array.from(subGroups.entries()).map(([subDir, subFiles]) => {
60
+ const label = hasCollisions ? subDir.replace(/\//g, '-') : subDir.split('/').pop();
61
+ return {
62
+ slug: slugify(`${parentName}-${label}`),
63
+ title: `${parentName} — ${label}`,
64
+ files: subFiles,
65
+ };
66
+ });
67
+ }
68
+ function applyModuleSplitting(modules) {
69
+ for (const node of modules) {
70
+ const files = node.files ?? [];
71
+ if (files.length > SPLIT_FILE_THRESHOLD) {
72
+ const children = splitBySubdirectory(node.title, files);
73
+ if (children.length > 1) {
74
+ node.children = children;
75
+ node.files = [];
76
+ }
77
+ }
78
+ }
79
+ return modules;
80
+ }
81
+ function flattenModuleTree(tree) {
82
+ const leaves = [];
83
+ const parents = [];
84
+ for (const node of tree) {
85
+ if (node.children && node.children.length > 0) {
86
+ for (const child of node.children)
87
+ leaves.push(child);
88
+ parents.push(node);
89
+ }
90
+ else {
91
+ leaves.push(node);
92
+ }
93
+ }
94
+ return { leaves, parents };
95
+ }
96
+ function extractPageMarkdown(raw) {
97
+ const trimmed = raw.trim();
98
+ if (trimmed.startsWith('```')) {
99
+ return trimmed
100
+ .replace(/^```(?:markdown)?\s*/i, '')
101
+ .replace(/```$/, '')
102
+ .trim();
103
+ }
104
+ return trimmed;
105
+ }
106
+ function fillTemplate(template, vars) {
107
+ let result = template;
108
+ for (const [key, value] of Object.entries(vars)) {
109
+ result = result.replaceAll(`{{${key}}}`, value);
110
+ }
111
+ return result;
112
+ }
113
+ export async function runWikiUpload(opts, deps) {
114
+ const ctx = await resolveWikiContext(deps);
115
+ if (opts.abortSignal?.aborted)
116
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted before session start');
117
+ const runner = deps.createClaudeRunner();
118
+ const [prompts, groupingCtx] = await Promise.all([
119
+ ctx.api.wikiPromptTemplates(ctx.hubRepoId),
120
+ ctx.api.wikiGroupingContext(ctx.hubRepoId),
121
+ ]);
122
+ // Phase 1: Module tree
123
+ const groupingPrompt = fillTemplate(prompts.grouping.user, {
124
+ COMMUNITY_GROUPS: groupingCtx.communityGroups,
125
+ INTER_COMMUNITY_EDGES: groupingCtx.interCommunityEdges,
126
+ CROSS_COMMUNITY_PROCESSES: groupingCtx.crossCommunityProcesses,
127
+ FILE_LIST: groupingCtx.filesWithExports,
128
+ DIRECTORY_TREE: groupingCtx.directoryTree,
129
+ });
130
+ const moduleTreeRaw = await runner.run(`${prompts.grouping.system}\n\n${groupingPrompt}`, {
131
+ cwd: ctx.repoRoot,
132
+ model: deps.model,
133
+ allowedTools: [],
134
+ });
135
+ const rawModules = parseModuleTree(moduleTreeRaw.text);
136
+ const modules = applyModuleSplitting(rawModules);
137
+ const { leaves, parents } = flattenModuleTree(modules);
138
+ const allPageNodes = [...leaves, ...parents];
139
+ const moduleRegistry = allPageNodes.map((m) => `- [${m.title}](${m.slug}.md)`).join('\n');
140
+ const generatedPages = new Map();
141
+ if (opts.abortSignal?.aborted)
142
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted before page generation');
143
+ // Phase 2a: Leaf pages
144
+ const generateLeafPage = async (node) => {
145
+ if (opts.abortSignal?.aborted)
146
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted during page generation');
147
+ const filePaths = node.files ?? [];
148
+ let leafCtx;
149
+ try {
150
+ leafCtx = await ctx.api.wikiLeafContext(ctx.hubRepoId, node.title, filePaths);
151
+ }
152
+ catch {
153
+ leafCtx = {
154
+ sourceCode: '',
155
+ intraCalls: 'None',
156
+ outgoingCalls: 'None',
157
+ incomingCalls: 'None',
158
+ processes: 'No execution flows detected.',
159
+ };
160
+ }
161
+ const reg = allPageNodes
162
+ .filter((m) => m.slug !== node.slug)
163
+ .map((m) => `- [${m.title}](${m.slug}.md)`)
164
+ .join('\n');
165
+ const pagePrompt = fillTemplate(prompts.module.user, {
166
+ MODULE_NAME: node.title,
167
+ SOURCE_CODE: leafCtx.sourceCode,
168
+ INTRA_CALLS: leafCtx.intraCalls,
169
+ OUTGOING_CALLS: leafCtx.outgoingCalls,
170
+ INCOMING_CALLS: leafCtx.incomingCalls,
171
+ PROCESSES: leafCtx.processes,
172
+ MODULE_REGISTRY: reg,
173
+ });
174
+ const result = await runner.run(`${prompts.module.system}\n\n${pagePrompt}`, {
175
+ cwd: ctx.repoRoot,
176
+ model: deps.model,
177
+ allowedTools: [],
178
+ });
179
+ const content = `# ${node.title}\n\n${extractPageMarkdown(result.text)}`;
180
+ generatedPages.set(node.slug, content);
181
+ return { title: node.title, contentMd: content };
182
+ };
183
+ // Phase 2b: Parent pages
184
+ const generateParentPage = async (node) => {
185
+ if (opts.abortSignal?.aborted)
186
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted during parent page generation');
187
+ const childDocs = (node.children ?? [])
188
+ .map((child) => {
189
+ const content = generatedPages.get(child.slug);
190
+ return content
191
+ ? `#### ${child.title}\n${content.slice(0, 800).trim()}`
192
+ : `#### ${child.title}\n(Documentation not yet generated)`;
193
+ })
194
+ .join('\n\n');
195
+ const allChildFiles = (node.children ?? []).flatMap((c) => c.files ?? []);
196
+ let parentCtx;
197
+ try {
198
+ parentCtx = await ctx.api.wikiLeafContext(ctx.hubRepoId, node.title, allChildFiles);
199
+ }
200
+ catch {
201
+ parentCtx = { intraCalls: 'None', processes: 'No execution flows detected.' };
202
+ }
203
+ const reg = allPageNodes
204
+ .filter((m) => m.slug !== node.slug)
205
+ .map((m) => `- [${m.title}](${m.slug}.md)`)
206
+ .join('\n');
207
+ const parentPrompt = fillTemplate(prompts.parent.user, {
208
+ MODULE_NAME: node.title,
209
+ CHILDREN_DOCS: childDocs,
210
+ CROSS_MODULE_CALLS: parentCtx.intraCalls ?? 'None',
211
+ CROSS_PROCESSES: parentCtx.processes ?? 'No execution flows detected.',
212
+ MODULE_REGISTRY: reg,
213
+ });
214
+ const result = await runner.run(`${prompts.parent.system}\n\n${parentPrompt}`, {
215
+ cwd: ctx.repoRoot,
216
+ model: deps.model,
217
+ allowedTools: [],
218
+ });
219
+ const content = `# ${node.title}\n\n${extractPageMarkdown(result.text)}`;
220
+ generatedPages.set(node.slug, content);
221
+ return { title: node.title, contentMd: content };
222
+ };
223
+ // Phase 3: Overview page
224
+ const generateOverview = async () => {
225
+ if (opts.abortSignal?.aborted)
226
+ throw new GnxError(ErrorCode.USER_ABORTED, 'aborted during overview generation');
227
+ const moduleSummaries = modules
228
+ .map((m) => {
229
+ const content = generatedPages.get(m.slug);
230
+ return content
231
+ ? `#### ${m.title}\n${content.slice(0, 600).trim()}`
232
+ : `#### ${m.title}\n(Documentation pending)`;
233
+ })
234
+ .join('\n\n');
235
+ const moduleFiles = {};
236
+ for (const m of modules) {
237
+ const files = m.children ? m.children.flatMap((c) => c.files ?? []) : (m.files ?? []);
238
+ if (files.length > 0)
239
+ moduleFiles[m.title] = files;
240
+ }
241
+ let overviewCtx;
242
+ try {
243
+ overviewCtx = await ctx.api.wikiOverviewContext(ctx.hubRepoId, moduleFiles);
244
+ }
245
+ catch {
246
+ overviewCtx = {
247
+ interModuleEdges: 'No inter-module call edges detected',
248
+ topProcesses: 'No execution flows detected.',
249
+ };
250
+ }
251
+ const overviewPrompt = fillTemplate(prompts.overview.user, {
252
+ PROJECT_INFO: `Repository: ${ctx.hubFullName}`,
253
+ MODULE_SUMMARIES: moduleSummaries,
254
+ MODULE_EDGES: overviewCtx.interModuleEdges,
255
+ TOP_PROCESSES: overviewCtx.topProcesses,
256
+ MODULE_REGISTRY: moduleRegistry,
257
+ });
258
+ const result = await runner.run(`${prompts.overview.system}\n\n${overviewPrompt}`, {
259
+ cwd: ctx.repoRoot,
260
+ model: deps.model,
261
+ allowedTools: [],
262
+ });
263
+ return { title: 'Overview', contentMd: extractPageMarkdown(result.text) };
264
+ };
265
+ // Execute: leaves → parents → overview
266
+ const overviewNode = { slug: 'overview', title: 'Overview', files: [] };
267
+ const orderedModules = [...leaves, ...parents, overviewNode];
268
+ const generatePageByType = async (node) => {
269
+ if (node.slug === 'overview' && (node.files ?? []).length === 0)
270
+ return generateOverview();
271
+ const treeNode = parents.find((p) => p.slug === node.slug);
272
+ if (treeNode?.children)
273
+ return generateParentPage(treeNode);
274
+ return generateLeafPage(node);
275
+ };
276
+ const moduleTree = orderedModules.map((m) => ({
277
+ name: m.title,
278
+ slug: m.slug,
279
+ summary: m.summary,
280
+ files: m.files,
281
+ }));
282
+ try {
283
+ return await runWikiUploadSession({
284
+ api: ctx.api,
285
+ repoId: ctx.hubRepoId,
286
+ mode: opts.mode ?? 'full',
287
+ fromCommit: ctx.headCommit,
288
+ clientVersion: deps.clientVersion,
289
+ clientModel: deps.model,
290
+ modules: orderedModules,
291
+ moduleTree,
292
+ generatePage: generatePageByType,
293
+ });
294
+ }
295
+ catch (err) {
296
+ if (opts.abortSignal?.aborted)
297
+ throw new GnxError(ErrorCode.USER_ABORTED, 'user aborted', { cause: err });
298
+ throw err;
299
+ }
300
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gitnexushub",
3
- "version": "0.4.2",
3
+ "version": "0.4.3",
4
4
  "description": "Connect your editor to GitNexus Hub — one command MCP setup + project context",
5
5
  "author": "Abhigyan Patwari",
6
6
  "license": "PolyForm-Noncommercial-1.0.0",
@@ -35,6 +35,7 @@
35
35
  "prepare": "npm run build"
36
36
  },
37
37
  "dependencies": {
38
+ "@anthropic-ai/claude-agent-sdk": "^0.2.109",
38
39
  "commander": "^12.0.0",
39
40
  "picocolors": "^1.1.1",
40
41
  "tar-stream": "^3.1.8"