gitnexus 1.2.1 → 1.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli/wiki.js +94 -27
  2. package/package.json +1 -1
package/dist/cli/wiki.js CHANGED
@@ -95,44 +95,96 @@ export const wikiCommand = async (inputPath, options) => {
95
95
  return;
96
96
  }
97
97
  // ── Resolve LLM config (with interactive fallback) ─────────────────
98
- // If --api-key was passed via CLI, save it immediately
99
- if (options?.apiKey) {
98
+ // Save any CLI overrides immediately
99
+ if (options?.apiKey || options?.model || options?.baseUrl) {
100
100
  const existing = await loadCLIConfig();
101
- await saveCLIConfig({ ...existing, apiKey: options.apiKey });
102
- console.log(' API key saved to ~/.gitnexus/config.json\n');
101
+ const updates = {};
102
+ if (options.apiKey)
103
+ updates.apiKey = options.apiKey;
104
+ if (options.model)
105
+ updates.model = options.model;
106
+ if (options.baseUrl)
107
+ updates.baseUrl = options.baseUrl;
108
+ await saveCLIConfig({ ...existing, ...updates });
109
+ console.log(' Config saved to ~/.gitnexus/config.json\n');
103
110
  }
111
+ const savedConfig = await loadCLIConfig();
112
+ const hasSavedConfig = !!(savedConfig.apiKey && savedConfig.baseUrl);
113
+ const hasCLIOverrides = !!(options?.apiKey || options?.model || options?.baseUrl);
104
114
  let llmConfig = await resolveLLMConfig({
105
115
  model: options?.model,
106
116
  baseUrl: options?.baseUrl,
107
117
  apiKey: options?.apiKey,
108
118
  });
109
- if (!llmConfig.apiKey) {
119
+ // Run interactive setup if no saved config and no CLI flags provided
120
+ // (even if env vars exist — let user explicitly choose their provider)
121
+ if (!hasSavedConfig && !hasCLIOverrides) {
110
122
  if (!process.stdin.isTTY) {
111
- console.log(' Error: No LLM API key found.');
112
- console.log(' Set OPENAI_API_KEY or GITNEXUS_API_KEY environment variable,');
113
- console.log(' or pass --api-key <key>.\n');
114
- process.exitCode = 1;
115
- return;
116
- }
117
- console.log(' No API key configured.\n');
118
- console.log(' The wiki command requires an LLM API key (OpenAI-compatible).');
119
- console.log(' You can also set OPENAI_API_KEY or GITNEXUS_API_KEY env var.\n');
120
- const key = await prompt(' Enter your API key: ', true);
121
- if (!key) {
122
- console.log('\n No key provided. Aborting.\n');
123
- process.exitCode = 1;
124
- return;
125
- }
126
- const save = await prompt(' Save key to ~/.gitnexus/config.json for future use? (Y/n): ');
127
- if (!save || save.toLowerCase() === 'y' || save.toLowerCase() === 'yes') {
128
- const existing = await loadCLIConfig();
129
- await saveCLIConfig({ ...existing, apiKey: key });
130
- console.log(' Key saved.\n');
123
+ if (!llmConfig.apiKey) {
124
+ console.log(' Error: No LLM API key found.');
125
+ console.log(' Set OPENAI_API_KEY or GITNEXUS_API_KEY environment variable,');
126
+ console.log(' or pass --api-key <key>.\n');
127
+ process.exitCode = 1;
128
+ return;
129
+ }
130
+ // Non-interactive with env var just use it
131
131
  }
132
132
  else {
133
- console.log(' Key will be used for this session only.\n');
133
+ console.log(' No LLM configured. Let\'s set it up.\n');
134
+ console.log(' Supports OpenAI, OpenRouter, or any OpenAI-compatible API.\n');
135
+ // Provider selection
136
+ console.log(' [1] OpenAI (api.openai.com)');
137
+ console.log(' [2] OpenRouter (openrouter.ai)');
138
+ console.log(' [3] Custom endpoint\n');
139
+ const choice = await prompt(' Select provider (1/2/3): ');
140
+ let baseUrl;
141
+ let defaultModel;
142
+ if (choice === '2') {
143
+ baseUrl = 'https://openrouter.ai/api/v1';
144
+ defaultModel = 'openai/gpt-4o-mini';
145
+ }
146
+ else if (choice === '3') {
147
+ baseUrl = await prompt(' Base URL (e.g. http://localhost:11434/v1): ');
148
+ if (!baseUrl) {
149
+ console.log('\n No URL provided. Aborting.\n');
150
+ process.exitCode = 1;
151
+ return;
152
+ }
153
+ defaultModel = 'gpt-4o-mini';
154
+ }
155
+ else {
156
+ baseUrl = 'https://api.openai.com/v1';
157
+ defaultModel = 'gpt-4o-mini';
158
+ }
159
+ // Model
160
+ const modelInput = await prompt(` Model (default: ${defaultModel}): `);
161
+ const model = modelInput || defaultModel;
162
+ // API key — pre-fill hint if env var exists
163
+ const envKey = process.env.GITNEXUS_API_KEY || process.env.OPENAI_API_KEY || '';
164
+ let key;
165
+ if (envKey) {
166
+ const masked = envKey.slice(0, 6) + '...' + envKey.slice(-4);
167
+ const useEnv = await prompt(` Use existing env key (${masked})? (Y/n): `);
168
+ if (!useEnv || useEnv.toLowerCase() === 'y' || useEnv.toLowerCase() === 'yes') {
169
+ key = envKey;
170
+ }
171
+ else {
172
+ key = await prompt(' API key: ', true);
173
+ }
174
+ }
175
+ else {
176
+ key = await prompt(' API key: ', true);
177
+ }
178
+ if (!key) {
179
+ console.log('\n No key provided. Aborting.\n');
180
+ process.exitCode = 1;
181
+ return;
182
+ }
183
+ // Save
184
+ await saveCLIConfig({ apiKey: key, baseUrl, model });
185
+ console.log(' Config saved to ~/.gitnexus/config.json\n');
186
+ llmConfig = { ...llmConfig, apiKey: key, baseUrl, model };
134
187
  }
135
- llmConfig = { ...llmConfig, apiKey: key };
136
188
  }
137
189
  // ── Setup progress bar ──────────────────────────────────────────────
138
190
  const bar = new cliProgress.SingleBar({
@@ -191,6 +243,21 @@ export const wikiCommand = async (inputPath, options) => {
191
243
  }
192
244
  else if (err.message?.includes('API key') || err.message?.includes('API error')) {
193
245
  console.log(`\n LLM Error: ${err.message}\n`);
246
+ // Offer to reconfigure on auth-related failures
247
+ const isAuthError = err.message?.includes('401') || err.message?.includes('403')
248
+ || err.message?.includes('502') || err.message?.includes('authenticate')
249
+ || err.message?.includes('Unauthorized');
250
+ if (isAuthError && process.stdin.isTTY) {
251
+ const answer = await new Promise((resolve) => {
252
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
253
+ rl.question(' Reconfigure LLM settings? (Y/n): ', (ans) => { rl.close(); resolve(ans.trim().toLowerCase()); });
254
+ });
255
+ if (!answer || answer === 'y' || answer === 'yes') {
256
+ // Clear saved config so next run triggers interactive setup
257
+ await saveCLIConfig({});
258
+ console.log(' Config cleared. Run `gitnexus wiki` again to reconfigure.\n');
259
+ }
260
+ }
194
261
  }
195
262
  else {
196
263
  console.log(`\n Error: ${err.message}\n`);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gitnexus",
3
- "version": "1.2.1",
3
+ "version": "1.2.3",
4
4
  "description": "Graph-powered code intelligence for AI agents. Index any codebase, query via MCP or CLI.",
5
5
  "author": "Abhigyan Patwari",
6
6
  "license": "PolyForm-Noncommercial-1.0.0",