code-graph-builder 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/bin/cli.mjs +108 -48
  2. package/package.json +1 -1
package/bin/cli.mjs CHANGED
@@ -113,70 +113,121 @@ async function runSetup() {
113
113
  (await ask(` Workspace path [${WORKSPACE_DIR}]: `)).trim() || WORKSPACE_DIR;
114
114
  log("");
115
115
 
116
- // --- LLM API Key ---
117
- log("── 2/3 LLM API Key (for natural language queries & descriptions) ──");
116
+ // --- LLM Provider ---
117
+ log("── 2/3 LLM Provider (for natural language queries & descriptions) ──");
118
118
  log("");
119
- log(" Supported providers (OpenAI-compatible):");
120
- log(" - Moonshot / Kimi https://platform.moonshot.cn");
121
- log(" - OpenAI https://platform.openai.com");
122
- log(" - DeepSeek https://platform.deepseek.com");
123
- log(" - Any OpenAI-compatible endpoint");
119
+ log(" Select your LLM provider:");
124
120
  log("");
121
+ log(" 1) Moonshot / Kimi https://platform.moonshot.cn");
122
+ log(" 2) OpenAI https://platform.openai.com");
123
+ log(" 3) DeepSeek https://platform.deepseek.com");
124
+ log(" 4) OpenRouter https://openrouter.ai");
125
+ log(" 5) LiteLLM Proxy (OpenAI-compatible gateway)");
126
+ log(" 6) Custom (any OpenAI-compatible endpoint)");
127
+ log(" 7) Skip (configure later)");
128
+ log("");
129
+
130
+ const providers = {
131
+ "1": { name: "Moonshot", url: "https://api.moonshot.cn/v1", model: "kimi-k2.5" },
132
+ "2": { name: "OpenAI", url: "https://api.openai.com/v1", model: "gpt-4o" },
133
+ "3": { name: "DeepSeek", url: "https://api.deepseek.com/v1", model: "deepseek-chat" },
134
+ "4": { name: "OpenRouter", url: "https://openrouter.ai/api/v1", model: "anthropic/claude-sonnet-4" },
135
+ "5": { name: "LiteLLM", url: "http://localhost:4000/v1", model: "gpt-4o" },
136
+ };
125
137
 
126
138
  if (existing.LLM_API_KEY) {
127
- log(` Current key: ${mask(existing.LLM_API_KEY)}`);
139
+ log(` Current: ${mask(existing.LLM_API_KEY)} → ${existing.LLM_BASE_URL || "?"}`);
128
140
  }
129
141
 
130
- const llmKey =
131
- (await ask(" LLM API Key (sk-...): ")).trim() || existing.LLM_API_KEY || "";
142
+ const choice = (await ask(" Choose provider [1-7]: ")).trim() || "7";
132
143
 
144
+ let llmKey = existing.LLM_API_KEY || "";
133
145
  let llmBaseUrl = existing.LLM_BASE_URL || "";
134
146
  let llmModel = existing.LLM_MODEL || "";
135
147
 
136
- if (llmKey) {
137
- log("");
138
- log(" Detecting provider from key...");
139
-
140
- if (llmKey.startsWith("sk-") && !llmKey.startsWith("sk-ant-")) {
141
- // Could be Moonshot, OpenAI, or other
142
- const urlInput = (
143
- await ask(
144
- ` API Base URL [${llmBaseUrl || "https://api.moonshot.cn/v1"}]: `
145
- )
146
- ).trim();
147
- llmBaseUrl = urlInput || llmBaseUrl || "https://api.moonshot.cn/v1";
148
-
149
- if (llmBaseUrl.includes("moonshot")) {
150
- llmModel = (await ask(` Model name [kimi-k2.5]: `)).trim() || "kimi-k2.5";
151
- } else if (llmBaseUrl.includes("openai")) {
152
- llmModel = (await ask(` Model name [gpt-4o]: `)).trim() || "gpt-4o";
153
- } else if (llmBaseUrl.includes("deepseek")) {
154
- llmModel = (await ask(` Model name [deepseek-chat]: `)).trim() || "deepseek-chat";
155
- } else {
156
- llmModel =
157
- (await ask(` Model name [${llmModel || "gpt-4o"}]: `)).trim() ||
158
- llmModel ||
159
- "gpt-4o";
160
- }
148
+ if (choice !== "7") {
149
+ const provider = providers[choice];
150
+
151
+ if (provider) {
152
+ log(`\n → ${provider.name} selected`);
153
+ llmBaseUrl = provider.url;
154
+ llmModel = provider.model;
155
+ } else {
156
+ // Choice "6" or invalid custom
157
+ log("\n → Custom provider");
158
+ llmBaseUrl = (await ask(" API Base URL: ")).trim() || llmBaseUrl;
159
+ llmModel = (await ask(" Model name: ")).trim() || llmModel || "gpt-4o";
160
+ }
161
+
162
+ llmKey = (await ask(` API Key (sk-...): `)).trim() || existing.LLM_API_KEY || "";
163
+
164
+ if (llmKey) {
165
+ // Allow overriding URL and model
166
+ const urlOverride = (await ask(` Base URL [${llmBaseUrl}]: `)).trim();
167
+ if (urlOverride) llmBaseUrl = urlOverride;
168
+ const modelOverride = (await ask(` Model [${llmModel}]: `)).trim();
169
+ if (modelOverride) llmModel = modelOverride;
161
170
  }
162
171
  }
163
172
  log("");
164
173
 
165
- // --- Embedding API Key ---
166
- log("── 3/3 Embedding API Key (for semantic code search) ─────");
174
+ // --- Embedding Provider ---
175
+ log("── 3/3 Embedding Provider (for semantic code search) ─────");
176
+ log("");
177
+ log(" Select your embedding provider:");
167
178
  log("");
168
- log(" Used for vector embedding of code (Qwen3 text-embedding-v4).");
169
- log(" Get a free key at: https://dashscope.console.aliyun.com");
179
+ log(" 1) DashScope / Qwen https://dashscope.console.aliyun.com (free tier)");
180
+ log(" 2) OpenAI Embeddings https://platform.openai.com");
181
+ log(" 3) Custom (any OpenAI-compatible embedding endpoint)");
182
+ log(" 4) Skip (configure later)");
170
183
  log("");
171
184
 
172
- if (existing.DASHSCOPE_API_KEY) {
173
- log(` Current key: ${mask(existing.DASHSCOPE_API_KEY)}`);
185
+ const embedProviders = {
186
+ "1": { name: "DashScope", url: "https://dashscope.aliyuncs.com/api/v1", model: "text-embedding-v4", keyEnv: "DASHSCOPE_API_KEY", urlEnv: "DASHSCOPE_BASE_URL" },
187
+ "2": { name: "OpenAI", url: "https://api.openai.com/v1", model: "text-embedding-3-small", keyEnv: "OPENAI_API_KEY", urlEnv: "OPENAI_BASE_URL" },
188
+ };
189
+
190
+ if (existing.DASHSCOPE_API_KEY || existing.EMBED_API_KEY) {
191
+ const ek = existing.DASHSCOPE_API_KEY || existing.EMBED_API_KEY;
192
+ log(` Current: ${mask(ek)} → ${existing.DASHSCOPE_BASE_URL || existing.EMBED_BASE_URL || "?"}`);
174
193
  }
175
194
 
176
- const dashscopeKey =
177
- (await ask(" DashScope API Key (sk-...): ")).trim() ||
178
- existing.DASHSCOPE_API_KEY ||
179
- "";
195
+ const embedChoice = (await ask(" Choose provider [1-4]: ")).trim() || "4";
196
+
197
+ let embedKey = "";
198
+ let embedUrl = "";
199
+ let embedModel = "";
200
+ let embedKeyEnv = "DASHSCOPE_API_KEY";
201
+ let embedUrlEnv = "DASHSCOPE_BASE_URL";
202
+
203
+ if (embedChoice !== "4") {
204
+ const ep = embedProviders[embedChoice];
205
+
206
+ if (ep) {
207
+ log(`\n → ${ep.name} selected`);
208
+ embedUrl = ep.url;
209
+ embedModel = ep.model;
210
+ embedKeyEnv = ep.keyEnv;
211
+ embedUrlEnv = ep.urlEnv;
212
+ } else {
213
+ // Choice "3" or invalid → custom
214
+ log("\n → Custom embedding provider");
215
+ embedUrl = (await ask(" Embedding API Base URL: ")).trim();
216
+ embedModel = (await ask(" Embedding model name: ")).trim() || "text-embedding-3-small";
217
+ embedKeyEnv = "EMBED_API_KEY";
218
+ embedUrlEnv = "EMBED_BASE_URL";
219
+ }
220
+
221
+ embedKey = (await ask(` API Key: `)).trim() ||
222
+ existing[embedKeyEnv] || existing.DASHSCOPE_API_KEY || "";
223
+
224
+ if (embedKey) {
225
+ const urlOverride = (await ask(` Base URL [${embedUrl}]: `)).trim();
226
+ if (urlOverride) embedUrl = urlOverride;
227
+ const modelOverride = (await ask(` Model [${embedModel}]: `)).trim();
228
+ if (modelOverride) embedModel = modelOverride;
229
+ }
230
+ }
180
231
 
181
232
  rl.close();
182
233
 
@@ -186,18 +237,27 @@ async function runSetup() {
186
237
  LLM_API_KEY: llmKey,
187
238
  LLM_BASE_URL: llmBaseUrl,
188
239
  LLM_MODEL: llmModel,
189
- DASHSCOPE_API_KEY: dashscopeKey,
190
- DASHSCOPE_BASE_URL: "https://dashscope.aliyuncs.com/api/v1",
191
240
  };
192
241
 
242
+ // Save embedding config with the correct env var names
243
+ if (embedKey) {
244
+ config[embedKeyEnv] = embedKey;
245
+ config[embedUrlEnv] = embedUrl;
246
+ if (embedModel) config.EMBED_MODEL = embedModel;
247
+ }
248
+
193
249
  saveEnvFile(config);
194
250
 
251
+ const embedDisplay = embedKey
252
+ ? `${mask(embedKey)} → ${embedModel || embedUrl}`
253
+ : "not configured (optional)";
254
+
195
255
  log("");
196
256
  log("── Configuration saved ─────────────────────────────────────");
197
257
  log(` File: ${ENV_FILE}`);
198
258
  log("");
199
259
  log(" LLM: " + (llmKey ? `${mask(llmKey)} → ${llmModel}` : "not configured (optional)"));
200
- log(" Embedding: " + (dashscopeKey ? mask(dashscopeKey) : "not configured (optional)"));
260
+ log(" Embedding: " + embedDisplay);
201
261
  log(" Workspace: " + workspace);
202
262
  log("");
203
263
  log("── Next steps ──────────────────────────────────────────────");
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "code-graph-builder",
3
- "version": "0.3.0",
3
+ "version": "0.4.0",
4
4
  "description": "Code knowledge graph builder with MCP server for AI-assisted code navigation",
5
5
  "license": "MIT",
6
6
  "bin": {