tech-debt-visualizer 0.2.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -30,9 +30,10 @@ program
30
30
  .option("-f, --format <type>", "Output format: cli | html | json | markdown", "cli")
31
31
  .option("--no-llm", "Skip LLM-powered insights")
32
32
  .option("--llm", "Enable LLM (default). Use with --llm-key and/or --llm-model")
33
- .option("--llm-key <key>", "API key for LLM (overrides GEMINI_API_KEY / OPENAI_API_KEY / OPENROUTER_API_KEY)")
34
- .option("--llm-model <model>", "Model name (e.g. gemini-1.5-flash, gpt-4o-mini)")
35
- .option("--llm-max-tokens <n>", "Max tokens for LLM responses (default: 2048 for overall, 1024 per-file)", (v) => parseInt(v, 10))
33
+ .option("--llm-key <key>", "API key (overrides env: GEMINI_API_KEY, OPENAI_API_KEY, OPENROUTER_API_KEY)")
34
+ .option("--llm-endpoint <url>", "OpenAI-compatible API base URL (e.g. https://api.openai.com/v1 or proxy)")
35
+ .option("--llm-model <model>", "Model name (e.g. gpt-4o-mini, gemini-2.5-flash)")
36
+ .option("--llm-max-tokens <n>", "Max tokens per response", (v) => parseInt(v, 10))
36
37
  .option("--ci", "CI mode: minimal output, exit with non-zero if debt score is high")
37
38
  .action(async (path, opts) => {
38
39
  const repoPath = join(process.cwd(), path);
@@ -41,6 +42,7 @@ program
41
42
  const outputPath = opts.output ?? (format === "html" ? "tech-debt-report.html" : undefined);
42
43
  const llmConfigOverrides = {
43
44
  apiKey: opts.llmKey,
45
+ baseURL: opts.llmEndpoint,
44
46
  model: opts.llmModel,
45
47
  ...(opts.llmMaxTokens != null && opts.llmMaxTokens > 0 ? { maxTokens: opts.llmMaxTokens } : {}),
46
48
  };
@@ -82,9 +84,9 @@ program
82
84
  if (!llmConfig) {
83
85
  progress.update(totalSteps, { task: "Skipping LLM (no key)" });
84
86
  progress.stop();
85
- process.stderr.write(chalk.yellow(" No LLM API key found. Set GEMINI_API_KEY or OPENAI_API_KEY (or use --llm-key <key>).\n" +
86
- " Example: export GEMINI_API_KEY=your_key or --llm-key your_key\n" +
87
- " Or add GEMINI_API_KEY=your_key to a .env file in the current directory.\n" +
87
+ process.stderr.write(chalk.yellow(" No LLM API key found. Use --llm-key <key> or set one of:\n" +
88
+ " GEMINI_API_KEY, OPENAI_API_KEY, OPENROUTER_API_KEY (or .env).\n" +
89
+ " For a custom endpoint: --llm-endpoint <url> --llm-key <key>\n" +
88
90
  " Skipping AI insights for this run.\n\n"));
89
91
  }
90
92
  else {
package/dist/llm.d.ts CHANGED
@@ -18,7 +18,7 @@ export interface LLMConfig {
18
18
  maxTokens?: number;
19
19
  }
20
20
  export type LLMProvider = "openai" | "openrouter" | "gemini";
21
- /** Resolve provider and auth from config + env. When --llm-key is used, provider is inferred from key format so a Gemini key is not sent to OpenRouter. */
21
+ /** Resolve provider and auth from config + env. Explicit baseURL = OpenAI-compatible; else key format or env picks provider. */
22
22
  export declare function resolveLLMConfig(config?: LLMConfig): {
23
23
  provider: LLMProvider;
24
24
  apiKey: string;
package/dist/llm.js CHANGED
@@ -68,13 +68,19 @@ function parseFileAssessmentResponse(raw) {
68
68
  code: code || undefined,
69
69
  };
70
70
  }
71
- /** Resolve provider and auth from config + env. When --llm-key is used, provider is inferred from key format so a Gemini key is not sent to OpenRouter. */
71
+ /** Resolve provider and auth from config + env. Explicit baseURL = OpenAI-compatible; else key format or env picks provider. */
72
72
  export function resolveLLMConfig(config = {}) {
73
+ const explicitBase = (config.baseURL ?? process.env.OPENAI_BASE_URL)?.replace(/\/$/, "");
73
74
  const cliKey = config.apiKey;
74
- const openRouterKey = cliKey ?? process.env.OPENROUTER_API_KEY;
75
- const geminiKey = cliKey ?? process.env.GEMINI_API_KEY ?? process.env.GOOGLE_GENAI_API_KEY;
76
75
  const openaiKey = cliKey ?? process.env.OPENAI_API_KEY ?? process.env.ANTHROPIC_API_KEY;
77
- // When a single key is passed (e.g. --llm-key), pick provider by key format so we don't send a Gemini key to OpenRouter (401 "No cookie auth").
76
+ if (explicitBase && openaiKey) {
77
+ return {
78
+ provider: "openai",
79
+ apiKey: openaiKey,
80
+ baseURL: explicitBase,
81
+ model: config.model ?? process.env.OPENAI_MODEL ?? OPENAI_DEFAULT_MODEL,
82
+ };
83
+ }
78
84
  if (cliKey) {
79
85
  if (cliKey.startsWith("AIza")) {
80
86
  return {
@@ -88,23 +94,25 @@ export function resolveLLMConfig(config = {}) {
88
94
  return {
89
95
  provider: "openai",
90
96
  apiKey: cliKey,
91
- baseURL: config.baseURL ?? process.env.OPENAI_BASE_URL ?? "",
97
+ baseURL: (config.baseURL ?? process.env.OPENAI_BASE_URL ?? "").replace(/\/$/, ""),
92
98
  model: config.model ?? process.env.OPENAI_MODEL ?? OPENAI_DEFAULT_MODEL,
93
99
  };
94
100
  }
95
101
  }
96
- if (openRouterKey) {
102
+ if (cliKey ?? process.env.OPENROUTER_API_KEY) {
103
+ const key = cliKey ?? process.env.OPENROUTER_API_KEY;
97
104
  return {
98
105
  provider: "openrouter",
99
- apiKey: openRouterKey,
106
+ apiKey: key,
100
107
  baseURL: config.baseURL ?? process.env.OPENROUTER_BASE_URL ?? OPENROUTER_BASE,
101
108
  model: config.model ?? process.env.OPENROUTER_MODEL ?? OPENROUTER_DEFAULT_MODEL,
102
109
  };
103
110
  }
104
- if (geminiKey) {
111
+ if (cliKey ?? process.env.GEMINI_API_KEY ?? process.env.GOOGLE_GENAI_API_KEY) {
112
+ const key = cliKey ?? process.env.GEMINI_API_KEY ?? process.env.GOOGLE_GENAI_API_KEY;
105
113
  return {
106
114
  provider: "gemini",
107
- apiKey: geminiKey,
115
+ apiKey: key,
108
116
  baseURL: GEMINI_BASE,
109
117
  model: config.model ?? process.env.GEMINI_MODEL ?? GEMINI_DEFAULT_MODEL,
110
118
  };
@@ -113,7 +121,7 @@ export function resolveLLMConfig(config = {}) {
113
121
  return {
114
122
  provider: "openai",
115
123
  apiKey: openaiKey,
116
- baseURL: config.baseURL ?? process.env.OPENAI_BASE_URL ?? "",
124
+ baseURL: (config.baseURL ?? process.env.OPENAI_BASE_URL ?? "").replace(/\/$/, ""),
117
125
  model: config.model ?? process.env.OPENAI_MODEL ?? OPENAI_DEFAULT_MODEL,
118
126
  };
119
127
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "tech-debt-visualizer",
3
- "version": "0.2.0",
3
+ "version": "0.2.1",
4
4
  "description": "Language-agnostic CLI that analyzes repos and generates interactive technical debt visualizations with AI-powered insights",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",