explainthisrepo 0.9.4 → 0.9.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/init.js +48 -9
- package/dist/providers/openrouter.d.ts +17 -0
- package/dist/providers/openrouter.js +72 -0
- package/dist/providers/registry.js +2 -0
- package/package.json +62 -62
package/dist/init.js
CHANGED
|
@@ -7,6 +7,7 @@ const PROVIDERS = {
|
|
|
7
7
|
"2": "openai",
|
|
8
8
|
"3": "ollama",
|
|
9
9
|
"4": "anthropic",
|
|
10
|
+
"5": "openrouter",
|
|
10
11
|
};
|
|
11
12
|
export async function runInit() {
|
|
12
13
|
const err = process.stderr;
|
|
@@ -18,7 +19,7 @@ export async function runInit() {
|
|
|
18
19
|
"[llm]",
|
|
19
20
|
`provider = "${provider}"`,
|
|
20
21
|
"",
|
|
21
|
-
`[providers.${provider}]
|
|
22
|
+
`[providers.${provider}]`,
|
|
22
23
|
];
|
|
23
24
|
for (const [k, v] of Object.entries(providerConfig)) {
|
|
24
25
|
lines.push(`${k} = "${v}"`);
|
|
@@ -44,6 +45,7 @@ async function promptProvider() {
|
|
|
44
45
|
err.write(" 2) OpenAI\n");
|
|
45
46
|
err.write(" 3) Ollama (local)\n");
|
|
46
47
|
err.write(" 4) Anthropic\n");
|
|
48
|
+
err.write(" 5) OpenRouter\n");
|
|
47
49
|
const choice = (await prompt("> ")).trim();
|
|
48
50
|
const provider = PROVIDERS[choice];
|
|
49
51
|
if (!provider) {
|
|
@@ -66,24 +68,61 @@ async function promptProviderConfig(provider) {
|
|
|
66
68
|
}
|
|
67
69
|
return { api_key: key };
|
|
68
70
|
}
|
|
71
|
+
if (provider === "anthropic") {
|
|
72
|
+
const key = (await promptHidden("Anthropic API key: ")).trim();
|
|
73
|
+
if (!key) {
|
|
74
|
+
throw new Error("API key cannot be empty");
|
|
75
|
+
}
|
|
76
|
+
return { api_key: key };
|
|
77
|
+
}
|
|
69
78
|
if (provider === "ollama") {
|
|
70
79
|
const model = (await prompt("Ollama model (e.g. llama3, glm-5:cloud): ")).trim();
|
|
71
80
|
if (!model) {
|
|
72
81
|
throw new Error("Model cannot be empty");
|
|
73
82
|
}
|
|
74
|
-
const host = (await prompt("Ollama host [http://localhost:11434]: ")).trim()
|
|
75
|
-
|
|
83
|
+
const host = (await prompt("Ollama host [http://localhost:11434]: ")).trim() ||
|
|
84
|
+
"http://localhost:11434";
|
|
76
85
|
return {
|
|
77
86
|
model,
|
|
78
|
-
host
|
|
87
|
+
host,
|
|
79
88
|
};
|
|
80
89
|
}
|
|
81
|
-
if (provider === "
|
|
82
|
-
const key = (await promptHidden("
|
|
90
|
+
if (provider === "openrouter") {
|
|
91
|
+
const key = (await promptHidden("OpenRouter API key: ")).trim();
|
|
83
92
|
if (!key) {
|
|
84
93
|
throw new Error("API key cannot be empty");
|
|
85
94
|
}
|
|
86
|
-
|
|
95
|
+
const err = process.stderr;
|
|
96
|
+
err.write(chalk.bold("Select OpenRouter model:\n"));
|
|
97
|
+
err.write(" 1) openai/gpt-4o (balanced)\n");
|
|
98
|
+
err.write(" 2) anthropic/claude-3.5-sonnet (reasoning)\n");
|
|
99
|
+
err.write(" 3) meta-llama/llama-3-70b-instruct (open)\n");
|
|
100
|
+
err.write(" 4) deepseek/deepseek-chat (cheap/fast)\n");
|
|
101
|
+
err.write(" 5) Enter model manually\n");
|
|
102
|
+
const choice = (await prompt("> ")).trim();
|
|
103
|
+
const modelMap = {
|
|
104
|
+
"1": "openai/gpt-4o",
|
|
105
|
+
"2": "anthropic/claude-3.5-sonnet",
|
|
106
|
+
"3": "meta-llama/llama-3-70b-instruct",
|
|
107
|
+
"4": "deepseek/deepseek-chat",
|
|
108
|
+
};
|
|
109
|
+
let model;
|
|
110
|
+
if (choice === "5") {
|
|
111
|
+
model = (await prompt("Enter model (provider/model): ")).trim();
|
|
112
|
+
}
|
|
113
|
+
else {
|
|
114
|
+
model = modelMap[choice];
|
|
115
|
+
if (!model) {
|
|
116
|
+
throw new Error("Invalid model selection");
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
if (!model || !model.trim()) {
|
|
120
|
+
throw new Error("Model cannot be empty");
|
|
121
|
+
}
|
|
122
|
+
return {
|
|
123
|
+
api_key: key,
|
|
124
|
+
model,
|
|
125
|
+
};
|
|
87
126
|
}
|
|
88
127
|
throw new Error(`Unsupported provider: ${provider}`);
|
|
89
128
|
}
|
|
@@ -91,7 +130,7 @@ function prompt(label) {
|
|
|
91
130
|
const rl = readline.createInterface({
|
|
92
131
|
input: process.stdin,
|
|
93
132
|
output: process.stderr,
|
|
94
|
-
terminal: true
|
|
133
|
+
terminal: true,
|
|
95
134
|
});
|
|
96
135
|
return new Promise((resolve) => {
|
|
97
136
|
rl.question(label, (answer) => {
|
|
@@ -107,7 +146,7 @@ function promptHidden(label) {
|
|
|
107
146
|
const rl = readline.createInterface({
|
|
108
147
|
input: process.stdin,
|
|
109
148
|
output: undefined,
|
|
110
|
-
terminal: true
|
|
149
|
+
terminal: true,
|
|
111
150
|
});
|
|
112
151
|
rl._writeToOutput = () => { };
|
|
113
152
|
rl.question("", (answer) => {
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { LLMProvider } from "./base.js";
|
|
2
|
+
type OpenRouterConfig = {
|
|
3
|
+
api_key?: string;
|
|
4
|
+
model?: string;
|
|
5
|
+
};
|
|
6
|
+
export declare class OpenRouterProvider implements LLMProvider {
|
|
7
|
+
name: string;
|
|
8
|
+
private apiKey;
|
|
9
|
+
private model;
|
|
10
|
+
private client;
|
|
11
|
+
constructor(config?: OpenRouterConfig);
|
|
12
|
+
validateConfig(): void;
|
|
13
|
+
private getClient;
|
|
14
|
+
generate(prompt: string): Promise<string>;
|
|
15
|
+
doctor(): Promise<string[]>;
|
|
16
|
+
}
|
|
17
|
+
export {};
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { LLMProviderError } from "./base.js";
|
|
2
|
+
const BASE_URL = "https://openrouter.ai/api/v1";
|
|
3
|
+
export class OpenRouterProvider {
|
|
4
|
+
name = "openrouter";
|
|
5
|
+
apiKey;
|
|
6
|
+
model;
|
|
7
|
+
client = null;
|
|
8
|
+
constructor(config = {}) {
|
|
9
|
+
this.apiKey = config.api_key ?? "";
|
|
10
|
+
this.model = config.model;
|
|
11
|
+
this.validateConfig();
|
|
12
|
+
}
|
|
13
|
+
validateConfig() {
|
|
14
|
+
if (!this.apiKey || !this.apiKey.trim()) {
|
|
15
|
+
throw new LLMProviderError("OpenRouter provider requires an API key.\n" +
|
|
16
|
+
"Run `explainthisrepo init` or set providers.openrouter.api_key.");
|
|
17
|
+
}
|
|
18
|
+
if (!this.model || !String(this.model).trim()) {
|
|
19
|
+
throw new LLMProviderError("OpenRouter provider requires a model.\n" +
|
|
20
|
+
"Set providers.openrouter.model (e.g. openai/gpt-4o, deepseek/deepseek-chat).");
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
getClient() {
|
|
24
|
+
if (this.client) {
|
|
25
|
+
return this.client;
|
|
26
|
+
}
|
|
27
|
+
try {
|
|
28
|
+
const { OpenAI } = require("openai");
|
|
29
|
+
this.client = new OpenAI({
|
|
30
|
+
apiKey: this.apiKey,
|
|
31
|
+
baseURL: BASE_URL,
|
|
32
|
+
});
|
|
33
|
+
return this.client;
|
|
34
|
+
}
|
|
35
|
+
catch {
|
|
36
|
+
throw new LLMProviderError("OpenRouter support is not installed.\n" +
|
|
37
|
+
"Install it with:\n" +
|
|
38
|
+
" npm install openai");
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
async generate(prompt) {
|
|
42
|
+
const client = this.getClient();
|
|
43
|
+
let response;
|
|
44
|
+
try {
|
|
45
|
+
response = await client.chat.completions.create({
|
|
46
|
+
model: this.model,
|
|
47
|
+
messages: [{ role: "user", content: prompt }],
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
catch (err) {
|
|
51
|
+
const message = err?.message ? String(err.message) : String(err);
|
|
52
|
+
throw new LLMProviderError(`OpenRouter request failed: ${message}`);
|
|
53
|
+
}
|
|
54
|
+
let text = null;
|
|
55
|
+
try {
|
|
56
|
+
text = response?.choices?.[0]?.message?.content ?? null;
|
|
57
|
+
}
|
|
58
|
+
catch {
|
|
59
|
+
text = null;
|
|
60
|
+
}
|
|
61
|
+
if (!text || !text.trim()) {
|
|
62
|
+
throw new LLMProviderError("OpenRouter returned no text");
|
|
63
|
+
}
|
|
64
|
+
return text.trim();
|
|
65
|
+
}
|
|
66
|
+
async doctor() {
|
|
67
|
+
return [
|
|
68
|
+
`OPENROUTER_API_KEY set: ${Boolean(this.apiKey)}`,
|
|
69
|
+
`model: ${this.model}`,
|
|
70
|
+
];
|
|
71
|
+
}
|
|
72
|
+
}
|
|
@@ -4,11 +4,13 @@ import { GeminiProvider } from "./gemini.js";
|
|
|
4
4
|
import { OpenAIProvider } from "./openai.js";
|
|
5
5
|
import { OllamaProvider } from "./ollama.js";
|
|
6
6
|
import { AnthropicProvider } from "./anthropic.js";
|
|
7
|
+
import { OpenRouterProvider } from "./openrouter.js";
|
|
7
8
|
const PROVIDER_REGISTRY = {
|
|
8
9
|
gemini: GeminiProvider,
|
|
9
10
|
openai: OpenAIProvider,
|
|
10
11
|
ollama: OllamaProvider,
|
|
11
12
|
anthropic: AnthropicProvider,
|
|
13
|
+
openrouter: OpenRouterProvider,
|
|
12
14
|
};
|
|
13
15
|
export function listProviders() {
|
|
14
16
|
return Object.keys(PROVIDER_REGISTRY);
|
package/package.json
CHANGED
|
@@ -1,62 +1,62 @@
|
|
|
1
|
-
{
|
|
2
|
-
"name": "explainthisrepo",
|
|
3
|
-
"version": "0.9.
|
|
4
|
-
"description": "The fastest way to understand any codebase in plain English. Not blind AI summarization",
|
|
5
|
-
"license": "MIT",
|
|
6
|
-
"type": "module",
|
|
7
|
-
"author": "Caleb Wodi <calebwodi33@gmail.com>",
|
|
8
|
-
"homepage": "https://explainthisrepo.com",
|
|
9
|
-
"repository": {
|
|
10
|
-
"type": "git",
|
|
11
|
-
"url": "git+https://github.com/calchiwo/ExplainThisRepo.git",
|
|
12
|
-
"directory": "node_version"
|
|
13
|
-
},
|
|
14
|
-
"bugs": {
|
|
15
|
-
"url": "https://github.com/calchiwo/ExplainThisRepo/issues"
|
|
16
|
-
},
|
|
17
|
-
"keywords": [
|
|
18
|
-
"github",
|
|
19
|
-
"cli",
|
|
20
|
-
"explain",
|
|
21
|
-
"repository",
|
|
22
|
-
"ai",
|
|
23
|
-
"repo-analysis",
|
|
24
|
-
"code-explanater",
|
|
25
|
-
"documentation",
|
|
26
|
-
"developer-productivity",
|
|
27
|
-
"developer-tools"
|
|
28
|
-
],
|
|
29
|
-
"bin": {
|
|
30
|
-
"explainthisrepo": "dist/cli.js",
|
|
31
|
-
"explain-this-repo": "dist/cli.js",
|
|
32
|
-
"etr": "dist/cli.js"
|
|
33
|
-
},
|
|
34
|
-
"files": [
|
|
35
|
-
"dist",
|
|
36
|
-
"README.md",
|
|
37
|
-
"LICENSE"
|
|
38
|
-
],
|
|
39
|
-
"scripts": {
|
|
40
|
-
"build": "tsc",
|
|
41
|
-
"start": "node dist/cli.js",
|
|
42
|
-
"sync-meta": "cp ../README.md README.md && cp ../LICENSE LICENSE",
|
|
43
|
-
"prepublishOnly": "npm run sync-meta && npm run build"
|
|
44
|
-
},
|
|
45
|
-
"engines": {
|
|
46
|
-
"node": ">=20"
|
|
47
|
-
},
|
|
48
|
-
"dependencies": {
|
|
49
|
-
"@google/generative-ai": "^0.24.1",
|
|
50
|
-
"@iarna/toml": "^2.2.5",
|
|
51
|
-
"axios": "^1.13.2",
|
|
52
|
-
"commander": "^14.0.3",
|
|
53
|
-
"dotenv": "^17.2.3",
|
|
54
|
-
"openai": "^4.0.0",
|
|
55
|
-
"ora": "^9.3.0",
|
|
56
|
-
"toml": "^3.0.0"
|
|
57
|
-
},
|
|
58
|
-
"devDependencies": {
|
|
59
|
-
"@types/node": "^22.0.0",
|
|
60
|
-
"typescript": "^5.6.0"
|
|
61
|
-
}
|
|
62
|
-
}
|
|
1
|
+
{
|
|
2
|
+
"name": "explainthisrepo",
|
|
3
|
+
"version": "0.9.5",
|
|
4
|
+
"description": "The fastest way to understand any codebase in plain English. Not blind AI summarization",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"type": "module",
|
|
7
|
+
"author": "Caleb Wodi <calebwodi33@gmail.com>",
|
|
8
|
+
"homepage": "https://explainthisrepo.com",
|
|
9
|
+
"repository": {
|
|
10
|
+
"type": "git",
|
|
11
|
+
"url": "git+https://github.com/calchiwo/ExplainThisRepo.git",
|
|
12
|
+
"directory": "node_version"
|
|
13
|
+
},
|
|
14
|
+
"bugs": {
|
|
15
|
+
"url": "https://github.com/calchiwo/ExplainThisRepo/issues"
|
|
16
|
+
},
|
|
17
|
+
"keywords": [
|
|
18
|
+
"github",
|
|
19
|
+
"cli",
|
|
20
|
+
"explain",
|
|
21
|
+
"repository",
|
|
22
|
+
"ai",
|
|
23
|
+
"repo-analysis",
|
|
24
|
+
"code-explanater",
|
|
25
|
+
"documentation",
|
|
26
|
+
"developer-productivity",
|
|
27
|
+
"developer-tools"
|
|
28
|
+
],
|
|
29
|
+
"bin": {
|
|
30
|
+
"explainthisrepo": "dist/cli.js",
|
|
31
|
+
"explain-this-repo": "dist/cli.js",
|
|
32
|
+
"etr": "dist/cli.js"
|
|
33
|
+
},
|
|
34
|
+
"files": [
|
|
35
|
+
"dist",
|
|
36
|
+
"README.md",
|
|
37
|
+
"LICENSE"
|
|
38
|
+
],
|
|
39
|
+
"scripts": {
|
|
40
|
+
"build": "tsc",
|
|
41
|
+
"start": "node dist/cli.js",
|
|
42
|
+
"sync-meta": "cp ../README.md README.md && cp ../LICENSE LICENSE",
|
|
43
|
+
"prepublishOnly": "npm run sync-meta && npm run build"
|
|
44
|
+
},
|
|
45
|
+
"engines": {
|
|
46
|
+
"node": ">=20"
|
|
47
|
+
},
|
|
48
|
+
"dependencies": {
|
|
49
|
+
"@google/generative-ai": "^0.24.1",
|
|
50
|
+
"@iarna/toml": "^2.2.5",
|
|
51
|
+
"axios": "^1.13.2",
|
|
52
|
+
"commander": "^14.0.3",
|
|
53
|
+
"dotenv": "^17.2.3",
|
|
54
|
+
"openai": "^4.0.0",
|
|
55
|
+
"ora": "^9.3.0",
|
|
56
|
+
"toml": "^3.0.0"
|
|
57
|
+
},
|
|
58
|
+
"devDependencies": {
|
|
59
|
+
"@types/node": "^22.0.0",
|
|
60
|
+
"typescript": "^5.6.0"
|
|
61
|
+
}
|
|
62
|
+
}
|