nvicode 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +52 -0
- package/dist/cli.js +289 -0
- package/dist/config.js +56 -0
- package/dist/models.js +61 -0
- package/dist/proxy.js +503 -0
- package/package.json +49 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
# nvicode
|
|
2
|
+
|
|
3
|
+
Run Claude Code through NVIDIA-hosted models using a local Anthropic-compatible gateway.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
Published package:
|
|
8
|
+
|
|
9
|
+
```sh
|
|
10
|
+
npm install -g nvicode
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
Local development:
|
|
14
|
+
|
|
15
|
+
```sh
|
|
16
|
+
npm install
|
|
17
|
+
npm run build
|
|
18
|
+
ln -sf "$(pwd)/dist/cli.js" ~/.local/bin/nvicode
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
## Usage
|
|
22
|
+
|
|
23
|
+
Choose a model and save your NVIDIA API key:
|
|
24
|
+
|
|
25
|
+
```sh
|
|
26
|
+
nvicode select model
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
Launch Claude Code through the local gateway:
|
|
30
|
+
|
|
31
|
+
```sh
|
|
32
|
+
nvicode launch claude
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
Useful commands:
|
|
36
|
+
|
|
37
|
+
```sh
|
|
38
|
+
nvicode models
|
|
39
|
+
nvicode config
|
|
40
|
+
nvicode auth
|
|
41
|
+
nvicode launch claude -p "Reply with exactly OK"
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
The launcher starts a local proxy on `127.0.0.1:8788`, points Claude Code at it with `ANTHROPIC_BASE_URL`, and forwards requests to NVIDIA `chat/completions`.
|
|
45
|
+
|
|
46
|
+
If no NVIDIA API key is saved yet, `nvicode` prompts for one on first use.
|
|
47
|
+
|
|
48
|
+
## Notes
|
|
49
|
+
|
|
50
|
+
- `thinking` is disabled by default because some NVIDIA reasoning models can consume the entire output budget and return no visible answer to Claude Code.
|
|
51
|
+
- The proxy supports basic text, tool calls, tool results, and token count estimation.
|
|
52
|
+
- Claude Code remains the frontend; the selected NVIDIA model becomes the backend.
|
package/dist/cli.js
ADDED
|
@@ -0,0 +1,289 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { createInterface } from "node:readline/promises";
|
|
3
|
+
import { constants, openSync } from "node:fs";
|
|
4
|
+
import { promises as fs } from "node:fs";
|
|
5
|
+
import os from "node:os";
|
|
6
|
+
import path from "node:path";
|
|
7
|
+
import process from "node:process";
|
|
8
|
+
import { spawn } from "node:child_process";
|
|
9
|
+
import { fileURLToPath } from "node:url";
|
|
10
|
+
import { getNvicodePaths, loadConfig, saveConfig, } from "./config.js";
|
|
11
|
+
import { createProxyServer } from "./proxy.js";
|
|
12
|
+
import { CURATED_MODELS, getRecommendedModels } from "./models.js";
|
|
13
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
14
|
+
const usage = () => {
|
|
15
|
+
console.log(`nvicode
|
|
16
|
+
|
|
17
|
+
Commands:
|
|
18
|
+
nvicode select model Select and save a NVIDIA model
|
|
19
|
+
nvicode models Show recommended coding models
|
|
20
|
+
nvicode auth Save or update NVIDIA API key
|
|
21
|
+
nvicode config Show current nvicode config
|
|
22
|
+
nvicode launch claude [...] Launch Claude Code through nvicode
|
|
23
|
+
nvicode serve Run the local proxy in the foreground
|
|
24
|
+
`);
|
|
25
|
+
};
|
|
26
|
+
const question = async (prompt) => {
|
|
27
|
+
const rl = createInterface({
|
|
28
|
+
input: process.stdin,
|
|
29
|
+
output: process.stdout,
|
|
30
|
+
});
|
|
31
|
+
try {
|
|
32
|
+
return (await rl.question(prompt)).trim();
|
|
33
|
+
}
|
|
34
|
+
finally {
|
|
35
|
+
rl.close();
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
const ensureConfigured = async () => {
|
|
39
|
+
let config = await loadConfig();
|
|
40
|
+
let changed = false;
|
|
41
|
+
if (!config.apiKey) {
|
|
42
|
+
if (!process.stdin.isTTY) {
|
|
43
|
+
throw new Error("Missing NVIDIA API key. Run `nvicode auth` first.");
|
|
44
|
+
}
|
|
45
|
+
const apiKey = await question("NVIDIA API key: ");
|
|
46
|
+
if (!apiKey) {
|
|
47
|
+
throw new Error("NVIDIA API key is required.");
|
|
48
|
+
}
|
|
49
|
+
config = {
|
|
50
|
+
...config,
|
|
51
|
+
apiKey,
|
|
52
|
+
};
|
|
53
|
+
changed = true;
|
|
54
|
+
}
|
|
55
|
+
if (!config.model) {
|
|
56
|
+
const [first] = await getRecommendedModels(config.apiKey);
|
|
57
|
+
config = {
|
|
58
|
+
...config,
|
|
59
|
+
model: first?.id || CURATED_MODELS[0].id,
|
|
60
|
+
};
|
|
61
|
+
changed = true;
|
|
62
|
+
}
|
|
63
|
+
if (changed) {
|
|
64
|
+
config = await saveConfig(config);
|
|
65
|
+
}
|
|
66
|
+
return config;
|
|
67
|
+
};
|
|
68
|
+
const runAuth = async () => {
|
|
69
|
+
const config = await loadConfig();
|
|
70
|
+
const apiKey = await question(config.apiKey ? "NVIDIA API key (leave blank to keep current): " : "NVIDIA API key: ");
|
|
71
|
+
if (!apiKey && config.apiKey) {
|
|
72
|
+
console.log("Kept existing NVIDIA API key.");
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
if (!apiKey) {
|
|
76
|
+
throw new Error("NVIDIA API key is required.");
|
|
77
|
+
}
|
|
78
|
+
await saveConfig({
|
|
79
|
+
...config,
|
|
80
|
+
apiKey,
|
|
81
|
+
});
|
|
82
|
+
console.log("Saved NVIDIA API key.");
|
|
83
|
+
};
|
|
84
|
+
const printModels = async (apiKey) => {
|
|
85
|
+
const models = apiKey ? await getRecommendedModels(apiKey) : CURATED_MODELS;
|
|
86
|
+
models.forEach((model, index) => {
|
|
87
|
+
console.log(`${index + 1}. ${model.label}`);
|
|
88
|
+
console.log(` ${model.id}`);
|
|
89
|
+
console.log(` ${model.description}`);
|
|
90
|
+
});
|
|
91
|
+
};
|
|
92
|
+
const runSelectModel = async () => {
|
|
93
|
+
const config = await ensureConfigured();
|
|
94
|
+
const models = await getRecommendedModels(config.apiKey);
|
|
95
|
+
console.log("Recommended NVIDIA coding models:");
|
|
96
|
+
await printModels(config.apiKey);
|
|
97
|
+
console.log("Type a number from the list or enter a custom model id.");
|
|
98
|
+
const answer = await question("Model selection: ");
|
|
99
|
+
const index = Number(answer);
|
|
100
|
+
const chosenModel = Number.isInteger(index) && index >= 1 && index <= models.length
|
|
101
|
+
? models[index - 1]?.id
|
|
102
|
+
: answer.trim();
|
|
103
|
+
if (!chosenModel) {
|
|
104
|
+
throw new Error("Model selection is required.");
|
|
105
|
+
}
|
|
106
|
+
await saveConfig({
|
|
107
|
+
...config,
|
|
108
|
+
model: chosenModel,
|
|
109
|
+
});
|
|
110
|
+
console.log(`Saved model: ${chosenModel}`);
|
|
111
|
+
};
|
|
112
|
+
const runConfig = async () => {
|
|
113
|
+
const config = await loadConfig();
|
|
114
|
+
const paths = getNvicodePaths();
|
|
115
|
+
console.log(`Config file: ${paths.configFile}`);
|
|
116
|
+
console.log(`State dir: ${paths.stateDir}`);
|
|
117
|
+
console.log(`Model: ${config.model}`);
|
|
118
|
+
console.log(`Proxy port: ${config.proxyPort}`);
|
|
119
|
+
console.log(`Thinking: ${config.thinking ? "on" : "off"}`);
|
|
120
|
+
console.log(`API key: ${config.apiKey ? "saved" : "missing"}`);
|
|
121
|
+
};
|
|
122
|
+
const waitForHealthyProxy = async (port) => {
|
|
123
|
+
for (let attempt = 0; attempt < 50; attempt += 1) {
|
|
124
|
+
try {
|
|
125
|
+
const response = await fetch(`http://127.0.0.1:${port}/health`);
|
|
126
|
+
if (response.ok) {
|
|
127
|
+
return true;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
catch {
|
|
131
|
+
// ignore and retry
|
|
132
|
+
}
|
|
133
|
+
await new Promise((resolve) => setTimeout(resolve, 200));
|
|
134
|
+
}
|
|
135
|
+
return false;
|
|
136
|
+
};
|
|
137
|
+
const ensureProxyRunning = async (config) => {
|
|
138
|
+
if (await waitForHealthyProxy(config.proxyPort)) {
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
const paths = getNvicodePaths();
|
|
142
|
+
await fs.mkdir(paths.stateDir, { recursive: true });
|
|
143
|
+
const logFd = openSync(paths.logFile, "a");
|
|
144
|
+
const child = spawn(process.execPath, [__filename, "serve"], {
|
|
145
|
+
detached: true,
|
|
146
|
+
env: {
|
|
147
|
+
...process.env,
|
|
148
|
+
},
|
|
149
|
+
stdio: ["ignore", logFd, logFd],
|
|
150
|
+
});
|
|
151
|
+
child.unref();
|
|
152
|
+
await fs.writeFile(paths.pidFile, `${child.pid}\n`);
|
|
153
|
+
if (!(await waitForHealthyProxy(config.proxyPort))) {
|
|
154
|
+
throw new Error(`nvicode proxy failed to start. See ${paths.logFile}`);
|
|
155
|
+
}
|
|
156
|
+
};
|
|
157
|
+
const isExecutable = async (filePath) => {
|
|
158
|
+
try {
|
|
159
|
+
await fs.access(filePath, constants.X_OK);
|
|
160
|
+
return true;
|
|
161
|
+
}
|
|
162
|
+
catch {
|
|
163
|
+
return false;
|
|
164
|
+
}
|
|
165
|
+
};
|
|
166
|
+
const resolveClaudeBinary = async () => {
|
|
167
|
+
const nativeInPath = await findExecutableInPath("claude-native");
|
|
168
|
+
if (nativeInPath) {
|
|
169
|
+
return nativeInPath;
|
|
170
|
+
}
|
|
171
|
+
const versionsDir = path.join(os.homedir(), ".local", "share", "claude", "versions");
|
|
172
|
+
try {
|
|
173
|
+
const entries = await fs.readdir(versionsDir);
|
|
174
|
+
const latest = entries.sort((left, right) => left.localeCompare(right, undefined, {
|
|
175
|
+
numeric: true,
|
|
176
|
+
sensitivity: "base",
|
|
177
|
+
})).at(-1);
|
|
178
|
+
if (latest) {
|
|
179
|
+
return path.join(versionsDir, latest);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
catch {
|
|
183
|
+
// continue
|
|
184
|
+
}
|
|
185
|
+
const claudeInPath = await findExecutableInPath("claude");
|
|
186
|
+
if (claudeInPath) {
|
|
187
|
+
return claudeInPath;
|
|
188
|
+
}
|
|
189
|
+
throw new Error("Unable to locate Claude Code binary.");
|
|
190
|
+
};
|
|
191
|
+
const findExecutableInPath = async (name) => {
|
|
192
|
+
const pathEntries = (process.env.PATH || "").split(path.delimiter);
|
|
193
|
+
for (const entry of pathEntries) {
|
|
194
|
+
if (!entry) {
|
|
195
|
+
continue;
|
|
196
|
+
}
|
|
197
|
+
const candidate = path.join(entry, name);
|
|
198
|
+
if (await isExecutable(candidate)) {
|
|
199
|
+
return candidate;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
return null;
|
|
203
|
+
};
|
|
204
|
+
const runLaunchClaude = async (args) => {
|
|
205
|
+
const config = await ensureConfigured();
|
|
206
|
+
await ensureProxyRunning(config);
|
|
207
|
+
const claudeBinary = await resolveClaudeBinary();
|
|
208
|
+
const child = spawn(claudeBinary, args, {
|
|
209
|
+
stdio: "inherit",
|
|
210
|
+
env: {
|
|
211
|
+
...process.env,
|
|
212
|
+
ANTHROPIC_BASE_URL: `http://127.0.0.1:${config.proxyPort}`,
|
|
213
|
+
ANTHROPIC_AUTH_TOKEN: config.proxyToken,
|
|
214
|
+
ANTHROPIC_API_KEY: "",
|
|
215
|
+
ANTHROPIC_MODEL: config.model,
|
|
216
|
+
CLAUDE_CODE_DISABLE_EXPERIMENTAL_BETAS: "1",
|
|
217
|
+
ANTHROPIC_CUSTOM_MODEL_OPTION: config.model,
|
|
218
|
+
ANTHROPIC_CUSTOM_MODEL_OPTION_NAME: "nvicode custom model",
|
|
219
|
+
ANTHROPIC_CUSTOM_MODEL_OPTION_DESCRIPTION: "Claude Code via local NVIDIA gateway",
|
|
220
|
+
},
|
|
221
|
+
});
|
|
222
|
+
await new Promise((resolve, reject) => {
|
|
223
|
+
child.on("exit", (code, signal) => {
|
|
224
|
+
if (signal) {
|
|
225
|
+
reject(new Error(`Claude exited with signal ${signal}`));
|
|
226
|
+
return;
|
|
227
|
+
}
|
|
228
|
+
process.exitCode = code ?? 0;
|
|
229
|
+
resolve();
|
|
230
|
+
});
|
|
231
|
+
child.on("error", reject);
|
|
232
|
+
});
|
|
233
|
+
};
|
|
234
|
+
const runServe = async () => {
|
|
235
|
+
const config = await ensureConfigured();
|
|
236
|
+
const server = createProxyServer(config);
|
|
237
|
+
await new Promise((resolve, reject) => {
|
|
238
|
+
server.once("error", reject);
|
|
239
|
+
server.listen(config.proxyPort, "127.0.0.1", () => resolve());
|
|
240
|
+
});
|
|
241
|
+
console.error(`nvicode proxy listening on http://127.0.0.1:${config.proxyPort} using ${config.model}`);
|
|
242
|
+
const shutdown = () => {
|
|
243
|
+
server.close(() => process.exit(0));
|
|
244
|
+
};
|
|
245
|
+
process.on("SIGINT", shutdown);
|
|
246
|
+
process.on("SIGTERM", shutdown);
|
|
247
|
+
};
|
|
248
|
+
const main = async () => {
|
|
249
|
+
const args = process.argv.slice(2);
|
|
250
|
+
const [command, ...rest] = args;
|
|
251
|
+
if (!command || command === "--help" || command === "-h") {
|
|
252
|
+
usage();
|
|
253
|
+
return;
|
|
254
|
+
}
|
|
255
|
+
if (command === "serve") {
|
|
256
|
+
await runServe();
|
|
257
|
+
return;
|
|
258
|
+
}
|
|
259
|
+
if (command === "models") {
|
|
260
|
+
const config = await loadConfig();
|
|
261
|
+
await printModels(config.apiKey || undefined);
|
|
262
|
+
return;
|
|
263
|
+
}
|
|
264
|
+
if (command === "auth") {
|
|
265
|
+
await runAuth();
|
|
266
|
+
return;
|
|
267
|
+
}
|
|
268
|
+
if (command === "config") {
|
|
269
|
+
await runConfig();
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
if ((command === "select" && rest[0] === "model") ||
|
|
273
|
+
command === "select-model") {
|
|
274
|
+
await runSelectModel();
|
|
275
|
+
return;
|
|
276
|
+
}
|
|
277
|
+
if (command === "launch") {
|
|
278
|
+
if (rest[0] !== "claude") {
|
|
279
|
+
throw new Error("Only `nvicode launch claude` is supported right now.");
|
|
280
|
+
}
|
|
281
|
+
await runLaunchClaude(rest.slice(1));
|
|
282
|
+
return;
|
|
283
|
+
}
|
|
284
|
+
throw new Error(`Unknown command: ${command}`);
|
|
285
|
+
};
|
|
286
|
+
void main().catch((error) => {
|
|
287
|
+
console.error(error instanceof Error ? error.message : String(error));
|
|
288
|
+
process.exit(1);
|
|
289
|
+
});
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { randomUUID } from "node:crypto";
|
|
2
|
+
import { promises as fs } from "node:fs";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
const DEFAULT_PROXY_PORT = 8788;
|
|
6
|
+
const DEFAULT_MODEL = "moonshotai/kimi-k2.5";
|
|
7
|
+
export const getNvicodePaths = () => {
|
|
8
|
+
const configHome = process.env.XDG_CONFIG_HOME || path.join(os.homedir(), ".local", "share");
|
|
9
|
+
const stateHome = process.env.XDG_STATE_HOME || path.join(os.homedir(), ".local", "state");
|
|
10
|
+
const configDir = path.join(configHome, "nvicode");
|
|
11
|
+
const stateDir = path.join(stateHome, "nvicode");
|
|
12
|
+
return {
|
|
13
|
+
configDir,
|
|
14
|
+
configFile: path.join(configDir, "config.json"),
|
|
15
|
+
stateDir,
|
|
16
|
+
logFile: path.join(stateDir, "proxy.log"),
|
|
17
|
+
pidFile: path.join(stateDir, "proxy.pid"),
|
|
18
|
+
};
|
|
19
|
+
};
|
|
20
|
+
const withDefaults = (config) => ({
|
|
21
|
+
apiKey: config.apiKey?.trim() || "",
|
|
22
|
+
model: config.model?.trim() || DEFAULT_MODEL,
|
|
23
|
+
proxyPort: Number.isInteger(config.proxyPort) && config.proxyPort > 0
|
|
24
|
+
? config.proxyPort
|
|
25
|
+
: DEFAULT_PROXY_PORT,
|
|
26
|
+
proxyToken: config.proxyToken?.trim() || randomUUID(),
|
|
27
|
+
thinking: config.thinking ?? false,
|
|
28
|
+
});
|
|
29
|
+
export const loadConfig = async () => {
|
|
30
|
+
const paths = getNvicodePaths();
|
|
31
|
+
try {
|
|
32
|
+
const raw = await fs.readFile(paths.configFile, "utf8");
|
|
33
|
+
return withDefaults(JSON.parse(raw));
|
|
34
|
+
}
|
|
35
|
+
catch (error) {
|
|
36
|
+
if (error.code === "ENOENT") {
|
|
37
|
+
return withDefaults({});
|
|
38
|
+
}
|
|
39
|
+
throw error;
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
export const saveConfig = async (config) => {
|
|
43
|
+
const paths = getNvicodePaths();
|
|
44
|
+
await fs.mkdir(paths.configDir, { recursive: true });
|
|
45
|
+
await fs.mkdir(paths.stateDir, { recursive: true });
|
|
46
|
+
const resolved = withDefaults(config);
|
|
47
|
+
await fs.writeFile(paths.configFile, `${JSON.stringify(resolved, null, 2)}\n`);
|
|
48
|
+
return resolved;
|
|
49
|
+
};
|
|
50
|
+
export const updateConfig = async (patch) => {
|
|
51
|
+
const current = await loadConfig();
|
|
52
|
+
return await saveConfig({
|
|
53
|
+
...current,
|
|
54
|
+
...patch,
|
|
55
|
+
});
|
|
56
|
+
};
|
package/dist/models.js
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
export const CURATED_MODELS = [
|
|
2
|
+
{
|
|
3
|
+
id: "moonshotai/kimi-k2.5",
|
|
4
|
+
label: "Kimi K2.5",
|
|
5
|
+
description: "Strong coding and agentic workflow model.",
|
|
6
|
+
},
|
|
7
|
+
{
|
|
8
|
+
id: "qwen/qwen3-coder-480b-a35b-instruct",
|
|
9
|
+
label: "Qwen3 Coder 480B",
|
|
10
|
+
description: "Large coding-focused Qwen model.",
|
|
11
|
+
},
|
|
12
|
+
{
|
|
13
|
+
id: "z-ai/glm5",
|
|
14
|
+
label: "GLM5",
|
|
15
|
+
description: "General purpose reasoning model with code capability.",
|
|
16
|
+
},
|
|
17
|
+
{
|
|
18
|
+
id: "deepseek-ai/deepseek-v3.2",
|
|
19
|
+
label: "DeepSeek V3.2",
|
|
20
|
+
description: "General coding and reasoning model.",
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
id: "mistralai/codestral-22b-instruct-v0.1",
|
|
24
|
+
label: "Codestral 22B",
|
|
25
|
+
description: "Compact coding-specialized model.",
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
id: "qwen/qwen2.5-coder-32b-instruct",
|
|
29
|
+
label: "Qwen2.5 Coder 32B",
|
|
30
|
+
description: "Smaller coding-focused Qwen model.",
|
|
31
|
+
},
|
|
32
|
+
];
|
|
33
|
+
const MODELS_URL = "https://integrate.api.nvidia.com/v1/models";
|
|
34
|
+
export const fetchAvailableModelIds = async (apiKey) => {
|
|
35
|
+
const response = await fetch(MODELS_URL, {
|
|
36
|
+
headers: {
|
|
37
|
+
Authorization: `Bearer ${apiKey}`,
|
|
38
|
+
},
|
|
39
|
+
});
|
|
40
|
+
if (!response.ok) {
|
|
41
|
+
throw new Error(`Unable to fetch NVIDIA models: HTTP ${response.status} ${response.statusText}`);
|
|
42
|
+
}
|
|
43
|
+
const body = (await response.json());
|
|
44
|
+
const ids = new Set();
|
|
45
|
+
for (const model of body.data ?? []) {
|
|
46
|
+
if (typeof model.id === "string" && model.id.length > 0) {
|
|
47
|
+
ids.add(model.id);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
return ids;
|
|
51
|
+
};
|
|
52
|
+
export const getRecommendedModels = async (apiKey) => {
|
|
53
|
+
try {
|
|
54
|
+
const available = await fetchAvailableModelIds(apiKey);
|
|
55
|
+
const curated = CURATED_MODELS.filter((model) => available.has(model.id));
|
|
56
|
+
return curated.length > 0 ? curated : CURATED_MODELS;
|
|
57
|
+
}
|
|
58
|
+
catch {
|
|
59
|
+
return CURATED_MODELS;
|
|
60
|
+
}
|
|
61
|
+
};
|
package/dist/proxy.js
ADDED
|
@@ -0,0 +1,503 @@
|
|
|
1
|
+
import { randomUUID } from "node:crypto";
|
|
2
|
+
import { createServer } from "node:http";
|
|
3
|
+
const NVIDIA_URL = "https://integrate.api.nvidia.com/v1/chat/completions";
|
|
4
|
+
const sendJson = (response, statusCode, payload) => {
|
|
5
|
+
response.writeHead(statusCode, {
|
|
6
|
+
"Content-Type": "application/json",
|
|
7
|
+
});
|
|
8
|
+
response.end(JSON.stringify(payload));
|
|
9
|
+
};
|
|
10
|
+
const sendAnthropicError = (response, statusCode, type, message) => {
|
|
11
|
+
sendJson(response, statusCode, {
|
|
12
|
+
type: "error",
|
|
13
|
+
error: {
|
|
14
|
+
type,
|
|
15
|
+
message,
|
|
16
|
+
},
|
|
17
|
+
});
|
|
18
|
+
};
|
|
19
|
+
const readRequestBody = async (request) => {
|
|
20
|
+
const chunks = [];
|
|
21
|
+
for await (const chunk of request) {
|
|
22
|
+
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
23
|
+
}
|
|
24
|
+
return Buffer.concat(chunks).toString("utf8");
|
|
25
|
+
};
|
|
26
|
+
const extractBearerToken = (request) => {
|
|
27
|
+
const header = request.headers.authorization;
|
|
28
|
+
if (!header) {
|
|
29
|
+
return null;
|
|
30
|
+
}
|
|
31
|
+
const match = /^Bearer\s+(.+)$/i.exec(header);
|
|
32
|
+
return match?.[1]?.trim() || null;
|
|
33
|
+
};
|
|
34
|
+
const stringifyContent = (value) => {
|
|
35
|
+
if (typeof value === "string") {
|
|
36
|
+
return value;
|
|
37
|
+
}
|
|
38
|
+
if (Array.isArray(value)) {
|
|
39
|
+
return value.map((entry) => stringifyContent(entry)).join("\n");
|
|
40
|
+
}
|
|
41
|
+
if (value &&
|
|
42
|
+
typeof value === "object" &&
|
|
43
|
+
"type" in value &&
|
|
44
|
+
value.type === "text" &&
|
|
45
|
+
"text" in value &&
|
|
46
|
+
typeof value.text === "string") {
|
|
47
|
+
return value.text || "";
|
|
48
|
+
}
|
|
49
|
+
if (value === undefined || value === null) {
|
|
50
|
+
return "";
|
|
51
|
+
}
|
|
52
|
+
return JSON.stringify(value);
|
|
53
|
+
};
|
|
54
|
+
const normalizeSystemPrompt = (system) => {
|
|
55
|
+
if (!system) {
|
|
56
|
+
return null;
|
|
57
|
+
}
|
|
58
|
+
if (typeof system === "string") {
|
|
59
|
+
return system;
|
|
60
|
+
}
|
|
61
|
+
const text = system.map((block) => block.text).join("\n\n");
|
|
62
|
+
return text || null;
|
|
63
|
+
};
|
|
64
|
+
const flushUserParts = (messages, parts) => {
|
|
65
|
+
if (parts.length === 0) {
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
messages.push({
|
|
69
|
+
role: "user",
|
|
70
|
+
content: parts.length === 1 && parts[0]?.type === "text"
|
|
71
|
+
? parts[0].text
|
|
72
|
+
: [...parts],
|
|
73
|
+
});
|
|
74
|
+
parts.length = 0;
|
|
75
|
+
};
|
|
76
|
+
const mapUserMessage = (message) => {
|
|
77
|
+
if (typeof message.content === "string") {
|
|
78
|
+
return [
|
|
79
|
+
{
|
|
80
|
+
role: "user",
|
|
81
|
+
content: message.content,
|
|
82
|
+
},
|
|
83
|
+
];
|
|
84
|
+
}
|
|
85
|
+
const mapped = [];
|
|
86
|
+
const parts = [];
|
|
87
|
+
for (const block of message.content) {
|
|
88
|
+
if (block.type === "text") {
|
|
89
|
+
parts.push({ type: "text", text: block.text });
|
|
90
|
+
continue;
|
|
91
|
+
}
|
|
92
|
+
if (block.type === "image") {
|
|
93
|
+
const mediaType = block.source?.media_type || "application/octet-stream";
|
|
94
|
+
const data = block.source?.data;
|
|
95
|
+
if (!data) {
|
|
96
|
+
continue;
|
|
97
|
+
}
|
|
98
|
+
parts.push({
|
|
99
|
+
type: "image_url",
|
|
100
|
+
image_url: {
|
|
101
|
+
url: `data:${mediaType};base64,${data}`,
|
|
102
|
+
},
|
|
103
|
+
});
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
if (block.type === "tool_result") {
|
|
107
|
+
flushUserParts(mapped, parts);
|
|
108
|
+
mapped.push({
|
|
109
|
+
role: "tool",
|
|
110
|
+
tool_call_id: block.tool_use_id,
|
|
111
|
+
content: stringifyContent(block.content),
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
flushUserParts(mapped, parts);
|
|
116
|
+
return mapped;
|
|
117
|
+
};
|
|
118
|
+
const mapAssistantMessage = (message) => {
|
|
119
|
+
if (typeof message.content === "string") {
|
|
120
|
+
return [
|
|
121
|
+
{
|
|
122
|
+
role: "assistant",
|
|
123
|
+
content: message.content,
|
|
124
|
+
},
|
|
125
|
+
];
|
|
126
|
+
}
|
|
127
|
+
const textParts = [];
|
|
128
|
+
const toolCalls = [];
|
|
129
|
+
for (const block of message.content) {
|
|
130
|
+
if (block.type === "text") {
|
|
131
|
+
textParts.push(block.text);
|
|
132
|
+
continue;
|
|
133
|
+
}
|
|
134
|
+
if (block.type === "tool_use") {
|
|
135
|
+
toolCalls.push({
|
|
136
|
+
id: block.id,
|
|
137
|
+
type: "function",
|
|
138
|
+
function: {
|
|
139
|
+
name: block.name,
|
|
140
|
+
arguments: JSON.stringify(block.input ?? {}),
|
|
141
|
+
},
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
return [
|
|
146
|
+
{
|
|
147
|
+
role: "assistant",
|
|
148
|
+
content: textParts.length > 0 ? textParts.join("\n\n") : null,
|
|
149
|
+
...(toolCalls.length > 0 ? { tool_calls: toolCalls } : {}),
|
|
150
|
+
},
|
|
151
|
+
];
|
|
152
|
+
};
|
|
153
|
+
const mapMessages = (payload) => {
|
|
154
|
+
const mapped = [];
|
|
155
|
+
const system = normalizeSystemPrompt(payload.system);
|
|
156
|
+
if (system) {
|
|
157
|
+
mapped.push({
|
|
158
|
+
role: "system",
|
|
159
|
+
content: system,
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
for (const message of payload.messages ?? []) {
|
|
163
|
+
if (message.role === "user") {
|
|
164
|
+
mapped.push(...mapUserMessage(message));
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
if (message.role === "assistant") {
|
|
168
|
+
mapped.push(...mapAssistantMessage(message));
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
return mapped;
|
|
172
|
+
};
|
|
173
|
+
const mapTools = (tools) => {
|
|
174
|
+
if (!tools || tools.length === 0) {
|
|
175
|
+
return undefined;
|
|
176
|
+
}
|
|
177
|
+
return tools.map((tool) => ({
|
|
178
|
+
type: "function",
|
|
179
|
+
function: {
|
|
180
|
+
name: tool.name,
|
|
181
|
+
description: tool.description,
|
|
182
|
+
parameters: tool.input_schema ?? {
|
|
183
|
+
type: "object",
|
|
184
|
+
properties: {},
|
|
185
|
+
},
|
|
186
|
+
},
|
|
187
|
+
}));
|
|
188
|
+
};
|
|
189
|
+
const mapToolChoice = (toolChoice) => {
|
|
190
|
+
if (!toolChoice || typeof toolChoice !== "object") {
|
|
191
|
+
return undefined;
|
|
192
|
+
}
|
|
193
|
+
const type = toolChoice.type;
|
|
194
|
+
if (type === "auto") {
|
|
195
|
+
return "auto";
|
|
196
|
+
}
|
|
197
|
+
if (type === "any") {
|
|
198
|
+
return "required";
|
|
199
|
+
}
|
|
200
|
+
if (type === "tool" &&
|
|
201
|
+
typeof toolChoice.name === "string") {
|
|
202
|
+
return {
|
|
203
|
+
type: "function",
|
|
204
|
+
function: {
|
|
205
|
+
name: toolChoice.name,
|
|
206
|
+
},
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
return undefined;
|
|
210
|
+
};
|
|
211
|
+
const safeParseJson = (value) => {
|
|
212
|
+
try {
|
|
213
|
+
return JSON.parse(value);
|
|
214
|
+
}
|
|
215
|
+
catch {
|
|
216
|
+
return {
|
|
217
|
+
raw: value,
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
};
|
|
221
|
+
const mapStopReason = (finishReason) => {
|
|
222
|
+
switch (finishReason) {
|
|
223
|
+
case "tool_calls":
|
|
224
|
+
return "tool_use";
|
|
225
|
+
case "length":
|
|
226
|
+
return "max_tokens";
|
|
227
|
+
case "stop":
|
|
228
|
+
default:
|
|
229
|
+
return "end_turn";
|
|
230
|
+
}
|
|
231
|
+
};
|
|
232
|
+
const mapResponseContent = (choice) => {
|
|
233
|
+
const content = [];
|
|
234
|
+
const message = choice?.message;
|
|
235
|
+
if (typeof message?.content === "string" && message.content.length > 0) {
|
|
236
|
+
content.push({
|
|
237
|
+
type: "text",
|
|
238
|
+
text: message.content,
|
|
239
|
+
});
|
|
240
|
+
}
|
|
241
|
+
else if (Array.isArray(message?.content)) {
|
|
242
|
+
const text = message.content
|
|
243
|
+
.map((part) => (typeof part.text === "string" ? part.text : ""))
|
|
244
|
+
.filter((entry) => entry.length > 0)
|
|
245
|
+
.join("\n");
|
|
246
|
+
if (text.length > 0) {
|
|
247
|
+
content.push({
|
|
248
|
+
type: "text",
|
|
249
|
+
text,
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
if (content.length === 0 &&
|
|
254
|
+
typeof message?.reasoning === "string" &&
|
|
255
|
+
message.reasoning.trim().length > 0) {
|
|
256
|
+
content.push({
|
|
257
|
+
type: "text",
|
|
258
|
+
text: message.reasoning,
|
|
259
|
+
});
|
|
260
|
+
}
|
|
261
|
+
for (const toolCall of message?.tool_calls ?? []) {
|
|
262
|
+
const name = toolCall.function?.name;
|
|
263
|
+
if (!name) {
|
|
264
|
+
continue;
|
|
265
|
+
}
|
|
266
|
+
content.push({
|
|
267
|
+
type: "tool_use",
|
|
268
|
+
id: toolCall.id || `toolu_${randomUUID()}`,
|
|
269
|
+
name,
|
|
270
|
+
input: safeParseJson(toolCall.function?.arguments || "{}"),
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
if (content.length === 0) {
|
|
274
|
+
content.push({
|
|
275
|
+
type: "text",
|
|
276
|
+
text: "",
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
return content;
|
|
280
|
+
};
|
|
281
|
+
const chunkText = (value, chunkSize = 1024) => {
|
|
282
|
+
if (!value) {
|
|
283
|
+
return [""];
|
|
284
|
+
}
|
|
285
|
+
const chunks = [];
|
|
286
|
+
for (let index = 0; index < value.length; index += chunkSize) {
|
|
287
|
+
chunks.push(value.slice(index, index + chunkSize));
|
|
288
|
+
}
|
|
289
|
+
return chunks;
|
|
290
|
+
};
|
|
291
|
+
const writeSse = (response, event, payload) => {
|
|
292
|
+
response.write(`event: ${event}\n`);
|
|
293
|
+
response.write(`data: ${JSON.stringify(payload)}\n\n`);
|
|
294
|
+
};
|
|
295
|
+
const estimateTokens = (payload) => {
|
|
296
|
+
const raw = JSON.stringify(payload);
|
|
297
|
+
return Math.max(1, Math.ceil(raw.length / 4));
|
|
298
|
+
};
|
|
299
|
+
const callNvidia = async (config, payload) => {
|
|
300
|
+
const targetModel = payload.model && payload.model.includes("/") && !payload.model.startsWith("claude-")
|
|
301
|
+
? payload.model
|
|
302
|
+
: config.model;
|
|
303
|
+
const requestBody = {
|
|
304
|
+
model: targetModel,
|
|
305
|
+
messages: mapMessages(payload),
|
|
306
|
+
max_tokens: payload.max_tokens ?? 16_384,
|
|
307
|
+
stream: false,
|
|
308
|
+
};
|
|
309
|
+
if (typeof payload.temperature === "number") {
|
|
310
|
+
requestBody.temperature = payload.temperature;
|
|
311
|
+
}
|
|
312
|
+
if (typeof payload.top_p === "number") {
|
|
313
|
+
requestBody.top_p = payload.top_p;
|
|
314
|
+
}
|
|
315
|
+
if (payload.stop_sequences && payload.stop_sequences.length > 0) {
|
|
316
|
+
requestBody.stop = payload.stop_sequences;
|
|
317
|
+
}
|
|
318
|
+
const tools = mapTools(payload.tools);
|
|
319
|
+
if (tools) {
|
|
320
|
+
requestBody.tools = tools;
|
|
321
|
+
}
|
|
322
|
+
const toolChoice = mapToolChoice(payload.tool_choice);
|
|
323
|
+
if (toolChoice) {
|
|
324
|
+
requestBody.tool_choice = toolChoice;
|
|
325
|
+
}
|
|
326
|
+
if (config.thinking) {
|
|
327
|
+
requestBody.chat_template_kwargs = {
|
|
328
|
+
thinking: true,
|
|
329
|
+
};
|
|
330
|
+
}
|
|
331
|
+
const response = await fetch(NVIDIA_URL, {
|
|
332
|
+
method: "POST",
|
|
333
|
+
headers: {
|
|
334
|
+
Authorization: `Bearer ${config.apiKey}`,
|
|
335
|
+
Accept: "application/json",
|
|
336
|
+
"Content-Type": "application/json",
|
|
337
|
+
},
|
|
338
|
+
body: JSON.stringify(requestBody),
|
|
339
|
+
});
|
|
340
|
+
const raw = await response.text();
|
|
341
|
+
if (!response.ok) {
|
|
342
|
+
throw new Error(`NVIDIA API HTTP ${response.status}: ${raw}`);
|
|
343
|
+
}
|
|
344
|
+
return {
|
|
345
|
+
targetModel,
|
|
346
|
+
upstream: JSON.parse(raw),
|
|
347
|
+
};
|
|
348
|
+
};
|
|
349
|
+
export const createProxyServer = (config) => {
|
|
350
|
+
return createServer(async (request, response) => {
|
|
351
|
+
try {
|
|
352
|
+
const url = new URL(request.url || "/", "http://127.0.0.1");
|
|
353
|
+
if (request.method === "OPTIONS") {
|
|
354
|
+
response.writeHead(204);
|
|
355
|
+
response.end();
|
|
356
|
+
return;
|
|
357
|
+
}
|
|
358
|
+
if (url.pathname === "/health") {
|
|
359
|
+
sendJson(response, 200, {
|
|
360
|
+
ok: true,
|
|
361
|
+
model: config.model,
|
|
362
|
+
port: config.proxyPort,
|
|
363
|
+
thinking: config.thinking,
|
|
364
|
+
});
|
|
365
|
+
return;
|
|
366
|
+
}
|
|
367
|
+
const token = extractBearerToken(request);
|
|
368
|
+
if (token !== config.proxyToken) {
|
|
369
|
+
sendAnthropicError(response, 401, "authentication_error", "Invalid nvicode proxy token");
|
|
370
|
+
return;
|
|
371
|
+
}
|
|
372
|
+
if (request.method === "POST" && url.pathname === "/v1/messages/count_tokens") {
|
|
373
|
+
const rawBody = await readRequestBody(request);
|
|
374
|
+
const payload = JSON.parse(rawBody);
|
|
375
|
+
sendJson(response, 200, {
|
|
376
|
+
input_tokens: estimateTokens({
|
|
377
|
+
system: payload.system ?? null,
|
|
378
|
+
messages: payload.messages ?? [],
|
|
379
|
+
tools: payload.tools ?? [],
|
|
380
|
+
}),
|
|
381
|
+
});
|
|
382
|
+
return;
|
|
383
|
+
}
|
|
384
|
+
if (request.method === "POST" && url.pathname === "/v1/messages") {
|
|
385
|
+
const rawBody = await readRequestBody(request);
|
|
386
|
+
const payload = JSON.parse(rawBody);
|
|
387
|
+
const { upstream, targetModel } = await callNvidia(config, payload);
|
|
388
|
+
const choice = upstream.choices?.[0];
|
|
389
|
+
const mappedContent = mapResponseContent(choice);
|
|
390
|
+
const anthropicResponse = {
|
|
391
|
+
id: upstream.id || `msg_${randomUUID()}`,
|
|
392
|
+
type: "message",
|
|
393
|
+
role: "assistant",
|
|
394
|
+
model: targetModel,
|
|
395
|
+
content: mappedContent,
|
|
396
|
+
stop_reason: mapStopReason(choice?.finish_reason),
|
|
397
|
+
stop_sequence: null,
|
|
398
|
+
usage: {
|
|
399
|
+
input_tokens: upstream.usage?.prompt_tokens ??
|
|
400
|
+
estimateTokens({
|
|
401
|
+
system: payload.system ?? null,
|
|
402
|
+
messages: payload.messages ?? [],
|
|
403
|
+
tools: payload.tools ?? [],
|
|
404
|
+
}),
|
|
405
|
+
output_tokens: upstream.usage?.completion_tokens ?? 0,
|
|
406
|
+
},
|
|
407
|
+
};
|
|
408
|
+
if (!payload.stream) {
|
|
409
|
+
sendJson(response, 200, anthropicResponse);
|
|
410
|
+
return;
|
|
411
|
+
}
|
|
412
|
+
response.writeHead(200, {
|
|
413
|
+
"Cache-Control": "no-cache, no-transform",
|
|
414
|
+
Connection: "keep-alive",
|
|
415
|
+
"Content-Type": "text/event-stream",
|
|
416
|
+
});
|
|
417
|
+
writeSse(response, "message_start", {
|
|
418
|
+
type: "message_start",
|
|
419
|
+
message: {
|
|
420
|
+
...anthropicResponse,
|
|
421
|
+
content: [],
|
|
422
|
+
stop_reason: null,
|
|
423
|
+
usage: {
|
|
424
|
+
input_tokens: anthropicResponse.usage.input_tokens,
|
|
425
|
+
output_tokens: 0,
|
|
426
|
+
},
|
|
427
|
+
},
|
|
428
|
+
});
|
|
429
|
+
mappedContent.forEach((block, index) => {
|
|
430
|
+
if (block.type === "text") {
|
|
431
|
+
writeSse(response, "content_block_start", {
|
|
432
|
+
type: "content_block_start",
|
|
433
|
+
index,
|
|
434
|
+
content_block: {
|
|
435
|
+
type: "text",
|
|
436
|
+
text: "",
|
|
437
|
+
},
|
|
438
|
+
});
|
|
439
|
+
for (const chunk of chunkText(block.text)) {
|
|
440
|
+
writeSse(response, "content_block_delta", {
|
|
441
|
+
type: "content_block_delta",
|
|
442
|
+
index,
|
|
443
|
+
delta: {
|
|
444
|
+
type: "text_delta",
|
|
445
|
+
text: chunk,
|
|
446
|
+
},
|
|
447
|
+
});
|
|
448
|
+
}
|
|
449
|
+
writeSse(response, "content_block_stop", {
|
|
450
|
+
type: "content_block_stop",
|
|
451
|
+
index,
|
|
452
|
+
});
|
|
453
|
+
return;
|
|
454
|
+
}
|
|
455
|
+
if (block.type === "tool_use") {
|
|
456
|
+
writeSse(response, "content_block_start", {
|
|
457
|
+
type: "content_block_start",
|
|
458
|
+
index,
|
|
459
|
+
content_block: {
|
|
460
|
+
type: "tool_use",
|
|
461
|
+
id: block.id,
|
|
462
|
+
name: block.name,
|
|
463
|
+
input: {},
|
|
464
|
+
},
|
|
465
|
+
});
|
|
466
|
+
writeSse(response, "content_block_delta", {
|
|
467
|
+
type: "content_block_delta",
|
|
468
|
+
index,
|
|
469
|
+
delta: {
|
|
470
|
+
type: "input_json_delta",
|
|
471
|
+
partial_json: JSON.stringify(block.input ?? {}),
|
|
472
|
+
},
|
|
473
|
+
});
|
|
474
|
+
writeSse(response, "content_block_stop", {
|
|
475
|
+
type: "content_block_stop",
|
|
476
|
+
index,
|
|
477
|
+
});
|
|
478
|
+
}
|
|
479
|
+
});
|
|
480
|
+
writeSse(response, "message_delta", {
|
|
481
|
+
type: "message_delta",
|
|
482
|
+
delta: {
|
|
483
|
+
stop_reason: anthropicResponse.stop_reason,
|
|
484
|
+
stop_sequence: null,
|
|
485
|
+
},
|
|
486
|
+
usage: {
|
|
487
|
+
output_tokens: anthropicResponse.usage.output_tokens,
|
|
488
|
+
},
|
|
489
|
+
});
|
|
490
|
+
writeSse(response, "message_stop", {
|
|
491
|
+
type: "message_stop",
|
|
492
|
+
});
|
|
493
|
+
response.end();
|
|
494
|
+
return;
|
|
495
|
+
}
|
|
496
|
+
sendAnthropicError(response, 404, "not_found_error", `Unsupported route: ${request.method || "GET"} ${url.pathname}`);
|
|
497
|
+
}
|
|
498
|
+
catch (error) {
|
|
499
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
500
|
+
sendAnthropicError(response, 500, "api_error", message);
|
|
501
|
+
}
|
|
502
|
+
});
|
|
503
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "nvicode",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Run Claude Code through NVIDIA-hosted models using a local Anthropic-compatible gateway.",
|
|
5
|
+
"author": "Dinesh Potla",
|
|
6
|
+
"keywords": [
|
|
7
|
+
"claude-code",
|
|
8
|
+
"nvidia",
|
|
9
|
+
"kimi",
|
|
10
|
+
"qwen",
|
|
11
|
+
"glm",
|
|
12
|
+
"gateway",
|
|
13
|
+
"cli"
|
|
14
|
+
],
|
|
15
|
+
"type": "module",
|
|
16
|
+
"bin": {
|
|
17
|
+
"nvicode": "dist/cli.js"
|
|
18
|
+
},
|
|
19
|
+
"scripts": {
|
|
20
|
+
"build": "tsc -p tsconfig.json",
|
|
21
|
+
"prepack": "npm run build",
|
|
22
|
+
"typecheck": "tsc --noEmit",
|
|
23
|
+
"dev": "tsx src/cli.ts"
|
|
24
|
+
},
|
|
25
|
+
"files": [
|
|
26
|
+
"dist",
|
|
27
|
+
"README.md"
|
|
28
|
+
],
|
|
29
|
+
"engines": {
|
|
30
|
+
"node": ">=20"
|
|
31
|
+
},
|
|
32
|
+
"license": "MIT",
|
|
33
|
+
"repository": {
|
|
34
|
+
"type": "git",
|
|
35
|
+
"url": "git+https://github.com/dineshpotla/nvicode.git"
|
|
36
|
+
},
|
|
37
|
+
"homepage": "https://github.com/dineshpotla/nvicode#readme",
|
|
38
|
+
"bugs": {
|
|
39
|
+
"url": "https://github.com/dineshpotla/nvicode/issues"
|
|
40
|
+
},
|
|
41
|
+
"publishConfig": {
|
|
42
|
+
"access": "public"
|
|
43
|
+
},
|
|
44
|
+
"devDependencies": {
|
|
45
|
+
"@types/node": "^22.13.8",
|
|
46
|
+
"tsx": "^4.20.3",
|
|
47
|
+
"typescript": "^5.8.2"
|
|
48
|
+
}
|
|
49
|
+
}
|