@banaxi/banana-code 1.7.0 → 1.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@banaxi/banana-code",
3
- "version": "1.7.0",
3
+ "version": "1.8.0",
4
4
  "description": "🍌 BananaCode",
5
5
  "keywords": [
6
6
  "banana",
package/src/config.js CHANGED
@@ -146,6 +146,63 @@ export async function setupProvider(provider, config = {}) {
146
146
  choices: OPENAI_MODELS
147
147
  });
148
148
  }
149
+ } else if (provider === 'openrouter') {
150
+ config.apiKey = await input({
151
+ message: 'Enter your OPENROUTER_API_KEY (from openrouter.ai/keys):',
152
+ default: config.apiKey
153
+ });
154
+
155
+ let modelAccepted = false;
156
+ while (!modelAccepted) {
157
+ const modelId = await input({
158
+ message: 'Enter the OpenRouter model ID (e.g., nvidia/nemotron-3-super-120b-a12b:free):',
159
+ default: config.model || '',
160
+ validate: (v) => v.trim().length > 0 || 'Model ID cannot be empty'
161
+ });
162
+
163
+ console.log(chalk.cyan(`\nValidating model "${modelId}" on OpenRouter...`));
164
+ try {
165
+ const res = await fetch('https://openrouter.ai/api/v1/models');
166
+ const data = await res.json();
167
+ const found = data.data?.find(m => m.id === modelId.trim());
168
+
169
+ if (!found) {
170
+ console.log(chalk.red(`Model "${modelId}" was not found on OpenRouter.`));
171
+ console.log(chalk.yellow('Browse available models at: https://openrouter.ai/models'));
172
+ const retry = await input({ message: 'Try a different model ID? (y/n):', default: 'y' });
173
+ if (retry.toLowerCase() !== 'y') {
174
+ config.model = modelId.trim();
175
+ modelAccepted = true;
176
+ console.log(chalk.yellow('Proceeding anyway — tool calling may not work.'));
177
+ }
178
+ continue;
179
+ }
180
+
181
+ const supported = found.supported_parameters || [];
182
+ const hasToolCalling = supported.includes('tools') || supported.includes('tool_choice');
183
+
184
+ if (hasToolCalling) {
185
+ console.log(chalk.green(`✔ "${modelId}" supports tool calling. Good to go!`));
186
+ config.model = modelId.trim();
187
+ modelAccepted = true;
188
+ } else {
189
+ console.log(chalk.red(`✘ "${modelId}" does NOT support tool calling.`));
190
+ console.log(chalk.gray(` Supported parameters: ${supported.join(', ') || 'none listed'}`));
191
+ console.log(chalk.yellow('Banana Code requires tool calling to function correctly.'));
192
+ const retry = await input({ message: 'Choose a different model? (y/n):', default: 'y' });
193
+ if (retry.toLowerCase() !== 'y') {
194
+ config.model = modelId.trim();
195
+ modelAccepted = true;
196
+ console.log(chalk.yellow('Proceeding anyway — tool calling will likely fail.'));
197
+ }
198
+ }
199
+ } catch (err) {
200
+ console.log(chalk.red(`Could not reach OpenRouter API: ${err.message}`));
201
+ console.log(chalk.yellow('Skipping validation and using the model ID as-is.'));
202
+ config.model = modelId.trim();
203
+ modelAccepted = true;
204
+ }
205
+ }
149
206
  } else if (provider === 'ollama') {
150
207
  console.log(chalk.cyan("Detecting running Ollama models..."));
151
208
  try {
@@ -180,6 +237,7 @@ async function runSetupWizard() {
180
237
  { name: 'Anthropic Claude', value: 'claude' },
181
238
  { name: 'OpenAI', value: 'openai' },
182
239
  { name: 'Mistral AI', value: 'mistral' },
240
+ { name: 'OpenRouter (Any Model)', value: 'openrouter' },
183
241
  { name: 'Ollama Cloud', value: 'ollama_cloud' },
184
242
  { name: 'Ollama (Local)', value: 'ollama' }
185
243
  ]
package/src/constants.js CHANGED
@@ -7,9 +7,9 @@ export const GEMINI_MODELS = [
7
7
  ];
8
8
 
9
9
  export const CLAUDE_MODELS = [
10
- { name: 'Claude Opus 4.6 (Flagship)', value: 'claude-4-6-opus-20260205' },
11
- { name: 'Claude Sonnet 4.6 (Fast & Smart)', value: 'claude-4-6-sonnet-20260217' },
12
- { name: 'Claude Haiku 4.5', value: 'claude-4-5-haiku-20251015' }
10
+ { name: 'Claude Opus 4.6 (Flagship)', value: 'claude-opus-4-6' },
11
+ { name: 'Claude Sonnet 4.6 (Fast & Smart)', value: 'claude-sonnet-4-6' },
12
+ { name: 'Claude Haiku 4.5', value: 'claude-haiku-4-5' }
13
13
  ];
14
14
 
15
15
  export const OPENAI_MODELS = [
package/src/index.js CHANGED
@@ -11,6 +11,7 @@ import { OpenAIProvider } from './providers/openai.js';
11
11
  import { OllamaProvider } from './providers/ollama.js';
12
12
  import { OllamaCloudProvider } from './providers/ollamaCloud.js';
13
13
  import { MistralProvider } from './providers/mistral.js';
14
+ import { OpenRouterProvider } from './providers/openrouter.js';
14
15
 
15
16
  import { loadSession, saveSession, generateSessionId, getLatestSessionId, listSessions } from './sessions.js';
16
17
  import { printMarkdown } from './utils/markdown.js';
@@ -33,6 +34,7 @@ function createProvider(overrideConfig = null) {
33
34
  case 'claude': return new ClaudeProvider(activeConfig);
34
35
  case 'openai': return new OpenAIProvider(activeConfig);
35
36
  case 'mistral': return new MistralProvider(activeConfig);
37
+ case 'openrouter': return new OpenRouterProvider(activeConfig);
36
38
  case 'ollama_cloud': return new OllamaCloudProvider(activeConfig);
37
39
  case 'ollama': return new OllamaProvider(activeConfig);
38
40
  default:
@@ -57,20 +59,21 @@ async function handleSlashCommand(command) {
57
59
  { name: 'Anthropic Claude', value: 'claude' },
58
60
  { name: 'OpenAI', value: 'openai' },
59
61
  { name: 'Mistral AI', value: 'mistral' },
62
+ { name: 'OpenRouter (Any Model)', value: 'openrouter' },
60
63
  { name: 'Ollama Cloud', value: 'ollama_cloud' },
61
64
  { name: 'Ollama (Local)', value: 'ollama' }
62
65
  ]
63
66
  });
64
67
  }
65
68
 
66
- if (['gemini', 'claude', 'openai', 'mistral', 'ollama_cloud', 'ollama'].includes(newProv)) {
69
+ if (['gemini', 'claude', 'openai', 'mistral', 'openrouter', 'ollama_cloud', 'ollama'].includes(newProv)) {
67
70
  // Use the shared setup logic to get keys/models
68
71
  config = await setupProvider(newProv, config);
69
72
  await saveConfig(config);
70
73
  providerInstance = createProvider();
71
74
  console.log(chalk.green(`Switched provider to ${newProv} (${config.model}).`));
72
75
  } else {
73
- console.log(chalk.yellow(`Usage: /provider <gemini|claude|openai|mistral|ollama_cloud|ollama>`));
76
+ console.log(chalk.yellow(`Usage: /provider <gemini|claude|openai|mistral|openrouter|ollama_cloud|ollama>`));
74
77
  }
75
78
  break;
76
79
  case '/model':
@@ -87,6 +90,13 @@ async function handleSlashCommand(command) {
87
90
  choices = config.authType === 'oauth' ? CODEX_MODELS : OPENAI_MODELS;
88
91
  } else if (config.provider === 'mistral') {
89
92
  choices = MISTRAL_MODELS;
93
+ } else if (config.provider === 'openrouter') {
94
+ // Re-run setup flow so the user gets full validation
95
+ config = await setupProvider('openrouter', config);
96
+ await saveConfig(config);
97
+ providerInstance = createProvider();
98
+ console.log(chalk.green(`Switched OpenRouter model to ${config.model}.`));
99
+ break;
90
100
  } else if (config.provider === 'ollama_cloud') {
91
101
  choices = OLLAMA_CLOUD_MODELS;
92
102
  } else if (config.provider === 'ollama') {
@@ -124,6 +134,29 @@ async function handleSlashCommand(command) {
124
134
  }
125
135
 
126
136
  if (newModel) {
137
+ if (config.provider === 'openrouter') {
138
+ // Validate tool calling support before switching
139
+ console.log(chalk.cyan(`Validating "${newModel}" on OpenRouter...`));
140
+ try {
141
+ const res = await fetch('https://openrouter.ai/api/v1/models');
142
+ const data = await res.json();
143
+ const found = data.data?.find(m => m.id === newModel);
144
+ if (!found) {
145
+ console.log(chalk.yellow(`Model "${newModel}" not found on OpenRouter — proceeding anyway.`));
146
+ } else {
147
+ const supported = found.supported_parameters || [];
148
+ const hasToolCalling = supported.includes('tools') || supported.includes('tool_choice');
149
+ if (hasToolCalling) {
150
+ console.log(chalk.green(`✔ "${newModel}" supports tool calling.`));
151
+ } else {
152
+ console.log(chalk.red(`✘ "${newModel}" does NOT support tool calling. Banana Code may not work correctly.`));
153
+ console.log(chalk.gray(` Supported parameters: ${supported.join(', ') || 'none listed'}`));
154
+ }
155
+ }
156
+ } catch (err) {
157
+ console.log(chalk.yellow(`Could not validate on OpenRouter: ${err.message}`));
158
+ }
159
+ }
127
160
  config.model = newModel;
128
161
  await saveConfig(config);
129
162
  if (providerInstance) {
@@ -571,7 +604,7 @@ async function handleSlashCommand(command) {
571
604
  case '/help':
572
605
  console.log(chalk.yellow(`
573
606
  Available commands:
574
- /provider <name> - Switch AI provider (gemini, claude, openai, mistral, ollama_cloud, ollama)
607
+ /provider <name> - Switch AI provider (gemini, claude, openai, mistral, openrouter, ollama_cloud, ollama)
575
608
  /model [name] - Switch model within current provider (opens menu if name omitted)
576
609
  /chats - List persistent chat sessions
577
610
  /clear - Clear chat history
@@ -0,0 +1,169 @@
1
+ import OpenAI from 'openai';
2
+ import { getAvailableTools, executeTool } from '../tools/registry.js';
3
+ import chalk from 'chalk';
4
+ import ora from 'ora';
5
+ import { getSystemPrompt } from '../prompt.js';
6
+ import { printMarkdown } from '../utils/markdown.js';
7
+
8
+ export class OpenRouterProvider {
9
+ constructor(config) {
10
+ this.config = config;
11
+ this.openai = new OpenAI({
12
+ apiKey: config.apiKey,
13
+ baseURL: 'https://openrouter.ai/api/v1',
14
+ defaultHeaders: {
15
+ 'HTTP-Referer': 'https://github.com/Banaxi-Tech/Banana-Code',
16
+ 'X-Title': 'Banana Code'
17
+ }
18
+ });
19
+ this.modelName = config.model;
20
+ this.systemPrompt = getSystemPrompt(config);
21
+ this.messages = [{ role: 'system', content: this.systemPrompt }];
22
+ this.tools = getAvailableTools(config).map(t => ({
23
+ type: 'function',
24
+ function: {
25
+ name: t.name,
26
+ description: t.description,
27
+ parameters: t.parameters
28
+ }
29
+ }));
30
+ }
31
+
32
+ updateSystemPrompt(newPrompt) {
33
+ this.systemPrompt = newPrompt;
34
+ if (this.messages.length > 0 && this.messages[0].role === 'system') {
35
+ this.messages[0].content = newPrompt;
36
+ }
37
+ }
38
+
39
+ async sendMessage(message) {
40
+ this.messages.push({ role: 'user', content: message });
41
+
42
+ let spinner = ora({ text: 'Thinking...', color: 'yellow', stream: process.stdout }).start();
43
+ let finalResponse = '';
44
+
45
+ try {
46
+ while (true) {
47
+ let stream = null;
48
+ try {
49
+ stream = await this.openai.chat.completions.create({
50
+ model: this.modelName,
51
+ messages: this.messages,
52
+ tools: this.tools,
53
+ stream: true
54
+ });
55
+ } catch (e) {
56
+ spinner.stop();
57
+ let errMsg = e.message;
58
+ if (e.error && e.error.message) {
59
+ errMsg += ` - ${e.error.message}`;
60
+ } else if (e.response && e.response.data) {
61
+ try {
62
+ errMsg += ` - ${JSON.stringify(e.response.data)}`;
63
+ } catch(err){}
64
+ } else if (e.error && typeof e.error === 'object') {
65
+ try {
66
+ errMsg += ` - ${JSON.stringify(e.error)}`;
67
+ } catch(err){}
68
+ }
69
+ console.error(chalk.red(`OpenRouter Request Error: ${errMsg}`));
70
+ return `Error: ${errMsg}`;
71
+ }
72
+
73
+ let chunkResponse = '';
74
+ let toolCalls = [];
75
+
76
+ for await (const chunk of stream) {
77
+ const delta = chunk.choices[0]?.delta;
78
+
79
+ if (delta?.content) {
80
+ if (spinner.isSpinning && !this.config.useMarkedTerminal) spinner.stop();
81
+ if (!this.config.useMarkedTerminal) {
82
+ if (this.config.isApiMode && this.onChunk) {
83
+ this.onChunk(delta.content);
84
+ } else {
85
+ process.stdout.write(chalk.cyan(delta.content));
86
+ }
87
+ }
88
+ chunkResponse += delta.content;
89
+ finalResponse += delta.content;
90
+ }
91
+
92
+ if (delta?.tool_calls) {
93
+ if (spinner.isSpinning) spinner.stop();
94
+ for (const tc of delta.tool_calls) {
95
+ if (tc.index === undefined) continue;
96
+ if (!toolCalls[tc.index]) {
97
+ toolCalls[tc.index] = { id: tc.id, type: 'function', function: { name: tc.function?.name || '', arguments: '' } };
98
+ }
99
+ if (tc.function?.name && !toolCalls[tc.index].function.name) {
100
+ toolCalls[tc.index].function.name = tc.function.name;
101
+ }
102
+ if (tc.function?.arguments) {
103
+ toolCalls[tc.index].function.arguments += tc.function.arguments;
104
+ if (!spinner.isSpinning) {
105
+ spinner = ora({ text: `Generating ${chalk.yellow(toolCalls[tc.index].function.name)} (${toolCalls[tc.index].function.arguments.length} bytes)...`, color: 'yellow', stream: process.stdout }).start();
106
+ } else {
107
+ spinner.text = `Generating ${chalk.yellow(toolCalls[tc.index].function.name)} (${toolCalls[tc.index].function.arguments.length} bytes)...`;
108
+ }
109
+ }
110
+ }
111
+ }
112
+ }
113
+ if (spinner.isSpinning) spinner.stop();
114
+
115
+ if (chunkResponse) {
116
+ if (this.config.useMarkedTerminal) printMarkdown(chunkResponse);
117
+ this.messages.push({ role: 'assistant', content: chunkResponse });
118
+ }
119
+
120
+ toolCalls = toolCalls.filter(Boolean);
121
+
122
+ if (toolCalls.length === 0) {
123
+ console.log();
124
+ break;
125
+ }
126
+
127
+ this.messages.push({
128
+ role: 'assistant',
129
+ tool_calls: toolCalls,
130
+ content: chunkResponse || null
131
+ });
132
+
133
+ for (const call of toolCalls) {
134
+ if (this.config.isApiMode && this.onToolStart) {
135
+ this.onToolStart(call.function.name);
136
+ }
137
+ console.log(chalk.yellow(`\n[Banana Calling Tool: ${call.function.name}]`));
138
+ let args = {};
139
+ try {
140
+ args = JSON.parse(call.function.arguments);
141
+ } catch (e) { }
142
+
143
+ const res = await executeTool(call.function.name, args, this.config);
144
+ if (this.config.isApiMode && this.onToolEnd) {
145
+ this.onToolEnd(res);
146
+ }
147
+ if (this.config.debug) {
148
+ console.log(chalk.gray(`[DEBUG] Tool Result: ${typeof res === 'string' ? res : JSON.stringify(res, null, 2)}`));
149
+ }
150
+ console.log(chalk.yellow(`[Tool Result Received]\n`));
151
+
152
+ this.messages.push({
153
+ role: 'tool',
154
+ tool_call_id: call.id,
155
+ content: typeof res === 'string' ? res : JSON.stringify(res)
156
+ });
157
+ }
158
+
159
+ spinner = ora({ text: 'Processing tool results...', color: 'yellow', stream: process.stdout }).start();
160
+ }
161
+
162
+ return finalResponse;
163
+ } catch (err) {
164
+ if (spinner && spinner.isSpinning) spinner.stop();
165
+ console.error(chalk.red(`OpenRouter Runtime Error: ${err.message}`));
166
+ return `Error: ${err.message}`;
167
+ }
168
+ }
169
+ }
package/src.zip ADDED
Binary file