@banaxi/banana-code 1.0.5 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -1
- package/src/config.js +26 -4
- package/src/constants.js +11 -0
- package/src/index.js +148 -14
- package/src/providers/claude.js +13 -2
- package/src/providers/gemini.js +15 -2
- package/src/providers/ollama.js +13 -1
- package/src/providers/ollamaCloud.js +107 -0
- package/src/providers/openai.js +37 -17
- package/src/tools/execCommand.js +6 -2
- package/src/utils/markdown.js +21 -0
- package/src/utils/workspace.js +30 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@banaxi/banana-code",
|
|
3
|
-
"version": "1.0
|
|
3
|
+
"version": "1.1.0",
|
|
4
4
|
"description": "🍌 BananaCode",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"license": "GPL-3.0-or-later",
|
|
@@ -21,6 +21,8 @@
|
|
|
21
21
|
"chalk": "^5.4.1",
|
|
22
22
|
"diff": "^8.0.4",
|
|
23
23
|
"glob": "13.0.6",
|
|
24
|
+
"marked": "^15.0.12",
|
|
25
|
+
"marked-terminal": "^7.3.0",
|
|
24
26
|
"open": "^11.0.0",
|
|
25
27
|
"openai": "^4.79.1",
|
|
26
28
|
"ora": "^8.1.1"
|
package/src/config.js
CHANGED
|
@@ -6,7 +6,7 @@ import { execSync } from 'child_process';
|
|
|
6
6
|
import fsSync from 'fs';
|
|
7
7
|
import chalk from 'chalk';
|
|
8
8
|
|
|
9
|
-
import { GEMINI_MODELS, CLAUDE_MODELS, OPENAI_MODELS, CODEX_MODELS } from './constants.js';
|
|
9
|
+
import { GEMINI_MODELS, CLAUDE_MODELS, OPENAI_MODELS, CODEX_MODELS, OLLAMA_CLOUD_MODELS } from './constants.js';
|
|
10
10
|
|
|
11
11
|
const CONFIG_DIR = path.join(os.homedir(), '.config', 'banana-code');
|
|
12
12
|
const CONFIG_FILE = path.join(CONFIG_DIR, 'config.json');
|
|
@@ -41,10 +41,31 @@ export async function setupProvider(provider, config = {}) {
|
|
|
41
41
|
default: config.apiKey
|
|
42
42
|
});
|
|
43
43
|
config.model = await select({
|
|
44
|
-
message: 'Select a
|
|
45
|
-
choices:
|
|
44
|
+
message: 'Select a model:',
|
|
45
|
+
choices: OPENAI_MODELS
|
|
46
46
|
});
|
|
47
|
-
|
|
47
|
+
} else if (provider === 'ollama_cloud') {
|
|
48
|
+
config.apiKey = await input({
|
|
49
|
+
message: 'Enter your OLLAMA_API_KEY (from ollama.com):',
|
|
50
|
+
default: config.apiKey
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
const choices = [...OLLAMA_CLOUD_MODELS, { name: chalk.magenta('✎ Enter custom model ID...'), value: 'CUSTOM_ID' }];
|
|
54
|
+
let selectedModel = await select({
|
|
55
|
+
message: 'Select an Ollama Cloud model:',
|
|
56
|
+
choices,
|
|
57
|
+
loop: false,
|
|
58
|
+
pageSize: Math.max(choices.length, 15)
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
if (selectedModel === 'CUSTOM_ID') {
|
|
62
|
+
selectedModel = await input({
|
|
63
|
+
message: 'Enter the exact model ID (e.g., gemma3:27b-cloud):',
|
|
64
|
+
validate: (v) => v.trim().length > 0 || 'Model ID cannot be empty'
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
config.model = selectedModel;
|
|
68
|
+
} else if (provider === 'ollama') {
|
|
48
69
|
config.apiKey = await input({
|
|
49
70
|
message: 'Enter your ANTHROPIC_API_KEY:',
|
|
50
71
|
default: config.apiKey
|
|
@@ -137,6 +158,7 @@ async function runSetupWizard() {
|
|
|
137
158
|
{ name: 'Google Gemini', value: 'gemini' },
|
|
138
159
|
{ name: 'Anthropic Claude', value: 'claude' },
|
|
139
160
|
{ name: 'OpenAI', value: 'openai' },
|
|
161
|
+
{ name: 'Ollama Cloud', value: 'ollama_cloud' },
|
|
140
162
|
{ name: 'Ollama (Local)', value: 'ollama' }
|
|
141
163
|
]
|
|
142
164
|
});
|
package/src/constants.js
CHANGED
|
@@ -19,6 +19,17 @@ export const OPENAI_MODELS = [
|
|
|
19
19
|
{ name: 'GPT-5.3 Instant', value: 'gpt-5.3-instant' }
|
|
20
20
|
];
|
|
21
21
|
|
|
22
|
+
export const OLLAMA_CLOUD_MODELS = [
|
|
23
|
+
{ name: 'Kimi K2 Thinking (Cloud)', value: 'kimi-k2-thinking:cloud' },
|
|
24
|
+
{ name: 'Kimi K2.5 (Cloud)', value: 'kimi-k2.5:cloud' },
|
|
25
|
+
{ name: 'Qwen 3.5 397B (Cloud)', value: 'qwen3.5:397b-cloud' },
|
|
26
|
+
{ name: 'DeepSeek V3.2 (Cloud)', value: 'deepseek-v3.2:cloud' },
|
|
27
|
+
{ name: 'GLM-5 (Cloud)', value: 'glm-5:cloud' },
|
|
28
|
+
{ name: 'MiniMax M2.7 (Cloud)', value: 'minimax-m2.7:cloud' },
|
|
29
|
+
{ name: 'Llama 3.3 70B (Cloud)', value: 'llama3.3:cloud' },
|
|
30
|
+
{ name: 'Llama 3.1 405B (Cloud)', value: 'llama3.1:405b-cloud' }
|
|
31
|
+
];
|
|
32
|
+
|
|
22
33
|
export const CODEX_MODELS = [
|
|
23
34
|
{ name: 'GPT-5.4 (Newest)', value: 'gpt-5.4' },
|
|
24
35
|
{ name: 'GPT-5.3 Codex', value: 'gpt-5.3-codex' },
|
package/src/index.js
CHANGED
|
@@ -8,12 +8,17 @@ import { GeminiProvider } from './providers/gemini.js';
|
|
|
8
8
|
import { ClaudeProvider } from './providers/claude.js';
|
|
9
9
|
import { OpenAIProvider } from './providers/openai.js';
|
|
10
10
|
import { OllamaProvider } from './providers/ollama.js';
|
|
11
|
+
import { OllamaCloudProvider } from './providers/ollamaCloud.js';
|
|
11
12
|
|
|
12
13
|
import { loadSession, saveSession, generateSessionId, getLatestSessionId, listSessions } from './sessions.js';
|
|
14
|
+
import { printMarkdown } from './utils/markdown.js';
|
|
13
15
|
|
|
14
16
|
let config;
|
|
15
17
|
let providerInstance;
|
|
16
18
|
let currentSessionId;
|
|
19
|
+
const commandHistory = [];
|
|
20
|
+
let historyIndex = -1;
|
|
21
|
+
let currentInputSaved = '';
|
|
17
22
|
|
|
18
23
|
function createProvider(overrideConfig = null) {
|
|
19
24
|
const activeConfig = overrideConfig || config;
|
|
@@ -21,6 +26,7 @@ function createProvider(overrideConfig = null) {
|
|
|
21
26
|
case 'gemini': return new GeminiProvider(activeConfig);
|
|
22
27
|
case 'claude': return new ClaudeProvider(activeConfig);
|
|
23
28
|
case 'openai': return new OpenAIProvider(activeConfig);
|
|
29
|
+
case 'ollama_cloud': return new OllamaCloudProvider(activeConfig);
|
|
24
30
|
case 'ollama': return new OllamaProvider(activeConfig);
|
|
25
31
|
default:
|
|
26
32
|
console.log(chalk.red(`Unknown provider: ${activeConfig.provider}. Defaulting to Ollama.`));
|
|
@@ -43,12 +49,13 @@ async function handleSlashCommand(command) {
|
|
|
43
49
|
{ name: 'Google Gemini', value: 'gemini' },
|
|
44
50
|
{ name: 'Anthropic Claude', value: 'claude' },
|
|
45
51
|
{ name: 'OpenAI', value: 'openai' },
|
|
52
|
+
{ name: 'Ollama Cloud', value: 'ollama_cloud' },
|
|
46
53
|
{ name: 'Ollama (Local)', value: 'ollama' }
|
|
47
54
|
]
|
|
48
55
|
});
|
|
49
56
|
}
|
|
50
57
|
|
|
51
|
-
if (['gemini', 'claude', 'openai', 'ollama'].includes(newProv)) {
|
|
58
|
+
if (['gemini', 'claude', 'openai', 'ollama_cloud', 'ollama'].includes(newProv)) {
|
|
52
59
|
// Use the shared setup logic to get keys/models
|
|
53
60
|
config = await setupProvider(newProv, config);
|
|
54
61
|
await saveConfig(config);
|
|
@@ -63,13 +70,15 @@ async function handleSlashCommand(command) {
|
|
|
63
70
|
if (!newModel) {
|
|
64
71
|
// Interactive selection
|
|
65
72
|
const { select } = await import('@inquirer/prompts');
|
|
66
|
-
const { GEMINI_MODELS, CLAUDE_MODELS, OPENAI_MODELS, CODEX_MODELS } = await import('./constants.js');
|
|
73
|
+
const { GEMINI_MODELS, CLAUDE_MODELS, OPENAI_MODELS, CODEX_MODELS, OLLAMA_CLOUD_MODELS } = await import('./constants.js');
|
|
67
74
|
|
|
68
75
|
let choices = [];
|
|
69
76
|
if (config.provider === 'gemini') choices = GEMINI_MODELS;
|
|
70
77
|
else if (config.provider === 'claude') choices = CLAUDE_MODELS;
|
|
71
78
|
else if (config.provider === 'openai') {
|
|
72
79
|
choices = config.authType === 'oauth' ? CODEX_MODELS : OPENAI_MODELS;
|
|
80
|
+
} else if (config.provider === 'ollama_cloud') {
|
|
81
|
+
choices = OLLAMA_CLOUD_MODELS;
|
|
73
82
|
} else if (config.provider === 'ollama') {
|
|
74
83
|
try {
|
|
75
84
|
const response = await fetch('http://localhost:11434/api/tags');
|
|
@@ -82,10 +91,25 @@ async function handleSlashCommand(command) {
|
|
|
82
91
|
}
|
|
83
92
|
|
|
84
93
|
if (choices.length > 0) {
|
|
94
|
+
const finalChoices = [...choices];
|
|
95
|
+
if (config.provider === 'ollama_cloud') {
|
|
96
|
+
finalChoices.push({ name: chalk.magenta('✎ Enter custom model ID...'), value: 'CUSTOM_ID' });
|
|
97
|
+
}
|
|
98
|
+
|
|
85
99
|
newModel = await select({
|
|
86
100
|
message: 'Select a model:',
|
|
87
|
-
choices
|
|
101
|
+
choices: finalChoices,
|
|
102
|
+
loop: false,
|
|
103
|
+
pageSize: Math.max(finalChoices.length, 15)
|
|
88
104
|
});
|
|
105
|
+
|
|
106
|
+
if (newModel === 'CUSTOM_ID') {
|
|
107
|
+
const { input } = await import('@inquirer/prompts');
|
|
108
|
+
newModel = await input({
|
|
109
|
+
message: 'Enter the exact model ID (e.g., gemma3:27b-cloud):',
|
|
110
|
+
validate: (v) => v.trim().length > 0 || 'Model ID cannot be empty'
|
|
111
|
+
});
|
|
112
|
+
}
|
|
89
113
|
}
|
|
90
114
|
}
|
|
91
115
|
|
|
@@ -161,6 +185,30 @@ async function handleSlashCommand(command) {
|
|
|
161
185
|
providerInstance = createProvider(); // Re-init to update tools
|
|
162
186
|
console.log(chalk.green(`Beta tools updated: ${enabledBetaTools.join(', ') || 'none'}`));
|
|
163
187
|
break;
|
|
188
|
+
case '/settings':
|
|
189
|
+
const { checkbox: settingsCheckbox } = await import('@inquirer/prompts');
|
|
190
|
+
const enabledSettings = await settingsCheckbox({
|
|
191
|
+
message: 'Select features to enable (Space to toggle, Enter to confirm):',
|
|
192
|
+
choices: [
|
|
193
|
+
{
|
|
194
|
+
name: 'Auto-feed workspace files to AI (uses .bananacodeignore / .gitignore)',
|
|
195
|
+
value: 'autoFeedWorkspace',
|
|
196
|
+
checked: config.autoFeedWorkspace || false
|
|
197
|
+
},
|
|
198
|
+
{
|
|
199
|
+
name: 'Use syntax highlighting for AI output (requires waiting for full response)',
|
|
200
|
+
value: 'useMarkedTerminal',
|
|
201
|
+
checked: config.useMarkedTerminal || false
|
|
202
|
+
}
|
|
203
|
+
]
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
config.autoFeedWorkspace = enabledSettings.includes('autoFeedWorkspace');
|
|
207
|
+
config.useMarkedTerminal = enabledSettings.includes('useMarkedTerminal');
|
|
208
|
+
await saveConfig(config);
|
|
209
|
+
providerInstance = createProvider(); // Re-init to update tools/config
|
|
210
|
+
console.log(chalk.green(`Settings updated.`));
|
|
211
|
+
break;
|
|
164
212
|
case '/debug':
|
|
165
213
|
config.debug = !config.debug;
|
|
166
214
|
await saveConfig(config);
|
|
@@ -190,6 +238,7 @@ Available commands:
|
|
|
190
238
|
/context - Show current context window size
|
|
191
239
|
/permissions - List session-approved permissions
|
|
192
240
|
/beta - Manage beta features and tools
|
|
241
|
+
/settings - Manage app settings (workspace auto-feed, etc)
|
|
193
242
|
/debug - Toggle debug mode (show tool results)
|
|
194
243
|
/help - Show all commands
|
|
195
244
|
/exit - Quit Banana Code
|
|
@@ -378,6 +427,10 @@ function promptUser() {
|
|
|
378
427
|
|
|
379
428
|
if (str === '\r' || str === '\n') { // Enter
|
|
380
429
|
exitRequested = false;
|
|
430
|
+
if (inputBuffer.trim() && inputBuffer !== commandHistory[commandHistory.length - 1]) {
|
|
431
|
+
commandHistory.push(inputBuffer);
|
|
432
|
+
}
|
|
433
|
+
historyIndex = -1;
|
|
381
434
|
resolve(inputBuffer);
|
|
382
435
|
return;
|
|
383
436
|
}
|
|
@@ -411,6 +464,33 @@ function promptUser() {
|
|
|
411
464
|
return;
|
|
412
465
|
}
|
|
413
466
|
|
|
467
|
+
if (str === '\x1b[A') { // Arrow Up
|
|
468
|
+
if (historyIndex === -1) {
|
|
469
|
+
currentInputSaved = inputBuffer;
|
|
470
|
+
}
|
|
471
|
+
if (historyIndex < commandHistory.length - 1) {
|
|
472
|
+
historyIndex++;
|
|
473
|
+
inputBuffer = commandHistory[commandHistory.length - 1 - historyIndex];
|
|
474
|
+
cursorPos = inputBuffer.length;
|
|
475
|
+
drawPromptBox(inputBuffer, cursorPos);
|
|
476
|
+
}
|
|
477
|
+
return;
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
if (str === '\x1b[B') { // Arrow Down
|
|
481
|
+
if (historyIndex > -1) {
|
|
482
|
+
historyIndex--;
|
|
483
|
+
if (historyIndex === -1) {
|
|
484
|
+
inputBuffer = currentInputSaved;
|
|
485
|
+
} else {
|
|
486
|
+
inputBuffer = commandHistory[commandHistory.length - 1 - historyIndex];
|
|
487
|
+
}
|
|
488
|
+
cursorPos = inputBuffer.length;
|
|
489
|
+
drawPromptBox(inputBuffer, cursorPos);
|
|
490
|
+
}
|
|
491
|
+
return;
|
|
492
|
+
}
|
|
493
|
+
|
|
414
494
|
if (str === '\x1b[H' || str === '\x01') { // Home / Ctrl+A
|
|
415
495
|
cursorPos = 0; drawPromptBox(inputBuffer, cursorPos);
|
|
416
496
|
return;
|
|
@@ -462,20 +542,23 @@ async function main() {
|
|
|
462
542
|
for (const msg of session.messages) {
|
|
463
543
|
if (msg.role === 'system') continue;
|
|
464
544
|
|
|
465
|
-
if (
|
|
545
|
+
if (config.provider === 'gemini') {
|
|
466
546
|
if (msg.role === 'user') {
|
|
467
547
|
if (msg.parts[0]?.text) console.log(`${chalk.yellow('🍌 >')} ${msg.parts[0].text}`);
|
|
468
548
|
else if (msg.parts[0]?.functionResponse) {
|
|
469
|
-
console.log(chalk.yellow(`[Tool Result
|
|
549
|
+
console.log(chalk.yellow(`[Tool Result Received]`));
|
|
470
550
|
}
|
|
471
551
|
} else if (msg.role === 'model') {
|
|
472
552
|
msg.parts.forEach(p => {
|
|
473
|
-
if (p.text)
|
|
553
|
+
if (p.text) {
|
|
554
|
+
if (config.useMarkedTerminal) printMarkdown(p.text);
|
|
555
|
+
else process.stdout.write(chalk.cyan(p.text));
|
|
556
|
+
}
|
|
474
557
|
if (p.functionCall) console.log(chalk.yellow(`\n[Banana Calling Tool: ${p.functionCall.name}]`));
|
|
475
558
|
});
|
|
476
559
|
console.log();
|
|
477
560
|
}
|
|
478
|
-
} else if (
|
|
561
|
+
} else if (config.provider === 'claude') {
|
|
479
562
|
if (msg.role === 'user') {
|
|
480
563
|
if (typeof msg.content === 'string') console.log(`${chalk.yellow('🍌 >')} ${msg.content}`);
|
|
481
564
|
else {
|
|
@@ -484,26 +567,36 @@ async function main() {
|
|
|
484
567
|
});
|
|
485
568
|
}
|
|
486
569
|
} else if (msg.role === 'assistant') {
|
|
487
|
-
if (typeof msg.content === 'string')
|
|
488
|
-
|
|
570
|
+
if (typeof msg.content === 'string') {
|
|
571
|
+
if (config.useMarkedTerminal) printMarkdown(msg.content);
|
|
572
|
+
else process.stdout.write(chalk.cyan(msg.content));
|
|
573
|
+
} else {
|
|
489
574
|
msg.content.forEach(c => {
|
|
490
|
-
if (c.type === 'text')
|
|
575
|
+
if (c.type === 'text') {
|
|
576
|
+
if (config.useMarkedTerminal) printMarkdown(c.text);
|
|
577
|
+
else process.stdout.write(chalk.cyan(c.text));
|
|
578
|
+
}
|
|
491
579
|
if (c.type === 'tool_use') console.log(chalk.yellow(`\n[Banana Calling Tool: ${c.name}]`));
|
|
492
580
|
});
|
|
493
|
-
console.log();
|
|
494
581
|
}
|
|
582
|
+
console.log();
|
|
495
583
|
}
|
|
496
584
|
} else {
|
|
497
585
|
// OpenAI, Ollama
|
|
498
586
|
if (msg.role === 'user') {
|
|
499
587
|
console.log(`${chalk.yellow('🍌 >')} ${msg.content}`);
|
|
500
588
|
} else if (msg.role === 'assistant' || msg.role === 'output_text') {
|
|
501
|
-
if (msg.content)
|
|
589
|
+
if (msg.content) {
|
|
590
|
+
if (config.useMarkedTerminal) printMarkdown(msg.content);
|
|
591
|
+
else process.stdout.write(chalk.cyan(msg.content));
|
|
592
|
+
}
|
|
502
593
|
if (msg.tool_calls) {
|
|
503
594
|
msg.tool_calls.forEach(tc => {
|
|
504
|
-
|
|
595
|
+
const name = tc.function ? tc.function.name : tc.name;
|
|
596
|
+
console.log(chalk.yellow(`\n[Banana Calling Tool: ${name}]`));
|
|
505
597
|
});
|
|
506
598
|
}
|
|
599
|
+
console.log();
|
|
507
600
|
} else if (msg.role === 'tool') {
|
|
508
601
|
console.log(chalk.yellow(`[Tool Result Received]`));
|
|
509
602
|
}
|
|
@@ -535,8 +628,49 @@ async function main() {
|
|
|
535
628
|
if (trimmed.startsWith('/')) {
|
|
536
629
|
await handleSlashCommand(trimmed);
|
|
537
630
|
} else {
|
|
631
|
+
let finalInput = trimmed;
|
|
632
|
+
const fileMentions = trimmed.match(/@@?([\w/.-]+)/g);
|
|
633
|
+
if (fileMentions) {
|
|
634
|
+
let addedFiles = 0;
|
|
635
|
+
const fsSync = await import('fs');
|
|
636
|
+
const path = await import('path');
|
|
637
|
+
for (const mention of fileMentions) {
|
|
638
|
+
let filepath;
|
|
639
|
+
if (mention.startsWith('@@')) {
|
|
640
|
+
filepath = mention.substring(2);
|
|
641
|
+
} else {
|
|
642
|
+
filepath = path.join(process.cwd(), mention.substring(1));
|
|
643
|
+
}
|
|
644
|
+
|
|
645
|
+
try {
|
|
646
|
+
const stat = fsSync.statSync(filepath);
|
|
647
|
+
if (stat.isFile()) {
|
|
648
|
+
const content = fsSync.readFileSync(filepath, 'utf8');
|
|
649
|
+
finalInput += `\n\n--- File Context: ${filepath} ---\n${content}\n--- End of ${filepath} ---`;
|
|
650
|
+
addedFiles++;
|
|
651
|
+
}
|
|
652
|
+
} catch (e) {
|
|
653
|
+
console.log(chalk.yellow(`Warning: Could not read file for mention ${mention}`));
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
if (addedFiles > 0) {
|
|
657
|
+
console.log(chalk.gray(`(Attached ${addedFiles} file(s) to context)`));
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
if (config.autoFeedWorkspace) {
|
|
662
|
+
const { getWorkspaceTree } = await import('./utils/workspace.js');
|
|
663
|
+
const tree = await getWorkspaceTree();
|
|
664
|
+
const { getSystemPrompt } = await import('./prompt.js');
|
|
665
|
+
let newSysPrompt = getSystemPrompt(config);
|
|
666
|
+
newSysPrompt += `\n\n--- Workspace File Tree ---\n${tree}\n--- End of Tree ---`;
|
|
667
|
+
if (typeof providerInstance.updateSystemPrompt === 'function') {
|
|
668
|
+
providerInstance.updateSystemPrompt(newSysPrompt);
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
|
|
538
672
|
process.stdout.write(chalk.cyan('✦ '));
|
|
539
|
-
await providerInstance.sendMessage(
|
|
673
|
+
await providerInstance.sendMessage(finalInput);
|
|
540
674
|
console.log(); // Extra newline after AI response
|
|
541
675
|
// Save session after AI message
|
|
542
676
|
await saveSession(currentSessionId, {
|
package/src/providers/claude.js
CHANGED
|
@@ -3,9 +3,11 @@ import { getAvailableTools, executeTool } from '../tools/registry.js';
|
|
|
3
3
|
import chalk from 'chalk';
|
|
4
4
|
import ora from 'ora';
|
|
5
5
|
import { getSystemPrompt } from '../prompt.js';
|
|
6
|
+
import { printMarkdown } from '../utils/markdown.js';
|
|
6
7
|
|
|
7
8
|
export class ClaudeProvider {
|
|
8
9
|
constructor(config) {
|
|
10
|
+
this.config = config;
|
|
9
11
|
this.anthropic = new Anthropic({ apiKey: config.apiKey });
|
|
10
12
|
this.modelName = config.model || 'claude-3-7-sonnet-20250219';
|
|
11
13
|
this.messages = [];
|
|
@@ -17,6 +19,10 @@ export class ClaudeProvider {
|
|
|
17
19
|
this.systemPrompt = getSystemPrompt(config);
|
|
18
20
|
}
|
|
19
21
|
|
|
22
|
+
updateSystemPrompt(newPrompt) {
|
|
23
|
+
this.systemPrompt = newPrompt;
|
|
24
|
+
}
|
|
25
|
+
|
|
20
26
|
async sendMessage(message) {
|
|
21
27
|
this.messages.push({ role: 'user', content: message });
|
|
22
28
|
|
|
@@ -47,8 +53,10 @@ export class ClaudeProvider {
|
|
|
47
53
|
|
|
48
54
|
for await (const event of stream) {
|
|
49
55
|
if (event.type === 'content_block_delta' && event.delta.type === 'text_delta') {
|
|
50
|
-
if (spinner.isSpinning) spinner.stop();
|
|
51
|
-
|
|
56
|
+
if (spinner.isSpinning && !this.config.useMarkedTerminal) spinner.stop();
|
|
57
|
+
if (!this.config.useMarkedTerminal) {
|
|
58
|
+
process.stdout.write(chalk.cyan(event.delta.text));
|
|
59
|
+
}
|
|
52
60
|
chunkResponse += event.delta.text;
|
|
53
61
|
finalResponse += event.delta.text;
|
|
54
62
|
} else if (event.type === 'content_block_start' && event.content_block.type === 'tool_use') {
|
|
@@ -76,8 +84,11 @@ export class ClaudeProvider {
|
|
|
76
84
|
}
|
|
77
85
|
}
|
|
78
86
|
|
|
87
|
+
if (spinner.isSpinning) spinner.stop();
|
|
88
|
+
|
|
79
89
|
const newContent = [];
|
|
80
90
|
if (chunkResponse) {
|
|
91
|
+
if (this.config.useMarkedTerminal) printMarkdown(chunkResponse);
|
|
81
92
|
newContent.push({ type: 'text', text: chunkResponse });
|
|
82
93
|
}
|
|
83
94
|
|
package/src/providers/gemini.js
CHANGED
|
@@ -2,6 +2,7 @@ import { getAvailableTools, executeTool } from '../tools/registry.js';
|
|
|
2
2
|
import chalk from 'chalk';
|
|
3
3
|
import ora from 'ora';
|
|
4
4
|
import { getSystemPrompt } from '../prompt.js';
|
|
5
|
+
import { printMarkdown } from '../utils/markdown.js';
|
|
5
6
|
|
|
6
7
|
export class GeminiProvider {
|
|
7
8
|
constructor(config) {
|
|
@@ -17,6 +18,10 @@ export class GeminiProvider {
|
|
|
17
18
|
this.systemPrompt = getSystemPrompt(config);
|
|
18
19
|
}
|
|
19
20
|
|
|
21
|
+
updateSystemPrompt(newPrompt) {
|
|
22
|
+
this.systemPrompt = newPrompt;
|
|
23
|
+
}
|
|
24
|
+
|
|
20
25
|
async sendMessage(message) {
|
|
21
26
|
this.messages.push({ role: 'user', parts: [{ text: message }] });
|
|
22
27
|
|
|
@@ -25,6 +30,7 @@ export class GeminiProvider {
|
|
|
25
30
|
|
|
26
31
|
try {
|
|
27
32
|
while (true) {
|
|
33
|
+
let currentTurnText = '';
|
|
28
34
|
const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/${this.modelName}:streamGenerateContent?alt=sse&key=${this.apiKey}`, {
|
|
29
35
|
method: 'POST',
|
|
30
36
|
headers: { 'Content-Type': 'application/json' },
|
|
@@ -74,9 +80,12 @@ export class GeminiProvider {
|
|
|
74
80
|
if (content && content.parts) {
|
|
75
81
|
for (const part of content.parts) {
|
|
76
82
|
if (part.text) {
|
|
77
|
-
if (spinner && spinner.isSpinning) spinner.stop();
|
|
78
|
-
|
|
83
|
+
if (spinner && spinner.isSpinning && !this.config.useMarkedTerminal) spinner.stop();
|
|
84
|
+
if (!this.config.useMarkedTerminal) {
|
|
85
|
+
process.stdout.write(chalk.cyan(part.text));
|
|
86
|
+
}
|
|
79
87
|
responseText += part.text;
|
|
88
|
+
currentTurnText += part.text;
|
|
80
89
|
|
|
81
90
|
// Aggregate sequential text parts
|
|
82
91
|
let lastPart = aggregatedParts[aggregatedParts.length - 1];
|
|
@@ -109,6 +118,10 @@ export class GeminiProvider {
|
|
|
109
118
|
|
|
110
119
|
if (spinner && spinner.isSpinning) spinner.stop();
|
|
111
120
|
|
|
121
|
+
if (currentTurnText && this.config.useMarkedTerminal) {
|
|
122
|
+
printMarkdown(currentTurnText);
|
|
123
|
+
}
|
|
124
|
+
|
|
112
125
|
if (aggregatedParts.length === 0) break;
|
|
113
126
|
|
|
114
127
|
// Push exact unmutated model response back to history
|
package/src/providers/ollama.js
CHANGED
|
@@ -2,6 +2,7 @@ import { getAvailableTools, executeTool } from '../tools/registry.js';
|
|
|
2
2
|
import chalk from 'chalk';
|
|
3
3
|
import ora from 'ora';
|
|
4
4
|
import { getSystemPrompt } from '../prompt.js';
|
|
5
|
+
import { printMarkdown } from '../utils/markdown.js';
|
|
5
6
|
|
|
6
7
|
export class OllamaProvider {
|
|
7
8
|
constructor(config) {
|
|
@@ -20,6 +21,13 @@ export class OllamaProvider {
|
|
|
20
21
|
this.URL = 'http://localhost:11434/api/chat';
|
|
21
22
|
}
|
|
22
23
|
|
|
24
|
+
updateSystemPrompt(newPrompt) {
|
|
25
|
+
this.systemPrompt = newPrompt;
|
|
26
|
+
if (this.messages.length > 0 && this.messages[0].role === 'system') {
|
|
27
|
+
this.messages[0].content = newPrompt;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
23
31
|
async sendMessage(message) {
|
|
24
32
|
this.messages.push({ role: 'user', content: message });
|
|
25
33
|
|
|
@@ -49,7 +57,11 @@ export class OllamaProvider {
|
|
|
49
57
|
const messageObj = data.message;
|
|
50
58
|
|
|
51
59
|
if (messageObj.content) {
|
|
52
|
-
|
|
60
|
+
if (this.config.useMarkedTerminal) {
|
|
61
|
+
printMarkdown(messageObj.content);
|
|
62
|
+
} else {
|
|
63
|
+
process.stdout.write(chalk.cyan(messageObj.content));
|
|
64
|
+
}
|
|
53
65
|
finalResponse += messageObj.content;
|
|
54
66
|
}
|
|
55
67
|
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import { getAvailableTools, executeTool } from '../tools/registry.js';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import ora from 'ora';
|
|
4
|
+
import { getSystemPrompt } from '../prompt.js';
|
|
5
|
+
import { printMarkdown } from '../utils/markdown.js';
|
|
6
|
+
|
|
7
|
+
export class OllamaCloudProvider {
|
|
8
|
+
constructor(config) {
|
|
9
|
+
this.config = config;
|
|
10
|
+
this.apiKey = config.apiKey;
|
|
11
|
+
this.modelName = config.model || 'llama3.3';
|
|
12
|
+
this.systemPrompt = getSystemPrompt(config);
|
|
13
|
+
this.messages = [{ role: 'system', content: this.systemPrompt }];
|
|
14
|
+
this.tools = getAvailableTools(config).map(t => ({
|
|
15
|
+
type: 'function',
|
|
16
|
+
function: {
|
|
17
|
+
name: t.name,
|
|
18
|
+
description: t.description,
|
|
19
|
+
parameters: t.parameters
|
|
20
|
+
}
|
|
21
|
+
}));
|
|
22
|
+
this.URL = 'https://ollama.com/api/chat';
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
updateSystemPrompt(newPrompt) {
|
|
26
|
+
this.systemPrompt = newPrompt;
|
|
27
|
+
if (this.messages.length > 0 && this.messages[0].role === 'system') {
|
|
28
|
+
this.messages[0].content = newPrompt;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
async sendMessage(message) {
|
|
33
|
+
this.messages.push({ role: 'user', content: message });
|
|
34
|
+
|
|
35
|
+
let spinner = ora({ text: 'Thinking (Cloud)...', color: 'yellow', stream: process.stdout }).start();
|
|
36
|
+
let finalResponse = '';
|
|
37
|
+
|
|
38
|
+
try {
|
|
39
|
+
while (true) {
|
|
40
|
+
const response = await fetch(this.URL, {
|
|
41
|
+
method: 'POST',
|
|
42
|
+
headers: {
|
|
43
|
+
'Content-Type': 'application/json',
|
|
44
|
+
'Authorization': `Bearer ${this.apiKey}`
|
|
45
|
+
},
|
|
46
|
+
body: JSON.stringify({
|
|
47
|
+
model: this.modelName,
|
|
48
|
+
messages: this.messages,
|
|
49
|
+
tools: this.tools.length > 0 ? this.tools : undefined,
|
|
50
|
+
stream: false // Cloud API sometimes prefers non-streaming or different SSE formats
|
|
51
|
+
})
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
if (!response.ok) {
|
|
55
|
+
const errorText = await response.text();
|
|
56
|
+
spinner.stop();
|
|
57
|
+
throw new Error(`HTTP ${response.status}: ${errorText}`);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const data = await response.json();
|
|
61
|
+
spinner.stop();
|
|
62
|
+
|
|
63
|
+
const messageObj = data.message;
|
|
64
|
+
|
|
65
|
+
if (messageObj.content) {
|
|
66
|
+
if (this.config.useMarkedTerminal) {
|
|
67
|
+
printMarkdown(messageObj.content);
|
|
68
|
+
} else {
|
|
69
|
+
process.stdout.write(chalk.cyan(messageObj.content));
|
|
70
|
+
}
|
|
71
|
+
finalResponse += messageObj.content;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
this.messages.push(messageObj);
|
|
75
|
+
|
|
76
|
+
if (!messageObj.tool_calls || messageObj.tool_calls.length === 0) {
|
|
77
|
+
console.log();
|
|
78
|
+
break;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
for (const call of messageObj.tool_calls) {
|
|
82
|
+
const fn = call.function;
|
|
83
|
+
console.log(chalk.yellow(`\n[Banana Calling Tool: ${fn.name}]`));
|
|
84
|
+
|
|
85
|
+
let res = await executeTool(fn.name, fn.arguments);
|
|
86
|
+
if (this.config.debug) {
|
|
87
|
+
console.log(chalk.gray(`[DEBUG] Tool Result: ${typeof res === 'string' ? res : JSON.stringify(res, null, 2)}`));
|
|
88
|
+
}
|
|
89
|
+
console.log(chalk.yellow(`[Tool Result Received]\n`));
|
|
90
|
+
|
|
91
|
+
this.messages.push({
|
|
92
|
+
role: 'tool',
|
|
93
|
+
content: typeof res === 'string' ? res : JSON.stringify(res)
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
spinner = ora({ text: 'Processing tool results...', color: 'yellow', stream: process.stdout }).start();
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return finalResponse;
|
|
101
|
+
} catch (err) {
|
|
102
|
+
if (spinner.isSpinning) spinner.stop();
|
|
103
|
+
console.error(chalk.red(`Ollama Cloud Error: ${err.message}`));
|
|
104
|
+
return `Error: ${err.message}`;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
package/src/providers/openai.js
CHANGED
|
@@ -6,6 +6,7 @@ import os from 'os';
|
|
|
6
6
|
import path from 'path';
|
|
7
7
|
import fsSync from 'fs';
|
|
8
8
|
import { getSystemPrompt } from '../prompt.js';
|
|
9
|
+
import { printMarkdown } from '../utils/markdown.js';
|
|
9
10
|
|
|
10
11
|
export class OpenAIProvider {
|
|
11
12
|
constructor(config) {
|
|
@@ -26,6 +27,13 @@ export class OpenAIProvider {
|
|
|
26
27
|
}));
|
|
27
28
|
}
|
|
28
29
|
|
|
30
|
+
updateSystemPrompt(newPrompt) {
|
|
31
|
+
this.systemPrompt = newPrompt;
|
|
32
|
+
if (this.messages.length > 0 && this.messages[0].role === 'system') {
|
|
33
|
+
this.messages[0].content = newPrompt;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
29
37
|
async sendMessage(message) {
|
|
30
38
|
if (this.config.authType === 'oauth') {
|
|
31
39
|
return await this.sendOauthMessage(message);
|
|
@@ -59,8 +67,10 @@ export class OpenAIProvider {
|
|
|
59
67
|
const delta = chunk.choices[0]?.delta;
|
|
60
68
|
|
|
61
69
|
if (delta?.content) {
|
|
62
|
-
if (spinner.isSpinning) spinner.stop();
|
|
63
|
-
|
|
70
|
+
if (spinner.isSpinning && !this.config.useMarkedTerminal) spinner.stop();
|
|
71
|
+
if (!this.config.useMarkedTerminal) {
|
|
72
|
+
process.stdout.write(chalk.cyan(delta.content));
|
|
73
|
+
}
|
|
64
74
|
chunkResponse += delta.content;
|
|
65
75
|
finalResponse += delta.content;
|
|
66
76
|
}
|
|
@@ -87,6 +97,7 @@ export class OpenAIProvider {
|
|
|
87
97
|
if (spinner.isSpinning) spinner.stop();
|
|
88
98
|
|
|
89
99
|
if (chunkResponse) {
|
|
100
|
+
if (this.config.useMarkedTerminal) printMarkdown(chunkResponse);
|
|
90
101
|
this.messages.push({ role: 'assistant', content: chunkResponse });
|
|
91
102
|
}
|
|
92
103
|
|
|
@@ -206,6 +217,18 @@ export class OpenAIProvider {
|
|
|
206
217
|
const backendInput = mapMessagesToBackend(this.messages);
|
|
207
218
|
const backendTools = mapToolsToBackend(this.tools);
|
|
208
219
|
|
|
220
|
+
const payload = {
|
|
221
|
+
model: this.modelName || 'gpt-5.1-codex',
|
|
222
|
+
instructions: this.systemPrompt,
|
|
223
|
+
input: backendInput,
|
|
224
|
+
tools: backendTools,
|
|
225
|
+
store: false,
|
|
226
|
+
stream: true,
|
|
227
|
+
include: ["reasoning.encrypted_content"],
|
|
228
|
+
reasoning: { effort: "medium", summary: "auto" },
|
|
229
|
+
text: { verbosity: "medium" }
|
|
230
|
+
};
|
|
231
|
+
|
|
209
232
|
const response = await fetch('https://chatgpt.com/backend-api/codex/responses', {
|
|
210
233
|
method: 'POST',
|
|
211
234
|
headers: {
|
|
@@ -216,17 +239,7 @@ export class OpenAIProvider {
|
|
|
216
239
|
'originator': 'codex_cli_rs',
|
|
217
240
|
'Accept': 'text/event-stream'
|
|
218
241
|
},
|
|
219
|
-
body: JSON.stringify(
|
|
220
|
-
model: this.modelName || 'gpt-5.1-codex',
|
|
221
|
-
instructions: this.systemPrompt,
|
|
222
|
-
input: backendInput,
|
|
223
|
-
tools: backendTools,
|
|
224
|
-
store: false,
|
|
225
|
-
stream: true,
|
|
226
|
-
include: ["reasoning.encrypted_content"],
|
|
227
|
-
reasoning: { effort: "medium", summary: "auto" },
|
|
228
|
-
text: { verbosity: "medium" }
|
|
229
|
-
})
|
|
242
|
+
body: JSON.stringify(payload)
|
|
230
243
|
});
|
|
231
244
|
|
|
232
245
|
if (!response.ok) {
|
|
@@ -259,10 +272,17 @@ export class OpenAIProvider {
|
|
|
259
272
|
try {
|
|
260
273
|
const data = JSON.parse(currentDataBuffer);
|
|
261
274
|
if (currentEvent === 'response.output_text.delta') {
|
|
262
|
-
if (spinner && spinner.isSpinning) spinner.stop();
|
|
263
|
-
|
|
275
|
+
if (spinner && spinner.isSpinning && !this.config.useMarkedTerminal) spinner.stop();
|
|
276
|
+
if (!this.config.useMarkedTerminal) {
|
|
277
|
+
process.stdout.write(chalk.cyan(data.delta));
|
|
278
|
+
}
|
|
264
279
|
currentChunkResponse += data.delta;
|
|
265
280
|
finalResponse += data.delta;
|
|
281
|
+
} else if (currentEvent === 'response.reasoning.delta' || currentEvent === 'response.reasoning_text.delta' || currentEvent.includes('reasoning.delta')) {
|
|
282
|
+
if (this.config.debug && data.delta) {
|
|
283
|
+
if (spinner && spinner.isSpinning) spinner.stop();
|
|
284
|
+
process.stdout.write(chalk.gray(data.delta));
|
|
285
|
+
}
|
|
266
286
|
} else if (currentEvent === 'response.output_item.added' && data.item?.type === 'function_call') {
|
|
267
287
|
if (spinner && spinner.isSpinning) spinner.stop();
|
|
268
288
|
currentToolCall = {
|
|
@@ -276,7 +296,7 @@ export class OpenAIProvider {
|
|
|
276
296
|
if (!spinner.isSpinning) {
|
|
277
297
|
spinner = ora({ text: `Generating ${chalk.yellow(currentToolCall.name)} (${currentToolCall.arguments.length} bytes)...`, color: 'yellow', stream: process.stdout }).start();
|
|
278
298
|
} else {
|
|
279
|
-
spinner.text = `Generating ${chalk.yellow(currentToolCall.name)} (${currentToolCall.arguments.length} bytes)...`;
|
|
299
|
+
spinner.text = `Generating ${chalk.yellow(currentToolCall.name)} arguments (${currentToolCall.arguments.length} bytes)...`;
|
|
280
300
|
}
|
|
281
301
|
} else if (currentEvent === 'response.output_item.done' && data.item?.type === 'function_call' && currentToolCall) {
|
|
282
302
|
if (spinner && spinner.isSpinning) spinner.stop();
|
|
@@ -322,7 +342,6 @@ export class OpenAIProvider {
|
|
|
322
342
|
currentChunkResponse += data.delta;
|
|
323
343
|
finalResponse += data.delta;
|
|
324
344
|
}
|
|
325
|
-
// Note: tool calls usually don't end exactly at stream end without \n\n
|
|
326
345
|
} catch (e) { }
|
|
327
346
|
}
|
|
328
347
|
}
|
|
@@ -330,6 +349,7 @@ export class OpenAIProvider {
|
|
|
330
349
|
if (spinner.isSpinning) spinner.stop();
|
|
331
350
|
|
|
332
351
|
if (currentChunkResponse) {
|
|
352
|
+
if (this.config.useMarkedTerminal) printMarkdown(currentChunkResponse);
|
|
333
353
|
this.messages.push({ role: 'assistant', content: currentChunkResponse });
|
|
334
354
|
}
|
|
335
355
|
|
package/src/tools/execCommand.js
CHANGED
|
@@ -29,12 +29,16 @@ export async function execCommand({ command, cwd = process.cwd() }) {
|
|
|
29
29
|
|
|
30
30
|
child.on('close', (code) => {
|
|
31
31
|
if (spinner.isSpinning) spinner.stop();
|
|
32
|
-
|
|
32
|
+
let result = `Command exited with code ${code}.\nOutput:\n${output}`;
|
|
33
|
+
if (code !== 0) {
|
|
34
|
+
result += `\n\n[System Note: The command failed with an error. Please analyze the output above and try to fix the issue if it is possible.]`;
|
|
35
|
+
}
|
|
36
|
+
resolve(result);
|
|
33
37
|
});
|
|
34
38
|
|
|
35
39
|
child.on('error', (err) => {
|
|
36
40
|
if (spinner.isSpinning) spinner.stop();
|
|
37
|
-
resolve(`Error executing command: ${err.message}`);
|
|
41
|
+
resolve(`Error executing command: ${err.message}\n\n[System Note: The command failed to execute. Please analyze the error and try to fix the issue if it is possible.]`);
|
|
38
42
|
});
|
|
39
43
|
});
|
|
40
44
|
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { marked } from 'marked';
|
|
2
|
+
import { markedTerminal } from 'marked-terminal';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
|
|
5
|
+
marked.use(markedTerminal({
|
|
6
|
+
// Prominent headings
|
|
7
|
+
firstHeading: chalk.magenta.bold.underline,
|
|
8
|
+
heading: chalk.magenta.bold,
|
|
9
|
+
|
|
10
|
+
// Stronger emphasis for other elements
|
|
11
|
+
strong: chalk.yellow.bold,
|
|
12
|
+
em: chalk.italic,
|
|
13
|
+
codespan: chalk.bgRgb(40, 40, 40).yellow,
|
|
14
|
+
|
|
15
|
+
// Custom tab/padding
|
|
16
|
+
tab: 4
|
|
17
|
+
}));
|
|
18
|
+
|
|
19
|
+
export function printMarkdown(text) {
|
|
20
|
+
process.stdout.write(marked.parse(text));
|
|
21
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { glob } from 'glob';
|
|
4
|
+
|
|
5
|
+
export async function getWorkspaceTree() {
|
|
6
|
+
let ignores = ['node_modules/**', '.git/**'];
|
|
7
|
+
|
|
8
|
+
try {
|
|
9
|
+
const gitignore = await fs.readFile(path.join(process.cwd(), '.gitignore'), 'utf8');
|
|
10
|
+
ignores = ignores.concat(gitignore.split('\n').map(l => l.trim()).filter(l => l && !l.startsWith('#')));
|
|
11
|
+
} catch (e) {}
|
|
12
|
+
|
|
13
|
+
try {
|
|
14
|
+
const bananacodeignore = await fs.readFile(path.join(process.cwd(), '.bananacodeignore'), 'utf8');
|
|
15
|
+
ignores = ignores.concat(bananacodeignore.split('\n').map(l => l.trim()).filter(l => l && !l.startsWith('#')));
|
|
16
|
+
} catch (e) {}
|
|
17
|
+
|
|
18
|
+
try {
|
|
19
|
+
const files = await glob('**/*', {
|
|
20
|
+
cwd: process.cwd(),
|
|
21
|
+
ignore: ignores,
|
|
22
|
+
nodir: true,
|
|
23
|
+
dot: true
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
return files.join('\n');
|
|
27
|
+
} catch (err) {
|
|
28
|
+
return `Error reading workspace: ${err.message}`;
|
|
29
|
+
}
|
|
30
|
+
}
|