@indiccoder/mentis-cli 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +90 -0
- package/console.log(tick) +0 -0
- package/debug_fs.js +12 -0
- package/dist/checkpoint/CheckpointManager.js +53 -0
- package/dist/config/ConfigManager.js +55 -0
- package/dist/context/ContextManager.js +55 -0
- package/dist/context/RepoMapper.js +112 -0
- package/dist/index.js +12 -0
- package/dist/llm/AnthropicClient.js +70 -0
- package/dist/llm/ModelInterface.js +2 -0
- package/dist/llm/OpenAIClient.js +58 -0
- package/dist/mcp/JsonRpcClient.js +117 -0
- package/dist/mcp/McpClient.js +59 -0
- package/dist/repl/PersistentShell.js +75 -0
- package/dist/repl/ReplManager.js +813 -0
- package/dist/tools/FileTools.js +100 -0
- package/dist/tools/GitTools.js +127 -0
- package/dist/tools/PersistentShellTool.js +30 -0
- package/dist/tools/SearchTools.js +83 -0
- package/dist/tools/Tool.js +2 -0
- package/dist/tools/WebSearchTool.js +60 -0
- package/dist/ui/UIManager.js +40 -0
- package/package.json +63 -0
- package/screenshot_1765779883482_9b30.png +0 -0
- package/scripts/test_features.ts +48 -0
- package/scripts/test_glm.ts +53 -0
- package/scripts/test_models.ts +38 -0
- package/src/checkpoint/CheckpointManager.ts +61 -0
- package/src/config/ConfigManager.ts +77 -0
- package/src/context/ContextManager.ts +63 -0
- package/src/context/RepoMapper.ts +119 -0
- package/src/index.ts +12 -0
- package/src/llm/ModelInterface.ts +47 -0
- package/src/llm/OpenAIClient.ts +64 -0
- package/src/mcp/JsonRpcClient.ts +103 -0
- package/src/mcp/McpClient.ts +75 -0
- package/src/repl/PersistentShell.ts +85 -0
- package/src/repl/ReplManager.ts +842 -0
- package/src/tools/FileTools.ts +89 -0
- package/src/tools/GitTools.ts +113 -0
- package/src/tools/PersistentShellTool.ts +32 -0
- package/src/tools/SearchTools.ts +74 -0
- package/src/tools/Tool.ts +6 -0
- package/src/tools/WebSearchTool.ts +63 -0
- package/src/ui/UIManager.ts +41 -0
- package/tsconfig.json +21 -0
|
@@ -0,0 +1,842 @@
|
|
|
1
|
+
import inquirer from 'inquirer';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import ora from 'ora';
|
|
4
|
+
import { ConfigManager } from '../config/ConfigManager';
|
|
5
|
+
import { ModelClient, ChatMessage } from '../llm/ModelInterface';
|
|
6
|
+
import { OpenAIClient } from '../llm/OpenAIClient';
|
|
7
|
+
|
|
8
|
+
import { ContextManager } from '../context/ContextManager';
|
|
9
|
+
import { UIManager } from '../ui/UIManager';
|
|
10
|
+
import { WriteFileTool, ReadFileTool, ListDirTool } from '../tools/FileTools';
|
|
11
|
+
import { SearchFileTool } from '../tools/SearchTools';
|
|
12
|
+
import { PersistentShellTool } from '../tools/PersistentShellTool';
|
|
13
|
+
import { PersistentShell } from './PersistentShell';
|
|
14
|
+
import { WebSearchTool } from '../tools/WebSearchTool';
|
|
15
|
+
import { GitStatusTool, GitDiffTool, GitCommitTool, GitPushTool, GitPullTool } from '../tools/GitTools';
|
|
16
|
+
import { Tool } from '../tools/Tool';
|
|
17
|
+
import { McpClient } from '../mcp/McpClient';
|
|
18
|
+
|
|
19
|
+
import { CheckpointManager } from '../checkpoint/CheckpointManager';
|
|
20
|
+
import * as readline from 'readline';
|
|
21
|
+
import * as fs from 'fs';
|
|
22
|
+
import * as path from 'path';
|
|
23
|
+
import * as os from 'os';
|
|
24
|
+
import { marked } from 'marked';
|
|
25
|
+
import TerminalRenderer from 'marked-terminal';
|
|
26
|
+
|
|
27
|
+
const HISTORY_FILE = path.join(os.homedir(), '.mentis_history');
|
|
28
|
+
|
|
29
|
+
export class ReplManager {
|
|
30
|
+
private configManager: ConfigManager;
|
|
31
|
+
private modelClient!: ModelClient;
|
|
32
|
+
private contextManager: ContextManager;
|
|
33
|
+
private checkpointManager: CheckpointManager;
|
|
34
|
+
private history: ChatMessage[] = [];
|
|
35
|
+
private mode: 'PLAN' | 'BUILD' = 'BUILD';
|
|
36
|
+
private tools: Tool[] = [];
|
|
37
|
+
private mcpClients: McpClient[] = [];
|
|
38
|
+
private shell: PersistentShell;
|
|
39
|
+
private currentModelName: string = 'Unknown';
|
|
40
|
+
|
|
41
|
+
constructor() {
|
|
42
|
+
this.configManager = new ConfigManager();
|
|
43
|
+
this.contextManager = new ContextManager();
|
|
44
|
+
this.checkpointManager = new CheckpointManager();
|
|
45
|
+
this.shell = new PersistentShell();
|
|
46
|
+
this.tools = [
|
|
47
|
+
new WriteFileTool(),
|
|
48
|
+
new ReadFileTool(),
|
|
49
|
+
new ListDirTool(),
|
|
50
|
+
new SearchFileTool(), // grep
|
|
51
|
+
new WebSearchTool(),
|
|
52
|
+
new GitStatusTool(),
|
|
53
|
+
new GitDiffTool(),
|
|
54
|
+
new GitCommitTool(),
|
|
55
|
+
new GitPushTool(),
|
|
56
|
+
new GitPullTool(),
|
|
57
|
+
new PersistentShellTool(this.shell) // /run
|
|
58
|
+
];
|
|
59
|
+
|
|
60
|
+
// Configure Markdown Renderer
|
|
61
|
+
marked.setOptions({
|
|
62
|
+
// @ts-ignore
|
|
63
|
+
renderer: new TerminalRenderer()
|
|
64
|
+
});
|
|
65
|
+
// Default to Ollama if not specified, assuming compatible endpoint
|
|
66
|
+
this.initializeClient();
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
private initializeClient() {
|
|
70
|
+
const config = this.configManager.getConfig();
|
|
71
|
+
const provider = config.defaultProvider || 'ollama';
|
|
72
|
+
|
|
73
|
+
let baseUrl: string | undefined;
|
|
74
|
+
let apiKey: string;
|
|
75
|
+
let model: string;
|
|
76
|
+
|
|
77
|
+
if (provider === 'gemini') {
|
|
78
|
+
baseUrl = 'https://generativelanguage.googleapis.com/v1beta/openai/';
|
|
79
|
+
apiKey = config.gemini?.apiKey || '';
|
|
80
|
+
model = config.gemini?.model || 'gemini-2.5-flash';
|
|
81
|
+
} else if (provider === 'openai') {
|
|
82
|
+
baseUrl = config.openai?.baseUrl || 'https://api.openai.com/v1';
|
|
83
|
+
apiKey = config.openai?.apiKey || '';
|
|
84
|
+
model = config.openai?.model || 'gpt-4o';
|
|
85
|
+
} else if (provider === 'glm') {
|
|
86
|
+
// Use the "Coding Plan" endpoint which supports glm-4.6 and this specific key type
|
|
87
|
+
baseUrl = config.glm?.baseUrl || 'https://api.z.ai/api/coding/paas/v4/';
|
|
88
|
+
apiKey = config.glm?.apiKey || '';
|
|
89
|
+
model = config.glm?.model || 'glm-4.6';
|
|
90
|
+
} else { // Default to Ollama
|
|
91
|
+
baseUrl = config.ollama?.baseUrl || 'http://localhost:11434/v1';
|
|
92
|
+
apiKey = 'ollama'; // Ollama typically doesn't use an API key in the same way
|
|
93
|
+
model = config.ollama?.model || 'llama3:latest';
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
this.currentModelName = model;
|
|
97
|
+
this.modelClient = new OpenAIClient(baseUrl, apiKey, model);
|
|
98
|
+
// console.log(chalk.dim(`Initialized ${provider} client with model ${model}`));
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
public async start() {
|
|
102
|
+
UIManager.displayLogo();
|
|
103
|
+
UIManager.displayWelcome();
|
|
104
|
+
|
|
105
|
+
// Load History
|
|
106
|
+
let commandHistory: string[] = [];
|
|
107
|
+
if (fs.existsSync(HISTORY_FILE)) {
|
|
108
|
+
try {
|
|
109
|
+
commandHistory = fs.readFileSync(HISTORY_FILE, 'utf-8').split('\n').filter(Boolean).reverse(); // readline expects newest first? No, newest is usually 0? Check.
|
|
110
|
+
// readline.history is [newest, ..., oldest]
|
|
111
|
+
// If I read from file where newest is at bottom (standard append), I need to reverse it.
|
|
112
|
+
// Let's assume standard file: line 1 (old), line 2 (new).
|
|
113
|
+
// So split -> reverse -> history.
|
|
114
|
+
} catch (e) { }
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
while (true) {
|
|
118
|
+
UIManager.printSeparator();
|
|
119
|
+
// console.log(chalk.dim(` /help for help | Model: ${chalk.cyan(this.currentModelName)}`));
|
|
120
|
+
// Removed redundancy to keep CLI clean, prompt has info? No, prompt is minimal.
|
|
121
|
+
|
|
122
|
+
const modeLabel = this.mode === 'PLAN' ? chalk.magenta('PLAN') : chalk.blue('BUILD');
|
|
123
|
+
const promptText = `${modeLabel} ${chalk.cyan('>')}`;
|
|
124
|
+
|
|
125
|
+
// Use readline for basic input to support history
|
|
126
|
+
const answer = await new Promise<string>((resolve) => {
|
|
127
|
+
const rl = readline.createInterface({
|
|
128
|
+
input: process.stdin,
|
|
129
|
+
output: process.stdout,
|
|
130
|
+
history: commandHistory,
|
|
131
|
+
historySize: 1000,
|
|
132
|
+
prompt: promptText + ' '
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
rl.prompt();
|
|
136
|
+
|
|
137
|
+
rl.on('line', (line) => {
|
|
138
|
+
rl.close();
|
|
139
|
+
resolve(line);
|
|
140
|
+
});
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
// Update history manually or grab from rl?
|
|
144
|
+
// rl.history gets updated when user hits enter.
|
|
145
|
+
// But we closed rl. We should manually save the input to our tracking array and file.
|
|
146
|
+
const input = answer.trim();
|
|
147
|
+
|
|
148
|
+
if (input) {
|
|
149
|
+
// Update in-memory history (for next readline instance)
|
|
150
|
+
// Readline history has newest at 0.
|
|
151
|
+
// Avoid duplicates if needed, but standard shell keeps them.
|
|
152
|
+
if (commandHistory[0] !== input) {
|
|
153
|
+
commandHistory.unshift(input);
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// Append to file (as standard log, so append at end)
|
|
157
|
+
try {
|
|
158
|
+
fs.appendFileSync(HISTORY_FILE, input + '\n');
|
|
159
|
+
} catch (e) { }
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
if (!answer.trim()) continue; // Skip empty but allow it to close readline loop
|
|
163
|
+
|
|
164
|
+
if (input.startsWith('/')) {
|
|
165
|
+
await this.handleCommand(input);
|
|
166
|
+
continue;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
await this.handleChat(input);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
private async handleCommand(input: string) {
|
|
174
|
+
const [command, ...args] = input.split(' ');
|
|
175
|
+
switch (command) {
|
|
176
|
+
case '/help':
|
|
177
|
+
console.log(chalk.yellow('Available commands:'));
|
|
178
|
+
console.log(' /help - Show this help message');
|
|
179
|
+
console.log(' /clear - Clear chat history');
|
|
180
|
+
console.log(' /exit - Exit the application');
|
|
181
|
+
console.log(' /config - Configure settings');
|
|
182
|
+
console.log(' /add <file> - Add file to context');
|
|
183
|
+
console.log(' /drop <file> - Remove file from context');
|
|
184
|
+
console.log(' /plan - Switch to PLAN mode');
|
|
185
|
+
console.log(' /build - Switch to BUILD mode');
|
|
186
|
+
console.log(' /plan - Switch to PLAN mode');
|
|
187
|
+
console.log(' /build - Switch to BUILD mode');
|
|
188
|
+
console.log(' /model - Interactively select Provider & Model');
|
|
189
|
+
console.log(' /use <provider> [model] - Quick switch (legacy)');
|
|
190
|
+
console.log(' /mcp <cmd> - Manage MCP servers');
|
|
191
|
+
console.log(' /resume - Resume last session');
|
|
192
|
+
console.log(' /checkpoint <save|load|list> [name] - Manage checkpoints');
|
|
193
|
+
console.log(' /search <query> - Search codebase');
|
|
194
|
+
console.log(' /run <cmd> - Run shell command');
|
|
195
|
+
console.log(' /commit [msg] - Git commit all changes');
|
|
196
|
+
break;
|
|
197
|
+
case '/plan':
|
|
198
|
+
this.mode = 'PLAN';
|
|
199
|
+
console.log(chalk.blue('Switched to PLAN mode.'));
|
|
200
|
+
break;
|
|
201
|
+
case '/build':
|
|
202
|
+
this.mode = 'BUILD';
|
|
203
|
+
console.log(chalk.yellow('Switched to BUILD mode.'));
|
|
204
|
+
break;
|
|
205
|
+
case '/build':
|
|
206
|
+
this.mode = 'BUILD';
|
|
207
|
+
console.log(chalk.yellow('Switched to BUILD mode.'));
|
|
208
|
+
break;
|
|
209
|
+
case '/model':
|
|
210
|
+
await this.handleModelCommand(args);
|
|
211
|
+
break;
|
|
212
|
+
case '/connect':
|
|
213
|
+
console.log(chalk.dim('Tip: Use /model for an interactive menu.'));
|
|
214
|
+
await this.handleConnectCommand(args);
|
|
215
|
+
break;
|
|
216
|
+
case '/use':
|
|
217
|
+
await this.handleUseCommand(args);
|
|
218
|
+
break;
|
|
219
|
+
case '/mcp':
|
|
220
|
+
await this.handleMcpCommand(args);
|
|
221
|
+
break;
|
|
222
|
+
case '/resume':
|
|
223
|
+
await this.handleResumeCommand();
|
|
224
|
+
break;
|
|
225
|
+
case '/checkpoint':
|
|
226
|
+
await this.handleCheckpointCommand(args);
|
|
227
|
+
break;
|
|
228
|
+
case '/clear':
|
|
229
|
+
this.history = [];
|
|
230
|
+
this.contextManager.clear();
|
|
231
|
+
UIManager.displayLogo(); // Redraw logo on clear
|
|
232
|
+
console.log(chalk.yellow('Chat history and context cleared.'));
|
|
233
|
+
break;
|
|
234
|
+
case '/add':
|
|
235
|
+
if (args.length === 0) {
|
|
236
|
+
console.log(chalk.red('Usage: /add <file_path>'));
|
|
237
|
+
} else {
|
|
238
|
+
const result = await this.contextManager.addFile(args[0]);
|
|
239
|
+
console.log(chalk.yellow(result));
|
|
240
|
+
}
|
|
241
|
+
break;
|
|
242
|
+
case '/drop':
|
|
243
|
+
if (args.length === 0) {
|
|
244
|
+
console.log(chalk.red('Usage: /drop <file_path>'));
|
|
245
|
+
} else {
|
|
246
|
+
const result = await this.contextManager.removeFile(args[0]);
|
|
247
|
+
console.log(chalk.yellow(result));
|
|
248
|
+
}
|
|
249
|
+
break;
|
|
250
|
+
case '/config':
|
|
251
|
+
await this.handleConfigCommand();
|
|
252
|
+
break;
|
|
253
|
+
case '/exit':
|
|
254
|
+
// Auto-save on exit
|
|
255
|
+
this.checkpointManager.save('latest', this.history, this.contextManager.getFiles());
|
|
256
|
+
this.shell.kill(); // Kill the shell process
|
|
257
|
+
console.log(chalk.green('Session saved. Goodbye!'));
|
|
258
|
+
process.exit(0);
|
|
259
|
+
break;
|
|
260
|
+
default:
|
|
261
|
+
console.log(chalk.red(`Unknown command: ${command}`));
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
private async handleChat(input: string) {
|
|
266
|
+
const context = this.contextManager.getContextString();
|
|
267
|
+
let fullInput = input;
|
|
268
|
+
|
|
269
|
+
let modeInstruction = '';
|
|
270
|
+
if (this.mode === 'PLAN') {
|
|
271
|
+
modeInstruction = '\n[SYSTEM: You are in PLAN mode. Focus on high-level architecture, requirements analysis, and creating a sturdy plan. Do not write full code implementation yet, just scaffolds or pseudocode if needed.]';
|
|
272
|
+
} else {
|
|
273
|
+
modeInstruction = '\n[SYSTEM: You are in BUILD mode. Focus on implementing working code that solves the user request efficiently.]';
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
fullInput = `${input}${modeInstruction}`;
|
|
277
|
+
|
|
278
|
+
if (context) {
|
|
279
|
+
fullInput = `${context}\n\nUser Question: ${fullInput}`;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
this.history.push({ role: 'user', content: fullInput });
|
|
283
|
+
|
|
284
|
+
let spinner = ora('Thinking... (Press Esc to cancel)').start();
|
|
285
|
+
const controller = new AbortController();
|
|
286
|
+
|
|
287
|
+
// Setup cancellation listener
|
|
288
|
+
const keyListener = (str: string, key: any) => {
|
|
289
|
+
if (key.name === 'escape') {
|
|
290
|
+
controller.abort();
|
|
291
|
+
}
|
|
292
|
+
};
|
|
293
|
+
|
|
294
|
+
if (process.stdin.isTTY) {
|
|
295
|
+
readline.emitKeypressEvents(process.stdin);
|
|
296
|
+
process.stdin.setRawMode(true);
|
|
297
|
+
process.stdin.on('keypress', keyListener);
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
try {
|
|
301
|
+
// First call
|
|
302
|
+
let response = await this.modelClient.chat(this.history, this.tools.map(t => ({
|
|
303
|
+
type: 'function',
|
|
304
|
+
function: {
|
|
305
|
+
name: t.name,
|
|
306
|
+
description: t.description,
|
|
307
|
+
parameters: t.parameters
|
|
308
|
+
}
|
|
309
|
+
})), controller.signal);
|
|
310
|
+
|
|
311
|
+
// Loop for tool calls
|
|
312
|
+
while (response.tool_calls && response.tool_calls.length > 0) {
|
|
313
|
+
if (controller.signal.aborted) throw new Error('Request cancelled by user');
|
|
314
|
+
|
|
315
|
+
spinner.stop();
|
|
316
|
+
|
|
317
|
+
// Add the assistant's request to use tool to history
|
|
318
|
+
this.history.push({
|
|
319
|
+
role: 'assistant',
|
|
320
|
+
content: response.content,
|
|
321
|
+
tool_calls: response.tool_calls
|
|
322
|
+
});
|
|
323
|
+
|
|
324
|
+
// Execute tools
|
|
325
|
+
for (const toolCall of response.tool_calls) {
|
|
326
|
+
if (controller.signal.aborted) break;
|
|
327
|
+
|
|
328
|
+
const toolName = toolCall.function.name;
|
|
329
|
+
const toolArgsStr = toolCall.function.arguments;
|
|
330
|
+
const toolArgs = JSON.parse(toolArgsStr);
|
|
331
|
+
|
|
332
|
+
// Truncate long arguments
|
|
333
|
+
let displayArgs = toolArgsStr;
|
|
334
|
+
if (displayArgs.length > 100) {
|
|
335
|
+
displayArgs = displayArgs.substring(0, 100) + '...';
|
|
336
|
+
}
|
|
337
|
+
console.log(chalk.dim(` [Action] ${toolName}(${displayArgs})`));
|
|
338
|
+
|
|
339
|
+
// Safety check for write_file
|
|
340
|
+
if (toolName === 'write_file') {
|
|
341
|
+
// Pause cancellation listener during user interaction
|
|
342
|
+
if (process.stdin.isTTY) {
|
|
343
|
+
process.stdin.removeListener('keypress', keyListener);
|
|
344
|
+
process.stdin.setRawMode(false);
|
|
345
|
+
process.stdin.pause(); // Explicitly pause before inquirer
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
spinner.stop(); // Stop spinner to allow input
|
|
349
|
+
|
|
350
|
+
const { confirm } = await inquirer.prompt([
|
|
351
|
+
{
|
|
352
|
+
type: 'confirm',
|
|
353
|
+
name: 'confirm',
|
|
354
|
+
message: `Allow writing to ${chalk.yellow(toolArgs.filePath)}?`,
|
|
355
|
+
default: true
|
|
356
|
+
}
|
|
357
|
+
]);
|
|
358
|
+
|
|
359
|
+
// Resume cancellation listener
|
|
360
|
+
if (process.stdin.isTTY) {
|
|
361
|
+
process.stdin.setRawMode(true);
|
|
362
|
+
process.stdin.resume(); // Explicitly resume
|
|
363
|
+
process.stdin.on('keypress', keyListener);
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
if (!confirm) {
|
|
367
|
+
this.history.push({
|
|
368
|
+
role: 'tool',
|
|
369
|
+
tool_call_id: toolCall.id,
|
|
370
|
+
name: toolName,
|
|
371
|
+
content: 'Error: User rejected write operation.'
|
|
372
|
+
});
|
|
373
|
+
console.log(chalk.red(' Action cancelled by user.'));
|
|
374
|
+
// Do not restart spinner here. Let the outer loop logic or next step handle it.
|
|
375
|
+
// If we continue, we go to next tool or finish loop.
|
|
376
|
+
// If finished, lines following loop will start spinner.
|
|
377
|
+
continue;
|
|
378
|
+
}
|
|
379
|
+
spinner = ora('Executing...').start();
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
const tool = this.tools.find(t => t.name === toolName);
|
|
383
|
+
let result = '';
|
|
384
|
+
|
|
385
|
+
if (tool) {
|
|
386
|
+
try {
|
|
387
|
+
// Tools typically run synchronously or promise-based.
|
|
388
|
+
// Verify if we want Tools to be cancellable?
|
|
389
|
+
// For now, if aborted during tool, we let tool finish but stop loop.
|
|
390
|
+
result = await tool.execute(toolArgs);
|
|
391
|
+
} catch (e: any) {
|
|
392
|
+
result = `Error: ${e.message}`;
|
|
393
|
+
}
|
|
394
|
+
} else {
|
|
395
|
+
result = `Error: Tool ${toolName} not found.`;
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
if (spinner.isSpinning) {
|
|
399
|
+
spinner.stop();
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
this.history.push({
|
|
403
|
+
role: 'tool',
|
|
404
|
+
tool_call_id: toolCall.id,
|
|
405
|
+
name: toolName,
|
|
406
|
+
content: result
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
if (controller.signal.aborted) throw new Error('Request cancelled by user');
|
|
411
|
+
|
|
412
|
+
spinner = ora('Thinking (processing tools)...').start();
|
|
413
|
+
|
|
414
|
+
// Get next response
|
|
415
|
+
response = await this.modelClient.chat(this.history, this.tools.map(t => ({
|
|
416
|
+
type: 'function',
|
|
417
|
+
function: {
|
|
418
|
+
name: t.name,
|
|
419
|
+
description: t.description,
|
|
420
|
+
parameters: t.parameters
|
|
421
|
+
}
|
|
422
|
+
})), controller.signal);
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
spinner.stop();
|
|
426
|
+
|
|
427
|
+
console.log('');
|
|
428
|
+
if (response.content) {
|
|
429
|
+
console.log(chalk.bold.blue('Mentis:'));
|
|
430
|
+
console.log(marked(response.content));
|
|
431
|
+
|
|
432
|
+
if (response.usage) {
|
|
433
|
+
const { input_tokens, output_tokens } = response.usage;
|
|
434
|
+
const totalCost = this.estimateCost(input_tokens, output_tokens);
|
|
435
|
+
console.log(chalk.dim(`\n(Tokens: ${input_tokens} in / ${output_tokens} out | Est. Cost: $${totalCost.toFixed(5)})`));
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
console.log('');
|
|
439
|
+
this.history.push({ role: 'assistant', content: response.content });
|
|
440
|
+
}
|
|
441
|
+
} catch (error: any) {
|
|
442
|
+
spinner.stop();
|
|
443
|
+
if (error.message === 'Request cancelled by user') {
|
|
444
|
+
console.log(chalk.yellow('\nRequest cancelled by user.'));
|
|
445
|
+
} else {
|
|
446
|
+
spinner.fail('Error getting response from model.');
|
|
447
|
+
console.error(error.message);
|
|
448
|
+
}
|
|
449
|
+
} finally {
|
|
450
|
+
if (process.stdin.isTTY) {
|
|
451
|
+
process.stdin.removeListener('keypress', keyListener);
|
|
452
|
+
process.stdin.setRawMode(false);
|
|
453
|
+
process.stdin.pause(); // Reset flow
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
private async handleConfigCommand() {
|
|
459
|
+
const config = this.configManager.getConfig();
|
|
460
|
+
const { action } = await inquirer.prompt([
|
|
461
|
+
{
|
|
462
|
+
type: 'list',
|
|
463
|
+
name: 'action',
|
|
464
|
+
message: 'Configuration',
|
|
465
|
+
prefix: '',
|
|
466
|
+
choices: [
|
|
467
|
+
'Show Current Configuration',
|
|
468
|
+
'Set Active Provider',
|
|
469
|
+
'Set API Key (for active provider)',
|
|
470
|
+
'Set Base URL (for active provider)',
|
|
471
|
+
'Back'
|
|
472
|
+
]
|
|
473
|
+
}
|
|
474
|
+
]);
|
|
475
|
+
|
|
476
|
+
if (action === 'Back') return;
|
|
477
|
+
|
|
478
|
+
if (action === 'Show Current Configuration') {
|
|
479
|
+
console.log(JSON.stringify(config, null, 2));
|
|
480
|
+
return;
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
if (action === 'Set Active Provider') {
|
|
484
|
+
const { provider } = await inquirer.prompt([{
|
|
485
|
+
type: 'list',
|
|
486
|
+
name: 'provider',
|
|
487
|
+
message: 'Select Provider:',
|
|
488
|
+
choices: ['Gemini', 'Ollama', 'OpenAI', 'GLM']
|
|
489
|
+
}]);
|
|
490
|
+
const key = provider.toLowerCase();
|
|
491
|
+
this.configManager.updateConfig({ defaultProvider: key });
|
|
492
|
+
console.log(chalk.green(`Active provider set to: ${provider}`));
|
|
493
|
+
this.initializeClient();
|
|
494
|
+
return;
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
const currentProvider = config.defaultProvider;
|
|
498
|
+
|
|
499
|
+
if (action === 'Set API Key (for active provider)') {
|
|
500
|
+
if (currentProvider === 'ollama') {
|
|
501
|
+
console.log(chalk.yellow('Ollama typically does not require an API key.'));
|
|
502
|
+
}
|
|
503
|
+
const { value } = await inquirer.prompt([{
|
|
504
|
+
type: 'password',
|
|
505
|
+
name: 'value',
|
|
506
|
+
message: `Enter API Key for ${currentProvider}:`,
|
|
507
|
+
mask: '*'
|
|
508
|
+
}]);
|
|
509
|
+
|
|
510
|
+
const updates: any = {};
|
|
511
|
+
updates[currentProvider] = { ...((config as any)[currentProvider] || {}), apiKey: value };
|
|
512
|
+
this.configManager.updateConfig(updates);
|
|
513
|
+
console.log(chalk.green(`API Key updated for ${currentProvider}.`));
|
|
514
|
+
this.initializeClient();
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
if (action === 'Set Base URL (for active provider)') {
|
|
518
|
+
const defaultUrl = (config as any)[currentProvider]?.baseUrl || '';
|
|
519
|
+
const { value } = await inquirer.prompt([{
|
|
520
|
+
type: 'input',
|
|
521
|
+
name: 'value',
|
|
522
|
+
message: `Enter Base URL for ${currentProvider}:`,
|
|
523
|
+
default: defaultUrl
|
|
524
|
+
}]);
|
|
525
|
+
|
|
526
|
+
const updates: any = {};
|
|
527
|
+
updates[currentProvider] = { ...((config as any)[currentProvider] || {}), baseUrl: value };
|
|
528
|
+
this.configManager.updateConfig(updates);
|
|
529
|
+
console.log(chalk.green(`Base URL updated for ${currentProvider}.`));
|
|
530
|
+
this.initializeClient();
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
private async handleModelCommand(args: string[]) {
|
|
535
|
+
const config = this.configManager.getConfig();
|
|
536
|
+
const provider = config.defaultProvider || 'ollama';
|
|
537
|
+
|
|
538
|
+
// If argument provided, use it directly
|
|
539
|
+
if (args.length > 0) {
|
|
540
|
+
const modelName = args[0];
|
|
541
|
+
const updates: any = {};
|
|
542
|
+
updates[provider] = { ...((config as any)[provider] || {}), model: modelName };
|
|
543
|
+
this.configManager.updateConfig(updates);
|
|
544
|
+
this.initializeClient(); // Re-init with new model
|
|
545
|
+
console.log(chalk.green(`\nModel set to ${chalk.bold(modelName)} for ${provider}!`));
|
|
546
|
+
return;
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
let models: string[] = [];
|
|
550
|
+
if (provider === 'gemini') {
|
|
551
|
+
models = ['gemini-2.5-flash', 'gemini-1.5-pro', 'gemini-1.0-pro', 'Other...'];
|
|
552
|
+
} else if (provider === 'ollama') {
|
|
553
|
+
models = ['llama3:latest', 'deepseek-r1:latest', 'mistral:latest', 'Other...'];
|
|
554
|
+
} else if (provider === 'openai') {
|
|
555
|
+
models = ['gpt-4o', 'gpt-4o-mini', 'gpt-4-turbo', 'Other...'];
|
|
556
|
+
} else if (provider === 'glm') {
|
|
557
|
+
models = ['glm-4.6', 'glm-4-plus', 'glm-4', 'glm-4-air', 'glm-4-flash', 'Other...'];
|
|
558
|
+
} else if (provider === 'anthropic') {
|
|
559
|
+
models = ['claude-3-5-sonnet-20241022', 'claude-3-opus-20240229', 'claude-3-sonnet-20240229', 'claude-3-haiku-20240307', 'glm-4.6', 'Other...'];
|
|
560
|
+
} else {
|
|
561
|
+
models = ['Other...'];
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
console.log(chalk.blue(`Configuring model for active provider: ${chalk.bold(provider)}`));
|
|
565
|
+
|
|
566
|
+
let { model } = await inquirer.prompt([
|
|
567
|
+
{
|
|
568
|
+
type: 'list',
|
|
569
|
+
name: 'model',
|
|
570
|
+
message: 'Select Model:',
|
|
571
|
+
choices: models,
|
|
572
|
+
}
|
|
573
|
+
]);
|
|
574
|
+
|
|
575
|
+
if (model === 'Other...') {
|
|
576
|
+
const { customModel } = await inquirer.prompt([{
|
|
577
|
+
type: 'input',
|
|
578
|
+
name: 'customModel',
|
|
579
|
+
message: 'Enter model name:'
|
|
580
|
+
}]);
|
|
581
|
+
model = customModel;
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
const updates: any = {};
|
|
585
|
+
updates[provider] = { ...((config as any)[provider] || {}), model: model };
|
|
586
|
+
|
|
587
|
+
this.configManager.updateConfig(updates);
|
|
588
|
+
this.initializeClient();
|
|
589
|
+
console.log(chalk.green(`\nModel set to ${model} for ${provider}!`));
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
private async handleConnectCommand(args: string[]) {
|
|
593
|
+
if (args.length < 1) {
|
|
594
|
+
console.log(chalk.red('Usage: /connect <provider> [key_or_url]'));
|
|
595
|
+
return;
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
const provider = args[0].toLowerCase();
|
|
599
|
+
const value = args[1]; // Optional for ollama (defaults), required for others usually
|
|
600
|
+
|
|
601
|
+
const config = this.configManager.getConfig();
|
|
602
|
+
|
|
603
|
+
if (provider === 'gemini') {
|
|
604
|
+
if (!value) {
|
|
605
|
+
console.log(chalk.red('Error: API Key required for Gemini. usage: /connect gemini <api_key>'));
|
|
606
|
+
return;
|
|
607
|
+
}
|
|
608
|
+
this.configManager.updateConfig({
|
|
609
|
+
gemini: { ...config.gemini, apiKey: value },
|
|
610
|
+
defaultProvider: 'gemini'
|
|
611
|
+
});
|
|
612
|
+
console.log(chalk.green(`Connected to Gemini with key: ${value.substring(0, 8)}...`));
|
|
613
|
+
} else if (provider === 'ollama') {
|
|
614
|
+
const url = value || 'http://localhost:11434/v1';
|
|
615
|
+
this.configManager.updateConfig({
|
|
616
|
+
ollama: { ...config.ollama, baseUrl: url },
|
|
617
|
+
defaultProvider: 'ollama'
|
|
618
|
+
});
|
|
619
|
+
console.log(chalk.green(`Connected to Ollama at ${url}`));
|
|
620
|
+
} else if (provider === 'openai') { // Support OpenAI since client supports it
|
|
621
|
+
if (!value) {
|
|
622
|
+
console.log(chalk.red('Error: API Key required for OpenAI. usage: /connect openai <api_key>'));
|
|
623
|
+
return;
|
|
624
|
+
}
|
|
625
|
+
this.configManager.updateConfig({
|
|
626
|
+
openai: { ...config.openai, apiKey: value },
|
|
627
|
+
defaultProvider: 'openai' // We might need to handle 'openai' in initializeClient if we add it officially
|
|
628
|
+
});
|
|
629
|
+
console.log(chalk.green(`Connected to OpenAI.`));
|
|
630
|
+
} else if (provider === 'glm') {
|
|
631
|
+
if (!value) {
|
|
632
|
+
console.log(chalk.red('Error: API Key required for GLM. usage: /connect glm <api_key>'));
|
|
633
|
+
return;
|
|
634
|
+
}
|
|
635
|
+
this.configManager.updateConfig({
|
|
636
|
+
glm: { ...config.glm, apiKey: value },
|
|
637
|
+
defaultProvider: 'glm'
|
|
638
|
+
});
|
|
639
|
+
console.log(chalk.green(`Connected to GLM (ZhipuAI).`));
|
|
640
|
+
} else {
|
|
641
|
+
console.log(chalk.red(`Unknown provider: ${provider}. Use 'gemini', 'ollama', 'openai', or 'glm'.`));
|
|
642
|
+
return;
|
|
643
|
+
}
|
|
644
|
+
|
|
645
|
+
this.initializeClient();
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
private async handleUseCommand(args: string[]) {
|
|
649
|
+
if (args.length < 1) {
|
|
650
|
+
console.log(chalk.red('Usage: /use <provider> [model_name]'));
|
|
651
|
+
return;
|
|
652
|
+
}
|
|
653
|
+
|
|
654
|
+
const provider = args[0].toLowerCase();
|
|
655
|
+
const model = args[1]; // Optional
|
|
656
|
+
|
|
657
|
+
const config = this.configManager.getConfig();
|
|
658
|
+
|
|
659
|
+
if (provider === 'gemini') {
|
|
660
|
+
const updates: any = { defaultProvider: 'gemini' };
|
|
661
|
+
if (model) {
|
|
662
|
+
updates.gemini = { ...config.gemini, model: model };
|
|
663
|
+
}
|
|
664
|
+
this.configManager.updateConfig(updates);
|
|
665
|
+
} else if (provider === 'ollama') {
|
|
666
|
+
const updates: any = { defaultProvider: 'ollama' };
|
|
667
|
+
if (model) {
|
|
668
|
+
updates.ollama = { ...config.ollama, model: model };
|
|
669
|
+
}
|
|
670
|
+
this.configManager.updateConfig(updates);
|
|
671
|
+
} else if (provider === 'glm') {
|
|
672
|
+
const updates: any = { defaultProvider: 'glm' };
|
|
673
|
+
if (model) {
|
|
674
|
+
updates.glm = { ...config.glm, model: model };
|
|
675
|
+
}
|
|
676
|
+
this.configManager.updateConfig(updates);
|
|
677
|
+
|
|
678
|
+
// Auto switch if connecting to a new provider
|
|
679
|
+
if ((provider as string) === 'gemini') {
|
|
680
|
+
updates.defaultProvider = 'gemini';
|
|
681
|
+
this.configManager.updateConfig(updates);
|
|
682
|
+
} else if ((provider as string) === 'ollama') {
|
|
683
|
+
updates.defaultProvider = 'ollama';
|
|
684
|
+
this.configManager.updateConfig(updates);
|
|
685
|
+
}
|
|
686
|
+
} else {
|
|
687
|
+
console.log(chalk.red(`Unknown provider: ${provider}`));
|
|
688
|
+
return;
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
this.initializeClient();
|
|
692
|
+
console.log(chalk.green(`Switched to ${provider} ${model ? `using model ${model}` : ''}`));
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
private async handleMcpCommand(args: string[]) {
|
|
696
|
+
if (args.length < 1) {
|
|
697
|
+
console.log(chalk.red('Usage: /mcp <connect|list|disconnect> [args]'));
|
|
698
|
+
return;
|
|
699
|
+
}
|
|
700
|
+
|
|
701
|
+
const action = args[0];
|
|
702
|
+
|
|
703
|
+
if (action === 'connect') {
|
|
704
|
+
const commandParts = args.slice(1);
|
|
705
|
+
if (commandParts.length === 0) {
|
|
706
|
+
console.log(chalk.red('Usage: /mcp connect <command> [args...]'));
|
|
707
|
+
return;
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
// Example: /mcp connect npx -y @modelcontextprotocol/server-memory
|
|
711
|
+
// On Windows, npx might be npx.cmd
|
|
712
|
+
const cmd = process.platform === 'win32' && commandParts[0] === 'npx' ? 'npx.cmd' : commandParts[0];
|
|
713
|
+
const cmdArgs = commandParts.slice(1);
|
|
714
|
+
|
|
715
|
+
const spinner = ora(`Connecting to MCP server: ${cmd} ${cmdArgs.join(' ')}...`).start();
|
|
716
|
+
|
|
717
|
+
try {
|
|
718
|
+
const client = new McpClient(cmd, cmdArgs);
|
|
719
|
+
await client.initialize();
|
|
720
|
+
const mcpTools = await client.listTools();
|
|
721
|
+
|
|
722
|
+
this.mcpClients.push(client);
|
|
723
|
+
this.tools.push(...mcpTools);
|
|
724
|
+
|
|
725
|
+
spinner.succeed(chalk.green(`Connected to ${client.serverName}!`));
|
|
726
|
+
console.log(chalk.green(`Added ${mcpTools.length} tools:`));
|
|
727
|
+
mcpTools.forEach(t => console.log(chalk.dim(` - ${t.name}: ${t.description.substring(0, 50)}...`)));
|
|
728
|
+
|
|
729
|
+
} catch (e: any) {
|
|
730
|
+
spinner.fail(chalk.red(`Failed to connect: ${e.message}`));
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
} else if (action === 'list') {
|
|
734
|
+
if (this.mcpClients.length === 0) {
|
|
735
|
+
console.log('No active MCP connections.');
|
|
736
|
+
} else {
|
|
737
|
+
console.log(chalk.cyan('Active MCP Connections:'));
|
|
738
|
+
this.mcpClients.forEach((client, idx) => {
|
|
739
|
+
console.log(`${idx + 1}. ${client.serverName}`);
|
|
740
|
+
});
|
|
741
|
+
}
|
|
742
|
+
} else if (action === 'disconnect') {
|
|
743
|
+
// Basic disconnect all for now or by index if we wanted
|
|
744
|
+
console.log(chalk.yellow('Disconnecting all MCP clients...'));
|
|
745
|
+
this.mcpClients.forEach(c => c.disconnect());
|
|
746
|
+
this.mcpClients = [];
|
|
747
|
+
// Re-init core tools
|
|
748
|
+
this.tools = [
|
|
749
|
+
new WriteFileTool(),
|
|
750
|
+
new ReadFileTool(),
|
|
751
|
+
new ListDirTool(),
|
|
752
|
+
new SearchFileTool(),
|
|
753
|
+
new PersistentShellTool(this.shell),
|
|
754
|
+
new WebSearchTool(),
|
|
755
|
+
new GitStatusTool(),
|
|
756
|
+
new GitDiffTool(),
|
|
757
|
+
new GitCommitTool(),
|
|
758
|
+
new GitPushTool(),
|
|
759
|
+
new GitPullTool()
|
|
760
|
+
];
|
|
761
|
+
} else {
|
|
762
|
+
console.log(chalk.red(`Unknown MCP action: ${action}`));
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
|
|
766
|
+
private async handleResumeCommand() {
|
|
767
|
+
if (!this.checkpointManager.exists('latest')) {
|
|
768
|
+
console.log(chalk.yellow('No previous session found to resume.'));
|
|
769
|
+
return;
|
|
770
|
+
}
|
|
771
|
+
await this.loadCheckpoint('latest');
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
private async handleCheckpointCommand(args: string[]) {
|
|
775
|
+
if (args.length < 1) {
|
|
776
|
+
console.log(chalk.red('Usage: /checkpoint <save|load|list> [name]'));
|
|
777
|
+
return;
|
|
778
|
+
}
|
|
779
|
+
const action = args[0];
|
|
780
|
+
const name = args[1] || 'default';
|
|
781
|
+
|
|
782
|
+
if (action === 'save') {
|
|
783
|
+
this.checkpointManager.save(name, this.history, this.contextManager.getFiles());
|
|
784
|
+
console.log(chalk.green(`Checkpoint '${name}' saved.`));
|
|
785
|
+
} else if (action === 'load') {
|
|
786
|
+
await this.loadCheckpoint(name);
|
|
787
|
+
} else if (action === 'list') {
|
|
788
|
+
const points = this.checkpointManager.list();
|
|
789
|
+
console.log(chalk.cyan('Available Checkpoints:'));
|
|
790
|
+
points.forEach(p => console.log(` - ${p}`));
|
|
791
|
+
} else {
|
|
792
|
+
console.log(chalk.red(`Unknown action: ${action}`));
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
|
|
796
|
+
private async loadCheckpoint(name: string) {
|
|
797
|
+
const cp = this.checkpointManager.load(name);
|
|
798
|
+
if (!cp) {
|
|
799
|
+
console.log(chalk.red(`Checkpoint '${name}' not found.`));
|
|
800
|
+
return;
|
|
801
|
+
}
|
|
802
|
+
|
|
803
|
+
this.history = cp.history;
|
|
804
|
+
this.contextManager.clear();
|
|
805
|
+
|
|
806
|
+
// Restore context files
|
|
807
|
+
if (cp.files && cp.files.length > 0) {
|
|
808
|
+
console.log(chalk.dim('Restoring context files...'));
|
|
809
|
+
for (const file of cp.files) {
|
|
810
|
+
await this.contextManager.addFile(file);
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
console.log(chalk.green(`Resumed session '${name}' (${new Date(cp.timestamp).toLocaleString()})`));
|
|
814
|
+
// Re-display last assistant message if any
|
|
815
|
+
const lastMsg = this.history[this.history.length - 1];
|
|
816
|
+
if (lastMsg && lastMsg.role === 'assistant' && lastMsg.content) {
|
|
817
|
+
console.log(chalk.blue('\nLast message:'));
|
|
818
|
+
console.log(lastMsg.content);
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
|
|
822
|
+
private estimateCost(input: number, output: number): number {
|
|
823
|
+
const config = this.configManager.getConfig();
|
|
824
|
+
const provider = config.defaultProvider;
|
|
825
|
+
|
|
826
|
+
let rateIn = 0;
|
|
827
|
+
let rateOut = 0;
|
|
828
|
+
|
|
829
|
+
if (provider === 'openai') {
|
|
830
|
+
rateIn = 5.00 / 1000000;
|
|
831
|
+
rateOut = 15.00 / 1000000;
|
|
832
|
+
} else if (provider === 'gemini') {
|
|
833
|
+
rateIn = 0.35 / 1000000;
|
|
834
|
+
rateOut = 0.70 / 1000000;
|
|
835
|
+
} else if (provider === 'glm') {
|
|
836
|
+
rateIn = 14.00 / 1000000; // Approximate for GLM-4
|
|
837
|
+
rateOut = 14.00 / 1000000;
|
|
838
|
+
}
|
|
839
|
+
|
|
840
|
+
return (input * rateIn) + (output * rateOut);
|
|
841
|
+
}
|
|
842
|
+
}
|