@indiccoder/mentis-cli 1.0.3 β 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -12
- package/dist/repl/ReplManager.js +50 -17
- package/package.json +1 -1
- package/scripts/test_config_update.ts +63 -0
- package/src/repl/ReplManager.ts +56 -17
package/README.md
CHANGED
|
@@ -5,26 +5,30 @@ Mentis is a powerful terminal-based AI coding assistant that lives in your comma
|
|
|
5
5
|
|
|
6
6
|
## β¨ Features
|
|
7
7
|
|
|
8
|
-
* **π€ Multi-Model Support**: Switch seamlessly between **Gemini**, **Ollama** (Local), and **
|
|
8
|
+
* **π€ Multi-Model Support**: Switch seamlessly between **Gemini**, **Ollama** (Local), **OpenAI**, and **GLM-4.6** (Z.AI Coding).
|
|
9
9
|
* **π οΈ Agentic Capabilities**: Mentis can read, write, list files, and search your codebase to understand context.
|
|
10
|
-
* **π Web Intelligence**: Hybrid
|
|
11
|
-
* **πΊοΈ Smart Context**: Automatically maps your repository structure
|
|
12
|
-
*
|
|
13
|
-
*
|
|
14
|
-
*
|
|
15
|
-
*
|
|
16
|
-
*
|
|
17
|
-
*
|
|
10
|
+
* **π Web Intelligence**: Hybrid search for documentation and error fixing.
|
|
11
|
+
* **πΊοΈ Smart Context**: Automatically maps your repository structure.
|
|
12
|
+
* **π§© Interactive & Robust CLI**:
|
|
13
|
+
* **History**: Navigate previous commands with Up/Down arrows (persistent across sessions).
|
|
14
|
+
* **Cancellation**: Press `Esc` to instantly stop generation.
|
|
15
|
+
* **Markdown**: Beautiful syntax highlighting in terminal output.
|
|
16
|
+
* **π° Cost Awareness**: Real-time token usage and cost tracking.
|
|
17
|
+
* **π§ Persistent Memory**: Auto-saves sessions (`/resume`) and checkpoints (`/checkpoint`).
|
|
18
|
+
* **π MCP Support**: Full [Model Context Protocol](https://github.com/modelcontextprotocol) client.
|
|
19
|
+
* **π Codebase Search**: Built-in `grep` tool.
|
|
20
|
+
* **π Shell Integration**: Run shell commands and commit changes directly.
|
|
18
21
|
|
|
19
22
|
## π Installation
|
|
20
23
|
|
|
21
|
-
###
|
|
22
|
-
You can install Mentis directly from the
|
|
24
|
+
### Using NPM (Recommended)
|
|
25
|
+
You can install Mentis globally directly from the NPM registry:
|
|
23
26
|
|
|
24
27
|
```bash
|
|
25
|
-
npm install -g
|
|
28
|
+
npm install -g @indiccoder/mentis-cli
|
|
26
29
|
```
|
|
27
30
|
|
|
31
|
+
|
|
28
32
|
### From Source (Dev)
|
|
29
33
|
```bash
|
|
30
34
|
git clone https://github.com/CoderVLSI/Mentis-CLI.git
|
package/dist/repl/ReplManager.js
CHANGED
|
@@ -142,7 +142,8 @@ class ReplManager {
|
|
|
142
142
|
// console.log(chalk.dim(` /help for help | Model: ${chalk.cyan(this.currentModelName)}`));
|
|
143
143
|
// Removed redundancy to keep CLI clean, prompt has info? No, prompt is minimal.
|
|
144
144
|
const modeLabel = this.mode === 'PLAN' ? chalk_1.default.magenta('PLAN') : chalk_1.default.blue('BUILD');
|
|
145
|
-
const
|
|
145
|
+
const modelInfo = this.currentModelName ? ` (${this.currentModelName})` : '';
|
|
146
|
+
const promptText = `${modeLabel}${chalk_1.default.dim(modelInfo)} ${chalk_1.default.cyan('>')}`;
|
|
146
147
|
// Use readline for basic input to support history
|
|
147
148
|
const answer = await new Promise((resolve) => {
|
|
148
149
|
const rl = readline.createInterface({
|
|
@@ -518,43 +519,53 @@ class ReplManager {
|
|
|
518
519
|
}
|
|
519
520
|
async handleModelCommand(args) {
|
|
520
521
|
const config = this.configManager.getConfig();
|
|
521
|
-
const
|
|
522
|
-
//
|
|
522
|
+
const currentProvider = config.defaultProvider || 'ollama';
|
|
523
|
+
// Direct argument: /model gpt-4o (updates active provider's model)
|
|
523
524
|
if (args.length > 0) {
|
|
524
525
|
const modelName = args[0];
|
|
525
526
|
const updates = {};
|
|
526
|
-
updates[
|
|
527
|
+
updates[currentProvider] = { ...(config[currentProvider] || {}), model: modelName };
|
|
527
528
|
this.configManager.updateConfig(updates);
|
|
528
529
|
this.initializeClient(); // Re-init with new model
|
|
529
|
-
console.log(chalk_1.default.green(`\nModel set to ${chalk_1.default.bold(modelName)} for ${
|
|
530
|
+
console.log(chalk_1.default.green(`\nModel set to ${chalk_1.default.bold(modelName)} for ${currentProvider}!`));
|
|
530
531
|
return;
|
|
531
532
|
}
|
|
533
|
+
// Interactive Mode: Streamlined Provider -> Model Flow
|
|
534
|
+
console.log(chalk_1.default.cyan('Configure Model & Provider'));
|
|
535
|
+
const { provider } = await inquirer_1.default.prompt([
|
|
536
|
+
{
|
|
537
|
+
type: 'list',
|
|
538
|
+
name: 'provider',
|
|
539
|
+
message: 'Select Provider:',
|
|
540
|
+
choices: ['Gemini', 'Ollama', 'OpenAI', 'GLM'],
|
|
541
|
+
default: currentProvider.charAt(0).toUpperCase() + currentProvider.slice(1) // Capitalize for default selection
|
|
542
|
+
}
|
|
543
|
+
]);
|
|
544
|
+
const selectedProvider = provider.toLowerCase();
|
|
532
545
|
let models = [];
|
|
533
|
-
if (
|
|
546
|
+
if (selectedProvider === 'gemini') {
|
|
534
547
|
models = ['gemini-2.5-flash', 'gemini-1.5-pro', 'gemini-1.0-pro', 'Other...'];
|
|
535
548
|
}
|
|
536
|
-
else if (
|
|
537
|
-
models = ['llama3:latest', 'deepseek-r1:latest', 'mistral:latest', 'Other...'];
|
|
549
|
+
else if (selectedProvider === 'ollama') {
|
|
550
|
+
models = ['llama3:latest', 'deepseek-r1:latest', 'mistral:latest', 'qwen2.5-coder', 'Other...'];
|
|
538
551
|
}
|
|
539
|
-
else if (
|
|
552
|
+
else if (selectedProvider === 'openai') {
|
|
540
553
|
models = ['gpt-4o', 'gpt-4o-mini', 'gpt-4-turbo', 'Other...'];
|
|
541
554
|
}
|
|
542
|
-
else if (
|
|
555
|
+
else if (selectedProvider === 'glm') {
|
|
543
556
|
models = ['glm-4.6', 'glm-4-plus', 'glm-4', 'glm-4-air', 'glm-4-flash', 'Other...'];
|
|
544
557
|
}
|
|
545
|
-
else if (provider === 'anthropic') {
|
|
546
|
-
models = ['claude-3-5-sonnet-20241022', 'claude-3-opus-20240229', 'claude-3-sonnet-20240229', 'claude-3-haiku-20240307', 'glm-4.6', 'Other...'];
|
|
547
|
-
}
|
|
548
558
|
else {
|
|
549
559
|
models = ['Other...'];
|
|
550
560
|
}
|
|
551
|
-
console.log(chalk_1.default.blue(`Configuring model for active provider: ${chalk_1.default.bold(provider)}`));
|
|
552
561
|
let { model } = await inquirer_1.default.prompt([
|
|
553
562
|
{
|
|
554
563
|
type: 'list',
|
|
555
564
|
name: 'model',
|
|
556
|
-
message:
|
|
565
|
+
message: `Select Model for ${provider}:`,
|
|
557
566
|
choices: models,
|
|
567
|
+
// Try to find current model in list to set default
|
|
568
|
+
default: config[selectedProvider]?.model
|
|
558
569
|
}
|
|
559
570
|
]);
|
|
560
571
|
if (model === 'Other...') {
|
|
@@ -565,11 +576,33 @@ class ReplManager {
|
|
|
565
576
|
}]);
|
|
566
577
|
model = customModel;
|
|
567
578
|
}
|
|
579
|
+
// Check for missing API Key (except for Ollama)
|
|
580
|
+
let newApiKey = undefined;
|
|
581
|
+
const currentKey = config[selectedProvider]?.apiKey;
|
|
582
|
+
if (selectedProvider !== 'ollama' && !currentKey) {
|
|
583
|
+
console.log(chalk_1.default.yellow(`\nβ οΈ No API Key found for ${provider}.`));
|
|
584
|
+
const { apiKey } = await inquirer_1.default.prompt([{
|
|
585
|
+
type: 'password',
|
|
586
|
+
name: 'apiKey',
|
|
587
|
+
message: `Enter API Key for ${provider} (or leave empty to skip):`,
|
|
588
|
+
mask: '*'
|
|
589
|
+
}]);
|
|
590
|
+
if (apiKey && apiKey.trim()) {
|
|
591
|
+
newApiKey = apiKey.trim();
|
|
592
|
+
}
|
|
593
|
+
}
|
|
568
594
|
const updates = {};
|
|
569
|
-
updates
|
|
595
|
+
updates.defaultProvider = selectedProvider;
|
|
596
|
+
updates[selectedProvider] = {
|
|
597
|
+
...(config[selectedProvider] || {}),
|
|
598
|
+
model: model
|
|
599
|
+
};
|
|
600
|
+
if (newApiKey) {
|
|
601
|
+
updates[selectedProvider].apiKey = newApiKey;
|
|
602
|
+
}
|
|
570
603
|
this.configManager.updateConfig(updates);
|
|
571
604
|
this.initializeClient();
|
|
572
|
-
console.log(chalk_1.default.green(`\
|
|
605
|
+
console.log(chalk_1.default.green(`\nSwitched to ${chalk_1.default.bold(provider)} (${model})!`));
|
|
573
606
|
}
|
|
574
607
|
async handleConnectCommand(args) {
|
|
575
608
|
if (args.length < 1) {
|
package/package.json
CHANGED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { ConfigManager } from '../src/config/ConfigManager';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import fs from 'fs';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
import os from 'os';
|
|
6
|
+
|
|
7
|
+
async function testConfig() {
|
|
8
|
+
console.log(chalk.cyan('π§ͺ Testing ConfigManager...'));
|
|
9
|
+
|
|
10
|
+
const configManager = new ConfigManager();
|
|
11
|
+
const initialConfig = configManager.getConfig();
|
|
12
|
+
console.log('Initial Config:', initialConfig);
|
|
13
|
+
|
|
14
|
+
// 1. Test updating active provider
|
|
15
|
+
console.log(chalk.yellow('\n1. Setting active provider to "gemini"'));
|
|
16
|
+
configManager.updateConfig({ defaultProvider: 'gemini' });
|
|
17
|
+
|
|
18
|
+
let currentConfig = configManager.getConfig();
|
|
19
|
+
if (currentConfig.defaultProvider === 'gemini') {
|
|
20
|
+
console.log(chalk.green('β
Provider set successfully.'));
|
|
21
|
+
} else {
|
|
22
|
+
console.log(chalk.red(`β Failed to set provider. Got: ${currentConfig.defaultProvider}`));
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// 2. Test updating model for a provider
|
|
26
|
+
console.log(chalk.yellow('\n2. Updating Model for "gemini"'));
|
|
27
|
+
const newModel = 'gemini-1.5-pro-test';
|
|
28
|
+
|
|
29
|
+
const updates: any = {};
|
|
30
|
+
updates['gemini'] = { ...(currentConfig.gemini || {}), model: newModel };
|
|
31
|
+
configManager.updateConfig(updates);
|
|
32
|
+
|
|
33
|
+
currentConfig = configManager.getConfig();
|
|
34
|
+
if (currentConfig.gemini?.model === newModel) {
|
|
35
|
+
console.log(chalk.green(`β
Model updated successfully to ${newModel}`));
|
|
36
|
+
} else {
|
|
37
|
+
console.log(chalk.red(`β Failed to update model. Got: ${currentConfig.gemini?.model}`));
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// 3. Test API Key update
|
|
41
|
+
console.log(chalk.yellow('\n3. Updating API Key for "gemini"'));
|
|
42
|
+
const newKey = 'test-key-123';
|
|
43
|
+
|
|
44
|
+
updates['gemini'] = { ...(currentConfig.gemini || {}), apiKey: newKey };
|
|
45
|
+
configManager.updateConfig(updates); // Should merge with model update from previous step effectively if we pull fresh?
|
|
46
|
+
// Actually our test logic pulled currentConfig above, so it preserves 'model'.
|
|
47
|
+
|
|
48
|
+
currentConfig = configManager.getConfig();
|
|
49
|
+
if (currentConfig.gemini?.apiKey === newKey && currentConfig.gemini?.model === newModel) {
|
|
50
|
+
console.log(chalk.green('β
API Key updated successfully (and model preserved).'));
|
|
51
|
+
} else {
|
|
52
|
+
console.log(chalk.red('β Failed to update API Key or verify persistence.'));
|
|
53
|
+
console.log(currentConfig.gemini);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Restore original config to be nice
|
|
57
|
+
console.log(chalk.yellow('\nRestoring original config (defaultProvider)...'));
|
|
58
|
+
configManager.updateConfig({ defaultProvider: initialConfig.defaultProvider });
|
|
59
|
+
|
|
60
|
+
console.log(chalk.cyan('\nπ Config Tests Completed.'));
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
testConfig();
|
package/src/repl/ReplManager.ts
CHANGED
|
@@ -120,7 +120,8 @@ export class ReplManager {
|
|
|
120
120
|
// Removed redundancy to keep CLI clean, prompt has info? No, prompt is minimal.
|
|
121
121
|
|
|
122
122
|
const modeLabel = this.mode === 'PLAN' ? chalk.magenta('PLAN') : chalk.blue('BUILD');
|
|
123
|
-
const
|
|
123
|
+
const modelInfo = this.currentModelName ? ` (${this.currentModelName})` : '';
|
|
124
|
+
const promptText = `${modeLabel}${chalk.dim(modelInfo)} ${chalk.cyan('>')}`;
|
|
124
125
|
|
|
125
126
|
// Use readline for basic input to support history
|
|
126
127
|
const answer = await new Promise<string>((resolve) => {
|
|
@@ -533,42 +534,55 @@ export class ReplManager {
|
|
|
533
534
|
|
|
534
535
|
private async handleModelCommand(args: string[]) {
|
|
535
536
|
const config = this.configManager.getConfig();
|
|
536
|
-
const
|
|
537
|
+
const currentProvider = config.defaultProvider || 'ollama';
|
|
537
538
|
|
|
538
|
-
//
|
|
539
|
+
// Direct argument: /model gpt-4o (updates active provider's model)
|
|
539
540
|
if (args.length > 0) {
|
|
540
541
|
const modelName = args[0];
|
|
541
542
|
const updates: any = {};
|
|
542
|
-
updates[
|
|
543
|
+
updates[currentProvider] = { ...((config as any)[currentProvider] || {}), model: modelName };
|
|
543
544
|
this.configManager.updateConfig(updates);
|
|
544
545
|
this.initializeClient(); // Re-init with new model
|
|
545
|
-
console.log(chalk.green(`\nModel set to ${chalk.bold(modelName)} for ${
|
|
546
|
+
console.log(chalk.green(`\nModel set to ${chalk.bold(modelName)} for ${currentProvider}!`));
|
|
546
547
|
return;
|
|
547
548
|
}
|
|
548
549
|
|
|
550
|
+
// Interactive Mode: Streamlined Provider -> Model Flow
|
|
551
|
+
console.log(chalk.cyan('Configure Model & Provider'));
|
|
552
|
+
|
|
553
|
+
const { provider } = await inquirer.prompt([
|
|
554
|
+
{
|
|
555
|
+
type: 'list',
|
|
556
|
+
name: 'provider',
|
|
557
|
+
message: 'Select Provider:',
|
|
558
|
+
choices: ['Gemini', 'Ollama', 'OpenAI', 'GLM'],
|
|
559
|
+
default: currentProvider.charAt(0).toUpperCase() + currentProvider.slice(1) // Capitalize for default selection
|
|
560
|
+
}
|
|
561
|
+
]);
|
|
562
|
+
|
|
563
|
+
const selectedProvider = provider.toLowerCase();
|
|
564
|
+
|
|
549
565
|
let models: string[] = [];
|
|
550
|
-
if (
|
|
566
|
+
if (selectedProvider === 'gemini') {
|
|
551
567
|
models = ['gemini-2.5-flash', 'gemini-1.5-pro', 'gemini-1.0-pro', 'Other...'];
|
|
552
|
-
} else if (
|
|
553
|
-
models = ['llama3:latest', 'deepseek-r1:latest', 'mistral:latest', 'Other...'];
|
|
554
|
-
} else if (
|
|
568
|
+
} else if (selectedProvider === 'ollama') {
|
|
569
|
+
models = ['llama3:latest', 'deepseek-r1:latest', 'mistral:latest', 'qwen2.5-coder', 'Other...'];
|
|
570
|
+
} else if (selectedProvider === 'openai') {
|
|
555
571
|
models = ['gpt-4o', 'gpt-4o-mini', 'gpt-4-turbo', 'Other...'];
|
|
556
|
-
} else if (
|
|
572
|
+
} else if (selectedProvider === 'glm') {
|
|
557
573
|
models = ['glm-4.6', 'glm-4-plus', 'glm-4', 'glm-4-air', 'glm-4-flash', 'Other...'];
|
|
558
|
-
} else if (provider === 'anthropic') {
|
|
559
|
-
models = ['claude-3-5-sonnet-20241022', 'claude-3-opus-20240229', 'claude-3-sonnet-20240229', 'claude-3-haiku-20240307', 'glm-4.6', 'Other...'];
|
|
560
574
|
} else {
|
|
561
575
|
models = ['Other...'];
|
|
562
576
|
}
|
|
563
577
|
|
|
564
|
-
console.log(chalk.blue(`Configuring model for active provider: ${chalk.bold(provider)}`));
|
|
565
|
-
|
|
566
578
|
let { model } = await inquirer.prompt([
|
|
567
579
|
{
|
|
568
580
|
type: 'list',
|
|
569
581
|
name: 'model',
|
|
570
|
-
message:
|
|
582
|
+
message: `Select Model for ${provider}:`,
|
|
571
583
|
choices: models,
|
|
584
|
+
// Try to find current model in list to set default
|
|
585
|
+
default: (config as any)[selectedProvider]?.model
|
|
572
586
|
}
|
|
573
587
|
]);
|
|
574
588
|
|
|
@@ -581,12 +595,37 @@ export class ReplManager {
|
|
|
581
595
|
model = customModel;
|
|
582
596
|
}
|
|
583
597
|
|
|
598
|
+
// Check for missing API Key (except for Ollama)
|
|
599
|
+
let newApiKey = undefined;
|
|
600
|
+
const currentKey = (config as any)[selectedProvider]?.apiKey;
|
|
601
|
+
|
|
602
|
+
if (selectedProvider !== 'ollama' && !currentKey) {
|
|
603
|
+
console.log(chalk.yellow(`\nβ οΈ No API Key found for ${provider}.`));
|
|
604
|
+
const { apiKey } = await inquirer.prompt([{
|
|
605
|
+
type: 'password',
|
|
606
|
+
name: 'apiKey',
|
|
607
|
+
message: `Enter API Key for ${provider} (or leave empty to skip):`,
|
|
608
|
+
mask: '*'
|
|
609
|
+
}]);
|
|
610
|
+
if (apiKey && apiKey.trim()) {
|
|
611
|
+
newApiKey = apiKey.trim();
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
|
|
584
615
|
const updates: any = {};
|
|
585
|
-
updates
|
|
616
|
+
updates.defaultProvider = selectedProvider;
|
|
617
|
+
updates[selectedProvider] = {
|
|
618
|
+
...((config as any)[selectedProvider] || {}),
|
|
619
|
+
model: model
|
|
620
|
+
};
|
|
621
|
+
|
|
622
|
+
if (newApiKey) {
|
|
623
|
+
updates[selectedProvider].apiKey = newApiKey;
|
|
624
|
+
}
|
|
586
625
|
|
|
587
626
|
this.configManager.updateConfig(updates);
|
|
588
627
|
this.initializeClient();
|
|
589
|
-
console.log(chalk.green(`\
|
|
628
|
+
console.log(chalk.green(`\nSwitched to ${chalk.bold(provider)} (${model})!`));
|
|
590
629
|
}
|
|
591
630
|
|
|
592
631
|
private async handleConnectCommand(args: string[]) {
|