@mcp-use/cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/app.d.ts +6 -0
- package/dist/app.js +343 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +37 -0
- package/dist/commands.d.ts +51 -0
- package/dist/commands.js +1024 -0
- package/dist/mcp-service.d.ts +44 -0
- package/dist/mcp-service.js +436 -0
- package/dist/storage.d.ts +24 -0
- package/dist/storage.js +108 -0
- package/package.json +82 -0
- package/readme.md +154 -0
package/package.json
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@mcp-use/cli",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"license": "MIT",
|
|
5
|
+
"description": "A CLI tool for interacting with Model Context Protocol (MCP) servers using natural language",
|
|
6
|
+
"keywords": [
|
|
7
|
+
"mcp",
|
|
8
|
+
"model-context-protocol",
|
|
9
|
+
"cli",
|
|
10
|
+
"ai",
|
|
11
|
+
"langchain",
|
|
12
|
+
"openai"
|
|
13
|
+
],
|
|
14
|
+
"author": "Pietro",
|
|
15
|
+
"repository": {
|
|
16
|
+
"type": "git",
|
|
17
|
+
"url": "https://github.com/your-username/mcp-use-cli"
|
|
18
|
+
},
|
|
19
|
+
"bin": {
|
|
20
|
+
"mcp-use": "dist/cli.js"
|
|
21
|
+
},
|
|
22
|
+
"type": "module",
|
|
23
|
+
"engines": {
|
|
24
|
+
"node": ">=16"
|
|
25
|
+
},
|
|
26
|
+
"scripts": {
|
|
27
|
+
"build": "tsc",
|
|
28
|
+
"dev": "tsc --watch",
|
|
29
|
+
"test": "prettier --check . && xo && ava"
|
|
30
|
+
},
|
|
31
|
+
"files": [
|
|
32
|
+
"dist"
|
|
33
|
+
],
|
|
34
|
+
"dependencies": {
|
|
35
|
+
"@hey-api/client-fetch": "^0.12.0",
|
|
36
|
+
"@langchain/anthropic": "^0.3.23",
|
|
37
|
+
"@langchain/core": "^0.3.60",
|
|
38
|
+
"@langchain/google-genai": "^0.2.13",
|
|
39
|
+
"@langchain/mistralai": "^0.2.1",
|
|
40
|
+
"@langchain/openai": "^0.5.14",
|
|
41
|
+
"dotenv": "^16.5.0",
|
|
42
|
+
"ink": "^5.2.1",
|
|
43
|
+
"ink-big-text": "^2.0.0",
|
|
44
|
+
"ink-text-input": "^6.0.0",
|
|
45
|
+
"mcp-use": "^0.0.8",
|
|
46
|
+
"meow": "^11.0.0",
|
|
47
|
+
"react": "^18.2.0",
|
|
48
|
+
"zod": "^3.25.56"
|
|
49
|
+
},
|
|
50
|
+
"devDependencies": {
|
|
51
|
+
"@sindresorhus/tsconfig": "^3.0.1",
|
|
52
|
+
"@types/react": "^18.0.32",
|
|
53
|
+
"@vdemedes/prettier-config": "^2.0.1",
|
|
54
|
+
"ava": "^5.2.0",
|
|
55
|
+
"chalk": "^5.2.0",
|
|
56
|
+
"eslint-config-xo-react": "^0.27.0",
|
|
57
|
+
"eslint-plugin-react": "^7.32.2",
|
|
58
|
+
"eslint-plugin-react-hooks": "^4.6.0",
|
|
59
|
+
"ink-testing-library": "^3.0.0",
|
|
60
|
+
"prettier": "^2.8.7",
|
|
61
|
+
"ts-node": "^10.9.1",
|
|
62
|
+
"typescript": "^5.0.3",
|
|
63
|
+
"xo": "^0.53.1"
|
|
64
|
+
},
|
|
65
|
+
"ava": {
|
|
66
|
+
"extensions": {
|
|
67
|
+
"ts": "module",
|
|
68
|
+
"tsx": "module"
|
|
69
|
+
},
|
|
70
|
+
"nodeArguments": [
|
|
71
|
+
"--loader=ts-node/esm"
|
|
72
|
+
]
|
|
73
|
+
},
|
|
74
|
+
"xo": {
|
|
75
|
+
"extends": "xo-react",
|
|
76
|
+
"prettier": true,
|
|
77
|
+
"rules": {
|
|
78
|
+
"react/prop-types": "off"
|
|
79
|
+
}
|
|
80
|
+
},
|
|
81
|
+
"prettier": "@vdemedes/prettier-config"
|
|
82
|
+
}
|
package/readme.md
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
<div align="center">
|
|
2
|
+
<img src="static/terminal.png" alt="Terminal" />
|
|
3
|
+
</div>
|
|
4
|
+
|
|
5
|
+
A CLI tool for interacting with Model Context Protocol (MCP) servers using natural language. Built with [mcp-use](https://github.com/mcp-use/mcp-use-ts) and powered by OpenAI's GPT models.
|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
- 🤖 Natural language interface for MCP servers
|
|
10
|
+
- 🔧 Built-in filesystem MCP server support
|
|
11
|
+
- 💬 Interactive chat interface with tool call visualization
|
|
12
|
+
- ⚡ Direct integration with mcp-use (no API layer needed)
|
|
13
|
+
- 🚀 Single command installation
|
|
14
|
+
- 🔄 **Multiple LLM providers** (OpenAI, Anthropic, Google, Mistral)
|
|
15
|
+
- ⚙️ **Slash commands** for configuration (like Claude Code)
|
|
16
|
+
- 🔑 **Smart API key prompting** - automatically asks for keys when needed
|
|
17
|
+
- 💾 **Persistent secure storage** - encrypted keys and settings saved across sessions
|
|
18
|
+
|
|
19
|
+
## Install
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
$ npm install --global mcp-use-cli
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
## Quick Start
|
|
26
|
+
|
|
27
|
+
1. **Install and run**:
|
|
28
|
+
```bash
|
|
29
|
+
$ npm install --global mcp-use-cli
|
|
30
|
+
$ mcp-use-cli
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
2. **Choose your model** (CLI handles API key setup automatically):
|
|
34
|
+
```bash
|
|
35
|
+
# Just pick a model - that's it!
|
|
36
|
+
/model openai gpt-4o-mini
|
|
37
|
+
/model anthropic claude-3-5-sonnet-20241022
|
|
38
|
+
/model google gemini-1.5-pro
|
|
39
|
+
|
|
40
|
+
# CLI will prompt: "Please enter your OPENAI API key:"
|
|
41
|
+
# Paste your key and start chatting immediately!
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
3. **Get API keys** when prompted from:
|
|
45
|
+
- [OpenAI](https://platform.openai.com/api-keys)
|
|
46
|
+
- [Anthropic](https://console.anthropic.com/)
|
|
47
|
+
- [Google AI](https://aistudio.google.com/app/apikey)
|
|
48
|
+
- [Mistral](https://console.mistral.ai/)
|
|
49
|
+
|
|
50
|
+
> **Keys are stored securely encrypted** in `~/.mcp-use-cli/config.json` and persist across sessions.
|
|
51
|
+
|
|
52
|
+
## Alternative Setup
|
|
53
|
+
|
|
54
|
+
If you prefer environment variables:
|
|
55
|
+
```bash
|
|
56
|
+
export OPENAI_API_KEY=your_key_here
|
|
57
|
+
export ANTHROPIC_API_KEY=your_key_here
|
|
58
|
+
# Then just run: mcp-use-cli
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
## Usage
|
|
62
|
+
|
|
63
|
+
```
|
|
64
|
+
$ mcp-use-cli --help
|
|
65
|
+
|
|
66
|
+
Usage
|
|
67
|
+
$ mcp-use-cli
|
|
68
|
+
|
|
69
|
+
Options
|
|
70
|
+
--name Your name (optional)
|
|
71
|
+
--config Path to MCP configuration file (optional)
|
|
72
|
+
|
|
73
|
+
Examples
|
|
74
|
+
$ mcp-use-cli
|
|
75
|
+
$ mcp-use-cli --name=Jane
|
|
76
|
+
$ mcp-use-cli --config=./mcp-config.json
|
|
77
|
+
|
|
78
|
+
Environment Variables
|
|
79
|
+
OPENAI_API_KEY Required - Your OpenAI API key
|
|
80
|
+
|
|
81
|
+
Setup
|
|
82
|
+
1. Set your OpenAI API key: export OPENAI_API_KEY=your_key_here
|
|
83
|
+
2. Run: mcp-use-cli
|
|
84
|
+
3. Start chatting with MCP servers!
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
## Configuration
|
|
88
|
+
|
|
89
|
+
By default, the CLI connects to a filesystem MCP server in `/tmp`. You can provide a custom configuration file:
|
|
90
|
+
|
|
91
|
+
```json
|
|
92
|
+
{
|
|
93
|
+
"servers": {
|
|
94
|
+
"filesystem": {
|
|
95
|
+
"command": "npx",
|
|
96
|
+
"args": ["-y", "@modelcontextprotocol/server-filesystem", "/path/to/directory"],
|
|
97
|
+
"env": {}
|
|
98
|
+
},
|
|
99
|
+
"other-server": {
|
|
100
|
+
"command": "your-mcp-server-command",
|
|
101
|
+
"args": ["--arg1", "value1"],
|
|
102
|
+
"env": {}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
## Slash Commands
|
|
109
|
+
|
|
110
|
+
Switch LLM providers and configure settings using slash commands (similar to Claude Code):
|
|
111
|
+
|
|
112
|
+
```bash
|
|
113
|
+
# Set API keys (stored securely)
|
|
114
|
+
/setkey openai sk-1234567890abcdef...
|
|
115
|
+
/setkey anthropic ant_1234567890abcdef...
|
|
116
|
+
/clearkeys # Clear all stored keys
|
|
117
|
+
|
|
118
|
+
# Switch models
|
|
119
|
+
/model openai gpt-4o
|
|
120
|
+
/model anthropic claude-3-5-sonnet-20241022
|
|
121
|
+
/model google gemini-1.5-pro
|
|
122
|
+
/model mistral mistral-large-latest
|
|
123
|
+
|
|
124
|
+
# List available models
|
|
125
|
+
/models
|
|
126
|
+
/models anthropic
|
|
127
|
+
|
|
128
|
+
# Configuration
|
|
129
|
+
/config temp 0.5
|
|
130
|
+
/config tokens 4000
|
|
131
|
+
|
|
132
|
+
# Status and help
|
|
133
|
+
/status
|
|
134
|
+
/help
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
## Chat Examples
|
|
138
|
+
|
|
139
|
+
- "List files in the current directory"
|
|
140
|
+
- "Create a new file called hello.txt with the content 'Hello, World!'"
|
|
141
|
+
- "Search for files containing 'TODO'"
|
|
142
|
+
- "What's the structure of this project?"
|
|
143
|
+
|
|
144
|
+
## Architecture
|
|
145
|
+
|
|
146
|
+
This CLI uses:
|
|
147
|
+
- **Frontend**: React + Ink for the terminal UI
|
|
148
|
+
- **Agent**: mcp-use MCPAgent for LLM + MCP integration
|
|
149
|
+
- **LLM**: OpenAI GPT-4o-mini
|
|
150
|
+
- **Transport**: Direct TypeScript integration (no API layer)
|
|
151
|
+
|
|
152
|
+
## License
|
|
153
|
+
|
|
154
|
+
MIT
|