casual-mcp 0.5.0__tar.gz → 0.7.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- casual_mcp-0.7.0/PKG-INFO +193 -0
- casual_mcp-0.7.0/README.md +168 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/pyproject.toml +4 -3
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/__init__.py +4 -0
- casual_mcp-0.7.0/src/casual_mcp/cli.py +457 -0
- casual_mcp-0.7.0/src/casual_mcp/main.py +179 -0
- casual_mcp-0.7.0/src/casual_mcp/mcp_tool_chat.py +304 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/models/__init__.py +16 -0
- casual_mcp-0.7.0/src/casual_mcp/models/chat_stats.py +37 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/models/config.py +3 -1
- casual_mcp-0.7.0/src/casual_mcp/models/toolset_config.py +40 -0
- casual_mcp-0.7.0/src/casual_mcp/tool_filter.py +171 -0
- casual_mcp-0.7.0/src/casual_mcp.egg-info/PKG-INFO +193 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp.egg-info/SOURCES.txt +5 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp.egg-info/requires.txt +2 -1
- casual_mcp-0.7.0/tests/test_chat_stats.py +118 -0
- casual_mcp-0.7.0/tests/test_mcp_tool_chat.py +499 -0
- casual_mcp-0.7.0/tests/test_tool_filter.py +352 -0
- casual_mcp-0.5.0/PKG-INFO +0 -630
- casual_mcp-0.5.0/README.md +0 -606
- casual_mcp-0.5.0/src/casual_mcp/cli.py +0 -84
- casual_mcp-0.5.0/src/casual_mcp/main.py +0 -94
- casual_mcp-0.5.0/src/casual_mcp/mcp_tool_chat.py +0 -167
- casual_mcp-0.5.0/src/casual_mcp.egg-info/PKG-INFO +0 -630
- casual_mcp-0.5.0/tests/test_mcp_tool_chat.py +0 -256
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/LICENSE +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/setup.cfg +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/convert_tools.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/logging.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/models/generation_error.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/models/mcp_server_config.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/models/model_config.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/provider_factory.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/tool_cache.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp/utils.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp.egg-info/dependency_links.txt +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp.egg-info/entry_points.txt +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/src/casual_mcp.egg-info/top_level.txt +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/tests/test_provider_factory.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/tests/test_session_management.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/tests/test_tool_cache.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/tests/test_tools.py +0 -0
- {casual_mcp-0.5.0 → casual_mcp-0.7.0}/tests/test_utils.py +0 -0
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: casual-mcp
|
|
3
|
+
Version: 0.7.0
|
|
4
|
+
Summary: Multi-server MCP client for LLM tool orchestration
|
|
5
|
+
Author: Alex Stansfield
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/casualgenius/casual-mcp
|
|
8
|
+
Project-URL: Repository, https://github.com/casualgenius/casual-mcp
|
|
9
|
+
Project-URL: Issue Tracker, https://github.com/casualgenius/casual-mcp/issues
|
|
10
|
+
Requires-Python: >=3.10
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
License-File: LICENSE
|
|
13
|
+
Requires-Dist: casual-llm[openai]>=0.4.3
|
|
14
|
+
Requires-Dist: dateparser>=1.2.1
|
|
15
|
+
Requires-Dist: fastapi>=0.115.12
|
|
16
|
+
Requires-Dist: fastmcp>=2.12.4
|
|
17
|
+
Requires-Dist: jinja2>=3.1.6
|
|
18
|
+
Requires-Dist: python-dotenv>=1.1.0
|
|
19
|
+
Requires-Dist: questionary>=2.1.0
|
|
20
|
+
Requires-Dist: requests>=2.32.3
|
|
21
|
+
Requires-Dist: rich>=14.0.0
|
|
22
|
+
Requires-Dist: typer>=0.19.2
|
|
23
|
+
Requires-Dist: uvicorn>=0.34.2
|
|
24
|
+
Dynamic: license-file
|
|
25
|
+
|
|
26
|
+
# Casual MCP
|
|
27
|
+
|
|
28
|
+

|
|
29
|
+

|
|
30
|
+
|
|
31
|
+
**Casual MCP** is a Python framework for building, evaluating, and serving LLMs with tool-calling capabilities using [Model Context Protocol (MCP)](https://modelcontextprotocol.io).
|
|
32
|
+
|
|
33
|
+
## Features
|
|
34
|
+
|
|
35
|
+
- Multi-server MCP client using [FastMCP](https://github.com/jlowin/fastmcp)
|
|
36
|
+
- OpenAI and Ollama provider support (via [casual-llm](https://github.com/AlexStansfield/casual-llm))
|
|
37
|
+
- Recursive tool-calling chat loop
|
|
38
|
+
- Toolsets for selective tool filtering per request
|
|
39
|
+
- Usage statistics tracking (tokens, tool calls, LLM calls)
|
|
40
|
+
- System prompt templating with Jinja2
|
|
41
|
+
- CLI and API interfaces
|
|
42
|
+
|
|
43
|
+
## Installation
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
# Using uv
|
|
47
|
+
uv add casual-mcp
|
|
48
|
+
|
|
49
|
+
# Using pip
|
|
50
|
+
pip install casual-mcp
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
For development:
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
git clone https://github.com/casualgenius/casual-mcp.git
|
|
57
|
+
cd casual-mcp
|
|
58
|
+
uv sync --group dev
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
## Quick Start
|
|
62
|
+
|
|
63
|
+
1. Create `casual_mcp_config.json`:
|
|
64
|
+
|
|
65
|
+
```json
|
|
66
|
+
{
|
|
67
|
+
"models": {
|
|
68
|
+
"gpt-4.1": { "provider": "openai", "model": "gpt-4.1" }
|
|
69
|
+
},
|
|
70
|
+
"servers": {
|
|
71
|
+
"time": { "command": "python", "args": ["mcp-servers/time/server.py"] }
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
2. Set your API key: `export OPENAI_API_KEY=your-key`
|
|
77
|
+
|
|
78
|
+
3. Start the server: `casual-mcp serve`
|
|
79
|
+
|
|
80
|
+
4. Make a request:
|
|
81
|
+
|
|
82
|
+
```bash
|
|
83
|
+
curl -X POST http://localhost:8000/generate \
|
|
84
|
+
-H "Content-Type: application/json" \
|
|
85
|
+
-d '{"model": "gpt-4.1", "prompt": "What time is it?"}'
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
## Configuration
|
|
89
|
+
|
|
90
|
+
Configure models, MCP servers, and toolsets in `casual_mcp_config.json`.
|
|
91
|
+
|
|
92
|
+
```json
|
|
93
|
+
{
|
|
94
|
+
"models": {
|
|
95
|
+
"gpt-4.1": { "provider": "openai", "model": "gpt-4.1" }
|
|
96
|
+
},
|
|
97
|
+
"servers": {
|
|
98
|
+
"time": { "command": "python", "args": ["server.py"] },
|
|
99
|
+
"weather": { "url": "http://localhost:5050/mcp" }
|
|
100
|
+
},
|
|
101
|
+
"tool_sets": {
|
|
102
|
+
"basic": { "description": "Basic tools", "servers": { "time": true } }
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
See [Configuration Guide](docs/configuration.md) for full details on models, servers, toolsets, and templates.
|
|
108
|
+
|
|
109
|
+
## CLI
|
|
110
|
+
|
|
111
|
+
```bash
|
|
112
|
+
casual-mcp serve # Start API server
|
|
113
|
+
casual-mcp servers # List configured servers
|
|
114
|
+
casual-mcp models # List configured models
|
|
115
|
+
casual-mcp toolsets # Manage toolsets interactively
|
|
116
|
+
casual-mcp tools # List available tools
|
|
117
|
+
```
|
|
118
|
+
|
|
119
|
+
See [CLI & API Reference](docs/cli-api.md) for all commands and options.
|
|
120
|
+
|
|
121
|
+
## API
|
|
122
|
+
|
|
123
|
+
| Endpoint | Description |
|
|
124
|
+
|----------|-------------|
|
|
125
|
+
| `POST /chat` | Send message history |
|
|
126
|
+
| `POST /generate` | Send prompt with optional session |
|
|
127
|
+
| `GET /generate/session/{id}` | Get session messages |
|
|
128
|
+
| `GET /toolsets` | List available toolsets |
|
|
129
|
+
|
|
130
|
+
See [CLI & API Reference](docs/cli-api.md#api-endpoints) for request/response formats.
|
|
131
|
+
|
|
132
|
+
## Programmatic Usage
|
|
133
|
+
|
|
134
|
+
```python
|
|
135
|
+
from casual_llm import SystemMessage, UserMessage
|
|
136
|
+
from casual_mcp import McpToolChat, ProviderFactory, load_config, load_mcp_client
|
|
137
|
+
|
|
138
|
+
config = load_config("casual_mcp_config.json")
|
|
139
|
+
mcp_client = load_mcp_client(config)
|
|
140
|
+
|
|
141
|
+
provider_factory = ProviderFactory()
|
|
142
|
+
provider = provider_factory.get_provider("gpt-4.1", config.models["gpt-4.1"])
|
|
143
|
+
|
|
144
|
+
chat = McpToolChat(mcp_client, provider)
|
|
145
|
+
messages = [
|
|
146
|
+
SystemMessage(content="You are a helpful assistant."),
|
|
147
|
+
UserMessage(content="What time is it?")
|
|
148
|
+
]
|
|
149
|
+
response = await chat.chat(messages)
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
See [Programmatic Usage Guide](docs/programmatic-usage.md) for `McpToolChat`, usage statistics, toolsets, and common patterns.
|
|
153
|
+
|
|
154
|
+
## Architecture
|
|
155
|
+
|
|
156
|
+
Casual MCP orchestrates LLMs and MCP tool servers in a recursive loop:
|
|
157
|
+
|
|
158
|
+
```
|
|
159
|
+
┌─────────────┐ ┌──────────────┐ ┌─────────────┐
|
|
160
|
+
│ MCP Servers │─────▶│ Tool Cache │─────▶│ Tool Converter│
|
|
161
|
+
└─────────────┘ └──────────────┘ └─────────────┘
|
|
162
|
+
│ │
|
|
163
|
+
▼ ▼
|
|
164
|
+
┌──────────────────────────────┐
|
|
165
|
+
│ McpToolChat Loop │
|
|
166
|
+
│ │
|
|
167
|
+
│ LLM ──▶ Tool Calls ──▶ MCP │
|
|
168
|
+
│ ▲ │ │
|
|
169
|
+
│ └──────── Results ─────┘ │
|
|
170
|
+
└──────────────────────────────┘
|
|
171
|
+
```
|
|
172
|
+
|
|
173
|
+
1. **MCP Client** connects to tool servers (local stdio or remote HTTP/SSE)
|
|
174
|
+
2. **Tool Cache** fetches and caches tools from all servers
|
|
175
|
+
3. **ProviderFactory** creates LLM providers from casual-llm
|
|
176
|
+
4. **McpToolChat** runs the recursive loop until the LLM provides a final answer
|
|
177
|
+
|
|
178
|
+
## Environment Variables
|
|
179
|
+
|
|
180
|
+
| Variable | Default | Description |
|
|
181
|
+
|----------|---------|-------------|
|
|
182
|
+
| `OPENAI_API_KEY` | - | Required for OpenAI provider |
|
|
183
|
+
| `TOOL_RESULT_FORMAT` | `result` | `result`, `function_result`, or `function_args_result` |
|
|
184
|
+
| `MCP_TOOL_CACHE_TTL` | `30` | Tool cache TTL in seconds (0 = indefinite) |
|
|
185
|
+
| `LOG_LEVEL` | `INFO` | Logging level |
|
|
186
|
+
|
|
187
|
+
## Troubleshooting
|
|
188
|
+
|
|
189
|
+
Common issues and solutions are covered in the [Troubleshooting Guide](docs/troubleshooting.md).
|
|
190
|
+
|
|
191
|
+
## License
|
|
192
|
+
|
|
193
|
+
[MIT License](LICENSE)
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
# Casual MCP
|
|
2
|
+
|
|
3
|
+

|
|
4
|
+

|
|
5
|
+
|
|
6
|
+
**Casual MCP** is a Python framework for building, evaluating, and serving LLMs with tool-calling capabilities using [Model Context Protocol (MCP)](https://modelcontextprotocol.io).
|
|
7
|
+
|
|
8
|
+
## Features
|
|
9
|
+
|
|
10
|
+
- Multi-server MCP client using [FastMCP](https://github.com/jlowin/fastmcp)
|
|
11
|
+
- OpenAI and Ollama provider support (via [casual-llm](https://github.com/AlexStansfield/casual-llm))
|
|
12
|
+
- Recursive tool-calling chat loop
|
|
13
|
+
- Toolsets for selective tool filtering per request
|
|
14
|
+
- Usage statistics tracking (tokens, tool calls, LLM calls)
|
|
15
|
+
- System prompt templating with Jinja2
|
|
16
|
+
- CLI and API interfaces
|
|
17
|
+
|
|
18
|
+
## Installation
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
# Using uv
|
|
22
|
+
uv add casual-mcp
|
|
23
|
+
|
|
24
|
+
# Using pip
|
|
25
|
+
pip install casual-mcp
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
For development:
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
git clone https://github.com/casualgenius/casual-mcp.git
|
|
32
|
+
cd casual-mcp
|
|
33
|
+
uv sync --group dev
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
## Quick Start
|
|
37
|
+
|
|
38
|
+
1. Create `casual_mcp_config.json`:
|
|
39
|
+
|
|
40
|
+
```json
|
|
41
|
+
{
|
|
42
|
+
"models": {
|
|
43
|
+
"gpt-4.1": { "provider": "openai", "model": "gpt-4.1" }
|
|
44
|
+
},
|
|
45
|
+
"servers": {
|
|
46
|
+
"time": { "command": "python", "args": ["mcp-servers/time/server.py"] }
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
2. Set your API key: `export OPENAI_API_KEY=your-key`
|
|
52
|
+
|
|
53
|
+
3. Start the server: `casual-mcp serve`
|
|
54
|
+
|
|
55
|
+
4. Make a request:
|
|
56
|
+
|
|
57
|
+
```bash
|
|
58
|
+
curl -X POST http://localhost:8000/generate \
|
|
59
|
+
-H "Content-Type: application/json" \
|
|
60
|
+
-d '{"model": "gpt-4.1", "prompt": "What time is it?"}'
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
## Configuration
|
|
64
|
+
|
|
65
|
+
Configure models, MCP servers, and toolsets in `casual_mcp_config.json`.
|
|
66
|
+
|
|
67
|
+
```json
|
|
68
|
+
{
|
|
69
|
+
"models": {
|
|
70
|
+
"gpt-4.1": { "provider": "openai", "model": "gpt-4.1" }
|
|
71
|
+
},
|
|
72
|
+
"servers": {
|
|
73
|
+
"time": { "command": "python", "args": ["server.py"] },
|
|
74
|
+
"weather": { "url": "http://localhost:5050/mcp" }
|
|
75
|
+
},
|
|
76
|
+
"tool_sets": {
|
|
77
|
+
"basic": { "description": "Basic tools", "servers": { "time": true } }
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
See [Configuration Guide](docs/configuration.md) for full details on models, servers, toolsets, and templates.
|
|
83
|
+
|
|
84
|
+
## CLI
|
|
85
|
+
|
|
86
|
+
```bash
|
|
87
|
+
casual-mcp serve # Start API server
|
|
88
|
+
casual-mcp servers # List configured servers
|
|
89
|
+
casual-mcp models # List configured models
|
|
90
|
+
casual-mcp toolsets # Manage toolsets interactively
|
|
91
|
+
casual-mcp tools # List available tools
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
See [CLI & API Reference](docs/cli-api.md) for all commands and options.
|
|
95
|
+
|
|
96
|
+
## API
|
|
97
|
+
|
|
98
|
+
| Endpoint | Description |
|
|
99
|
+
|----------|-------------|
|
|
100
|
+
| `POST /chat` | Send message history |
|
|
101
|
+
| `POST /generate` | Send prompt with optional session |
|
|
102
|
+
| `GET /generate/session/{id}` | Get session messages |
|
|
103
|
+
| `GET /toolsets` | List available toolsets |
|
|
104
|
+
|
|
105
|
+
See [CLI & API Reference](docs/cli-api.md#api-endpoints) for request/response formats.
|
|
106
|
+
|
|
107
|
+
## Programmatic Usage
|
|
108
|
+
|
|
109
|
+
```python
|
|
110
|
+
from casual_llm import SystemMessage, UserMessage
|
|
111
|
+
from casual_mcp import McpToolChat, ProviderFactory, load_config, load_mcp_client
|
|
112
|
+
|
|
113
|
+
config = load_config("casual_mcp_config.json")
|
|
114
|
+
mcp_client = load_mcp_client(config)
|
|
115
|
+
|
|
116
|
+
provider_factory = ProviderFactory()
|
|
117
|
+
provider = provider_factory.get_provider("gpt-4.1", config.models["gpt-4.1"])
|
|
118
|
+
|
|
119
|
+
chat = McpToolChat(mcp_client, provider)
|
|
120
|
+
messages = [
|
|
121
|
+
SystemMessage(content="You are a helpful assistant."),
|
|
122
|
+
UserMessage(content="What time is it?")
|
|
123
|
+
]
|
|
124
|
+
response = await chat.chat(messages)
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
See [Programmatic Usage Guide](docs/programmatic-usage.md) for `McpToolChat`, usage statistics, toolsets, and common patterns.
|
|
128
|
+
|
|
129
|
+
## Architecture
|
|
130
|
+
|
|
131
|
+
Casual MCP orchestrates LLMs and MCP tool servers in a recursive loop:
|
|
132
|
+
|
|
133
|
+
```
|
|
134
|
+
┌─────────────┐ ┌──────────────┐ ┌─────────────┐
|
|
135
|
+
│ MCP Servers │─────▶│ Tool Cache │─────▶│ Tool Converter│
|
|
136
|
+
└─────────────┘ └──────────────┘ └─────────────┘
|
|
137
|
+
│ │
|
|
138
|
+
▼ ▼
|
|
139
|
+
┌──────────────────────────────┐
|
|
140
|
+
│ McpToolChat Loop │
|
|
141
|
+
│ │
|
|
142
|
+
│ LLM ──▶ Tool Calls ──▶ MCP │
|
|
143
|
+
│ ▲ │ │
|
|
144
|
+
│ └──────── Results ─────┘ │
|
|
145
|
+
└──────────────────────────────┘
|
|
146
|
+
```
|
|
147
|
+
|
|
148
|
+
1. **MCP Client** connects to tool servers (local stdio or remote HTTP/SSE)
|
|
149
|
+
2. **Tool Cache** fetches and caches tools from all servers
|
|
150
|
+
3. **ProviderFactory** creates LLM providers from casual-llm
|
|
151
|
+
4. **McpToolChat** runs the recursive loop until the LLM provides a final answer
|
|
152
|
+
|
|
153
|
+
## Environment Variables
|
|
154
|
+
|
|
155
|
+
| Variable | Default | Description |
|
|
156
|
+
|----------|---------|-------------|
|
|
157
|
+
| `OPENAI_API_KEY` | - | Required for OpenAI provider |
|
|
158
|
+
| `TOOL_RESULT_FORMAT` | `result` | `result`, `function_result`, or `function_args_result` |
|
|
159
|
+
| `MCP_TOOL_CACHE_TTL` | `30` | Tool cache TTL in seconds (0 = indefinite) |
|
|
160
|
+
| `LOG_LEVEL` | `INFO` | Logging level |
|
|
161
|
+
|
|
162
|
+
## Troubleshooting
|
|
163
|
+
|
|
164
|
+
Common issues and solutions are covered in the [Troubleshooting Guide](docs/troubleshooting.md).
|
|
165
|
+
|
|
166
|
+
## License
|
|
167
|
+
|
|
168
|
+
[MIT License](LICENSE)
|
|
@@ -4,19 +4,20 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "casual-mcp"
|
|
7
|
-
version = "0.
|
|
7
|
+
version = "0.7.0"
|
|
8
8
|
description = "Multi-server MCP client for LLM tool orchestration"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.10"
|
|
11
11
|
license = { text = "MIT" }
|
|
12
12
|
authors = [{ name = "Alex Stansfield" }]
|
|
13
13
|
dependencies = [
|
|
14
|
-
"casual-llm[openai]>=0.4.
|
|
14
|
+
"casual-llm[openai]>=0.4.3",
|
|
15
15
|
"dateparser>=1.2.1",
|
|
16
16
|
"fastapi>=0.115.12",
|
|
17
17
|
"fastmcp>=2.12.4",
|
|
18
18
|
"jinja2>=3.1.6",
|
|
19
19
|
"python-dotenv>=1.1.0",
|
|
20
|
+
"questionary>=2.1.0",
|
|
20
21
|
"requests>=2.32.3",
|
|
21
22
|
"rich>=14.0.0",
|
|
22
23
|
"typer>=0.19.2",
|
|
@@ -69,7 +70,7 @@ dev = [
|
|
|
69
70
|
]
|
|
70
71
|
|
|
71
72
|
[tool.bumpversion]
|
|
72
|
-
current_version = "0.
|
|
73
|
+
current_version = "0.6.0"
|
|
73
74
|
commit = true
|
|
74
75
|
tag = true
|
|
75
76
|
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from importlib.metadata import version
|
|
2
2
|
|
|
3
3
|
from . import models
|
|
4
|
+
from .models.chat_stats import ChatStats, TokenUsageStats, ToolCallStats
|
|
4
5
|
|
|
5
6
|
__version__ = version("casual-mcp")
|
|
6
7
|
from .mcp_tool_chat import McpToolChat
|
|
@@ -17,4 +18,7 @@ __all__ = [
|
|
|
17
18
|
"load_mcp_client",
|
|
18
19
|
"render_system_prompt",
|
|
19
20
|
"models",
|
|
21
|
+
"ChatStats",
|
|
22
|
+
"TokenUsageStats",
|
|
23
|
+
"ToolCallStats",
|
|
20
24
|
]
|