casual-mcp 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. casual_mcp-0.1.0/LICENSE +7 -0
  2. casual_mcp-0.1.0/PKG-INFO +352 -0
  3. casual_mcp-0.1.0/README.md +324 -0
  4. casual_mcp-0.1.0/pyproject.toml +61 -0
  5. casual_mcp-0.1.0/setup.cfg +4 -0
  6. casual_mcp-0.1.0/src/casual_mcp/__init__.py +13 -0
  7. casual_mcp-0.1.0/src/casual_mcp/cli.py +68 -0
  8. casual_mcp-0.1.0/src/casual_mcp/logging.py +30 -0
  9. casual_mcp-0.1.0/src/casual_mcp/main.py +118 -0
  10. casual_mcp-0.1.0/src/casual_mcp/mcp_tool_chat.py +90 -0
  11. casual_mcp-0.1.0/src/casual_mcp/models/__init__.py +33 -0
  12. casual_mcp-0.1.0/src/casual_mcp/models/config.py +10 -0
  13. casual_mcp-0.1.0/src/casual_mcp/models/generation_error.py +10 -0
  14. casual_mcp-0.1.0/src/casual_mcp/models/mcp_server_config.py +39 -0
  15. casual_mcp-0.1.0/src/casual_mcp/models/messages.py +31 -0
  16. casual_mcp-0.1.0/src/casual_mcp/models/model_config.py +21 -0
  17. casual_mcp-0.1.0/src/casual_mcp/models/tool_call.py +14 -0
  18. casual_mcp-0.1.0/src/casual_mcp/multi_server_mcp_client.py +170 -0
  19. casual_mcp-0.1.0/src/casual_mcp/providers/__init__.py +0 -0
  20. casual_mcp-0.1.0/src/casual_mcp/providers/abstract_provider.py +15 -0
  21. casual_mcp-0.1.0/src/casual_mcp/providers/ollama_provider.py +72 -0
  22. casual_mcp-0.1.0/src/casual_mcp/providers/openai_provider.py +178 -0
  23. casual_mcp-0.1.0/src/casual_mcp/providers/provider_factory.py +48 -0
  24. casual_mcp-0.1.0/src/casual_mcp/utils.py +90 -0
  25. casual_mcp-0.1.0/src/casual_mcp.egg-info/PKG-INFO +352 -0
  26. casual_mcp-0.1.0/src/casual_mcp.egg-info/SOURCES.txt +28 -0
  27. casual_mcp-0.1.0/src/casual_mcp.egg-info/dependency_links.txt +1 -0
  28. casual_mcp-0.1.0/src/casual_mcp.egg-info/entry_points.txt +2 -0
  29. casual_mcp-0.1.0/src/casual_mcp.egg-info/requires.txt +18 -0
  30. casual_mcp-0.1.0/src/casual_mcp.egg-info/top_level.txt +1 -0
@@ -0,0 +1,7 @@
1
+ Copyright (c) 2025 Alex Stansfield
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4
+
5
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6
+
7
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,352 @@
1
+ Metadata-Version: 2.4
2
+ Name: casual-mcp
3
+ Version: 0.1.0
4
+ Summary: Multi-server MCP client for LLM tool orchestration
5
+ Author: Alex Stansfield
6
+ License: MIT
7
+ Requires-Python: >=3.10
8
+ Description-Content-Type: text/markdown
9
+ License-File: LICENSE
10
+ Requires-Dist: amadeus>=12.0.0
11
+ Requires-Dist: dateparser>=1.2.1
12
+ Requires-Dist: fastapi>=0.115.12
13
+ Requires-Dist: fastmcp>=2.3.4
14
+ Requires-Dist: jinja2>=3.1.6
15
+ Requires-Dist: ollama>=0.4.8
16
+ Requires-Dist: openai>=1.78.0
17
+ Requires-Dist: python-dotenv>=1.1.0
18
+ Requires-Dist: requests>=2.32.3
19
+ Requires-Dist: rich>=14.0.0
20
+ Requires-Dist: uvicorn>=0.34.2
21
+ Provides-Extra: dev
22
+ Requires-Dist: ruff; extra == "dev"
23
+ Requires-Dist: black; extra == "dev"
24
+ Requires-Dist: mypy; extra == "dev"
25
+ Requires-Dist: pytest; extra == "dev"
26
+ Requires-Dist: coverage; extra == "dev"
27
+ Dynamic: license-file
28
+
29
+ # ๐Ÿง  Casual MCP
30
+
31
+ ![PyPI](https://img.shields.io/pypi/v/casual-mcp)
32
+ ![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)
33
+
34
+ **Casual MCP** is a Python framework for building, evaluating, and serving LLMs with tool-calling capabilities using [Model Context Protocol (MCP)](https://modelcontextprotocol.io).
35
+ It includes:
36
+
37
+ - โœ… A multi-server MCP client
38
+ - โœ… Provider support for OpenAI (and OpenAI compatible APIs)
39
+ - โœ… A recursive tool-calling chat loop
40
+ - โœ… System prompt templating with Jinja2
41
+ - โœ… A basic API exposing a chat endpoint
42
+
43
+ ## โœจ Features
44
+
45
+ - Plug-and-play multi-server tool orchestration
46
+ - Prompt templating with Jinja2
47
+ - Configurable via JSON
48
+ - CLI and API access
49
+ - Extensible architecture
50
+
51
+ ## ๐Ÿ”ง Installation
52
+
53
+ ```bash
54
+ pip install casual-mcp
55
+ ```
56
+
57
+ Or for development:
58
+
59
+ ```bash
60
+ git clone https://github.com/AlexStansfield/casual-mcp.git
61
+ cd casual-mcp
62
+ uv pip install -e .[dev]
63
+ ```
64
+
65
+ ## ๐Ÿงฉ Providers
66
+
67
+ Providers allow access to LLMs. Currently, only an OpenAI provider is supplied. However, in the model configuration, you can supply an optional `endpoint` allowing you to use any OpenAI-compatible API (e.g., LM Studio).
68
+
69
+ Ollama support is planned for a future version, along with support for custom pluggable providers via a standard interface.
70
+
71
+ ## ๐Ÿงฉ System Prompt Templates
72
+
73
+ System prompts are defined as [Jinja2](https://jinja.palletsprojects.com) templates in the `prompt-templates/` directory.
74
+
75
+ They are used in the config file to specify a system prompt to use per model.
76
+
77
+ This allows you to define custom prompts for each model โ€” useful when using models that do not natively support tools. Templates are passed the tool list in the `tools` variable.
78
+
79
+ ```jinja2
80
+ # prompt-templates/example_prompt.j2
81
+ Here is a list of functions in JSON format that you can invoke:
82
+ [
83
+ {% for tool in tools %}
84
+ {
85
+ "name": "{{ tool.name }}",
86
+ "description": "{{ tool.description }}",
87
+ "parameters": {
88
+ {% for param_name, param in tool.inputSchema.items() %}
89
+ "{{ param_name }}": {
90
+ "description": "{{ param.description }}",
91
+ "type": "{{ param.type }}"{% if param.default is defined %},
92
+ "default": "{{ param.default }}"{% endif %}
93
+ }{% if not loop.last %},{% endif %}
94
+ {% endfor %}
95
+ }
96
+ }{% if not loop.last %},{% endif %}
97
+ {% endfor %}
98
+ ]
99
+ ```
100
+
101
+ ## โš™๏ธ Configuration File (`config.json`)
102
+
103
+ ๐Ÿ“„ See the [Programmatic Usage](#-programmatic-usage) section to build configs and messages with typed models.
104
+
105
+ The CLI and API can be configured using a `config.json` file that defines:
106
+
107
+ - ๐Ÿ”ง Available **models** and their providers
108
+ - ๐Ÿงฐ Available **MCP tool servers**
109
+ - ๐Ÿงฉ Optional tool namespacing behavior
110
+
111
+ ### ๐Ÿ”ธ Example
112
+
113
+ ```json
114
+ {
115
+ "namespaced_tools": false,
116
+ "models": {
117
+ "lm-qwen-3": {
118
+ "provider": "openai",
119
+ "endpoint": "http://localhost:1234/v1",
120
+ "model": "qwen3-8b",
121
+ "template": "lm-studio-native-tools"
122
+ },
123
+ "gpt-4.1": {
124
+ "provider": "openai",
125
+ "model": "gpt-4.1"
126
+ }
127
+ },
128
+ "servers": {
129
+ "time": {
130
+ "type": "python",
131
+ "path": "mcp-servers/time/server.py"
132
+ },
133
+ "weather": {
134
+ "type": "http",
135
+ "url": "http://localhost:5050/mcp"
136
+ }
137
+ }
138
+ }
139
+ ```
140
+
141
+ ### ๐Ÿ”น `models`
142
+
143
+ Each model has:
144
+
145
+ - `provider`: `"openai"` or `"ollama"`
146
+ - `model`: the model name (e.g., `gpt-4.1`, `qwen3-8b`)
147
+ - `endpoint`: required for custom OpenAI-compatible backends (e.g., LM Studio)
148
+ - `template`: optional name used to apply model-specific tool calling formatting
149
+
150
+ ### ๐Ÿ”น `servers`
151
+
152
+ Each server has:
153
+
154
+ - `type`: `"python"`, `"http"`, `"node"`, or `"uvx"`
155
+ - For `python`/`node`: `path` to the script
156
+ - For `http`: `url` to the remote MCP endpoint
157
+ - For `uvx`: `package` for the package to run
158
+ - Optional: `env` for subprocess environments, `system_prompt` to override server prompt
159
+
160
+ ### ๐Ÿ”น `namespaced_tools`
161
+
162
+ If `true`, tools will be prefixed by server name (e.g., `weather-get_weather`).
163
+ Useful for disambiguating tool names across servers and avoiding name collision if multiple servers have the same tool name.
164
+
165
+ ## ๐Ÿ›  CLI Reference
166
+
167
+ ### `casual-mcp serve`
168
+ Start the API server.
169
+
170
+ **Options:**
171
+ - `--host`: Host to bind (default `0.0.0.0`)
172
+ - `--port`: Port to serve on (default `8000`)
173
+
174
+ ### `casual-mcp servers`
175
+ Loads the config and outputs the list of MCP servers you have configured.
176
+
177
+ #### Example Output
178
+ ```
179
+ $ casual-mcp servers
180
+ โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”“
181
+ โ”ƒ Name โ”ƒ Type โ”ƒ Path / Package / Url โ”ƒ Env โ”ƒ
182
+ โ”กโ”โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”ฉ
183
+ โ”‚ math โ”‚ python โ”‚ mcp-servers/math/server.py โ”‚ โ”‚
184
+ โ”‚ time โ”‚ python โ”‚ mcp-servers/time-v2/server.py โ”‚ โ”‚
185
+ โ”‚ weather โ”‚ python โ”‚ mcp-servers/weather/server.py โ”‚ โ”‚
186
+ โ”‚ words โ”‚ python โ”‚ mcp-servers/words/server.py โ”‚ โ”‚
187
+ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”˜
188
+ ```
189
+
190
+ ### `casual-mcp models`
191
+ Loads the config and outputs the list of models you have configured.
192
+
193
+ #### Example Output
194
+ ```
195
+ $ casual-mcp models
196
+ โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”“
197
+ โ”ƒ Name โ”ƒ Provider โ”ƒ Model โ”ƒ Endpoint โ”ƒ
198
+ โ”กโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ฉ
199
+ โ”‚ lm-phi-4-mini โ”‚ openai โ”‚ phi-4-mini-instruct โ”‚ http://kovacs:1234/v1 โ”‚
200
+ โ”‚ lm-hermes-3 โ”‚ openai โ”‚ hermes-3-llama-3.2-3b โ”‚ http://kovacs:1234/v1 โ”‚
201
+ โ”‚ lm-groq โ”‚ openai โ”‚ llama-3-groq-8b-tool-use โ”‚ http://kovacs:1234/v1 โ”‚
202
+ โ”‚ gpt-4o-mini โ”‚ openai โ”‚ gpt-4o-mini โ”‚ โ”‚
203
+ โ”‚ gpt-4.1-nano โ”‚ openai โ”‚ gpt-4.1-nano โ”‚ โ”‚
204
+ โ”‚ gpt-4.1-mini โ”‚ openai โ”‚ gpt-4.1-mini โ”‚ โ”‚
205
+ โ”‚ gpt-4.1 โ”‚ openai โ”‚ gpt-4.1 โ”‚ โ”‚
206
+ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜
207
+ ```
208
+
209
+ ## ๐Ÿง  Programmatic Usage
210
+
211
+ You can import and use the core framework in your own Python code.
212
+
213
+ ### โœ… Exposed Interfaces
214
+
215
+ #### `McpToolChat`
216
+ Orchestrates LLM interaction with tools using a recursive loop.
217
+
218
+ ```python
219
+ from casual_mcp import McpToolChat
220
+
221
+ chat = McpToolChat(mcp_client, provider, system_prompt)
222
+ response = await chat.chat(prompt="What time is it in London?")
223
+ ```
224
+
225
+ #### `MultiServerMCPClient`
226
+ Connects to multiple MCP tool servers and manages available tools.
227
+
228
+ ```python
229
+ from casual_mcp import MultiServerMCPClient
230
+
231
+ mcp_client = MultiServerMCPClient()
232
+ await mcp_client.load_config(config["servers"])
233
+ tools = await mcp_client.list_tools()
234
+ ```
235
+
236
+ #### `ProviderFactory`
237
+ Instantiates LLM providers based on the selected model config.
238
+
239
+ ```python
240
+ from casual_mcp.providers.provider_factory import ProviderFactory
241
+
242
+ provider_factory = ProviderFactory()
243
+ provider = provider_factory.get_provider("lm-qwen-3", model_config)
244
+ ```
245
+
246
+ #### `load_config`
247
+ Loads your `config.json` into a validated config object.
248
+
249
+ ```python
250
+ from casual_mcp.utils import load_config
251
+
252
+ config = load_config("config.json")
253
+ ```
254
+
255
+ #### Model and Server Configs
256
+
257
+ Exported models:
258
+ - PythonMcpServerConfig
259
+ - UvxMcpServerConfig
260
+ - NodeMcpServerConfig
261
+ - HttpMcpServerConfig
262
+ - OpenAIModelConfig
263
+
264
+ Use these types to build valid configs:
265
+
266
+ ```python
267
+ from casual_mcp.models import OpenAIModelConfig, PythonMcpServerConfig
268
+
269
+ model = OpenAIModelConfig( model="llama3", endpoint="http://...")
270
+ server = PythonMcpServerConfig(path="time/server.py")
271
+ ```
272
+
273
+ #### Chat Messages
274
+
275
+ Exported models:
276
+ - AssistantMessage
277
+ - SystemMessage
278
+ - ToolResultMessage
279
+ - UserMessage
280
+
281
+ Use these types to build message chains:
282
+
283
+ ```python
284
+ from casual_mcp.models import SystemMessage, UserMessage
285
+
286
+ messages = [
287
+ SystemMessage(content="You are a friendly tool calling assistant."),
288
+ UserMessage(content="What is the time?")
289
+ ]
290
+ ```
291
+
292
+ ### Example
293
+
294
+ ```python
295
+ from casual_mcp import McpToolChat, MultiServerMCPClient, load_config, ProviderFactory
296
+ from casual_mcp.models import SystemMessage, UserMessage
297
+
298
+ model = "gpt-4.1-nano"
299
+ messages = [
300
+ SystemMessage(content="""You are a tool calling assistant.
301
+ You have access to up-to-date information through the tools.
302
+ Respond naturally and confidently, as if you already know all the facts."""),
303
+ UserMessage(content="Will I need to take my umbrella to London today?")
304
+ ]
305
+
306
+ # Load the Config from the File
307
+ config = load_config("config.json")
308
+
309
+ # Setup the MultiServer MCP Client
310
+ mcp_client = MultiServerMCPClient()
311
+ await mcp_client.load_config(config.servers)
312
+
313
+ # Get the Provider for the Model
314
+ provider_factory.set_tools(await mcp_client.list_tools())
315
+ provider_factory = ProviderFactory()
316
+ provider = provider_factory.get_provider(model, config.models[model])
317
+
318
+ # Perform the Chat and Tool calling
319
+ chat = McpToolChat(mcp_client, provider, system_prompt)
320
+ response_messages = await chat.chat(messages=messages)
321
+ ```
322
+
323
+ ## ๐Ÿš€ API Usage
324
+
325
+ ### Start the API Server
326
+
327
+ ```bash
328
+ casual-mcp serve --host 0.0.0.0 --port 8000
329
+ ```
330
+
331
+ You can then POST to `/chat` to trigger tool-calling LLM responses.
332
+
333
+ The request takes a json body consisting of:
334
+ - `model`: the LLM model to use
335
+ - `user_prompt`: optional, the latest user message (required if messages isn't provided)
336
+ - `messages`: optional, list of chat messages (system, assistant, user, etc) that you can pass to the api, allowing you to keep your own chat session in the client calling the api
337
+ - `session_id`: an optional ID that stores all the messages from the session and provides them back to the LLM for context
338
+
339
+ You can either pass in a `user_prompt` or a list of `messages` depending on your use case.
340
+
341
+ Example:
342
+ ```
343
+ {
344
+ "session_id": "my-test-session",
345
+ "model": "gpt-4o-mini",
346
+ "user_prompt": "can you explain what the word consistent means?"
347
+ }
348
+ ```
349
+
350
+ ## License
351
+
352
+ This software is released under the [MIT License](LICENSE)
@@ -0,0 +1,324 @@
1
+ # ๐Ÿง  Casual MCP
2
+
3
+ ![PyPI](https://img.shields.io/pypi/v/casual-mcp)
4
+ ![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)
5
+
6
+ **Casual MCP** is a Python framework for building, evaluating, and serving LLMs with tool-calling capabilities using [Model Context Protocol (MCP)](https://modelcontextprotocol.io).
7
+ It includes:
8
+
9
+ - โœ… A multi-server MCP client
10
+ - โœ… Provider support for OpenAI (and OpenAI compatible APIs)
11
+ - โœ… A recursive tool-calling chat loop
12
+ - โœ… System prompt templating with Jinja2
13
+ - โœ… A basic API exposing a chat endpoint
14
+
15
+ ## โœจ Features
16
+
17
+ - Plug-and-play multi-server tool orchestration
18
+ - Prompt templating with Jinja2
19
+ - Configurable via JSON
20
+ - CLI and API access
21
+ - Extensible architecture
22
+
23
+ ## ๐Ÿ”ง Installation
24
+
25
+ ```bash
26
+ pip install casual-mcp
27
+ ```
28
+
29
+ Or for development:
30
+
31
+ ```bash
32
+ git clone https://github.com/AlexStansfield/casual-mcp.git
33
+ cd casual-mcp
34
+ uv pip install -e .[dev]
35
+ ```
36
+
37
+ ## ๐Ÿงฉ Providers
38
+
39
+ Providers allow access to LLMs. Currently, only an OpenAI provider is supplied. However, in the model configuration, you can supply an optional `endpoint` allowing you to use any OpenAI-compatible API (e.g., LM Studio).
40
+
41
+ Ollama support is planned for a future version, along with support for custom pluggable providers via a standard interface.
42
+
43
+ ## ๐Ÿงฉ System Prompt Templates
44
+
45
+ System prompts are defined as [Jinja2](https://jinja.palletsprojects.com) templates in the `prompt-templates/` directory.
46
+
47
+ They are used in the config file to specify a system prompt to use per model.
48
+
49
+ This allows you to define custom prompts for each model โ€” useful when using models that do not natively support tools. Templates are passed the tool list in the `tools` variable.
50
+
51
+ ```jinja2
52
+ # prompt-templates/example_prompt.j2
53
+ Here is a list of functions in JSON format that you can invoke:
54
+ [
55
+ {% for tool in tools %}
56
+ {
57
+ "name": "{{ tool.name }}",
58
+ "description": "{{ tool.description }}",
59
+ "parameters": {
60
+ {% for param_name, param in tool.inputSchema.items() %}
61
+ "{{ param_name }}": {
62
+ "description": "{{ param.description }}",
63
+ "type": "{{ param.type }}"{% if param.default is defined %},
64
+ "default": "{{ param.default }}"{% endif %}
65
+ }{% if not loop.last %},{% endif %}
66
+ {% endfor %}
67
+ }
68
+ }{% if not loop.last %},{% endif %}
69
+ {% endfor %}
70
+ ]
71
+ ```
72
+
73
+ ## โš™๏ธ Configuration File (`config.json`)
74
+
75
+ ๐Ÿ“„ See the [Programmatic Usage](#-programmatic-usage) section to build configs and messages with typed models.
76
+
77
+ The CLI and API can be configured using a `config.json` file that defines:
78
+
79
+ - ๐Ÿ”ง Available **models** and their providers
80
+ - ๐Ÿงฐ Available **MCP tool servers**
81
+ - ๐Ÿงฉ Optional tool namespacing behavior
82
+
83
+ ### ๐Ÿ”ธ Example
84
+
85
+ ```json
86
+ {
87
+ "namespaced_tools": false,
88
+ "models": {
89
+ "lm-qwen-3": {
90
+ "provider": "openai",
91
+ "endpoint": "http://localhost:1234/v1",
92
+ "model": "qwen3-8b",
93
+ "template": "lm-studio-native-tools"
94
+ },
95
+ "gpt-4.1": {
96
+ "provider": "openai",
97
+ "model": "gpt-4.1"
98
+ }
99
+ },
100
+ "servers": {
101
+ "time": {
102
+ "type": "python",
103
+ "path": "mcp-servers/time/server.py"
104
+ },
105
+ "weather": {
106
+ "type": "http",
107
+ "url": "http://localhost:5050/mcp"
108
+ }
109
+ }
110
+ }
111
+ ```
112
+
113
+ ### ๐Ÿ”น `models`
114
+
115
+ Each model has:
116
+
117
+ - `provider`: `"openai"` or `"ollama"`
118
+ - `model`: the model name (e.g., `gpt-4.1`, `qwen3-8b`)
119
+ - `endpoint`: required for custom OpenAI-compatible backends (e.g., LM Studio)
120
+ - `template`: optional name used to apply model-specific tool calling formatting
121
+
122
+ ### ๐Ÿ”น `servers`
123
+
124
+ Each server has:
125
+
126
+ - `type`: `"python"`, `"http"`, `"node"`, or `"uvx"`
127
+ - For `python`/`node`: `path` to the script
128
+ - For `http`: `url` to the remote MCP endpoint
129
+ - For `uvx`: `package` for the package to run
130
+ - Optional: `env` for subprocess environments, `system_prompt` to override server prompt
131
+
132
+ ### ๐Ÿ”น `namespaced_tools`
133
+
134
+ If `true`, tools will be prefixed by server name (e.g., `weather-get_weather`).
135
+ Useful for disambiguating tool names across servers and avoiding name collision if multiple servers have the same tool name.
136
+
137
+ ## ๐Ÿ›  CLI Reference
138
+
139
+ ### `casual-mcp serve`
140
+ Start the API server.
141
+
142
+ **Options:**
143
+ - `--host`: Host to bind (default `0.0.0.0`)
144
+ - `--port`: Port to serve on (default `8000`)
145
+
146
+ ### `casual-mcp servers`
147
+ Loads the config and outputs the list of MCP servers you have configured.
148
+
149
+ #### Example Output
150
+ ```
151
+ $ casual-mcp servers
152
+ โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”“
153
+ โ”ƒ Name โ”ƒ Type โ”ƒ Path / Package / Url โ”ƒ Env โ”ƒ
154
+ โ”กโ”โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”ฉ
155
+ โ”‚ math โ”‚ python โ”‚ mcp-servers/math/server.py โ”‚ โ”‚
156
+ โ”‚ time โ”‚ python โ”‚ mcp-servers/time-v2/server.py โ”‚ โ”‚
157
+ โ”‚ weather โ”‚ python โ”‚ mcp-servers/weather/server.py โ”‚ โ”‚
158
+ โ”‚ words โ”‚ python โ”‚ mcp-servers/words/server.py โ”‚ โ”‚
159
+ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”˜
160
+ ```
161
+
162
+ ### `casual-mcp models`
163
+ Loads the config and outputs the list of models you have configured.
164
+
165
+ #### Example Output
166
+ ```
167
+ $ casual-mcp models
168
+ โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ณโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”“
169
+ โ”ƒ Name โ”ƒ Provider โ”ƒ Model โ”ƒ Endpoint โ”ƒ
170
+ โ”กโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ•‡โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”ฉ
171
+ โ”‚ lm-phi-4-mini โ”‚ openai โ”‚ phi-4-mini-instruct โ”‚ http://kovacs:1234/v1 โ”‚
172
+ โ”‚ lm-hermes-3 โ”‚ openai โ”‚ hermes-3-llama-3.2-3b โ”‚ http://kovacs:1234/v1 โ”‚
173
+ โ”‚ lm-groq โ”‚ openai โ”‚ llama-3-groq-8b-tool-use โ”‚ http://kovacs:1234/v1 โ”‚
174
+ โ”‚ gpt-4o-mini โ”‚ openai โ”‚ gpt-4o-mini โ”‚ โ”‚
175
+ โ”‚ gpt-4.1-nano โ”‚ openai โ”‚ gpt-4.1-nano โ”‚ โ”‚
176
+ โ”‚ gpt-4.1-mini โ”‚ openai โ”‚ gpt-4.1-mini โ”‚ โ”‚
177
+ โ”‚ gpt-4.1 โ”‚ openai โ”‚ gpt-4.1 โ”‚ โ”‚
178
+ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜
179
+ ```
180
+
181
+ ## ๐Ÿง  Programmatic Usage
182
+
183
+ You can import and use the core framework in your own Python code.
184
+
185
+ ### โœ… Exposed Interfaces
186
+
187
+ #### `McpToolChat`
188
+ Orchestrates LLM interaction with tools using a recursive loop.
189
+
190
+ ```python
191
+ from casual_mcp import McpToolChat
192
+
193
+ chat = McpToolChat(mcp_client, provider, system_prompt)
194
+ response = await chat.chat(prompt="What time is it in London?")
195
+ ```
196
+
197
+ #### `MultiServerMCPClient`
198
+ Connects to multiple MCP tool servers and manages available tools.
199
+
200
+ ```python
201
+ from casual_mcp import MultiServerMCPClient
202
+
203
+ mcp_client = MultiServerMCPClient()
204
+ await mcp_client.load_config(config["servers"])
205
+ tools = await mcp_client.list_tools()
206
+ ```
207
+
208
+ #### `ProviderFactory`
209
+ Instantiates LLM providers based on the selected model config.
210
+
211
+ ```python
212
+ from casual_mcp.providers.provider_factory import ProviderFactory
213
+
214
+ provider_factory = ProviderFactory()
215
+ provider = provider_factory.get_provider("lm-qwen-3", model_config)
216
+ ```
217
+
218
+ #### `load_config`
219
+ Loads your `config.json` into a validated config object.
220
+
221
+ ```python
222
+ from casual_mcp.utils import load_config
223
+
224
+ config = load_config("config.json")
225
+ ```
226
+
227
+ #### Model and Server Configs
228
+
229
+ Exported models:
230
+ - PythonMcpServerConfig
231
+ - UvxMcpServerConfig
232
+ - NodeMcpServerConfig
233
+ - HttpMcpServerConfig
234
+ - OpenAIModelConfig
235
+
236
+ Use these types to build valid configs:
237
+
238
+ ```python
239
+ from casual_mcp.models import OpenAIModelConfig, PythonMcpServerConfig
240
+
241
+ model = OpenAIModelConfig( model="llama3", endpoint="http://...")
242
+ server = PythonMcpServerConfig(path="time/server.py")
243
+ ```
244
+
245
+ #### Chat Messages
246
+
247
+ Exported models:
248
+ - AssistantMessage
249
+ - SystemMessage
250
+ - ToolResultMessage
251
+ - UserMessage
252
+
253
+ Use these types to build message chains:
254
+
255
+ ```python
256
+ from casual_mcp.models import SystemMessage, UserMessage
257
+
258
+ messages = [
259
+ SystemMessage(content="You are a friendly tool calling assistant."),
260
+ UserMessage(content="What is the time?")
261
+ ]
262
+ ```
263
+
264
+ ### Example
265
+
266
+ ```python
267
+ from casual_mcp import McpToolChat, MultiServerMCPClient, load_config, ProviderFactory
268
+ from casual_mcp.models import SystemMessage, UserMessage
269
+
270
+ model = "gpt-4.1-nano"
271
+ messages = [
272
+ SystemMessage(content="""You are a tool calling assistant.
273
+ You have access to up-to-date information through the tools.
274
+ Respond naturally and confidently, as if you already know all the facts."""),
275
+ UserMessage(content="Will I need to take my umbrella to London today?")
276
+ ]
277
+
278
+ # Load the Config from the File
279
+ config = load_config("config.json")
280
+
281
+ # Setup the MultiServer MCP Client
282
+ mcp_client = MultiServerMCPClient()
283
+ await mcp_client.load_config(config.servers)
284
+
285
+ # Get the Provider for the Model
286
+ provider_factory.set_tools(await mcp_client.list_tools())
287
+ provider_factory = ProviderFactory()
288
+ provider = provider_factory.get_provider(model, config.models[model])
289
+
290
+ # Perform the Chat and Tool calling
291
+ chat = McpToolChat(mcp_client, provider, system_prompt)
292
+ response_messages = await chat.chat(messages=messages)
293
+ ```
294
+
295
+ ## ๐Ÿš€ API Usage
296
+
297
+ ### Start the API Server
298
+
299
+ ```bash
300
+ casual-mcp serve --host 0.0.0.0 --port 8000
301
+ ```
302
+
303
+ You can then POST to `/chat` to trigger tool-calling LLM responses.
304
+
305
+ The request takes a json body consisting of:
306
+ - `model`: the LLM model to use
307
+ - `user_prompt`: optional, the latest user message (required if messages isn't provided)
308
+ - `messages`: optional, list of chat messages (system, assistant, user, etc) that you can pass to the api, allowing you to keep your own chat session in the client calling the api
309
+ - `session_id`: an optional ID that stores all the messages from the session and provides them back to the LLM for context
310
+
311
+ You can either pass in a `user_prompt` or a list of `messages` depending on your use case.
312
+
313
+ Example:
314
+ ```
315
+ {
316
+ "session_id": "my-test-session",
317
+ "model": "gpt-4o-mini",
318
+ "user_prompt": "can you explain what the word consistent means?"
319
+ }
320
+ ```
321
+
322
+ ## License
323
+
324
+ This software is released under the [MIT License](LICENSE)