casual-mcp 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- casual_mcp/__init__.py +13 -0
- casual_mcp/cli.py +68 -0
- casual_mcp/logging.py +30 -0
- casual_mcp/main.py +118 -0
- casual_mcp/mcp_tool_chat.py +90 -0
- casual_mcp/models/__init__.py +33 -0
- casual_mcp/models/config.py +10 -0
- casual_mcp/models/generation_error.py +10 -0
- casual_mcp/models/mcp_server_config.py +39 -0
- casual_mcp/models/messages.py +31 -0
- casual_mcp/models/model_config.py +21 -0
- casual_mcp/models/tool_call.py +14 -0
- casual_mcp/multi_server_mcp_client.py +170 -0
- casual_mcp/providers/__init__.py +0 -0
- casual_mcp/providers/abstract_provider.py +15 -0
- casual_mcp/providers/ollama_provider.py +72 -0
- casual_mcp/providers/openai_provider.py +178 -0
- casual_mcp/providers/provider_factory.py +48 -0
- casual_mcp/utils.py +90 -0
- casual_mcp-0.1.0.dist-info/METADATA +352 -0
- casual_mcp-0.1.0.dist-info/RECORD +25 -0
- casual_mcp-0.1.0.dist-info/WHEEL +5 -0
- casual_mcp-0.1.0.dist-info/entry_points.txt +2 -0
- casual_mcp-0.1.0.dist-info/licenses/LICENSE +7 -0
- casual_mcp-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: casual-mcp
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Multi-server MCP client for LLM tool orchestration
|
|
5
|
+
Author: Alex Stansfield
|
|
6
|
+
License: MIT
|
|
7
|
+
Requires-Python: >=3.10
|
|
8
|
+
Description-Content-Type: text/markdown
|
|
9
|
+
License-File: LICENSE
|
|
10
|
+
Requires-Dist: amadeus>=12.0.0
|
|
11
|
+
Requires-Dist: dateparser>=1.2.1
|
|
12
|
+
Requires-Dist: fastapi>=0.115.12
|
|
13
|
+
Requires-Dist: fastmcp>=2.3.4
|
|
14
|
+
Requires-Dist: jinja2>=3.1.6
|
|
15
|
+
Requires-Dist: ollama>=0.4.8
|
|
16
|
+
Requires-Dist: openai>=1.78.0
|
|
17
|
+
Requires-Dist: python-dotenv>=1.1.0
|
|
18
|
+
Requires-Dist: requests>=2.32.3
|
|
19
|
+
Requires-Dist: rich>=14.0.0
|
|
20
|
+
Requires-Dist: uvicorn>=0.34.2
|
|
21
|
+
Provides-Extra: dev
|
|
22
|
+
Requires-Dist: ruff; extra == "dev"
|
|
23
|
+
Requires-Dist: black; extra == "dev"
|
|
24
|
+
Requires-Dist: mypy; extra == "dev"
|
|
25
|
+
Requires-Dist: pytest; extra == "dev"
|
|
26
|
+
Requires-Dist: coverage; extra == "dev"
|
|
27
|
+
Dynamic: license-file
|
|
28
|
+
|
|
29
|
+
# 🧠 Casual MCP
|
|
30
|
+
|
|
31
|
+

|
|
32
|
+

|
|
33
|
+
|
|
34
|
+
**Casual MCP** is a Python framework for building, evaluating, and serving LLMs with tool-calling capabilities using [Model Context Protocol (MCP)](https://modelcontextprotocol.io).
|
|
35
|
+
It includes:
|
|
36
|
+
|
|
37
|
+
- ✅ A multi-server MCP client
|
|
38
|
+
- ✅ Provider support for OpenAI (and OpenAI compatible APIs)
|
|
39
|
+
- ✅ A recursive tool-calling chat loop
|
|
40
|
+
- ✅ System prompt templating with Jinja2
|
|
41
|
+
- ✅ A basic API exposing a chat endpoint
|
|
42
|
+
|
|
43
|
+
## ✨ Features
|
|
44
|
+
|
|
45
|
+
- Plug-and-play multi-server tool orchestration
|
|
46
|
+
- Prompt templating with Jinja2
|
|
47
|
+
- Configurable via JSON
|
|
48
|
+
- CLI and API access
|
|
49
|
+
- Extensible architecture
|
|
50
|
+
|
|
51
|
+
## 🔧 Installation
|
|
52
|
+
|
|
53
|
+
```bash
|
|
54
|
+
pip install casual-mcp
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
Or for development:
|
|
58
|
+
|
|
59
|
+
```bash
|
|
60
|
+
git clone https://github.com/AlexStansfield/casual-mcp.git
|
|
61
|
+
cd casual-mcp
|
|
62
|
+
uv pip install -e .[dev]
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
## 🧩 Providers
|
|
66
|
+
|
|
67
|
+
Providers allow access to LLMs. Currently, only an OpenAI provider is supplied. However, in the model configuration, you can supply an optional `endpoint` allowing you to use any OpenAI-compatible API (e.g., LM Studio).
|
|
68
|
+
|
|
69
|
+
Ollama support is planned for a future version, along with support for custom pluggable providers via a standard interface.
|
|
70
|
+
|
|
71
|
+
## 🧩 System Prompt Templates
|
|
72
|
+
|
|
73
|
+
System prompts are defined as [Jinja2](https://jinja.palletsprojects.com) templates in the `prompt-templates/` directory.
|
|
74
|
+
|
|
75
|
+
They are used in the config file to specify a system prompt to use per model.
|
|
76
|
+
|
|
77
|
+
This allows you to define custom prompts for each model — useful when using models that do not natively support tools. Templates are passed the tool list in the `tools` variable.
|
|
78
|
+
|
|
79
|
+
```jinja2
|
|
80
|
+
# prompt-templates/example_prompt.j2
|
|
81
|
+
Here is a list of functions in JSON format that you can invoke:
|
|
82
|
+
[
|
|
83
|
+
{% for tool in tools %}
|
|
84
|
+
{
|
|
85
|
+
"name": "{{ tool.name }}",
|
|
86
|
+
"description": "{{ tool.description }}",
|
|
87
|
+
"parameters": {
|
|
88
|
+
{% for param_name, param in tool.inputSchema.items() %}
|
|
89
|
+
"{{ param_name }}": {
|
|
90
|
+
"description": "{{ param.description }}",
|
|
91
|
+
"type": "{{ param.type }}"{% if param.default is defined %},
|
|
92
|
+
"default": "{{ param.default }}"{% endif %}
|
|
93
|
+
}{% if not loop.last %},{% endif %}
|
|
94
|
+
{% endfor %}
|
|
95
|
+
}
|
|
96
|
+
}{% if not loop.last %},{% endif %}
|
|
97
|
+
{% endfor %}
|
|
98
|
+
]
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
## ⚙️ Configuration File (`config.json`)
|
|
102
|
+
|
|
103
|
+
📄 See the [Programmatic Usage](#-programmatic-usage) section to build configs and messages with typed models.
|
|
104
|
+
|
|
105
|
+
The CLI and API can be configured using a `config.json` file that defines:
|
|
106
|
+
|
|
107
|
+
- 🔧 Available **models** and their providers
|
|
108
|
+
- 🧰 Available **MCP tool servers**
|
|
109
|
+
- 🧩 Optional tool namespacing behavior
|
|
110
|
+
|
|
111
|
+
### 🔸 Example
|
|
112
|
+
|
|
113
|
+
```json
|
|
114
|
+
{
|
|
115
|
+
"namespaced_tools": false,
|
|
116
|
+
"models": {
|
|
117
|
+
"lm-qwen-3": {
|
|
118
|
+
"provider": "openai",
|
|
119
|
+
"endpoint": "http://localhost:1234/v1",
|
|
120
|
+
"model": "qwen3-8b",
|
|
121
|
+
"template": "lm-studio-native-tools"
|
|
122
|
+
},
|
|
123
|
+
"gpt-4.1": {
|
|
124
|
+
"provider": "openai",
|
|
125
|
+
"model": "gpt-4.1"
|
|
126
|
+
}
|
|
127
|
+
},
|
|
128
|
+
"servers": {
|
|
129
|
+
"time": {
|
|
130
|
+
"type": "python",
|
|
131
|
+
"path": "mcp-servers/time/server.py"
|
|
132
|
+
},
|
|
133
|
+
"weather": {
|
|
134
|
+
"type": "http",
|
|
135
|
+
"url": "http://localhost:5050/mcp"
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
### 🔹 `models`
|
|
142
|
+
|
|
143
|
+
Each model has:
|
|
144
|
+
|
|
145
|
+
- `provider`: `"openai"` or `"ollama"`
|
|
146
|
+
- `model`: the model name (e.g., `gpt-4.1`, `qwen3-8b`)
|
|
147
|
+
- `endpoint`: required for custom OpenAI-compatible backends (e.g., LM Studio)
|
|
148
|
+
- `template`: optional name used to apply model-specific tool calling formatting
|
|
149
|
+
|
|
150
|
+
### 🔹 `servers`
|
|
151
|
+
|
|
152
|
+
Each server has:
|
|
153
|
+
|
|
154
|
+
- `type`: `"python"`, `"http"`, `"node"`, or `"uvx"`
|
|
155
|
+
- For `python`/`node`: `path` to the script
|
|
156
|
+
- For `http`: `url` to the remote MCP endpoint
|
|
157
|
+
- For `uvx`: `package` for the package to run
|
|
158
|
+
- Optional: `env` for subprocess environments, `system_prompt` to override server prompt
|
|
159
|
+
|
|
160
|
+
### 🔹 `namespaced_tools`
|
|
161
|
+
|
|
162
|
+
If `true`, tools will be prefixed by server name (e.g., `weather-get_weather`).
|
|
163
|
+
Useful for disambiguating tool names across servers and avoiding name collision if multiple servers have the same tool name.
|
|
164
|
+
|
|
165
|
+
## 🛠 CLI Reference
|
|
166
|
+
|
|
167
|
+
### `casual-mcp serve`
|
|
168
|
+
Start the API server.
|
|
169
|
+
|
|
170
|
+
**Options:**
|
|
171
|
+
- `--host`: Host to bind (default `0.0.0.0`)
|
|
172
|
+
- `--port`: Port to serve on (default `8000`)
|
|
173
|
+
|
|
174
|
+
### `casual-mcp servers`
|
|
175
|
+
Loads the config and outputs the list of MCP servers you have configured.
|
|
176
|
+
|
|
177
|
+
#### Example Output
|
|
178
|
+
```
|
|
179
|
+
$ casual-mcp servers
|
|
180
|
+
┏━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━┓
|
|
181
|
+
┃ Name ┃ Type ┃ Path / Package / Url ┃ Env ┃
|
|
182
|
+
┡━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━┩
|
|
183
|
+
│ math │ python │ mcp-servers/math/server.py │ │
|
|
184
|
+
│ time │ python │ mcp-servers/time-v2/server.py │ │
|
|
185
|
+
│ weather │ python │ mcp-servers/weather/server.py │ │
|
|
186
|
+
│ words │ python │ mcp-servers/words/server.py │ │
|
|
187
|
+
└─────────┴────────┴───────────────────────────────┴─────┘
|
|
188
|
+
```
|
|
189
|
+
|
|
190
|
+
### `casual-mcp models`
|
|
191
|
+
Loads the config and outputs the list of models you have configured.
|
|
192
|
+
|
|
193
|
+
#### Example Output
|
|
194
|
+
```
|
|
195
|
+
$ casual-mcp models
|
|
196
|
+
┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┓
|
|
197
|
+
┃ Name ┃ Provider ┃ Model ┃ Endpoint ┃
|
|
198
|
+
┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━┩
|
|
199
|
+
│ lm-phi-4-mini │ openai │ phi-4-mini-instruct │ http://kovacs:1234/v1 │
|
|
200
|
+
│ lm-hermes-3 │ openai │ hermes-3-llama-3.2-3b │ http://kovacs:1234/v1 │
|
|
201
|
+
│ lm-groq │ openai │ llama-3-groq-8b-tool-use │ http://kovacs:1234/v1 │
|
|
202
|
+
│ gpt-4o-mini │ openai │ gpt-4o-mini │ │
|
|
203
|
+
│ gpt-4.1-nano │ openai │ gpt-4.1-nano │ │
|
|
204
|
+
│ gpt-4.1-mini │ openai │ gpt-4.1-mini │ │
|
|
205
|
+
│ gpt-4.1 │ openai │ gpt-4.1 │ │
|
|
206
|
+
└───────────────────┴──────────┴───────────────────────────┴────────────────────────┘
|
|
207
|
+
```
|
|
208
|
+
|
|
209
|
+
## 🧠 Programmatic Usage
|
|
210
|
+
|
|
211
|
+
You can import and use the core framework in your own Python code.
|
|
212
|
+
|
|
213
|
+
### ✅ Exposed Interfaces
|
|
214
|
+
|
|
215
|
+
#### `McpToolChat`
|
|
216
|
+
Orchestrates LLM interaction with tools using a recursive loop.
|
|
217
|
+
|
|
218
|
+
```python
|
|
219
|
+
from casual_mcp import McpToolChat
|
|
220
|
+
|
|
221
|
+
chat = McpToolChat(mcp_client, provider, system_prompt)
|
|
222
|
+
response = await chat.chat(prompt="What time is it in London?")
|
|
223
|
+
```
|
|
224
|
+
|
|
225
|
+
#### `MultiServerMCPClient`
|
|
226
|
+
Connects to multiple MCP tool servers and manages available tools.
|
|
227
|
+
|
|
228
|
+
```python
|
|
229
|
+
from casual_mcp import MultiServerMCPClient
|
|
230
|
+
|
|
231
|
+
mcp_client = MultiServerMCPClient()
|
|
232
|
+
await mcp_client.load_config(config["servers"])
|
|
233
|
+
tools = await mcp_client.list_tools()
|
|
234
|
+
```
|
|
235
|
+
|
|
236
|
+
#### `ProviderFactory`
|
|
237
|
+
Instantiates LLM providers based on the selected model config.
|
|
238
|
+
|
|
239
|
+
```python
|
|
240
|
+
from casual_mcp.providers.provider_factory import ProviderFactory
|
|
241
|
+
|
|
242
|
+
provider_factory = ProviderFactory()
|
|
243
|
+
provider = provider_factory.get_provider("lm-qwen-3", model_config)
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
#### `load_config`
|
|
247
|
+
Loads your `config.json` into a validated config object.
|
|
248
|
+
|
|
249
|
+
```python
|
|
250
|
+
from casual_mcp.utils import load_config
|
|
251
|
+
|
|
252
|
+
config = load_config("config.json")
|
|
253
|
+
```
|
|
254
|
+
|
|
255
|
+
#### Model and Server Configs
|
|
256
|
+
|
|
257
|
+
Exported models:
|
|
258
|
+
- PythonMcpServerConfig
|
|
259
|
+
- UvxMcpServerConfig
|
|
260
|
+
- NodeMcpServerConfig
|
|
261
|
+
- HttpMcpServerConfig
|
|
262
|
+
- OpenAIModelConfig
|
|
263
|
+
|
|
264
|
+
Use these types to build valid configs:
|
|
265
|
+
|
|
266
|
+
```python
|
|
267
|
+
from casual_mcp.models import OpenAIModelConfig, PythonMcpServerConfig
|
|
268
|
+
|
|
269
|
+
model = OpenAIModelConfig( model="llama3", endpoint="http://...")
|
|
270
|
+
server = PythonMcpServerConfig(path="time/server.py")
|
|
271
|
+
```
|
|
272
|
+
|
|
273
|
+
#### Chat Messages
|
|
274
|
+
|
|
275
|
+
Exported models:
|
|
276
|
+
- AssistantMessage
|
|
277
|
+
- SystemMessage
|
|
278
|
+
- ToolResultMessage
|
|
279
|
+
- UserMessage
|
|
280
|
+
|
|
281
|
+
Use these types to build message chains:
|
|
282
|
+
|
|
283
|
+
```python
|
|
284
|
+
from casual_mcp.models import SystemMessage, UserMessage
|
|
285
|
+
|
|
286
|
+
messages = [
|
|
287
|
+
SystemMessage(content="You are a friendly tool calling assistant."),
|
|
288
|
+
UserMessage(content="What is the time?")
|
|
289
|
+
]
|
|
290
|
+
```
|
|
291
|
+
|
|
292
|
+
### Example
|
|
293
|
+
|
|
294
|
+
```python
|
|
295
|
+
from casual_mcp import McpToolChat, MultiServerMCPClient, load_config, ProviderFactory
|
|
296
|
+
from casual_mcp.models import SystemMessage, UserMessage
|
|
297
|
+
|
|
298
|
+
model = "gpt-4.1-nano"
|
|
299
|
+
messages = [
|
|
300
|
+
SystemMessage(content="""You are a tool calling assistant.
|
|
301
|
+
You have access to up-to-date information through the tools.
|
|
302
|
+
Respond naturally and confidently, as if you already know all the facts."""),
|
|
303
|
+
UserMessage(content="Will I need to take my umbrella to London today?")
|
|
304
|
+
]
|
|
305
|
+
|
|
306
|
+
# Load the Config from the File
|
|
307
|
+
config = load_config("config.json")
|
|
308
|
+
|
|
309
|
+
# Setup the MultiServer MCP Client
|
|
310
|
+
mcp_client = MultiServerMCPClient()
|
|
311
|
+
await mcp_client.load_config(config.servers)
|
|
312
|
+
|
|
313
|
+
# Get the Provider for the Model
|
|
314
|
+
provider_factory.set_tools(await mcp_client.list_tools())
|
|
315
|
+
provider_factory = ProviderFactory()
|
|
316
|
+
provider = provider_factory.get_provider(model, config.models[model])
|
|
317
|
+
|
|
318
|
+
# Perform the Chat and Tool calling
|
|
319
|
+
chat = McpToolChat(mcp_client, provider, system_prompt)
|
|
320
|
+
response_messages = await chat.chat(messages=messages)
|
|
321
|
+
```
|
|
322
|
+
|
|
323
|
+
## 🚀 API Usage
|
|
324
|
+
|
|
325
|
+
### Start the API Server
|
|
326
|
+
|
|
327
|
+
```bash
|
|
328
|
+
casual-mcp serve --host 0.0.0.0 --port 8000
|
|
329
|
+
```
|
|
330
|
+
|
|
331
|
+
You can then POST to `/chat` to trigger tool-calling LLM responses.
|
|
332
|
+
|
|
333
|
+
The request takes a json body consisting of:
|
|
334
|
+
- `model`: the LLM model to use
|
|
335
|
+
- `user_prompt`: optional, the latest user message (required if messages isn't provided)
|
|
336
|
+
- `messages`: optional, list of chat messages (system, assistant, user, etc) that you can pass to the api, allowing you to keep your own chat session in the client calling the api
|
|
337
|
+
- `session_id`: an optional ID that stores all the messages from the session and provides them back to the LLM for context
|
|
338
|
+
|
|
339
|
+
You can either pass in a `user_prompt` or a list of `messages` depending on your use case.
|
|
340
|
+
|
|
341
|
+
Example:
|
|
342
|
+
```
|
|
343
|
+
{
|
|
344
|
+
"session_id": "my-test-session",
|
|
345
|
+
"model": "gpt-4o-mini",
|
|
346
|
+
"user_prompt": "can you explain what the word consistent means?"
|
|
347
|
+
}
|
|
348
|
+
```
|
|
349
|
+
|
|
350
|
+
## License
|
|
351
|
+
|
|
352
|
+
This software is released under the [MIT License](LICENSE)
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
casual_mcp/__init__.py,sha256=UZTKF9qlKijDh2SRCbpz6nPi0now7hi4-VOJBnl7tTk,323
|
|
2
|
+
casual_mcp/cli.py,sha256=s5-Mr2XNlzNcsfGwtwP25YBQYzf-orvDIu9gqwrVCI8,1561
|
|
3
|
+
casual_mcp/logging.py,sha256=o3rvT8GLJKGlu0ieeC9TY_SRSEUY-VO8jRQZjx-sSvY,863
|
|
4
|
+
casual_mcp/main.py,sha256=x-jJUltW4p4j0Vx-LaixbY0Oik6QZ81K2wdDeTNytME,3497
|
|
5
|
+
casual_mcp/mcp_tool_chat.py,sha256=pIAQD-ghyLSGuLzXiG5Sv81-NHaNb5NVqRojJalwS1o,3113
|
|
6
|
+
casual_mcp/multi_server_mcp_client.py,sha256=RrLO7wFGzkUgzmliagkOAx16lrvEG323MGPEU7Sw56o,5615
|
|
7
|
+
casual_mcp/utils.py,sha256=8ekPpIfcqheMMjjKGe6lk81AWKpmCAixOXx_KJXGRAQ,2758
|
|
8
|
+
casual_mcp/models/__init__.py,sha256=hHT-GBD0YMjHdJ4QGVefXQZsHu3bPd1vlizVdfYXoQ0,660
|
|
9
|
+
casual_mcp/models/config.py,sha256=ITu3WAPMad7i2CS3ljkHapjT8lLm7k6HFUF6N73U1oo,294
|
|
10
|
+
casual_mcp/models/generation_error.py,sha256=n1mF3vc1Sg_9yIe603G1nTP395Tht8JMKHqdMWFNAn0,259
|
|
11
|
+
casual_mcp/models/mcp_server_config.py,sha256=o4uxq9JnrLRRHe0KNsaYE3P03wJdW1EmX18fmF7SoTQ,857
|
|
12
|
+
casual_mcp/models/messages.py,sha256=5UASrYqlXeqaziDT8Zsej0kA7Ofce0109YlFAyQDuTY,688
|
|
13
|
+
casual_mcp/models/model_config.py,sha256=gN5hNDfbur_bHgrji87CcU2WgNZO-F3eveK4pVWVSAE,435
|
|
14
|
+
casual_mcp/models/tool_call.py,sha256=BKMxcmyW7EmNoG1jgS9PXXvf6RQIHf7wB8fElEbc4gA,271
|
|
15
|
+
casual_mcp/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
|
+
casual_mcp/providers/abstract_provider.py,sha256=kACSVgPY9qTqD1IgIWY9HkS8US2B0Nm7MyGJk0GLfDk,307
|
|
17
|
+
casual_mcp/providers/ollama_provider.py,sha256=DKX9QTDl9DspWJSghuQgOzHZgjmTVtf5uyRH_DeOgQc,2601
|
|
18
|
+
casual_mcp/providers/openai_provider.py,sha256=VIymU3Jimncne0c1fyowy4BFBpyfK70eG-2rP_YGDzc,6153
|
|
19
|
+
casual_mcp/providers/provider_factory.py,sha256=Bub4y4uHFc23VCOeRicT_Fi54IdcjHUgYAWPo1oiSo4,1519
|
|
20
|
+
casual_mcp-0.1.0.dist-info/licenses/LICENSE,sha256=U3Zu2tkrh5vXdy7gIdE8WJGM9D4gGp3hohAAWdre-yo,1058
|
|
21
|
+
casual_mcp-0.1.0.dist-info/METADATA,sha256=PCJXGER8P8MgnqNBNRPVSysqumU4654KfUpUUyNLEdE,11576
|
|
22
|
+
casual_mcp-0.1.0.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
|
|
23
|
+
casual_mcp-0.1.0.dist-info/entry_points.txt,sha256=X48Np2cwl-SlRQdV26y2vPZ-2tJaODgZeVtfpHho-zg,50
|
|
24
|
+
casual_mcp-0.1.0.dist-info/top_level.txt,sha256=K4CiI0Jf8PHICjuQVm32HuNMB44kp8Lb02bbbdiH5bo,11
|
|
25
|
+
casual_mcp-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
Copyright (c) 2025 Alex Stansfield
|
|
2
|
+
|
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
4
|
+
|
|
5
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
6
|
+
|
|
7
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
casual_mcp
|