casual-mcp 0.2.2__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
casual_mcp/__init__.py CHANGED
@@ -1,12 +1,13 @@
1
1
  from . import models
2
2
  from .mcp_tool_chat import McpToolChat
3
3
  from .providers.provider_factory import ProviderFactory
4
- from .utils import load_config, load_mcp_client
4
+ from .utils import load_config, load_mcp_client, render_system_prompt
5
5
 
6
6
  __all__ = [
7
7
  "McpToolChat",
8
8
  "ProviderFactory",
9
9
  "load_config",
10
10
  "load_mcp_client",
11
+ "render_system_prompt",
11
12
  "models",
12
13
  ]
casual_mcp/main.py CHANGED
@@ -32,6 +32,7 @@ You must not speculate or guess about dates — if a date is given to you by a t
32
32
  Always present information as current and factual.
33
33
  """
34
34
 
35
+
35
36
  class GenerateRequest(BaseModel):
36
37
  session_id: str | None = Field(
37
38
  default=None, title="Session to use"
@@ -42,11 +43,20 @@ class GenerateRequest(BaseModel):
42
43
  system_prompt: str | None = Field(
43
44
  default=None, title="System Prompt to use"
44
45
  )
45
- user_prompt: str = Field(
46
+ prompt: str = Field(
46
47
  title="User Prompt"
47
48
  )
48
- messages: list[ChatMessage] | None = Field(
49
- default=None, title="Previous messages to supply to the LLM"
49
+
50
+
51
+ class ChatRequest(BaseModel):
52
+ model: str = Field(
53
+ title="Model to user"
54
+ )
55
+ system_prompt: str | None = Field(
56
+ default=None, title="System Prompt to use"
57
+ )
58
+ messages: list[ChatMessage] = Field(
59
+ title="Previous messages to supply to the LLM"
50
60
  )
51
61
 
52
62
  sys.path.append(str(Path(__file__).parent.resolve()))
@@ -55,44 +65,11 @@ sys.path.append(str(Path(__file__).parent.resolve()))
55
65
  configure_logging(os.getenv("LOG_LEVEL", 'INFO'))
56
66
  logger = get_logger("main")
57
67
 
58
- async def perform_chat(
59
- model,
60
- user,
61
- system: str | None = None,
62
- messages: list[ChatMessage] = None,
63
- session_id: str | None = None
64
- ) -> list[ChatMessage]:
65
- # Get Provider from Model Config
66
- model_config = config.models[model]
67
- provider = await provider_factory.get_provider(model, model_config)
68
-
69
- if not system:
70
- if (model_config.template):
71
- async with mcp_client:
72
- system = render_system_prompt(
73
- f"{model_config.template}.j2",
74
- await mcp_client.list_tools()
75
- )
76
- else:
77
- system = default_system_prompt
78
-
79
- chat = McpToolChat(mcp_client, provider, system)
80
- return await chat.chat(
81
- prompt=user,
82
- messages=messages,
83
- session_id=session_id
84
- )
85
-
86
68
 
87
69
  @app.post("/chat")
88
- async def chat(req: GenerateRequest):
89
- messages = await perform_chat(
90
- req.model,
91
- system=req.system_prompt,
92
- user=req.user_prompt,
93
- messages=req.messages,
94
- session_id=req.session_id
95
- )
70
+ async def chat(req: ChatRequest):
71
+ chat = await get_chat(req.model, req.system_prompt)
72
+ messages = await chat.chat(req.messages)
96
73
 
97
74
  return {
98
75
  "messages": messages,
@@ -100,16 +77,43 @@ async def chat(req: GenerateRequest):
100
77
  }
101
78
 
102
79
 
103
- # This endpoint will either go away or be used for something else, don't use it
104
80
  @app.post("/generate")
105
- async def generate_response(req: GenerateRequest):
106
- return await chat(req)
81
+ async def generate(req: GenerateRequest):
82
+ chat = await get_chat(req.model, req.system_prompt)
83
+ messages = await chat.generate(
84
+ req.prompt,
85
+ req.session_id
86
+ )
87
+
88
+ return {
89
+ "messages": messages,
90
+ "response": messages[len(messages) - 1].content
91
+ }
107
92
 
108
93
 
109
- @app.get("/chat/session/{session_id}")
110
- async def get_chat_session(session_id):
94
+ @app.get("/generate/session/{session_id}")
95
+ async def get_generate_session(session_id):
111
96
  session = McpToolChat.get_session(session_id)
112
97
  if not session:
113
98
  raise HTTPException(status_code=404, detail="Session not found")
114
99
 
115
100
  return session
101
+
102
+
103
+ async def get_chat(model: str, system: str | None = None) -> McpToolChat:
104
+ # Get Provider from Model Config
105
+ model_config = config.models[model]
106
+ provider = await provider_factory.get_provider(model, model_config)
107
+
108
+ # Get the system prompt
109
+ if not system:
110
+ if (model_config.template):
111
+ async with mcp_client:
112
+ system = render_system_prompt(
113
+ f"{model_config.template}.j2",
114
+ await mcp_client.list_tools()
115
+ )
116
+ else:
117
+ system = default_system_prompt
118
+
119
+ return McpToolChat(mcp_client, provider, system)
@@ -18,8 +18,26 @@ logger = get_logger("mcp_tool_chat")
18
18
  sessions: dict[str, list[ChatMessage]] = {}
19
19
 
20
20
 
21
+ def get_session_messages(session_id: str | None):
22
+ global sessions
23
+
24
+ if not sessions.get(session_id):
25
+ logger.info(f"Starting new session {session_id}")
26
+ sessions[session_id] = []
27
+ else:
28
+ logger.info(
29
+ f"Retrieving session {session_id} of length {len(sessions[session_id])}"
30
+ )
31
+ return sessions[session_id].copy()
32
+
33
+
34
+ def add_messages_to_session(session_id: str, messages: list[ChatMessage]):
35
+ global sessions
36
+ sessions[session_id].extend(messages.copy())
37
+
38
+
21
39
  class McpToolChat:
22
- def __init__(self, mcp_client: Client, provider: LLMProvider, system: str):
40
+ def __init__(self, mcp_client: Client, provider: LLMProvider, system: str = None):
23
41
  self.provider = provider
24
42
  self.mcp_client = mcp_client
25
43
  self.system = system
@@ -29,47 +47,56 @@ class McpToolChat:
29
47
  global sessions
30
48
  return sessions.get(session_id)
31
49
 
32
- async def chat(
50
+ async def generate(
33
51
  self,
34
- prompt: str | None = None,
35
- messages: list[ChatMessage] = None,
52
+ prompt: str,
36
53
  session_id: str | None = None
37
54
  ) -> list[ChatMessage]:
38
- global sessions
55
+ # Fetch the session if we have a session ID
56
+ if session_id:
57
+ messages = get_session_messages(session_id)
58
+ else:
59
+ messages: list[ChatMessage] = []
39
60
 
40
- # todo: check that we have a prompt or that there is a user message in messages
61
+ # Add the prompt as a user message
62
+ user_message = UserMessage(content=prompt)
63
+ messages.append(user_message)
41
64
 
42
- # If we have a session ID then create if new and fetch it
65
+ # Add the user message to the session
43
66
  if session_id:
44
- if not sessions.get(session_id):
45
- logger.info(f"Starting new session {session_id}")
46
- sessions[session_id] = []
47
- else:
48
- logger.info(
49
- f"Retrieving session {session_id} of length {len(sessions[session_id])}"
50
- )
51
- messages = sessions[session_id].copy()
67
+ add_messages_to_session(session_id, [user_message])
68
+
69
+ # Perform Chat
70
+ response = await self.chat(messages=messages)
71
+
72
+ # Add responses to session
73
+ if session_id:
74
+ add_messages_to_session(session_id, response)
75
+
76
+ return response
77
+
78
+
79
+ async def chat(
80
+ self,
81
+ messages: list[ChatMessage]
82
+ ) -> list[ChatMessage]:
83
+ # Add a system message if required
84
+ has_system_message = any(message.role == 'system' for message in messages)
85
+ if self.system and not has_system_message:
86
+ # Insert the system message at the start of the messages
87
+ messages.insert(0, SystemMessage(content=self.system))
52
88
 
53
89
  logger.info("Start Chat")
54
90
  async with self.mcp_client:
55
91
  tools = await self.mcp_client.list_tools()
56
92
 
57
- if messages is None or len(messages) == 0:
58
- message_history = []
59
- messages = [SystemMessage(content=self.system)]
60
- else:
61
- message_history = messages.copy()
62
-
63
- if prompt:
64
- messages.append(UserMessage(content=prompt))
65
-
66
- response: str | None = None
93
+ response_messages: list[ChatMessage] = []
67
94
  while True:
68
95
  logger.info("Calling the LLM")
69
96
  ai_message = await self.provider.generate(messages, tools)
70
- response = ai_message.content
71
97
 
72
98
  # Add the assistant's message
99
+ response_messages.append(ai_message)
73
100
  messages.append(ai_message)
74
101
 
75
102
  if not ai_message.tool_calls:
@@ -86,18 +113,14 @@ class McpToolChat:
86
113
  return messages
87
114
  if result:
88
115
  messages.append(result)
116
+ response_messages.append(result)
89
117
  result_count = result_count + 1
90
118
 
91
119
  logger.info(f"Added {result_count} tool results")
92
120
 
93
- logger.debug(f"""Final Response:
94
- {response} """)
95
-
96
- new_messages = [item for item in messages if item not in message_history]
97
- if session_id:
98
- sessions[session_id].extend(new_messages)
121
+ logger.debug(f"Final Response: {response_messages[-1].content}")
99
122
 
100
- return new_messages
123
+ return response_messages
101
124
 
102
125
 
103
126
  async def execute(self, tool_call: AssistantToolCall):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: casual-mcp
3
- Version: 0.2.2
3
+ Version: 0.3.0
4
4
  Summary: Multi-server MCP client for LLM tool orchestration
5
5
  Author: Alex Stansfield
6
6
  License: MIT
@@ -36,7 +36,7 @@ Dynamic: license-file
36
36
  **Casual MCP** is a Python framework for building, evaluating, and serving LLMs with tool-calling capabilities using [Model Context Protocol (MCP)](https://modelcontextprotocol.io).
37
37
  It includes:
38
38
 
39
- - ✅ A multi-server MCP client
39
+ - ✅ A multi-server MCP client using [FastMCP](https://github.com/jlowin/fastmcp)
40
40
  - ✅ Provider support for OpenAI (and OpenAI compatible APIs)
41
41
  - ✅ A recursive tool-calling chat loop
42
42
  - ✅ System prompt templating with Jinja2
@@ -151,12 +151,12 @@ Each model has:
151
151
 
152
152
  Servers can either be local (over stdio) or remote.
153
153
 
154
- Local Config:
154
+ #### Local Config:
155
155
  - `command`: the command to run the server, e.g `python`, `npm`
156
156
  - `args`: the arguments to pass to the server as a list, e.g `["time/server.py"]`
157
157
  - Optional: `env`: for subprocess environments, `system_prompt` to override server prompt
158
158
 
159
- Remote Config:
159
+ #### Remote Config:
160
160
  - `url`: the url of the mcp server
161
161
  - Optional: `transport`: the type of transport, `http`, `sse`, `streamable-http`. Defaults to `http`
162
162
 
@@ -184,12 +184,12 @@ Loads the config and outputs the list of MCP servers you have configured.
184
184
  ```
185
185
  $ casual-mcp servers
186
186
  ┏━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━┓
187
- ┃ Name ┃ Type ┃ Path / Package / Url ┃ Env ┃
187
+ ┃ Name ┃ Type ┃ Command / Url ┃ Env ┃
188
188
  ┡━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━┩
189
- │ math │ python │ mcp-servers/math/server.py │ │
190
- │ time │ python │ mcp-servers/time-v2/server.py │ │
191
- │ weather │ python │ mcp-servers/weather/server.py │ │
192
- │ words │ python │ mcp-servers/words/server.py │ │
189
+ │ math │ local │ mcp-servers/math/server.py │ │
190
+ │ time │ local │ mcp-servers/time-v2/server.py │ │
191
+ │ weather │ local │ mcp-servers/weather/server.py │ │
192
+ │ words │ remotehttps://localhost:3000/mcp │ │
193
193
  └─────────┴────────┴───────────────────────────────┴─────┘
194
194
  ```
195
195
 
@@ -223,9 +223,24 @@ Orchestrates LLM interaction with tools using a recursive loop.
223
223
 
224
224
  ```python
225
225
  from casual_mcp import McpToolChat
226
+ from casual_mcp.models import SystemMessage, UserMessage
226
227
 
227
228
  chat = McpToolChat(mcp_client, provider, system_prompt)
228
- response = await chat.chat(prompt="What time is it in London?")
229
+
230
+ # Generate method to take user prompt
231
+ response = await chat.generate("What time is it in London?")
232
+
233
+ # Generate method with session
234
+ response = await chat.generate("What time is it in London?", "my-session-id")
235
+
236
+ # Chat method that takes list of chat messages
237
+ # note: system prompt ignored if sent in messages so no need to set
238
+ chat = McpToolChat(mcp_client, provider)
239
+ messages = [
240
+ SystemMessage(content="You are a cool dude who likes to help the user"),
241
+ UserMessage(content="What time is it in London?")
242
+ ]
243
+ response = await chat.chat(messages)
229
244
  ```
230
245
 
231
246
  #### `ProviderFactory`
@@ -234,8 +249,8 @@ Instantiates LLM providers based on the selected model config.
234
249
  ```python
235
250
  from casual_mcp import ProviderFactory
236
251
 
237
- provider_factory = ProviderFactory()
238
- provider = provider_factory.get_provider("lm-qwen-3", model_config)
252
+ provider_factory = ProviderFactory(mcp_client)
253
+ provider = await provider_factory.get_provider("lm-qwen-3", model_config)
239
254
  ```
240
255
 
241
256
  #### `load_config`
@@ -308,18 +323,16 @@ Respond naturally and confidently, as if you already know all the facts."""),
308
323
  # Load the Config from the File
309
324
  config = load_config("casual_mcp_config.json")
310
325
 
311
- # Setup the MultiServer MCP Client
326
+ # Setup the MCP Client
312
327
  mcp_client = load_mcp_client(config)
313
- await mcp_client.load_config(config.servers)
314
328
 
315
329
  # Get the Provider for the Model
316
- provider_factory.set_tools(await mcp_client.list_tools())
317
- provider_factory = ProviderFactory()
318
- provider = provider_factory.get_provider(model, config.models[model])
330
+ provider_factory = ProviderFactory(mcp_client)
331
+ provider = await provider_factory.get_provider(model, config.models[model])
319
332
 
320
333
  # Perform the Chat and Tool calling
321
- chat = McpToolChat(mcp_client, provider, system_prompt)
322
- response_messages = await chat.chat(messages=messages)
334
+ chat = McpToolChat(mcp_client, provider)
335
+ response_messages = await chat.chat(messages)
323
336
  ```
324
337
 
325
338
  ## 🚀 API Usage
@@ -330,25 +343,56 @@ response_messages = await chat.chat(messages=messages)
330
343
  casual-mcp serve --host 0.0.0.0 --port 8000
331
344
  ```
332
345
 
333
- You can then POST to `/chat` to trigger tool-calling LLM responses.
346
+ ### Chat
347
+
348
+ #### Endpoint: `POST /chat`
334
349
 
335
- The request takes a json body consisting of:
350
+ #### Request Body:
336
351
  - `model`: the LLM model to use
337
- - `user_prompt`: optional, the latest user message (required if messages isn't provided)
338
- - `messages`: optional, list of chat messages (system, assistant, user, etc) that you can pass to the api, allowing you to keep your own chat session in the client calling the api
339
- - `session_id`: an optional ID that stores all the messages from the session and provides them back to the LLM for context
352
+ - `messages`: list of chat messages (system, assistant, user, etc) that you can pass to the api, allowing you to keep your own chat session in the client calling the api
353
+
354
+ #### Example:
355
+ ```
356
+ {
357
+ "model": "gpt-4.1-nano",
358
+ "messages": [
359
+ {
360
+ "role": "user",
361
+ "content": "can you explain what the word consistent means?"
362
+ }
363
+ ]
364
+ }
365
+ ```
366
+
367
+ ### Generate
368
+
369
+ The generate endpoint allows you to send a user prompt as a string.
370
+
371
+ It also support sessions that keep a record of all messages in the session and feeds them back into the LLM for context. Sessions are stored in memory so are cleared when the server is restarted
372
+
373
+ #### Endpoint: `POST /generate`
340
374
 
341
- You can either pass in a `user_prompt` or a list of `messages` depending on your use case.
375
+ #### Request Body:
376
+ - `model`: the LLM model to use
377
+ - `prompt`: the user prompt
378
+ - `session_id`: an optional ID that stores all the messages from the session and provides them back to the LLM for context
342
379
 
343
- Example:
380
+ #### Example:
344
381
  ```
345
382
  {
346
- "session_id": "my-test-session",
383
+ "session_id": "my-session",
347
384
  "model": "gpt-4o-mini",
348
- "user_prompt": "can you explain what the word consistent means?"
385
+ "prompt": "can you explain what the word consistent means?"
349
386
  }
350
387
  ```
351
388
 
389
+ ### Get Session
390
+
391
+ Get all the messages from a session
392
+
393
+ #### Endpoint: `GET /generate/session/{session_id}`
394
+
395
+
352
396
  ## License
353
397
 
354
398
  This software is released under the [MIT License](LICENSE)
@@ -1,8 +1,8 @@
1
- casual_mcp/__init__.py,sha256=qL2sZhhWrp60taGBHUG1yeelUgpxld-qXFEbEUMXZVM,277
1
+ casual_mcp/__init__.py,sha256=pInJdGkFqSH8RwbQq-9mc96GWIQjLrtExeXnTYGtNHw,327
2
2
  casual_mcp/cli.py,sha256=TSk12nXJH86f0WAR_u5hIJV9IAHeGHrkgFs7ZZ63Lug,1627
3
3
  casual_mcp/logging.py,sha256=o3rvT8GLJKGlu0ieeC9TY_SRSEUY-VO8jRQZjx-sSvY,863
4
- casual_mcp/main.py,sha256=1t702JABc1sFBzic7TMPE6OrikizBfOnlRpaY84p7oQ,3358
5
- casual_mcp/mcp_tool_chat.py,sha256=5PcbacNj3HR13Rnz4TDhxOtxfipNqGMgyTKHsaSAnV4,4249
4
+ casual_mcp/main.py,sha256=AzqQ6SUJsyKyMaqd3HIxLDozoftMd27KQAQNsfM9e2I,3385
5
+ casual_mcp/mcp_tool_chat.py,sha256=BebLuo2F4nStd4vVO3BftfG8Sa6Zlx11UBuMezpbtIE,4897
6
6
  casual_mcp/utils.py,sha256=Nea0aRbPyjqm7mIjffJtGP2NssE7BsdPleO-yiuAWPE,2964
7
7
  casual_mcp/models/__init__.py,sha256=qlKylcCyRJOSIVteU2feiLOigZoY-m-soVGp4NALM_c,538
8
8
  casual_mcp/models/config.py,sha256=ITu3WAPMad7i2CS3ljkHapjT8lLm7k6HFUF6N73U1oo,294
@@ -16,9 +16,9 @@ casual_mcp/providers/abstract_provider.py,sha256=TTEP3FeTxOtbD0By_k17UxS8cqxYCOG
16
16
  casual_mcp/providers/ollama_provider.py,sha256=IUSJFBtEYmza_-_7bk5YZKqed3N67l8A2lZEmHPiyHo,2581
17
17
  casual_mcp/providers/openai_provider.py,sha256=uSjoqM-X9bVp_RVM8Ip6lqjZ7q3DdN0-p7o2HKrWxMI,6138
18
18
  casual_mcp/providers/provider_factory.py,sha256=CyFHJ0mU2tjHqj04btF0SL0B3pf12LAJ52Msqsbnv_g,1766
19
- casual_mcp-0.2.2.dist-info/licenses/LICENSE,sha256=U3Zu2tkrh5vXdy7gIdE8WJGM9D4gGp3hohAAWdre-yo,1058
20
- casual_mcp-0.2.2.dist-info/METADATA,sha256=z9WtBJF17iSjFXzj3vBIIK786LGO7Lx6VYv8SZGtX54,11954
21
- casual_mcp-0.2.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
- casual_mcp-0.2.2.dist-info/entry_points.txt,sha256=X48Np2cwl-SlRQdV26y2vPZ-2tJaODgZeVtfpHho-zg,50
23
- casual_mcp-0.2.2.dist-info/top_level.txt,sha256=K4CiI0Jf8PHICjuQVm32HuNMB44kp8Lb02bbbdiH5bo,11
24
- casual_mcp-0.2.2.dist-info/RECORD,,
19
+ casual_mcp-0.3.0.dist-info/licenses/LICENSE,sha256=U3Zu2tkrh5vXdy7gIdE8WJGM9D4gGp3hohAAWdre-yo,1058
20
+ casual_mcp-0.3.0.dist-info/METADATA,sha256=ULZbRBwX0FVKjfS2pPu3JWwvD5btiIkA6qOqWoWRa_0,12902
21
+ casual_mcp-0.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
+ casual_mcp-0.3.0.dist-info/entry_points.txt,sha256=X48Np2cwl-SlRQdV26y2vPZ-2tJaODgZeVtfpHho-zg,50
23
+ casual_mcp-0.3.0.dist-info/top_level.txt,sha256=K4CiI0Jf8PHICjuQVm32HuNMB44kp8Lb02bbbdiH5bo,11
24
+ casual_mcp-0.3.0.dist-info/RECORD,,