casual-mcp 0.1.0__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
casual_mcp/__init__.py CHANGED
@@ -1,13 +1,12 @@
1
1
  from . import models
2
2
  from .mcp_tool_chat import McpToolChat
3
- from .multi_server_mcp_client import MultiServerMCPClient
4
3
  from .providers.provider_factory import ProviderFactory
5
- from .utils import load_config
4
+ from .utils import load_config, load_mcp_client
6
5
 
7
6
  __all__ = [
8
7
  "McpToolChat",
9
- "MultiServerMCPClient",
10
8
  "ProviderFactory",
11
9
  "load_config",
10
+ "load_mcp_client",
12
11
  "models",
13
12
  ]
casual_mcp/cli.py CHANGED
@@ -3,6 +3,7 @@ import uvicorn
3
3
  from rich.console import Console
4
4
  from rich.table import Table
5
5
 
6
+ from casual_mcp.models.mcp_server_config import RemoteServerConfig
6
7
  from casual_mcp.utils import load_config
7
8
 
8
9
  app = typer.Typer()
@@ -26,23 +27,22 @@ def servers():
26
27
  """
27
28
  Return a table of all configured servers
28
29
  """
29
- config = load_config('config.json')
30
- table = Table("Name", "Type", "Path / Package / Url", "Env")
30
+ config = load_config('casual_mcp_config.json')
31
+ table = Table("Name", "Type", "Command / Url", "Env")
31
32
 
32
33
  for name, server in config.servers.items():
34
+ type = 'local'
35
+ if isinstance(server, RemoteServerConfig):
36
+ type = 'remote'
37
+
33
38
  path = ''
34
- match server.type:
35
- case 'python':
36
- path = server.path
37
- case 'node':
38
- path = server.path
39
- case 'http':
40
- path = server.url
41
- case 'uvx':
42
- path = server.package
39
+ if isinstance(server, RemoteServerConfig):
40
+ path = server.url
41
+ else:
42
+ path = f"{server.command} {" ".join(server.args)}"
43
43
  env = ''
44
44
 
45
- table.add_row(name, server.type, path, env)
45
+ table.add_row(name, type, path, env)
46
46
 
47
47
  console.print(table)
48
48
 
@@ -51,7 +51,7 @@ def models():
51
51
  """
52
52
  Return a table of all configured models
53
53
  """
54
- config = load_config('config.json')
54
+ config = load_config('casual_mcp_config.json')
55
55
  table = Table("Name", "Provider", "Model", "Endpoint")
56
56
 
57
57
  for name, model in config.models.items():
casual_mcp/main.py CHANGED
@@ -6,16 +6,16 @@ from dotenv import load_dotenv
6
6
  from fastapi import FastAPI, HTTPException
7
7
  from pydantic import BaseModel, Field
8
8
 
9
- from casual_mcp import McpToolChat, MultiServerMCPClient
9
+ from casual_mcp import McpToolChat
10
10
  from casual_mcp.logging import configure_logging, get_logger
11
- from casual_mcp.models.messages import CasualMcpMessage
11
+ from casual_mcp.models.messages import ChatMessage
12
12
  from casual_mcp.providers.provider_factory import ProviderFactory
13
- from casual_mcp.utils import load_config, render_system_prompt
13
+ from casual_mcp.utils import load_config, load_mcp_client, render_system_prompt
14
14
 
15
15
  load_dotenv()
16
- config = load_config("config.json")
17
- mcp_client = MultiServerMCPClient(namespace_tools=config.namespace_tools)
18
- provider_factory = ProviderFactory()
16
+ config = load_config("casual_mcp_config.json")
17
+ mcp_client = load_mcp_client(config)
18
+ provider_factory = ProviderFactory(mcp_client)
19
19
 
20
20
  app = FastAPI()
21
21
 
@@ -45,7 +45,7 @@ class GenerateRequest(BaseModel):
45
45
  user_prompt: str = Field(
46
46
  title="User Prompt"
47
47
  )
48
- messages: list[CasualMcpMessage] | None = Field(
48
+ messages: list[ChatMessage] | None = Field(
49
49
  default=None, title="Previous messages to supply to the LLM"
50
50
  )
51
51
 
@@ -59,19 +59,20 @@ async def perform_chat(
59
59
  model,
60
60
  user,
61
61
  system: str | None = None,
62
- messages: list[CasualMcpMessage] = None,
62
+ messages: list[ChatMessage] = None,
63
63
  session_id: str | None = None
64
- ) -> list[CasualMcpMessage]:
64
+ ) -> list[ChatMessage]:
65
65
  # Get Provider from Model Config
66
66
  model_config = config.models[model]
67
- provider = provider_factory.get_provider(model, model_config)
67
+ provider = await provider_factory.get_provider(model, model_config)
68
68
 
69
69
  if not system:
70
70
  if (model_config.template):
71
- system = render_system_prompt(
72
- f"{model_config.template}.j2",
73
- await mcp_client.list_tools()
74
- )
71
+ async with mcp_client:
72
+ system = render_system_prompt(
73
+ f"{model_config.template}.j2",
74
+ await mcp_client.list_tools()
75
+ )
75
76
  else:
76
77
  system = default_system_prompt
77
78
 
@@ -85,10 +86,6 @@ async def perform_chat(
85
86
 
86
87
  @app.post("/chat")
87
88
  async def chat(req: GenerateRequest):
88
- if len(mcp_client.tools) == 0:
89
- await mcp_client.load_config(config.servers)
90
- provider_factory.set_tools(await mcp_client.list_tools())
91
-
92
89
  messages = await perform_chat(
93
90
  req.model,
94
91
  system=req.system_prompt,
@@ -1,30 +1,40 @@
1
+ import json
2
+ import os
3
+
4
+ from fastmcp import Client
1
5
 
2
6
  from casual_mcp.logging import get_logger
3
- from casual_mcp.models.messages import CasualMcpMessage, SystemMessage, UserMessage
4
- from casual_mcp.multi_server_mcp_client import MultiServerMCPClient
7
+ from casual_mcp.models.messages import (
8
+ ChatMessage,
9
+ SystemMessage,
10
+ ToolResultMessage,
11
+ UserMessage,
12
+ )
13
+ from casual_mcp.models.tool_call import AssistantToolCall
5
14
  from casual_mcp.providers.provider_factory import LLMProvider
15
+ from casual_mcp.utils import format_tool_call_result
6
16
 
7
17
  logger = get_logger("mcp_tool_chat")
8
- sessions: dict[str, list[CasualMcpMessage]] = {}
18
+ sessions: dict[str, list[ChatMessage]] = {}
9
19
 
10
20
 
11
21
  class McpToolChat:
12
- def __init__(self, tool_client: MultiServerMCPClient, provider: LLMProvider, system: str):
22
+ def __init__(self, mcp_client: Client, provider: LLMProvider, system: str):
13
23
  self.provider = provider
14
- self.tool_client = tool_client
24
+ self.mcp_client = mcp_client
15
25
  self.system = system
16
26
 
17
27
  @staticmethod
18
- def get_session(session_id) -> list[CasualMcpMessage] | None:
28
+ def get_session(session_id) -> list[ChatMessage] | None:
19
29
  global sessions
20
30
  return sessions.get(session_id)
21
31
 
22
32
  async def chat(
23
33
  self,
24
34
  prompt: str | None = None,
25
- messages: list[CasualMcpMessage] = None,
35
+ messages: list[ChatMessage] = None,
26
36
  session_id: str | None = None
27
- ) -> list[CasualMcpMessage]:
37
+ ) -> list[ChatMessage]:
28
38
  global sessions
29
39
 
30
40
  # todo: check that we have a prompt or that there is a user message in messages
@@ -41,7 +51,8 @@ class McpToolChat:
41
51
  messages = sessions[session_id].copy()
42
52
 
43
53
  logger.info("Start Chat")
44
- tools = await self.tool_client.list_tools()
54
+ async with self.mcp_client:
55
+ tools = await self.mcp_client.list_tools()
45
56
 
46
57
  if messages is None or len(messages) == 0:
47
58
  message_history = []
@@ -69,7 +80,7 @@ class McpToolChat:
69
80
  result_count = 0
70
81
  for tool_call in ai_message.tool_calls:
71
82
  try:
72
- result = await self.tool_client.execute(tool_call)
83
+ result = await self.execute(tool_call)
73
84
  except Exception as e:
74
85
  logger.error(e)
75
86
  return messages
@@ -88,3 +99,32 @@ class McpToolChat:
88
99
 
89
100
  return new_messages
90
101
 
102
+
103
+ async def execute(self, tool_call: AssistantToolCall):
104
+ tool_name = tool_call.function.name
105
+ tool_args = json.loads(tool_call.function.arguments)
106
+ try:
107
+ async with self.mcp_client:
108
+ result = await self.mcp_client.call_tool(tool_name, tool_args)
109
+ except Exception as e:
110
+ if isinstance(e, ValueError):
111
+ logger.warning(e)
112
+ else:
113
+ logger.error(f"Error calling tool: {e}")
114
+
115
+ return ToolResultMessage(
116
+ name=tool_call.function.name,
117
+ tool_call_id=tool_call.id,
118
+ content=str(e),
119
+ )
120
+
121
+ logger.debug(f"Tool Call Result: {result}")
122
+
123
+ result_format = os.getenv('TOOL_RESULT_FORMAT', 'result')
124
+ content = format_tool_call_result(tool_call, result[0].text, style=result_format)
125
+
126
+ return ToolResultMessage(
127
+ name=tool_call.function.name,
128
+ tool_call_id=tool_call.id,
129
+ content=content,
130
+ )
@@ -1,13 +1,11 @@
1
1
  from .mcp_server_config import (
2
- HttpMcpServerConfig,
3
2
  McpServerConfig,
4
- NodeMcpServerConfig,
5
- PythonMcpServerConfig,
6
- UvxMcpServerConfig,
3
+ RemoteServerConfig,
4
+ StdioServerConfig,
7
5
  )
8
6
  from .messages import (
9
7
  AssistantMessage,
10
- CasualMcpMessage,
8
+ ChatMessage,
11
9
  SystemMessage,
12
10
  ToolResultMessage,
13
11
  UserMessage,
@@ -22,12 +20,10 @@ __all__ = [
22
20
  "AssistantMessage",
23
21
  "ToolResultMessage",
24
22
  "SystemMessage",
25
- "CasualMcpMessage",
23
+ "ChatMessage",
26
24
  "ModelConfig",
27
25
  "OpenAIModelConfig",
28
26
  "McpServerConfig",
29
- "PythonMcpServerConfig",
30
- "UvxMcpServerConfig",
31
- "NodeMcpServerConfig",
32
- "HttpMcpServerConfig",
27
+ "StdioServerConfig",
28
+ "RemoteServerConfig",
33
29
  ]
@@ -1,39 +1,20 @@
1
- from typing import Literal
1
+ from typing import Any, Literal
2
2
 
3
- from pydantic import BaseModel
3
+ from pydantic import BaseModel, Field
4
4
 
5
5
 
6
- class BaseMcpServerConfig(BaseModel):
7
- type: Literal["python", "node", "http", "uvx"]
8
- system_prompt: str | None | None = None
6
+ class StdioServerConfig(BaseModel):
7
+ command: str
8
+ args: list[str] = Field(default_factory=list)
9
+ env: dict[str, Any] = Field(default_factory=dict)
10
+ cwd: str | None = None
11
+ transport: Literal["stdio"] = "stdio"
9
12
 
10
13
 
11
- class PythonMcpServerConfig(BaseMcpServerConfig):
12
- type: Literal["python"] = "python"
13
- path: str
14
- env: dict[str, str] | None | None = None
15
-
16
-
17
- class UvxMcpServerConfig(BaseMcpServerConfig):
18
- type: Literal["uvx"] = "uvx"
19
- package: str
20
- env: dict[str, str] | None | None = None
21
-
22
-
23
- class NodeMcpServerConfig(BaseMcpServerConfig):
24
- type: Literal["node"] = "node"
25
- path: str
26
- env: dict[str, str] | None | None = None
27
-
28
-
29
- class HttpMcpServerConfig(BaseMcpServerConfig):
30
- type: Literal["http"] = "http"
14
+ class RemoteServerConfig(BaseModel):
31
15
  url: str
16
+ headers: dict[str, str] = Field(default_factory=dict)
17
+ transport: Literal["streamable-http", "sse", "http"] | None = None
32
18
 
33
19
 
34
- McpServerConfig = (
35
- PythonMcpServerConfig
36
- | NodeMcpServerConfig
37
- | HttpMcpServerConfig
38
- | UvxMcpServerConfig
39
- )
20
+ McpServerConfig = StdioServerConfig | RemoteServerConfig
@@ -28,4 +28,4 @@ class UserMessage(BaseModel):
28
28
  content: str | None
29
29
 
30
30
 
31
- CasualMcpMessage: TypeAlias = AssistantMessage | SystemMessage | ToolResultMessage | UserMessage
31
+ ChatMessage: TypeAlias = AssistantMessage | SystemMessage | ToolResultMessage | UserMessage
@@ -2,14 +2,14 @@ from abc import ABC, abstractmethod
2
2
 
3
3
  import mcp
4
4
 
5
- from casual_mcp.models.messages import CasualMcpMessage
5
+ from casual_mcp.models.messages import ChatMessage
6
6
 
7
7
 
8
8
  class CasualMcpProvider(ABC):
9
9
  @abstractmethod
10
10
  async def generate(
11
11
  self,
12
- messages: list[CasualMcpMessage],
12
+ messages: list[ChatMessage],
13
13
  tools: list[mcp.Tool]
14
- ) -> CasualMcpMessage:
14
+ ) -> ChatMessage:
15
15
  pass
@@ -6,7 +6,7 @@ from ollama import ChatResponse, Client, ResponseError
6
6
 
7
7
  from casual_mcp.logging import get_logger
8
8
  from casual_mcp.models.generation_error import GenerationError
9
- from casual_mcp.models.messages import AssistantMessage, CasualMcpMessage
9
+ from casual_mcp.models.messages import AssistantMessage, ChatMessage
10
10
  from casual_mcp.providers.abstract_provider import CasualMcpProvider
11
11
 
12
12
  logger = get_logger("providers.ollama")
@@ -15,7 +15,7 @@ def convert_tools(mcp_tools: list[mcp.Tool]) -> list[ollama.Tool]:
15
15
  raise Exception({"message": "under development"})
16
16
 
17
17
 
18
- def convert_messages(messages: list[CasualMcpMessage]) -> list[ollama.Message]:
18
+ def convert_messages(messages: list[ChatMessage]) -> list[ollama.Message]:
19
19
  raise Exception({"message": "under development"})
20
20
 
21
21
 
@@ -32,9 +32,9 @@ class OllamaProvider(CasualMcpProvider):
32
32
 
33
33
  async def generate(
34
34
  self,
35
- messages: list[CasualMcpMessage],
35
+ messages: list[ChatMessage],
36
36
  tools: list[mcp.Tool]
37
- ) -> CasualMcpMessage:
37
+ ) -> ChatMessage:
38
38
  logger.info("Start Generating")
39
39
  logger.debug(f"Model: {self.model}")
40
40
 
@@ -15,7 +15,7 @@ from openai.types.chat import (
15
15
 
16
16
  from casual_mcp.logging import get_logger
17
17
  from casual_mcp.models.generation_error import GenerationError
18
- from casual_mcp.models.messages import AssistantMessage, CasualMcpMessage
18
+ from casual_mcp.models.messages import AssistantMessage, ChatMessage
19
19
  from casual_mcp.models.tool_call import AssistantToolCall, AssistantToolCallFunction
20
20
  from casual_mcp.providers.abstract_provider import CasualMcpProvider
21
21
 
@@ -59,7 +59,7 @@ def convert_tool(mcp_tool: mcp.Tool) -> ChatCompletionToolParam | None:
59
59
  return ChatCompletionToolParam(**tool)
60
60
 
61
61
 
62
- def convert_messages(messages: list[CasualMcpMessage]) -> list[ChatCompletionMessageParam]:
62
+ def convert_messages(messages: list[ChatMessage]) -> list[ChatCompletionMessageParam]:
63
63
  if not messages:
64
64
  return messages
65
65
 
@@ -144,7 +144,7 @@ class OpenAiProvider(CasualMcpProvider):
144
144
 
145
145
  async def generate(
146
146
  self,
147
- messages: list[CasualMcpMessage],
147
+ messages: list[ChatMessage],
148
148
  tools: list[mcp.Tool]
149
149
  ) -> AssistantMessage:
150
150
  logger.info("Start Generating")
@@ -2,6 +2,7 @@ import os
2
2
  from typing import TypeAlias
3
3
 
4
4
  import mcp
5
+ from fastmcp import Client
5
6
 
6
7
  from casual_mcp.logging import get_logger
7
8
  from casual_mcp.models.model_config import ModelConfig
@@ -13,15 +14,22 @@ logger = get_logger("providers.factory")
13
14
  LLMProvider: TypeAlias = OpenAiProvider | OllamaProvider
14
15
 
15
16
  class ProviderFactory:
16
- def __init__(self):
17
- self.providers: dict[str, LLMProvider] = {}
17
+ providers: dict[str, LLMProvider] = {}
18
+ tools: list[mcp.Tool] = None
19
+
20
+ def __init__(self, mcp_client: Client):
21
+ self.mcp_client = mcp_client
18
22
 
19
23
 
20
24
  def set_tools(self, tools: list[mcp.Tool]):
21
25
  self.tools = tools
22
26
 
23
27
 
24
- def get_provider(self, name: str, config: ModelConfig) -> LLMProvider:
28
+ async def get_provider(self, name: str, config: ModelConfig) -> LLMProvider:
29
+ if not self.tools:
30
+ async with self.mcp_client:
31
+ self.tools = await self.mcp_client.list_tools()
32
+
25
33
  if self.providers.get(name):
26
34
  return self.providers.get(name)
27
35
 
casual_mcp/utils.py CHANGED
@@ -2,6 +2,7 @@ import json
2
2
  from pathlib import Path
3
3
 
4
4
  import mcp
5
+ from fastmcp import Client
5
6
  from jinja2 import Environment, FileSystemLoader
6
7
  from pydantic import ValidationError
7
8
 
@@ -9,6 +10,14 @@ from casual_mcp.models.config import Config
9
10
  from casual_mcp.models.tool_call import AssistantToolCall
10
11
 
11
12
 
13
+ def load_mcp_client(config: Config) -> Client:
14
+ servers = {
15
+ key: value.model_dump()
16
+ for key, value in config.servers.items()
17
+ }
18
+ return Client(servers)
19
+
20
+
12
21
  def load_config(path: str | Path) -> Config:
13
22
  path = Path(path)
14
23
 
@@ -1,16 +1,18 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: casual-mcp
3
- Version: 0.1.0
3
+ Version: 0.2.2
4
4
  Summary: Multi-server MCP client for LLM tool orchestration
5
5
  Author: Alex Stansfield
6
6
  License: MIT
7
+ Project-URL: Homepage, https://github.com/AlexStansfield/casual-mcp
8
+ Project-URL: Repository, https://github.com/AlexStansfield/casual-mcp
9
+ Project-URL: Issue Tracker, https://github.com/AlexStansfield/casual-mcp/issues
7
10
  Requires-Python: >=3.10
8
11
  Description-Content-Type: text/markdown
9
12
  License-File: LICENSE
10
- Requires-Dist: amadeus>=12.0.0
11
13
  Requires-Dist: dateparser>=1.2.1
12
14
  Requires-Dist: fastapi>=0.115.12
13
- Requires-Dist: fastmcp>=2.3.4
15
+ Requires-Dist: fastmcp>=2.5.1
14
16
  Requires-Dist: jinja2>=3.1.6
15
17
  Requires-Dist: ollama>=0.4.8
16
18
  Requires-Dist: openai>=1.78.0
@@ -98,11 +100,11 @@ Here is a list of functions in JSON format that you can invoke:
98
100
  ]
99
101
  ```
100
102
 
101
- ## ⚙️ Configuration File (`config.json`)
103
+ ## ⚙️ Configuration File (`casual_mcp_config.json`)
102
104
 
103
105
  📄 See the [Programmatic Usage](#-programmatic-usage) section to build configs and messages with typed models.
104
106
 
105
- The CLI and API can be configured using a `config.json` file that defines:
107
+ The CLI and API can be configured using a `casual_mcp_config.json` file that defines:
106
108
 
107
109
  - 🔧 Available **models** and their providers
108
110
  - 🧰 Available **MCP tool servers**
@@ -112,7 +114,6 @@ The CLI and API can be configured using a `config.json` file that defines:
112
114
 
113
115
  ```json
114
116
  {
115
- "namespaced_tools": false,
116
117
  "models": {
117
118
  "lm-qwen-3": {
118
119
  "provider": "openai",
@@ -127,11 +128,10 @@ The CLI and API can be configured using a `config.json` file that defines:
127
128
  },
128
129
  "servers": {
129
130
  "time": {
130
- "type": "python",
131
- "path": "mcp-servers/time/server.py"
131
+ "command": "python",
132
+ "args": ["mcp-servers/time/server.py"]
132
133
  },
133
134
  "weather": {
134
- "type": "http",
135
135
  "url": "http://localhost:5050/mcp"
136
136
  }
137
137
  }
@@ -142,25 +142,31 @@ The CLI and API can be configured using a `config.json` file that defines:
142
142
 
143
143
  Each model has:
144
144
 
145
- - `provider`: `"openai"` or `"ollama"`
145
+ - `provider`: `"openai"` (more to come)
146
146
  - `model`: the model name (e.g., `gpt-4.1`, `qwen3-8b`)
147
147
  - `endpoint`: required for custom OpenAI-compatible backends (e.g., LM Studio)
148
148
  - `template`: optional name used to apply model-specific tool calling formatting
149
149
 
150
150
  ### 🔹 `servers`
151
151
 
152
- Each server has:
152
+ Servers can either be local (over stdio) or remote.
153
153
 
154
- - `type`: `"python"`, `"http"`, `"node"`, or `"uvx"`
155
- - For `python`/`node`: `path` to the script
156
- - For `http`: `url` to the remote MCP endpoint
157
- - For `uvx`: `package` for the package to run
158
- - Optional: `env` for subprocess environments, `system_prompt` to override server prompt
154
+ Local Config:
155
+ - `command`: the command to run the server, e.g `python`, `npm`
156
+ - `args`: the arguments to pass to the server as a list, e.g `["time/server.py"]`
157
+ - Optional: `env`: for subprocess environments, `system_prompt` to override server prompt
159
158
 
160
- ### 🔹 `namespaced_tools`
159
+ Remote Config:
160
+ - `url`: the url of the mcp server
161
+ - Optional: `transport`: the type of transport, `http`, `sse`, `streamable-http`. Defaults to `http`
161
162
 
162
- If `true`, tools will be prefixed by server name (e.g., `weather-get_weather`).
163
- Useful for disambiguating tool names across servers and avoiding name collision if multiple servers have the same tool name.
163
+ ## Environmental Variables
164
+
165
+ There are two environmental variables:
166
+ - `OPEN_AI_API_KEY`: required when using the `openai` provider, if using a local model with an openai compatible API it can be any string
167
+ - `TOOL_RESULT_FORMAT`: adjusts the format of the tool result given back to the LLM. Options are `result`, `function_result`, `function_args_result`. Defaults to `result`
168
+
169
+ You can set them using `export` or by creating a `.env` file.
164
170
 
165
171
  ## 🛠 CLI Reference
166
172
 
@@ -222,52 +228,48 @@ chat = McpToolChat(mcp_client, provider, system_prompt)
222
228
  response = await chat.chat(prompt="What time is it in London?")
223
229
  ```
224
230
 
225
- #### `MultiServerMCPClient`
226
- Connects to multiple MCP tool servers and manages available tools.
227
-
228
- ```python
229
- from casual_mcp import MultiServerMCPClient
230
-
231
- mcp_client = MultiServerMCPClient()
232
- await mcp_client.load_config(config["servers"])
233
- tools = await mcp_client.list_tools()
234
- ```
235
-
236
231
  #### `ProviderFactory`
237
232
  Instantiates LLM providers based on the selected model config.
238
233
 
239
234
  ```python
240
- from casual_mcp.providers.provider_factory import ProviderFactory
235
+ from casual_mcp import ProviderFactory
241
236
 
242
237
  provider_factory = ProviderFactory()
243
238
  provider = provider_factory.get_provider("lm-qwen-3", model_config)
244
239
  ```
245
240
 
246
241
  #### `load_config`
247
- Loads your `config.json` into a validated config object.
242
+ Loads your `casual_mcp_config.json` into a validated config object.
243
+
244
+ ```python
245
+ from casual_mcp import load_config
246
+
247
+ config = load_config("casual_mcp_config.json")
248
+ ```
249
+
250
+ #### `load_mcp_client`
251
+ Creats a multi server FastMCP client from the config object
248
252
 
249
253
  ```python
250
- from casual_mcp.utils import load_config
254
+ from casual_mcp import load_mcp_client
251
255
 
252
- config = load_config("config.json")
256
+ config = load_mcp_client(config)
253
257
  ```
254
258
 
255
259
  #### Model and Server Configs
256
260
 
257
261
  Exported models:
258
- - PythonMcpServerConfig
259
- - UvxMcpServerConfig
260
- - NodeMcpServerConfig
261
- - HttpMcpServerConfig
262
+ - StdioServerConfig
263
+ - RemoteServerConfig
262
264
  - OpenAIModelConfig
263
265
 
264
266
  Use these types to build valid configs:
265
267
 
266
268
  ```python
267
- from casual_mcp.models import OpenAIModelConfig, PythonMcpServerConfig
269
+ from casual_mcp.models import OpenAIModelConfig, StdioServerConfig
268
270
 
269
- model = OpenAIModelConfig( model="llama3", endpoint="http://...")
270
- server = PythonMcpServerConfig(path="time/server.py")
271
+ model = OpenAIModelConfig(model="llama3", endpoint="http://...")
272
+ server = StdioServerConfig(command="python", args=["time/server.py"])
271
273
  ```
272
274
 
273
275
  #### Chat Messages
@@ -292,7 +294,7 @@ messages = [
292
294
  ### Example
293
295
 
294
296
  ```python
295
- from casual_mcp import McpToolChat, MultiServerMCPClient, load_config, ProviderFactory
297
+ from casual_mcp import McpToolChat, load_config, load_mcp_client, ProviderFactory
296
298
  from casual_mcp.models import SystemMessage, UserMessage
297
299
 
298
300
  model = "gpt-4.1-nano"
@@ -304,10 +306,10 @@ Respond naturally and confidently, as if you already know all the facts."""),
304
306
  ]
305
307
 
306
308
  # Load the Config from the File
307
- config = load_config("config.json")
309
+ config = load_config("casual_mcp_config.json")
308
310
 
309
311
  # Setup the MultiServer MCP Client
310
- mcp_client = MultiServerMCPClient()
312
+ mcp_client = load_mcp_client(config)
311
313
  await mcp_client.load_config(config.servers)
312
314
 
313
315
  # Get the Provider for the Model
@@ -0,0 +1,24 @@
1
+ casual_mcp/__init__.py,sha256=qL2sZhhWrp60taGBHUG1yeelUgpxld-qXFEbEUMXZVM,277
2
+ casual_mcp/cli.py,sha256=TSk12nXJH86f0WAR_u5hIJV9IAHeGHrkgFs7ZZ63Lug,1627
3
+ casual_mcp/logging.py,sha256=o3rvT8GLJKGlu0ieeC9TY_SRSEUY-VO8jRQZjx-sSvY,863
4
+ casual_mcp/main.py,sha256=1t702JABc1sFBzic7TMPE6OrikizBfOnlRpaY84p7oQ,3358
5
+ casual_mcp/mcp_tool_chat.py,sha256=5PcbacNj3HR13Rnz4TDhxOtxfipNqGMgyTKHsaSAnV4,4249
6
+ casual_mcp/utils.py,sha256=Nea0aRbPyjqm7mIjffJtGP2NssE7BsdPleO-yiuAWPE,2964
7
+ casual_mcp/models/__init__.py,sha256=qlKylcCyRJOSIVteU2feiLOigZoY-m-soVGp4NALM_c,538
8
+ casual_mcp/models/config.py,sha256=ITu3WAPMad7i2CS3ljkHapjT8lLm7k6HFUF6N73U1oo,294
9
+ casual_mcp/models/generation_error.py,sha256=n1mF3vc1Sg_9yIe603G1nTP395Tht8JMKHqdMWFNAn0,259
10
+ casual_mcp/models/mcp_server_config.py,sha256=0OHsHUEKxRoCl21lsye4E5GoCNmdZWIZCOOthcTpdsE,539
11
+ casual_mcp/models/messages.py,sha256=7C0SoCC6Ee970iHprpCpsKsQrwvM66e39o96wfYm1Y8,683
12
+ casual_mcp/models/model_config.py,sha256=gN5hNDfbur_bHgrji87CcU2WgNZO-F3eveK4pVWVSAE,435
13
+ casual_mcp/models/tool_call.py,sha256=BKMxcmyW7EmNoG1jgS9PXXvf6RQIHf7wB8fElEbc4gA,271
14
+ casual_mcp/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ casual_mcp/providers/abstract_provider.py,sha256=TTEP3FeTxOtbD0By_k17UxS8cqxYCOGNRTRxYRrqGwc,292
16
+ casual_mcp/providers/ollama_provider.py,sha256=IUSJFBtEYmza_-_7bk5YZKqed3N67l8A2lZEmHPiyHo,2581
17
+ casual_mcp/providers/openai_provider.py,sha256=uSjoqM-X9bVp_RVM8Ip6lqjZ7q3DdN0-p7o2HKrWxMI,6138
18
+ casual_mcp/providers/provider_factory.py,sha256=CyFHJ0mU2tjHqj04btF0SL0B3pf12LAJ52Msqsbnv_g,1766
19
+ casual_mcp-0.2.2.dist-info/licenses/LICENSE,sha256=U3Zu2tkrh5vXdy7gIdE8WJGM9D4gGp3hohAAWdre-yo,1058
20
+ casual_mcp-0.2.2.dist-info/METADATA,sha256=z9WtBJF17iSjFXzj3vBIIK786LGO7Lx6VYv8SZGtX54,11954
21
+ casual_mcp-0.2.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
+ casual_mcp-0.2.2.dist-info/entry_points.txt,sha256=X48Np2cwl-SlRQdV26y2vPZ-2tJaODgZeVtfpHho-zg,50
23
+ casual_mcp-0.2.2.dist-info/top_level.txt,sha256=K4CiI0Jf8PHICjuQVm32HuNMB44kp8Lb02bbbdiH5bo,11
24
+ casual_mcp-0.2.2.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.8.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,170 +0,0 @@
1
- import json
2
- import os
3
-
4
- import mcp
5
- from fastmcp import Client
6
- from fastmcp.client.logging import LogMessage
7
- from fastmcp.client.transports import (
8
- ClientTransport,
9
- NodeStdioTransport,
10
- PythonStdioTransport,
11
- StreamableHttpTransport,
12
- UvxStdioTransport,
13
- )
14
-
15
- from casual_mcp.logging import get_logger
16
- from casual_mcp.models.mcp_server_config import McpServerConfig
17
- from casual_mcp.models.messages import ToolResultMessage
18
- from casual_mcp.models.tool_call import AssistantToolCall, AssistantToolCallFunction
19
- from casual_mcp.utils import format_tool_call_result
20
-
21
- logger = get_logger("multi_server_mcp_client")
22
-
23
-
24
- async def my_log_handler(params: LogMessage):
25
- logger.log(params.level, params.data)
26
-
27
-
28
- def get_server_transport(config: McpServerConfig) -> ClientTransport:
29
- match config.type:
30
- case 'python':
31
- return PythonStdioTransport(
32
- script_path=config.path,
33
- env=config.env
34
- )
35
- case 'node':
36
- return NodeStdioTransport(
37
- script_path=config.path,
38
- env=config.env
39
- )
40
- case 'http':
41
- return StreamableHttpTransport(
42
- url=config.url
43
- )
44
- case 'uvx':
45
- return UvxStdioTransport(
46
- tool_name=config.package,
47
- env_vars=config.env
48
- )
49
-
50
-
51
- class MultiServerMCPClient:
52
- def __init__(self, namespace_tools: bool = False):
53
- self.servers: dict[str, Client] = {} # Map server names to client connections
54
- self.tools_map = {} # Map tool names to server names
55
- self.tools: list[mcp.types.Tool] = []
56
- self.system_prompts: list[str] = []
57
- self.namespace_tools = namespace_tools
58
-
59
- async def load_config(self, config: dict[str, McpServerConfig]):
60
- # Load the servers from config
61
- logger.info("Loading server config")
62
- for name, server_config in config.items():
63
- transport = get_server_transport(server_config)
64
- await self.connect_to_server(
65
- transport,
66
- name,
67
- system_prompt=server_config.system_prompt
68
- )
69
-
70
-
71
- async def connect_to_server_script(self, path, name, env={}):
72
- # Connect via stdio to a local script
73
- transport = PythonStdioTransport(
74
- script_path=path,
75
- env=env,
76
- )
77
-
78
- return await self.connect_to_server(transport, name)
79
-
80
- async def connect_to_server(self, server, name, system_prompt: str = None):
81
- """Connect to an MCP server and register its tools."""
82
- logger.debug(f"Connecting to server {name}")
83
-
84
- async with Client(
85
- server,
86
- log_handler=my_log_handler,
87
- ) as server_client:
88
- # Store the connection
89
- self.servers[name] = server_client
90
-
91
- # Fetch tools and map them to this server
92
- tools = await server_client.list_tools()
93
-
94
- # If we are namespacing servers then change the tool names
95
- for tool in tools:
96
- if self.namespace_tools:
97
- tool.name = f"{name}-{tool.name}"
98
- else:
99
- if self.tools_map.get(tool.name):
100
- raise SystemError(
101
- f"Tool name collision {name}:{tool.name} already added by {self.tools_map[tool.name]}" # noqa: E501
102
- )
103
-
104
- self.tools_map[tool.name] = name
105
- self.tools.extend(tools)
106
-
107
- if system_prompt:
108
- prompt = await server_client.get_prompt(system_prompt)
109
- if prompt:
110
- self.system_prompts.append(prompt)
111
-
112
- return tools
113
-
114
- async def list_tools(self):
115
- """Fetch and aggregate tools from all connected servers."""
116
- return self.tools
117
-
118
- async def call_tool(self, function: AssistantToolCallFunction):
119
- """Route a tool call to the appropriate server."""
120
- tool_name = function.name
121
- tool_args = json.loads(function.arguments)
122
-
123
- # Find which server has this tool
124
- server_name = self.tools_map.get(tool_name)
125
-
126
- # Remove the sever name if the tools are namespaced
127
- if self.namespace_tools:
128
- tool_name = tool_name.removeprefix(f"{server_name}-")
129
- else:
130
- tool_name = tool_name
131
-
132
- if not self.tools_map.get(tool_name):
133
- raise ValueError(f"Tool not found: {tool_name}")
134
-
135
- logger.info(f"Calling tool {tool_name}")
136
-
137
- server_client = self.servers[server_name]
138
- async with server_client:
139
- return await server_client.call_tool(tool_name, tool_args)
140
-
141
-
142
- async def execute(self, tool_call: AssistantToolCall):
143
- try:
144
- result = await self.call_tool(tool_call.function)
145
- except Exception as e:
146
- if isinstance(e, ValueError):
147
- logger.warning(e)
148
- else:
149
- logger.error(f"Error calling tool: {e}")
150
-
151
- return ToolResultMessage(
152
- name=tool_call.function.name,
153
- tool_call_id=tool_call.id,
154
- content=str(e),
155
- )
156
-
157
- logger.debug(f"Tool Call Result: {result}")
158
-
159
- result_format = os.getenv('TOOL_RESULT_FORMAT', 'result')
160
- content = format_tool_call_result(tool_call, result[0].text, style=result_format)
161
-
162
- return ToolResultMessage(
163
- name=tool_call.function.name,
164
- tool_call_id=tool_call.id,
165
- content=content,
166
- )
167
-
168
-
169
- def get_system_prompts(self) -> list[str]:
170
- return self.system_prompts
@@ -1,25 +0,0 @@
1
- casual_mcp/__init__.py,sha256=UZTKF9qlKijDh2SRCbpz6nPi0now7hi4-VOJBnl7tTk,323
2
- casual_mcp/cli.py,sha256=s5-Mr2XNlzNcsfGwtwP25YBQYzf-orvDIu9gqwrVCI8,1561
3
- casual_mcp/logging.py,sha256=o3rvT8GLJKGlu0ieeC9TY_SRSEUY-VO8jRQZjx-sSvY,863
4
- casual_mcp/main.py,sha256=x-jJUltW4p4j0Vx-LaixbY0Oik6QZ81K2wdDeTNytME,3497
5
- casual_mcp/mcp_tool_chat.py,sha256=pIAQD-ghyLSGuLzXiG5Sv81-NHaNb5NVqRojJalwS1o,3113
6
- casual_mcp/multi_server_mcp_client.py,sha256=RrLO7wFGzkUgzmliagkOAx16lrvEG323MGPEU7Sw56o,5615
7
- casual_mcp/utils.py,sha256=8ekPpIfcqheMMjjKGe6lk81AWKpmCAixOXx_KJXGRAQ,2758
8
- casual_mcp/models/__init__.py,sha256=hHT-GBD0YMjHdJ4QGVefXQZsHu3bPd1vlizVdfYXoQ0,660
9
- casual_mcp/models/config.py,sha256=ITu3WAPMad7i2CS3ljkHapjT8lLm7k6HFUF6N73U1oo,294
10
- casual_mcp/models/generation_error.py,sha256=n1mF3vc1Sg_9yIe603G1nTP395Tht8JMKHqdMWFNAn0,259
11
- casual_mcp/models/mcp_server_config.py,sha256=o4uxq9JnrLRRHe0KNsaYE3P03wJdW1EmX18fmF7SoTQ,857
12
- casual_mcp/models/messages.py,sha256=5UASrYqlXeqaziDT8Zsej0kA7Ofce0109YlFAyQDuTY,688
13
- casual_mcp/models/model_config.py,sha256=gN5hNDfbur_bHgrji87CcU2WgNZO-F3eveK4pVWVSAE,435
14
- casual_mcp/models/tool_call.py,sha256=BKMxcmyW7EmNoG1jgS9PXXvf6RQIHf7wB8fElEbc4gA,271
15
- casual_mcp/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- casual_mcp/providers/abstract_provider.py,sha256=kACSVgPY9qTqD1IgIWY9HkS8US2B0Nm7MyGJk0GLfDk,307
17
- casual_mcp/providers/ollama_provider.py,sha256=DKX9QTDl9DspWJSghuQgOzHZgjmTVtf5uyRH_DeOgQc,2601
18
- casual_mcp/providers/openai_provider.py,sha256=VIymU3Jimncne0c1fyowy4BFBpyfK70eG-2rP_YGDzc,6153
19
- casual_mcp/providers/provider_factory.py,sha256=Bub4y4uHFc23VCOeRicT_Fi54IdcjHUgYAWPo1oiSo4,1519
20
- casual_mcp-0.1.0.dist-info/licenses/LICENSE,sha256=U3Zu2tkrh5vXdy7gIdE8WJGM9D4gGp3hohAAWdre-yo,1058
21
- casual_mcp-0.1.0.dist-info/METADATA,sha256=PCJXGER8P8MgnqNBNRPVSysqumU4654KfUpUUyNLEdE,11576
22
- casual_mcp-0.1.0.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
23
- casual_mcp-0.1.0.dist-info/entry_points.txt,sha256=X48Np2cwl-SlRQdV26y2vPZ-2tJaODgZeVtfpHho-zg,50
24
- casual_mcp-0.1.0.dist-info/top_level.txt,sha256=K4CiI0Jf8PHICjuQVm32HuNMB44kp8Lb02bbbdiH5bo,11
25
- casual_mcp-0.1.0.dist-info/RECORD,,