casual-mcp 0.3.0__tar.gz → 0.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. {casual_mcp-0.3.0/src/casual_mcp.egg-info → casual_mcp-0.4.0}/PKG-INFO +3 -2
  2. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/pyproject.toml +5 -4
  3. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/cli.py +19 -3
  4. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/main.py +6 -3
  5. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/mcp_tool_chat.py +2 -1
  6. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/providers/openai_provider.py +2 -1
  7. {casual_mcp-0.3.0 → casual_mcp-0.4.0/src/casual_mcp.egg-info}/PKG-INFO +3 -2
  8. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp.egg-info/requires.txt +2 -1
  9. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/LICENSE +0 -0
  10. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/README.md +0 -0
  11. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/setup.cfg +0 -0
  12. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/__init__.py +0 -0
  13. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/logging.py +0 -0
  14. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/models/__init__.py +0 -0
  15. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/models/config.py +0 -0
  16. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/models/generation_error.py +0 -0
  17. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/models/mcp_server_config.py +0 -0
  18. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/models/messages.py +0 -0
  19. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/models/model_config.py +0 -0
  20. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/models/tool_call.py +0 -0
  21. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/providers/__init__.py +0 -0
  22. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/providers/abstract_provider.py +0 -0
  23. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/providers/ollama_provider.py +0 -0
  24. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/providers/provider_factory.py +0 -0
  25. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp/utils.py +0 -0
  26. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp.egg-info/SOURCES.txt +0 -0
  27. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp.egg-info/dependency_links.txt +0 -0
  28. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp.egg-info/entry_points.txt +0 -0
  29. {casual_mcp-0.3.0 → casual_mcp-0.4.0}/src/casual_mcp.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: casual-mcp
3
- Version: 0.3.0
3
+ Version: 0.4.0
4
4
  Summary: Multi-server MCP client for LLM tool orchestration
5
5
  Author: Alex Stansfield
6
6
  License: MIT
@@ -12,13 +12,14 @@ Description-Content-Type: text/markdown
12
12
  License-File: LICENSE
13
13
  Requires-Dist: dateparser>=1.2.1
14
14
  Requires-Dist: fastapi>=0.115.12
15
- Requires-Dist: fastmcp>=2.5.1
15
+ Requires-Dist: fastmcp>=2.12.4
16
16
  Requires-Dist: jinja2>=3.1.6
17
17
  Requires-Dist: ollama>=0.4.8
18
18
  Requires-Dist: openai>=1.78.0
19
19
  Requires-Dist: python-dotenv>=1.1.0
20
20
  Requires-Dist: requests>=2.32.3
21
21
  Requires-Dist: rich>=14.0.0
22
+ Requires-Dist: typer>=0.19.2
22
23
  Requires-Dist: uvicorn>=0.34.2
23
24
  Provides-Extra: dev
24
25
  Requires-Dist: ruff; extra == "dev"
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "casual-mcp"
3
- version = "0.3.0"
3
+ version = "0.4.0"
4
4
  description = "Multi-server MCP client for LLM tool orchestration"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10"
@@ -9,13 +9,14 @@ authors = [{ name = "Alex Stansfield" }]
9
9
  dependencies = [
10
10
  "dateparser>=1.2.1",
11
11
  "fastapi>=0.115.12",
12
- "fastmcp>=2.5.1",
12
+ "fastmcp>=2.12.4",
13
13
  "jinja2>=3.1.6",
14
14
  "ollama>=0.4.8",
15
15
  "openai>=1.78.0",
16
16
  "python-dotenv>=1.1.0",
17
17
  "requests>=2.32.3",
18
18
  "rich>=14.0.0",
19
+ "typer>=0.19.2",
19
20
  "uvicorn>=0.34.2",
20
21
  ]
21
22
 
@@ -65,10 +66,10 @@ dev = [
65
66
  ]
66
67
 
67
68
  [tool.bumpversion]
68
- current_version = "0.3.0"
69
+ current_version = "0.4.0"
69
70
  commit = true
70
71
  tag = true
71
72
 
72
73
  [tool.bumpversion.file."pyproject.toml"]
73
74
  search = 'version = "{current_version}"'
74
- replace = 'version = "{new_version}"'
75
+ replace = 'version = "{new_version}"'
@@ -1,10 +1,11 @@
1
+ import asyncio
1
2
  import typer
2
3
  import uvicorn
3
4
  from rich.console import Console
4
5
  from rich.table import Table
5
6
 
6
7
  from casual_mcp.models.mcp_server_config import RemoteServerConfig
7
- from casual_mcp.utils import load_config
8
+ from casual_mcp.utils import load_config, load_mcp_client
8
9
 
9
10
  app = typer.Typer()
10
11
  console = Console()
@@ -31,7 +32,7 @@ def servers():
31
32
  table = Table("Name", "Type", "Command / Url", "Env")
32
33
 
33
34
  for name, server in config.servers.items():
34
- type = 'local'
35
+ type = 'stdio'
35
36
  if isinstance(server, RemoteServerConfig):
36
37
  type = 'remote'
37
38
 
@@ -39,7 +40,7 @@ def servers():
39
40
  if isinstance(server, RemoteServerConfig):
40
41
  path = server.url
41
42
  else:
42
- path = f"{server.command} {" ".join(server.args)}"
43
+ path = f"{server.command} {' '.join(server.args)}"
43
44
  env = ''
44
45
 
45
46
  table.add_row(name, type, path, env)
@@ -63,6 +64,21 @@ def models():
63
64
 
64
65
  console.print(table)
65
66
 
67
+ @app.command()
68
+ def tools():
69
+ config = load_config('casual_mcp_config.json')
70
+ mcp_client = load_mcp_client(config)
71
+ table = Table("Name", "Description")
72
+ # async with mcp_client:
73
+ tools = asyncio.run(get_tools(mcp_client))
74
+ for tool in tools:
75
+ table.add_row(tool.name, tool.description)
76
+ console.print(table)
77
+
78
+
79
+ async def get_tools(client):
80
+ async with client:
81
+ return await client.list_tools()
66
82
 
67
83
  if __name__ == "__main__":
68
84
  app()
@@ -13,6 +13,11 @@ from casual_mcp.providers.provider_factory import ProviderFactory
13
13
  from casual_mcp.utils import load_config, load_mcp_client, render_system_prompt
14
14
 
15
15
  load_dotenv()
16
+
17
+ # Configure logging
18
+ configure_logging(os.getenv("LOG_LEVEL", 'INFO'))
19
+ logger = get_logger("main")
20
+
16
21
  config = load_config("casual_mcp_config.json")
17
22
  mcp_client = load_mcp_client(config)
18
23
  provider_factory = ProviderFactory(mcp_client)
@@ -61,9 +66,7 @@ class ChatRequest(BaseModel):
61
66
 
62
67
  sys.path.append(str(Path(__file__).parent.resolve()))
63
68
 
64
- # Configure logging
65
- configure_logging(os.getenv("LOG_LEVEL", 'INFO'))
66
- logger = get_logger("main")
69
+
67
70
 
68
71
 
69
72
  @app.post("/chat")
@@ -84,6 +84,7 @@ class McpToolChat:
84
84
  has_system_message = any(message.role == 'system' for message in messages)
85
85
  if self.system and not has_system_message:
86
86
  # Insert the system message at the start of the messages
87
+ logger.debug(f"Adding System Message")
87
88
  messages.insert(0, SystemMessage(content=self.system))
88
89
 
89
90
  logger.info("Start Chat")
@@ -144,7 +145,7 @@ class McpToolChat:
144
145
  logger.debug(f"Tool Call Result: {result}")
145
146
 
146
147
  result_format = os.getenv('TOOL_RESULT_FORMAT', 'result')
147
- content = format_tool_call_result(tool_call, result[0].text, style=result_format)
148
+ content = format_tool_call_result(tool_call, result.content[0].text, style=result_format)
148
149
 
149
150
  return ToolResultMessage(
150
151
  name=tool_call.function.name,
@@ -153,10 +153,10 @@ class OpenAiProvider(CasualMcpProvider):
153
153
  # Convert Messages to OpenAI format
154
154
  converted_messages = convert_messages(messages)
155
155
  logger.debug(f"Converted Messages: {converted_messages}")
156
- logger.info(f"Adding {len(converted_messages)} messages")
157
156
 
158
157
  # Call OpenAi API
159
158
  try:
159
+ logger.info(f"Calling LLM with {len(converted_messages)} messages")
160
160
  result = self.client.chat.completions.create(
161
161
  model=self.model, messages=converted_messages, tools=self.tools
162
162
  )
@@ -166,6 +166,7 @@ class OpenAiProvider(CasualMcpProvider):
166
166
  logger.warning(f"Error in Generation: {e}")
167
167
  raise GenerationError(str(e))
168
168
 
169
+ logger.info(f"LLM Response received")
169
170
  logger.debug(response)
170
171
 
171
172
  # Convert any tool calls
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: casual-mcp
3
- Version: 0.3.0
3
+ Version: 0.4.0
4
4
  Summary: Multi-server MCP client for LLM tool orchestration
5
5
  Author: Alex Stansfield
6
6
  License: MIT
@@ -12,13 +12,14 @@ Description-Content-Type: text/markdown
12
12
  License-File: LICENSE
13
13
  Requires-Dist: dateparser>=1.2.1
14
14
  Requires-Dist: fastapi>=0.115.12
15
- Requires-Dist: fastmcp>=2.5.1
15
+ Requires-Dist: fastmcp>=2.12.4
16
16
  Requires-Dist: jinja2>=3.1.6
17
17
  Requires-Dist: ollama>=0.4.8
18
18
  Requires-Dist: openai>=1.78.0
19
19
  Requires-Dist: python-dotenv>=1.1.0
20
20
  Requires-Dist: requests>=2.32.3
21
21
  Requires-Dist: rich>=14.0.0
22
+ Requires-Dist: typer>=0.19.2
22
23
  Requires-Dist: uvicorn>=0.34.2
23
24
  Provides-Extra: dev
24
25
  Requires-Dist: ruff; extra == "dev"
@@ -1,12 +1,13 @@
1
1
  dateparser>=1.2.1
2
2
  fastapi>=0.115.12
3
- fastmcp>=2.5.1
3
+ fastmcp>=2.12.4
4
4
  jinja2>=3.1.6
5
5
  ollama>=0.4.8
6
6
  openai>=1.78.0
7
7
  python-dotenv>=1.1.0
8
8
  requests>=2.32.3
9
9
  rich>=14.0.0
10
+ typer>=0.19.2
10
11
  uvicorn>=0.34.2
11
12
 
12
13
  [dev]
File without changes
File without changes
File without changes