casual-mcp 0.3.1__tar.gz → 0.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. {casual_mcp-0.3.1/src/casual_mcp.egg-info → casual_mcp-0.4.0}/PKG-INFO +3 -2
  2. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/pyproject.toml +4 -3
  3. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/main.py +6 -3
  4. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/mcp_tool_chat.py +1 -1
  5. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/providers/openai_provider.py +2 -1
  6. {casual_mcp-0.3.1 → casual_mcp-0.4.0/src/casual_mcp.egg-info}/PKG-INFO +3 -2
  7. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp.egg-info/requires.txt +2 -1
  8. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/LICENSE +0 -0
  9. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/README.md +0 -0
  10. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/setup.cfg +0 -0
  11. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/__init__.py +0 -0
  12. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/cli.py +0 -0
  13. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/logging.py +0 -0
  14. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/models/__init__.py +0 -0
  15. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/models/config.py +0 -0
  16. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/models/generation_error.py +0 -0
  17. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/models/mcp_server_config.py +0 -0
  18. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/models/messages.py +0 -0
  19. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/models/model_config.py +0 -0
  20. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/models/tool_call.py +0 -0
  21. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/providers/__init__.py +0 -0
  22. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/providers/abstract_provider.py +0 -0
  23. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/providers/ollama_provider.py +0 -0
  24. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/providers/provider_factory.py +0 -0
  25. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp/utils.py +0 -0
  26. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp.egg-info/SOURCES.txt +0 -0
  27. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp.egg-info/dependency_links.txt +0 -0
  28. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp.egg-info/entry_points.txt +0 -0
  29. {casual_mcp-0.3.1 → casual_mcp-0.4.0}/src/casual_mcp.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: casual-mcp
3
- Version: 0.3.1
3
+ Version: 0.4.0
4
4
  Summary: Multi-server MCP client for LLM tool orchestration
5
5
  Author: Alex Stansfield
6
6
  License: MIT
@@ -12,13 +12,14 @@ Description-Content-Type: text/markdown
12
12
  License-File: LICENSE
13
13
  Requires-Dist: dateparser>=1.2.1
14
14
  Requires-Dist: fastapi>=0.115.12
15
- Requires-Dist: fastmcp>=2.5.2
15
+ Requires-Dist: fastmcp>=2.12.4
16
16
  Requires-Dist: jinja2>=3.1.6
17
17
  Requires-Dist: ollama>=0.4.8
18
18
  Requires-Dist: openai>=1.78.0
19
19
  Requires-Dist: python-dotenv>=1.1.0
20
20
  Requires-Dist: requests>=2.32.3
21
21
  Requires-Dist: rich>=14.0.0
22
+ Requires-Dist: typer>=0.19.2
22
23
  Requires-Dist: uvicorn>=0.34.2
23
24
  Provides-Extra: dev
24
25
  Requires-Dist: ruff; extra == "dev"
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "casual-mcp"
3
- version = "0.3.1"
3
+ version = "0.4.0"
4
4
  description = "Multi-server MCP client for LLM tool orchestration"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10"
@@ -9,13 +9,14 @@ authors = [{ name = "Alex Stansfield" }]
9
9
  dependencies = [
10
10
  "dateparser>=1.2.1",
11
11
  "fastapi>=0.115.12",
12
- "fastmcp>=2.5.2",
12
+ "fastmcp>=2.12.4",
13
13
  "jinja2>=3.1.6",
14
14
  "ollama>=0.4.8",
15
15
  "openai>=1.78.0",
16
16
  "python-dotenv>=1.1.0",
17
17
  "requests>=2.32.3",
18
18
  "rich>=14.0.0",
19
+ "typer>=0.19.2",
19
20
  "uvicorn>=0.34.2",
20
21
  ]
21
22
 
@@ -65,7 +66,7 @@ dev = [
65
66
  ]
66
67
 
67
68
  [tool.bumpversion]
68
- current_version = "0.3.1"
69
+ current_version = "0.4.0"
69
70
  commit = true
70
71
  tag = true
71
72
 
@@ -13,6 +13,11 @@ from casual_mcp.providers.provider_factory import ProviderFactory
13
13
  from casual_mcp.utils import load_config, load_mcp_client, render_system_prompt
14
14
 
15
15
  load_dotenv()
16
+
17
+ # Configure logging
18
+ configure_logging(os.getenv("LOG_LEVEL", 'INFO'))
19
+ logger = get_logger("main")
20
+
16
21
  config = load_config("casual_mcp_config.json")
17
22
  mcp_client = load_mcp_client(config)
18
23
  provider_factory = ProviderFactory(mcp_client)
@@ -61,9 +66,7 @@ class ChatRequest(BaseModel):
61
66
 
62
67
  sys.path.append(str(Path(__file__).parent.resolve()))
63
68
 
64
- # Configure logging
65
- configure_logging(os.getenv("LOG_LEVEL", 'INFO'))
66
- logger = get_logger("main")
69
+
67
70
 
68
71
 
69
72
  @app.post("/chat")
@@ -145,7 +145,7 @@ class McpToolChat:
145
145
  logger.debug(f"Tool Call Result: {result}")
146
146
 
147
147
  result_format = os.getenv('TOOL_RESULT_FORMAT', 'result')
148
- content = format_tool_call_result(tool_call, result[0].text, style=result_format)
148
+ content = format_tool_call_result(tool_call, result.content[0].text, style=result_format)
149
149
 
150
150
  return ToolResultMessage(
151
151
  name=tool_call.function.name,
@@ -153,10 +153,10 @@ class OpenAiProvider(CasualMcpProvider):
153
153
  # Convert Messages to OpenAI format
154
154
  converted_messages = convert_messages(messages)
155
155
  logger.debug(f"Converted Messages: {converted_messages}")
156
- logger.info(f"Adding {len(converted_messages)} messages")
157
156
 
158
157
  # Call OpenAi API
159
158
  try:
159
+ logger.info(f"Calling LLM with {len(converted_messages)} messages")
160
160
  result = self.client.chat.completions.create(
161
161
  model=self.model, messages=converted_messages, tools=self.tools
162
162
  )
@@ -166,6 +166,7 @@ class OpenAiProvider(CasualMcpProvider):
166
166
  logger.warning(f"Error in Generation: {e}")
167
167
  raise GenerationError(str(e))
168
168
 
169
+ logger.info(f"LLM Response received")
169
170
  logger.debug(response)
170
171
 
171
172
  # Convert any tool calls
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: casual-mcp
3
- Version: 0.3.1
3
+ Version: 0.4.0
4
4
  Summary: Multi-server MCP client for LLM tool orchestration
5
5
  Author: Alex Stansfield
6
6
  License: MIT
@@ -12,13 +12,14 @@ Description-Content-Type: text/markdown
12
12
  License-File: LICENSE
13
13
  Requires-Dist: dateparser>=1.2.1
14
14
  Requires-Dist: fastapi>=0.115.12
15
- Requires-Dist: fastmcp>=2.5.2
15
+ Requires-Dist: fastmcp>=2.12.4
16
16
  Requires-Dist: jinja2>=3.1.6
17
17
  Requires-Dist: ollama>=0.4.8
18
18
  Requires-Dist: openai>=1.78.0
19
19
  Requires-Dist: python-dotenv>=1.1.0
20
20
  Requires-Dist: requests>=2.32.3
21
21
  Requires-Dist: rich>=14.0.0
22
+ Requires-Dist: typer>=0.19.2
22
23
  Requires-Dist: uvicorn>=0.34.2
23
24
  Provides-Extra: dev
24
25
  Requires-Dist: ruff; extra == "dev"
@@ -1,12 +1,13 @@
1
1
  dateparser>=1.2.1
2
2
  fastapi>=0.115.12
3
- fastmcp>=2.5.2
3
+ fastmcp>=2.12.4
4
4
  jinja2>=3.1.6
5
5
  ollama>=0.4.8
6
6
  openai>=1.78.0
7
7
  python-dotenv>=1.1.0
8
8
  requests>=2.32.3
9
9
  rich>=14.0.0
10
+ typer>=0.19.2
10
11
  uvicorn>=0.34.2
11
12
 
12
13
  [dev]
File without changes
File without changes
File without changes