mcp-ollama-python 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ __all__ = ["main"]
@@ -0,0 +1,8 @@
1
+ """
2
+ Entry point for running as: python -m mcp_ollama_python
3
+ """
4
+
5
+ from .main import run
6
+
7
+ if __name__ == "__main__":
8
+ run()
@@ -0,0 +1,97 @@
1
+ """
2
+ Tool autoloader - dynamically discovers and loads tools from the tools directory
3
+ """
4
+
5
+ import importlib
6
+ import os
7
+ import pkgutil
8
+ from typing import List, Callable, Any, Dict
9
+
10
+ try:
11
+ from mcp_ollama_python.ollama_client import OllamaClient
12
+ from mcp_ollama_python.models import ToolDefinition, ResponseFormat
13
+ except ImportError:
14
+ from .ollama_client import OllamaClient
15
+ from .models import ToolDefinition, ResponseFormat
16
+
17
+ # Type for tool handler function
18
+ ToolHandler = Callable[[OllamaClient, Dict[str, Any], ResponseFormat], str]
19
+
20
+
21
+ class ToolRegistry:
22
+ """Registry for tool definitions and their handlers"""
23
+
24
+ def __init__(self):
25
+ self.tools: List[ToolDefinition] = []
26
+ self.handlers: Dict[str, ToolHandler] = {}
27
+
28
+ def register(self, tool_def: ToolDefinition, handler: ToolHandler):
29
+ """Register a tool definition with its handler"""
30
+ self.tools.append(tool_def)
31
+ self.handlers[tool_def.name] = handler
32
+
33
+ def get_handler(self, tool_name: str) -> ToolHandler:
34
+ """Get handler for a tool by name"""
35
+ return self.handlers.get(tool_name)
36
+
37
+
38
+ async def discover_tools() -> List[ToolDefinition]:
39
+ """Discover and load all tools from the tools directory (backward compatibility)"""
40
+ registry = await discover_tools_with_handlers()
41
+ return registry.tools
42
+
43
+
44
+ async def discover_tools_with_handlers() -> ToolRegistry:
45
+ """Discover and load all tools with their handlers from the tools directory"""
46
+
47
+ registry = ToolRegistry()
48
+
49
+ # Get the tools package path
50
+ tools_package = importlib.import_module(".tools", package="mcp_ollama_python")
51
+
52
+ # Get the directory path
53
+ if hasattr(tools_package, "__path__"):
54
+ tools_dir = tools_package.__path__[0]
55
+ else:
56
+ # Fallback for different Python versions
57
+ tools_dir = os.path.dirname(tools_package.__file__)
58
+
59
+ # Iterate through all Python files in the tools directory
60
+ for _, module_name, _ in pkgutil.iter_modules([tools_dir]):
61
+ if module_name.startswith("__"):
62
+ continue
63
+
64
+ try:
65
+ # Import the module
66
+ module = importlib.import_module(
67
+ f".tools.{module_name}", package="mcp_ollama_python"
68
+ )
69
+
70
+ # Check if module exports tool_definition
71
+ if hasattr(module, "tool_definition"):
72
+ tool_def = getattr(module, "tool_definition")
73
+
74
+ # Convert dict to ToolDefinition if needed
75
+ if isinstance(tool_def, dict):
76
+ tool_def = ToolDefinition(**tool_def)
77
+
78
+ if isinstance(tool_def, ToolDefinition):
79
+ # Find the handler function (should end with _handler)
80
+ handler = None
81
+ for attr_name in dir(module):
82
+ if attr_name.endswith("_handler") and callable(
83
+ getattr(module, attr_name)
84
+ ):
85
+ handler = getattr(module, attr_name)
86
+ break
87
+
88
+ if handler:
89
+ registry.register(tool_def, handler)
90
+ else:
91
+ print(f"Warning: Tool {tool_def.name} has no handler function")
92
+
93
+ except Exception as e:
94
+ print(f"Warning: Failed to load tool {module_name}: {e}")
95
+ continue
96
+
97
+ return registry
@@ -0,0 +1,59 @@
1
+ """
2
+ Get a Spanish poem using local MCP Ollama server with gpt-oss model
3
+ """
4
+
5
+ import asyncio
6
+ import json
7
+ from mcp_ollama_python.ollama_client import OllamaClient
8
+ from mcp_ollama_python.models import ChatMessage
9
+
10
+
11
+ async def get_spanish_poem():
12
+ """Generate a Spanish poem using gpt-oss model"""
13
+ print("Connecting to Ollama server...")
14
+ client = OllamaClient()
15
+
16
+ print("Requesting Spanish poem from gpt-oss model...")
17
+
18
+ # Create chat message
19
+ messages = [
20
+ ChatMessage(
21
+ role="user",
22
+ content="Escribe un poema bonito en español sobre la vida, el amor y la esperanza. Hazlo corto pero emotivo.",
23
+ )
24
+ ]
25
+
26
+ try:
27
+ # Call Ollama chat API
28
+ result = await client.chat(model="gpt-oss", messages=messages)
29
+
30
+ # Extract the response
31
+ if result and "message" in result:
32
+ poem = result["message"].get("content", "")
33
+ print("\n" + "=" * 60)
34
+ print("POEMA EN ESPAÑOL (Spanish Poem)")
35
+ print("=" * 60)
36
+ print(poem)
37
+ print("=" * 60)
38
+ return poem
39
+ else:
40
+ print("Error: No response received")
41
+ print(f"Result: {json.dumps(result, indent=2)}")
42
+ return None
43
+ except Exception as e:
44
+ print(f"Error calling Ollama: {e}")
45
+ return None
46
+ finally:
47
+ await client.client.aclose()
48
+
49
+
50
+ if __name__ == "__main__":
51
+ try:
52
+ asyncio.run(get_spanish_poem())
53
+ except KeyboardInterrupt:
54
+ print("\n\nInterrupted by user")
55
+ except Exception as e:
56
+ print(f"\nError: {e}")
57
+ import traceback
58
+
59
+ traceback.print_exc()
@@ -0,0 +1,214 @@
1
+ """
2
+ Ollama MCP Server - Main entry point
3
+ """
4
+
5
+ import asyncio
6
+ import signal
7
+ import sys
8
+ from typing import Optional
9
+
10
+ # Use absolute imports so PyInstaller/standalone execution works even when __package__ is not set
11
+ try:
12
+ from mcp_ollama_python.server import OllamaMCPServer
13
+ from mcp_ollama_python.ollama_client import OllamaClient
14
+ except ImportError:
15
+ from .server import OllamaMCPServer
16
+ from .ollama_client import OllamaClient
17
+
18
+ try:
19
+ from mcp.server import Server
20
+ from mcp.types import (
21
+ TextContent,
22
+ Tool as MCPTool,
23
+ Resource,
24
+ Prompt,
25
+ )
26
+ from mcp.server.stdio import stdio_server
27
+ except ImportError as e:
28
+ print(f"Error: mcp package import failed: {e}")
29
+ print("Please install it with: pip install mcp")
30
+ sys.exit(1)
31
+
32
+
33
+ # Global server instance for signal handling
34
+ _server_instance: Optional[Server] = None
35
+ _shutdown_event: Optional[asyncio.Event] = None
36
+
37
+
38
+ def safe_print(*args, **kwargs):
39
+ """Safe print that handles closed stdout/stderr"""
40
+ try:
41
+ print(*args, **kwargs)
42
+ except (ValueError, OSError):
43
+ # stdout/stderr is closed, ignore
44
+ pass
45
+
46
+
47
+ def signal_handler(signum, frame):
48
+ """Handle shutdown signals gracefully"""
49
+ safe_print(f"\nReceived signal {signum}, shutting down gracefully...")
50
+ if _shutdown_event:
51
+ _shutdown_event.set()
52
+
53
+
54
+ async def main():
55
+ """Main function to start the MCP server"""
56
+ global _server_instance, _shutdown_event
57
+
58
+ # Create shutdown event
59
+ _shutdown_event = asyncio.Event()
60
+
61
+ # Register signal handlers for graceful shutdown
62
+ signal.signal(signal.SIGINT, signal_handler)
63
+ signal.signal(signal.SIGTERM, signal_handler)
64
+
65
+ # Initialize Ollama client
66
+ ollama_client = OllamaClient()
67
+
68
+ # Initialize our MCP server
69
+ server = OllamaMCPServer(ollama_client)
70
+
71
+ # Create MCP server instance
72
+ mcp_server = Server("ollama-mcp")
73
+ _server_instance = mcp_server
74
+
75
+ safe_print("Starting Ollama MCP Server...")
76
+ safe_print("Press Ctrl+C to stop the server")
77
+
78
+ @mcp_server.list_tools()
79
+ async def handle_list_tools() -> list[MCPTool]:
80
+ """Handle list_tools request"""
81
+ result = await server.handle_list_tools()
82
+ tools = []
83
+ for tool_data in result["tools"]:
84
+ tools.append(
85
+ MCPTool(
86
+ name=tool_data["name"],
87
+ description=tool_data["description"],
88
+ inputSchema=tool_data["inputSchema"],
89
+ )
90
+ )
91
+ return tools
92
+
93
+ @mcp_server.call_tool()
94
+ async def handle_call_tool(name: str, arguments: dict) -> list[TextContent]:
95
+ """Handle call_tool request"""
96
+ result = await server.handle_call_tool(name, arguments)
97
+
98
+ content = []
99
+ for item in result["content"]:
100
+ if item["type"] == "text":
101
+ content.append(TextContent(type="text", text=item["text"]))
102
+
103
+ return content
104
+
105
+ @mcp_server.list_resources()
106
+ async def handle_list_resources() -> list[Resource]:
107
+ """Handle list_resources request"""
108
+ result = await server.handle_list_resources()
109
+ resources = []
110
+ for resource_data in result["resources"]:
111
+ resources.append(
112
+ Resource(
113
+ uri=resource_data["uri"],
114
+ name=resource_data["name"],
115
+ description=resource_data.get("description"),
116
+ mimeType=resource_data.get("mimeType", "text/plain"),
117
+ )
118
+ )
119
+ return resources
120
+
121
+ @mcp_server.read_resource()
122
+ async def handle_read_resource(uri: str) -> str:
123
+ """Handle read_resource request"""
124
+ result = await server.handle_read_resource(uri)
125
+ if result.get("contents"):
126
+ return result["contents"][0].get("text", "")
127
+ return ""
128
+
129
+ @mcp_server.list_prompts()
130
+ async def handle_list_prompts() -> list[Prompt]:
131
+ """Handle list_prompts request"""
132
+ result = await server.handle_list_prompts()
133
+ prompts = []
134
+ for prompt_data in result["prompts"]:
135
+ prompts.append(
136
+ Prompt(
137
+ name=prompt_data["name"],
138
+ description=prompt_data.get("description"),
139
+ arguments=prompt_data.get("arguments", []),
140
+ )
141
+ )
142
+ return prompts
143
+
144
+ @mcp_server.get_prompt()
145
+ async def handle_get_prompt(name: str, arguments: Optional[dict] = None) -> dict:
146
+ """Handle get_prompt request"""
147
+ result = await server.handle_get_prompt(name, arguments)
148
+ return result
149
+
150
+ # Run the server with graceful shutdown support
151
+ try:
152
+ async with stdio_server() as (read_stream, write_stream):
153
+ safe_print("Server started successfully!")
154
+ safe_print("Waiting for MCP client connections...")
155
+
156
+ # Create server task
157
+ server_task = asyncio.create_task(
158
+ mcp_server.run(
159
+ read_stream,
160
+ write_stream,
161
+ mcp_server.create_initialization_options(),
162
+ )
163
+ )
164
+
165
+ # Wait for either server completion or shutdown signal
166
+ shutdown_task = asyncio.create_task(_shutdown_event.wait())
167
+ done, pending = await asyncio.wait(
168
+ [server_task, shutdown_task], return_when=asyncio.FIRST_COMPLETED
169
+ )
170
+
171
+ # Cancel pending tasks
172
+ for task in pending:
173
+ task.cancel()
174
+ try:
175
+ await task
176
+ except asyncio.CancelledError:
177
+ pass
178
+
179
+ safe_print("Server stopped.")
180
+
181
+ except KeyboardInterrupt:
182
+ safe_print("\nServer interrupted by user")
183
+ except Exception as e:
184
+ safe_print(f"Server error: {e}")
185
+ raise
186
+ finally:
187
+ # Cleanup
188
+ await ollama_client.client.aclose()
189
+ safe_print("Cleanup completed.")
190
+
191
+
192
+ def run():
193
+ """Entry point for the mcp-ollama-python command"""
194
+ try:
195
+ asyncio.run(main())
196
+ except KeyboardInterrupt:
197
+ safe_print("\nShutdown complete.")
198
+ sys.exit(0)
199
+ except Exception as e:
200
+ safe_print(f"Fatal error: {e}")
201
+ sys.exit(1)
202
+
203
+
204
+ def stop():
205
+ """Stop the running server (for programmatic control)"""
206
+ if _shutdown_event:
207
+ _shutdown_event.set()
208
+ safe_print("Stop signal sent to server")
209
+ else:
210
+ safe_print("No server instance running")
211
+
212
+
213
+ if __name__ == "__main__":
214
+ run()
@@ -0,0 +1,119 @@
1
+ """
2
+ Core types and enums for Ollama MCP Server
3
+ """
4
+
5
+ from enum import Enum
6
+ from typing import Any, Dict, List, Optional
7
+ from pydantic import BaseModel, Field
8
+
9
+
10
+ class ResponseFormat(str, Enum):
11
+ """Response format for tool outputs"""
12
+
13
+ MARKDOWN = "markdown"
14
+ JSON = "json"
15
+
16
+
17
+ class GenerationOptions(BaseModel):
18
+ """Generation options that can be passed to Ollama models"""
19
+
20
+ temperature: Optional[float] = None
21
+ top_p: Optional[float] = None
22
+ top_k: Optional[int] = None
23
+ num_predict: Optional[int] = None
24
+ repeat_penalty: Optional[float] = None
25
+ seed: Optional[int] = None
26
+ stop: Optional[List[str]] = None
27
+
28
+
29
+ class MessageRole(str, Enum):
30
+ """Message role for chat"""
31
+
32
+ SYSTEM = "system"
33
+ USER = "user"
34
+ ASSISTANT = "assistant"
35
+
36
+
37
+ class ChatMessage(BaseModel):
38
+ """Chat message structure"""
39
+
40
+ role: MessageRole
41
+ content: str
42
+ images: Optional[List[str]] = None
43
+ tool_calls: Optional[List["ToolCall"]] = None
44
+
45
+
46
+ class Tool(BaseModel):
47
+ """Tool definition for function calling"""
48
+
49
+ type: str
50
+ function: Dict[str, Any]
51
+
52
+
53
+ class ToolCall(BaseModel):
54
+ """Tool call made by the model"""
55
+
56
+ function: Dict[str, Any]
57
+
58
+
59
+ class ToolDefinition(BaseModel):
60
+ """Represents a tool's metadata and handler function"""
61
+
62
+ name: str
63
+ description: str
64
+ input_schema: Dict[str, Any] = Field(
65
+ default_factory=lambda: {"type": "object", "properties": {}}
66
+ )
67
+
68
+
69
+ class ToolContext(BaseModel):
70
+ """Base tool context passed to all tool implementations"""
71
+
72
+ pass # In Python, we'll use dependency injection
73
+
74
+
75
+ class ToolResult(BaseModel):
76
+ """Tool result with content and format"""
77
+
78
+ content: str
79
+ format: ResponseFormat
80
+
81
+
82
+ # Error types
83
+ class OllamaError(Exception):
84
+ """Base error for Ollama operations"""
85
+
86
+ def __init__(self, message: str, cause: Optional[Exception] = None):
87
+ super().__init__(message)
88
+ self.cause = cause
89
+
90
+
91
+ class ModelNotFoundError(OllamaError):
92
+ """Error when a model is not found"""
93
+
94
+ def __init__(self, model_name: str):
95
+ super().__init__(
96
+ f"Model not found: {model_name}. Use ollama_list to see available models."
97
+ )
98
+
99
+
100
+ class NetworkError(OllamaError):
101
+ """Network-related error"""
102
+
103
+ pass
104
+
105
+
106
+ class WebSearchResult(BaseModel):
107
+ """Web search result"""
108
+
109
+ title: str
110
+ url: str
111
+ content: str
112
+
113
+
114
+ class WebFetchResult(BaseModel):
115
+ """Web fetch result"""
116
+
117
+ title: str
118
+ content: str
119
+ links: List[str]
@@ -0,0 +1,169 @@
1
+ """
2
+ Ollama HTTP client wrapper
3
+ """
4
+
5
+ import os
6
+ import httpx
7
+ from typing import Any, Dict, List, Optional, Union
8
+
9
+ try:
10
+ from mcp_ollama_python.models import (
11
+ GenerationOptions,
12
+ ChatMessage,
13
+ Tool,
14
+ OllamaError,
15
+ NetworkError,
16
+ )
17
+ except ImportError:
18
+ from .models import GenerationOptions, ChatMessage, Tool, OllamaError, NetworkError
19
+
20
+
21
+ class OllamaClient:
22
+ """HTTP client for Ollama API"""
23
+
24
+ def __init__(self, host: Optional[str] = None, api_key: Optional[str] = None):
25
+ self.host = host or os.getenv("OLLAMA_HOST", "http://127.0.0.1:11434")
26
+ self.api_key = api_key or os.getenv("OLLAMA_API_KEY")
27
+
28
+ # Create httpx client
29
+ headers = {}
30
+ if self.api_key:
31
+ headers["Authorization"] = f"Bearer {self.api_key}"
32
+
33
+ self.client = httpx.AsyncClient(
34
+ base_url=self.host,
35
+ headers=headers,
36
+ timeout=300.0, # 5 minute timeout
37
+ follow_redirects=True, # Follow HTTP redirects (301, 302, etc.)
38
+ )
39
+
40
+ async def __aenter__(self):
41
+ return self
42
+
43
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
44
+ await self.client.aclose()
45
+
46
+ async def _get(self, endpoint: str) -> Dict[str, Any]:
47
+ """Make a GET request to Ollama API"""
48
+ try:
49
+ response = await self.client.get(endpoint)
50
+ response.raise_for_status()
51
+ return response.json()
52
+ except httpx.HTTPStatusError as e:
53
+ raise OllamaError(
54
+ f"Ollama API error: {e.response.status_code} - {e.response.text}",
55
+ cause=e,
56
+ ) from e
57
+ except Exception as e:
58
+ raise NetworkError(f"Failed to connect to Ollama: {str(e)}", cause=e) from e
59
+
60
+ async def _post(self, endpoint: str, data: Dict[str, Any]) -> Dict[str, Any]:
61
+ """Make a POST request to Ollama API"""
62
+ try:
63
+ response = await self.client.post(endpoint, json=data)
64
+ response.raise_for_status()
65
+ return response.json()
66
+ except httpx.HTTPStatusError as e:
67
+ raise OllamaError(
68
+ f"Ollama API error: {e.response.status_code} - {e.response.text}",
69
+ cause=e,
70
+ ) from e
71
+ except OllamaError:
72
+ raise
73
+ except Exception as e:
74
+ raise NetworkError(f"Failed to connect to Ollama: {str(e)}", cause=e) from e
75
+
76
+ async def _delete(self, endpoint: str, data: Dict[str, Any]) -> Dict[str, Any]:
77
+ """Make a DELETE request to Ollama API"""
78
+ try:
79
+ response = await self.client.request("DELETE", endpoint, json=data)
80
+ response.raise_for_status()
81
+ # DELETE may return empty body on success
82
+ if response.headers.get("content-length") == "0" or not response.content:
83
+ return {"status": "success"}
84
+ return response.json()
85
+ except httpx.HTTPStatusError as e:
86
+ raise OllamaError(
87
+ f"Ollama API error: {e.response.status_code} - {e.response.text}",
88
+ cause=e,
89
+ ) from e
90
+ except OllamaError:
91
+ raise
92
+ except Exception as e:
93
+ raise NetworkError(f"Failed to connect to Ollama: {str(e)}", cause=e) from e
94
+
95
+ async def list(self) -> Dict[str, Any]:
96
+ """List all available models"""
97
+ return await self._get("/api/tags")
98
+
99
+ async def show(self, model: str) -> Dict[str, Any]:
100
+ """Show model information"""
101
+ return await self._post("/api/show", {"name": model})
102
+
103
+ async def pull(self, model: str) -> Dict[str, Any]:
104
+ """Pull a model"""
105
+ return await self._post("/api/pull", {"name": model, "stream": False})
106
+
107
+ async def push(self, model: str) -> Dict[str, Any]:
108
+ """Push a model"""
109
+ return await self._post("/api/push", {"name": model, "stream": False})
110
+
111
+ async def copy(self, source: str, destination: str) -> Dict[str, Any]:
112
+ """Copy a model"""
113
+ return await self._post(
114
+ "/api/copy", {"source": source, "destination": destination}
115
+ )
116
+
117
+ async def delete(self, model: str) -> Dict[str, Any]:
118
+ """Delete a model"""
119
+ return await self._delete("/api/delete", {"name": model})
120
+
121
+ async def create(
122
+ self, name: str, modelfile: str, stream: bool = False
123
+ ) -> Dict[str, Any]:
124
+ """Create a model from Modelfile"""
125
+ data = {"name": name, "modelfile": modelfile}
126
+ if stream:
127
+ data["stream"] = True
128
+ return await self._post("/api/create", data)
129
+
130
+ async def generate(
131
+ self,
132
+ model: str,
133
+ prompt: str,
134
+ options: Optional[GenerationOptions] = None,
135
+ stream: bool = False,
136
+ ) -> Dict[str, Any]:
137
+ """Generate text"""
138
+ data = {"model": model, "prompt": prompt, "stream": stream}
139
+ if options:
140
+ data["options"] = options.model_dump(exclude_unset=True)
141
+ return await self._post("/api/generate", data)
142
+
143
+ async def chat(
144
+ self,
145
+ model: str,
146
+ messages: List[ChatMessage],
147
+ tools: Optional[List[Tool]] = None,
148
+ options: Optional[GenerationOptions] = None,
149
+ stream: bool = False,
150
+ ) -> Dict[str, Any]:
151
+ """Chat with a model"""
152
+ data = {
153
+ "model": model,
154
+ "messages": [msg.model_dump(exclude_unset=True) for msg in messages],
155
+ "stream": stream,
156
+ }
157
+ if tools:
158
+ data["tools"] = [tool.model_dump() for tool in tools]
159
+ if options:
160
+ data["options"] = options.model_dump(exclude_unset=True)
161
+ return await self._post("/api/chat", data)
162
+
163
+ async def embed(self, model: str, input: Union[str, List[str]]) -> Dict[str, Any]:
164
+ """Generate embeddings"""
165
+ return await self._post("/api/embed", {"model": model, "input": input})
166
+
167
+ async def ps(self) -> Dict[str, Any]:
168
+ """List running models"""
169
+ return await self._get("/api/ps")