mcp-ollama-python 1.0.2__tar.gz → 1.0.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_ollama_python-1.0.4/PKG-INFO +121 -0
- mcp_ollama_python-1.0.4/README.md +95 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/pyproject.toml +9 -1
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/models.py +1 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/ollama_client.py +6 -3
- mcp_ollama_python-1.0.4/src/mcp_ollama_python/scripts/__init__.py +0 -0
- mcp_ollama_python-1.0.4/src/mcp_ollama_python/scripts/mcp_interactive.py +826 -0
- mcp_ollama_python-1.0.4/src/mcp_ollama_python/scripts/server_control.py +276 -0
- mcp_ollama_python-1.0.2/PKG-INFO +0 -554
- mcp_ollama_python-1.0.2/README.md +0 -531
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/LICENSE +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/__init__.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/__main__.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/autoloader.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/main.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/response_formatter.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/server.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/tools/__init__.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/tools/chat.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/tools/delete.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/tools/embed.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/tools/execute.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/tools/generate.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/tools/list.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/tools/ps.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/tools/pull.py +0 -0
- {mcp_ollama_python-1.0.2 → mcp_ollama_python-1.0.4}/src/mcp_ollama_python/tools/show.py +0 -0
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mcp-ollama-python
|
|
3
|
+
Version: 1.0.4
|
|
4
|
+
Summary: Model Context Protocol server that proxies local Ollama to MCP clients like Windsurf and VS Code
|
|
5
|
+
License: MIT
|
|
6
|
+
License-File: LICENSE
|
|
7
|
+
Author: Pedja Blagojevic
|
|
8
|
+
Author-email: pb@internetics.net
|
|
9
|
+
Requires-Python: >=3.10,<3.15
|
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
17
|
+
Requires-Dist: httpx (>=0.27.0,<0.28.0)
|
|
18
|
+
Requires-Dist: mcp (>=1.0.0,<2.0.0)
|
|
19
|
+
Requires-Dist: psutil (>=7.1.3,<8.0.0)
|
|
20
|
+
Requires-Dist: pydantic (>=2.7.0,<3.0.0)
|
|
21
|
+
Requires-Dist: rich (>=13.7.0,<14.0.0)
|
|
22
|
+
Requires-Dist: tzdata (>=2024.1,<2025.0)
|
|
23
|
+
Project-URL: Homepage, https://github.com/pblagoje/mcp-ollama-python
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
|
|
26
|
+
# 🦙 Ollama MCP Server (Python)
|
|
27
|
+
|
|
28
|
+
**Supercharge your AI assistant with local LLM access**
|
|
29
|
+
|
|
30
|
+
[](https://python.org)
|
|
31
|
+
[](https://python-poetry.org)
|
|
32
|
+
[](https://github.com/anthropics/model-context-protocol)
|
|
33
|
+
[](LICENSE)
|
|
34
|
+
|
|
35
|
+
A Python [MCP](https://github.com/anthropics/model-context-protocol) server that exposes your local [Ollama](https://ollama.ai) models as tools for AI assistants like **Windsurf**, **VS Code**, **Claude Desktop**, and more.
|
|
36
|
+
|
|
37
|
+
📚 **[Full Documentation](https://pblagoje.github.io/mcp-ollama-python/)**
|
|
38
|
+
|
|
39
|
+
---
|
|
40
|
+
|
|
41
|
+
## What It Does
|
|
42
|
+
|
|
43
|
+
Connect your local LLMs to any MCP-compatible AI assistant. No cloud APIs needed.
|
|
44
|
+
|
|
45
|
+
| Tool | What it does |
|
|
46
|
+
|------|-------------|
|
|
47
|
+
| `ollama_chat` | Chat with any local model (multi-turn, tool-calling) |
|
|
48
|
+
| `ollama_generate` | Generate text completions |
|
|
49
|
+
| `ollama_embed` | Create vector embeddings |
|
|
50
|
+
| `ollama_list` | List installed models |
|
|
51
|
+
| `ollama_show` | Inspect model details |
|
|
52
|
+
| `ollama_pull` | Download new models |
|
|
53
|
+
| `ollama_delete` | Remove models |
|
|
54
|
+
| `ollama_ps` | List running models |
|
|
55
|
+
|
|
56
|
+
## Quick Start
|
|
57
|
+
|
|
58
|
+
**Prerequisites:** Python 3.10+, [Ollama](https://ollama.ai) running locally
|
|
59
|
+
|
|
60
|
+
```bash
|
|
61
|
+
pip install mcp-ollama-python
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
### Windsurf / VS Code
|
|
65
|
+
|
|
66
|
+
Add to your MCP config (`mcp_config.json`):
|
|
67
|
+
|
|
68
|
+
```json
|
|
69
|
+
{
|
|
70
|
+
"mcpServers": {
|
|
71
|
+
"ollama": {
|
|
72
|
+
"command": "py",
|
|
73
|
+
"args": ["-m", "mcp_ollama_python"],
|
|
74
|
+
"disabled": false
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
Restart your editor — done. Your AI assistant can now use local Ollama models.
|
|
81
|
+
|
|
82
|
+
### Try It
|
|
83
|
+
|
|
84
|
+
Type in your AI assistant's chat:
|
|
85
|
+
|
|
86
|
+
> **MCP Tool: ollama / ollama_chat** — Use model llama3.1 and explain quantum computing
|
|
87
|
+
|
|
88
|
+
## Key Features
|
|
89
|
+
|
|
90
|
+
- 🔧 **8 MCP tools** — Full Ollama SDK access
|
|
91
|
+
- 🔄 **Hot-swap architecture** — Drop a file in `tools/`, it's auto-discovered
|
|
92
|
+
- 🎯 **Type-safe** — Pydantic models throughout
|
|
93
|
+
- 🚀 **Lightweight** — Minimal dependencies, fast startup
|
|
94
|
+
- 🔌 **Universal** — Works with any MCP client
|
|
95
|
+
|
|
96
|
+
## Documentation
|
|
97
|
+
|
|
98
|
+
| Guide | Description |
|
|
99
|
+
|-------|-------------|
|
|
100
|
+
| [Installation](https://pblagoje.github.io/mcp-ollama-python/installation/) | Setup and prerequisites |
|
|
101
|
+
| [Available Tools](https://pblagoje.github.io/mcp-ollama-python/tools/) | All tools with examples |
|
|
102
|
+
| [Configuration](https://pblagoje.github.io/mcp-ollama-python/configuration/) | Environment variables, model config |
|
|
103
|
+
| [Windsurf Integration](https://pblagoje.github.io/mcp-ollama-python/windsurf/) | Complete Windsurf setup guide |
|
|
104
|
+
| [VS Code Integration](https://pblagoje.github.io/mcp-ollama-python/vscode/) | VS Code setup |
|
|
105
|
+
| [Architecture](https://pblagoje.github.io/mcp-ollama-python/architecture/) | How it works, adding tools |
|
|
106
|
+
| [Server Control](https://pblagoje.github.io/mcp-ollama-python/SERVER_CONTROL/) | Start/stop/manage the server |
|
|
107
|
+
| [Interactive Manager](https://pblagoje.github.io/mcp-ollama-python/mcp_interactive/) | Menu-driven management UI |
|
|
108
|
+
| [Development](https://pblagoje.github.io/mcp-ollama-python/development/) | Contributing, code quality |
|
|
109
|
+
|
|
110
|
+
## License
|
|
111
|
+
|
|
112
|
+
[MIT](LICENSE)
|
|
113
|
+
|
|
114
|
+
---
|
|
115
|
+
|
|
116
|
+
<div align="center">
|
|
117
|
+
|
|
118
|
+
Made with ❤️ using Python, Poetry, and Ollama
|
|
119
|
+
|
|
120
|
+
</div>
|
|
121
|
+
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
# 🦙 Ollama MCP Server (Python)
|
|
2
|
+
|
|
3
|
+
**Supercharge your AI assistant with local LLM access**
|
|
4
|
+
|
|
5
|
+
[](https://python.org)
|
|
6
|
+
[](https://python-poetry.org)
|
|
7
|
+
[](https://github.com/anthropics/model-context-protocol)
|
|
8
|
+
[](LICENSE)
|
|
9
|
+
|
|
10
|
+
A Python [MCP](https://github.com/anthropics/model-context-protocol) server that exposes your local [Ollama](https://ollama.ai) models as tools for AI assistants like **Windsurf**, **VS Code**, **Claude Desktop**, and more.
|
|
11
|
+
|
|
12
|
+
📚 **[Full Documentation](https://pblagoje.github.io/mcp-ollama-python/)**
|
|
13
|
+
|
|
14
|
+
---
|
|
15
|
+
|
|
16
|
+
## What It Does
|
|
17
|
+
|
|
18
|
+
Connect your local LLMs to any MCP-compatible AI assistant. No cloud APIs needed.
|
|
19
|
+
|
|
20
|
+
| Tool | What it does |
|
|
21
|
+
|------|-------------|
|
|
22
|
+
| `ollama_chat` | Chat with any local model (multi-turn, tool-calling) |
|
|
23
|
+
| `ollama_generate` | Generate text completions |
|
|
24
|
+
| `ollama_embed` | Create vector embeddings |
|
|
25
|
+
| `ollama_list` | List installed models |
|
|
26
|
+
| `ollama_show` | Inspect model details |
|
|
27
|
+
| `ollama_pull` | Download new models |
|
|
28
|
+
| `ollama_delete` | Remove models |
|
|
29
|
+
| `ollama_ps` | List running models |
|
|
30
|
+
|
|
31
|
+
## Quick Start
|
|
32
|
+
|
|
33
|
+
**Prerequisites:** Python 3.10+, [Ollama](https://ollama.ai) running locally
|
|
34
|
+
|
|
35
|
+
```bash
|
|
36
|
+
pip install mcp-ollama-python
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
### Windsurf / VS Code
|
|
40
|
+
|
|
41
|
+
Add to your MCP config (`mcp_config.json`):
|
|
42
|
+
|
|
43
|
+
```json
|
|
44
|
+
{
|
|
45
|
+
"mcpServers": {
|
|
46
|
+
"ollama": {
|
|
47
|
+
"command": "py",
|
|
48
|
+
"args": ["-m", "mcp_ollama_python"],
|
|
49
|
+
"disabled": false
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
Restart your editor — done. Your AI assistant can now use local Ollama models.
|
|
56
|
+
|
|
57
|
+
### Try It
|
|
58
|
+
|
|
59
|
+
Type in your AI assistant's chat:
|
|
60
|
+
|
|
61
|
+
> **MCP Tool: ollama / ollama_chat** — Use model llama3.1 and explain quantum computing
|
|
62
|
+
|
|
63
|
+
## Key Features
|
|
64
|
+
|
|
65
|
+
- 🔧 **8 MCP tools** — Full Ollama SDK access
|
|
66
|
+
- 🔄 **Hot-swap architecture** — Drop a file in `tools/`, it's auto-discovered
|
|
67
|
+
- 🎯 **Type-safe** — Pydantic models throughout
|
|
68
|
+
- 🚀 **Lightweight** — Minimal dependencies, fast startup
|
|
69
|
+
- 🔌 **Universal** — Works with any MCP client
|
|
70
|
+
|
|
71
|
+
## Documentation
|
|
72
|
+
|
|
73
|
+
| Guide | Description |
|
|
74
|
+
|-------|-------------|
|
|
75
|
+
| [Installation](https://pblagoje.github.io/mcp-ollama-python/installation/) | Setup and prerequisites |
|
|
76
|
+
| [Available Tools](https://pblagoje.github.io/mcp-ollama-python/tools/) | All tools with examples |
|
|
77
|
+
| [Configuration](https://pblagoje.github.io/mcp-ollama-python/configuration/) | Environment variables, model config |
|
|
78
|
+
| [Windsurf Integration](https://pblagoje.github.io/mcp-ollama-python/windsurf/) | Complete Windsurf setup guide |
|
|
79
|
+
| [VS Code Integration](https://pblagoje.github.io/mcp-ollama-python/vscode/) | VS Code setup |
|
|
80
|
+
| [Architecture](https://pblagoje.github.io/mcp-ollama-python/architecture/) | How it works, adding tools |
|
|
81
|
+
| [Server Control](https://pblagoje.github.io/mcp-ollama-python/SERVER_CONTROL/) | Start/stop/manage the server |
|
|
82
|
+
| [Interactive Manager](https://pblagoje.github.io/mcp-ollama-python/mcp_interactive/) | Menu-driven management UI |
|
|
83
|
+
| [Development](https://pblagoje.github.io/mcp-ollama-python/development/) | Contributing, code quality |
|
|
84
|
+
|
|
85
|
+
## License
|
|
86
|
+
|
|
87
|
+
[MIT](LICENSE)
|
|
88
|
+
|
|
89
|
+
---
|
|
90
|
+
|
|
91
|
+
<div align="center">
|
|
92
|
+
|
|
93
|
+
Made with ❤️ using Python, Poetry, and Ollama
|
|
94
|
+
|
|
95
|
+
</div>
|
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "mcp-ollama-python"
|
|
3
|
-
version = "1.0.
|
|
3
|
+
version = "1.0.4"
|
|
4
4
|
description = "Model Context Protocol server that proxies local Ollama to MCP clients like Windsurf and VS Code"
|
|
5
5
|
authors = ["Pedja Blagojevic <pb@internetics.net>"]
|
|
6
6
|
readme = "README.md"
|
|
7
7
|
packages = [{include = "mcp_ollama_python", from = "src"}]
|
|
8
|
+
license = "MIT"
|
|
9
|
+
homepage = "https://github.com/pblagoje/mcp-ollama-python"
|
|
8
10
|
|
|
9
11
|
[tool.poetry.dependencies]
|
|
10
12
|
python = ">=3.10,<3.15"
|
|
@@ -17,6 +19,8 @@ tzdata = "^2024.1"
|
|
|
17
19
|
|
|
18
20
|
[tool.poetry.scripts]
|
|
19
21
|
mcp-ollama-python = "mcp_ollama_python.main:run"
|
|
22
|
+
mcp-interactive = "mcp_ollama_python.scripts.mcp_interactive:main"
|
|
23
|
+
mcp-server-control = "mcp_ollama_python.scripts.server_control:main"
|
|
20
24
|
|
|
21
25
|
[tool.poetry.group.dev.dependencies]
|
|
22
26
|
pytest = "^8.0.0"
|
|
@@ -26,6 +30,10 @@ pre-commit = "^4.5.1"
|
|
|
26
30
|
black = "^26.1.0"
|
|
27
31
|
flake8 = "^7.3.0"
|
|
28
32
|
|
|
33
|
+
[tool.poetry.group.docs.dependencies]
|
|
34
|
+
mkdocs = "^1.6.0"
|
|
35
|
+
mkdocs-material = "^9.5.0"
|
|
36
|
+
|
|
29
37
|
[tool.ruff]
|
|
30
38
|
line-length = 100
|
|
31
39
|
target-version = "py310"
|
|
@@ -3,9 +3,10 @@ Ollama HTTP client wrapper
|
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
import os
|
|
6
|
-
import httpx
|
|
7
6
|
from typing import Any, Dict, List, Optional, Union
|
|
8
7
|
|
|
8
|
+
import httpx
|
|
9
|
+
|
|
9
10
|
try:
|
|
10
11
|
from mcp_ollama_python.models import (
|
|
11
12
|
GenerationOptions,
|
|
@@ -160,9 +161,11 @@ class OllamaClient:
|
|
|
160
161
|
data["options"] = options.model_dump(exclude_unset=True)
|
|
161
162
|
return await self._post("/api/chat", data)
|
|
162
163
|
|
|
163
|
-
async def embed(
|
|
164
|
+
async def embed(
|
|
165
|
+
self, model: str, input_text: Union[str, List[str]]
|
|
166
|
+
) -> Dict[str, Any]:
|
|
164
167
|
"""Generate embeddings"""
|
|
165
|
-
return await self._post("/api/embed", {"model": model, "input":
|
|
168
|
+
return await self._post("/api/embed", {"model": model, "input": input_text})
|
|
166
169
|
|
|
167
170
|
async def ps(self) -> Dict[str, Any]:
|
|
168
171
|
"""List running models"""
|
|
File without changes
|