mcpsdk 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcpsdk-0.1.0/PKG-INFO +129 -0
- mcpsdk-0.1.0/README.md +108 -0
- mcpsdk-0.1.0/mcpeasy/__init__.py +7 -0
- mcpsdk-0.1.0/mcpeasy/client.py +168 -0
- mcpsdk-0.1.0/mcpeasy/server.py +55 -0
- mcpsdk-0.1.0/mcpsdk.egg-info/PKG-INFO +129 -0
- mcpsdk-0.1.0/mcpsdk.egg-info/SOURCES.txt +10 -0
- mcpsdk-0.1.0/mcpsdk.egg-info/dependency_links.txt +1 -0
- mcpsdk-0.1.0/mcpsdk.egg-info/requires.txt +2 -0
- mcpsdk-0.1.0/mcpsdk.egg-info/top_level.txt +1 -0
- mcpsdk-0.1.0/pyproject.toml +35 -0
- mcpsdk-0.1.0/setup.cfg +4 -0
mcpsdk-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mcpsdk
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: The simplest way to build and connect MCP servers and clients.
|
|
5
|
+
License: MIT
|
|
6
|
+
Project-URL: Homepage, https://github.com/yourname/mcpeasy
|
|
7
|
+
Project-URL: Bug Tracker, https://github.com/yourname/mcpeasy/issues
|
|
8
|
+
Keywords: mcp,ai,agent,llm,tools,model-context-protocol
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
14
|
+
Classifier: Operating System :: OS Independent
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
17
|
+
Requires-Python: >=3.11
|
|
18
|
+
Description-Content-Type: text/markdown
|
|
19
|
+
Requires-Dist: mcp>=1.0.0
|
|
20
|
+
Requires-Dist: httpx>=0.27.0
|
|
21
|
+
|
|
22
|
+
# MCPeasy
|
|
23
|
+
|
|
24
|
+
The simplest way to build and connect MCP (Model Context Protocol) servers and clients in Python.
|
|
25
|
+
|
|
26
|
+
No async. No boilerplate. Just define your tools and go.
|
|
27
|
+
|
|
28
|
+
## Install
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
pip install mcpeasy
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
## Quickstart
|
|
35
|
+
|
|
36
|
+
### Build a server
|
|
37
|
+
|
|
38
|
+
```python
|
|
39
|
+
from mcpeasy import MCPServer
|
|
40
|
+
|
|
41
|
+
server = MCPServer("my-server")
|
|
42
|
+
|
|
43
|
+
@server.tool
|
|
44
|
+
def get_weather(city: str) -> str:
|
|
45
|
+
"""Get the current weather for a city."""
|
|
46
|
+
return f"Sunny in {city}, 22C"
|
|
47
|
+
|
|
48
|
+
@server.tool
|
|
49
|
+
def calculate(expression: str) -> str:
|
|
50
|
+
"""Evaluate a math expression."""
|
|
51
|
+
return str(eval(expression))
|
|
52
|
+
|
|
53
|
+
server.run() # stdio (default)
|
|
54
|
+
server.run(mode="sse") # SSE on http://0.0.0.0:8000
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
### Connect a client
|
|
58
|
+
|
|
59
|
+
```python
|
|
60
|
+
from mcpeasy import MCPClient
|
|
61
|
+
|
|
62
|
+
# SSE — server already running
|
|
63
|
+
with MCPClient.from_sse("http://localhost:8000/sse") as client:
|
|
64
|
+
tools = client.list_tools()
|
|
65
|
+
result = client.call_tool("get_weather", {"city": "Tokyo"})
|
|
66
|
+
print(result)
|
|
67
|
+
|
|
68
|
+
# stdio — client spawns the server
|
|
69
|
+
with MCPClient.from_stdio("python", ["server.py"]) as client:
|
|
70
|
+
tools = client.list_tools()
|
|
71
|
+
result = client.call_tool("calculate", {"expression": "12 * 7"})
|
|
72
|
+
print(result)
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
### Use with any LLM
|
|
76
|
+
|
|
77
|
+
`list_tools()` returns raw MCP format — convert to your LLM's format yourself:
|
|
78
|
+
|
|
79
|
+
```python
|
|
80
|
+
from mcpeasy import MCPClient
|
|
81
|
+
|
|
82
|
+
with MCPClient.from_sse("http://localhost:8000/sse") as client:
|
|
83
|
+
tools = client.list_tools()
|
|
84
|
+
# tools → [{ "name", "description", "parameters" }]
|
|
85
|
+
|
|
86
|
+
# Anthropic
|
|
87
|
+
anthropic_tools = [{"name": t["name"], "description": t["description"], "input_schema": t["parameters"]} for t in tools]
|
|
88
|
+
|
|
89
|
+
# OpenAI
|
|
90
|
+
openai_tools = [{"type": "function", "function": {"name": t["name"], "description": t["description"], "parameters": t["parameters"]}} for t in tools]
|
|
91
|
+
|
|
92
|
+
# when LLM picks a tool:
|
|
93
|
+
result = client.call_tool(tool_name, params)
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
## API
|
|
97
|
+
|
|
98
|
+
### MCPServer
|
|
99
|
+
|
|
100
|
+
| Method | Description |
|
|
101
|
+
|---|---|
|
|
102
|
+
| `MCPServer(name)` | Create a server |
|
|
103
|
+
| `@server.tool` | Register a function as a tool |
|
|
104
|
+
| `server.run(mode, host, port)` | Start the server (`mode="stdio"` or `"sse"`) |
|
|
105
|
+
|
|
106
|
+
### MCPClient
|
|
107
|
+
|
|
108
|
+
| Method | Description |
|
|
109
|
+
|---|---|
|
|
110
|
+
| `MCPClient.from_sse(url)` | Create client for remote SSE server |
|
|
111
|
+
| `MCPClient.from_stdio(command, args)` | Create client for local stdio server |
|
|
112
|
+
| `client.connect()` | Open connection |
|
|
113
|
+
| `client.disconnect()` | Close connection |
|
|
114
|
+
| `client.list_tools()` | Get all tools from server |
|
|
115
|
+
| `client.call_tool(name, params)` | Call a tool, get result as string |
|
|
116
|
+
| `client.tool_info(name)` | Get details of one tool |
|
|
117
|
+
|
|
118
|
+
## Transport modes
|
|
119
|
+
|
|
120
|
+
| | stdio | SSE |
|
|
121
|
+
|---|---|---|
|
|
122
|
+
| Where | Same machine | Anywhere |
|
|
123
|
+
| Server startup | Client spawns it | Already running |
|
|
124
|
+
| Connect with | `from_stdio(cmd, args)` | `from_sse(url)` |
|
|
125
|
+
| Best for | Local tools | Remote / shared servers |
|
|
126
|
+
|
|
127
|
+
## License
|
|
128
|
+
|
|
129
|
+
MIT
|
mcpsdk-0.1.0/README.md
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
# MCPeasy
|
|
2
|
+
|
|
3
|
+
The simplest way to build and connect MCP (Model Context Protocol) servers and clients in Python.
|
|
4
|
+
|
|
5
|
+
No async. No boilerplate. Just define your tools and go.
|
|
6
|
+
|
|
7
|
+
## Install
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
pip install mcpeasy
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Quickstart
|
|
14
|
+
|
|
15
|
+
### Build a server
|
|
16
|
+
|
|
17
|
+
```python
|
|
18
|
+
from mcpeasy import MCPServer
|
|
19
|
+
|
|
20
|
+
server = MCPServer("my-server")
|
|
21
|
+
|
|
22
|
+
@server.tool
|
|
23
|
+
def get_weather(city: str) -> str:
|
|
24
|
+
"""Get the current weather for a city."""
|
|
25
|
+
return f"Sunny in {city}, 22C"
|
|
26
|
+
|
|
27
|
+
@server.tool
|
|
28
|
+
def calculate(expression: str) -> str:
|
|
29
|
+
"""Evaluate a math expression."""
|
|
30
|
+
return str(eval(expression))
|
|
31
|
+
|
|
32
|
+
server.run() # stdio (default)
|
|
33
|
+
server.run(mode="sse") # SSE on http://0.0.0.0:8000
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
### Connect a client
|
|
37
|
+
|
|
38
|
+
```python
|
|
39
|
+
from mcpeasy import MCPClient
|
|
40
|
+
|
|
41
|
+
# SSE — server already running
|
|
42
|
+
with MCPClient.from_sse("http://localhost:8000/sse") as client:
|
|
43
|
+
tools = client.list_tools()
|
|
44
|
+
result = client.call_tool("get_weather", {"city": "Tokyo"})
|
|
45
|
+
print(result)
|
|
46
|
+
|
|
47
|
+
# stdio — client spawns the server
|
|
48
|
+
with MCPClient.from_stdio("python", ["server.py"]) as client:
|
|
49
|
+
tools = client.list_tools()
|
|
50
|
+
result = client.call_tool("calculate", {"expression": "12 * 7"})
|
|
51
|
+
print(result)
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
### Use with any LLM
|
|
55
|
+
|
|
56
|
+
`list_tools()` returns raw MCP format — convert to your LLM's format yourself:
|
|
57
|
+
|
|
58
|
+
```python
|
|
59
|
+
from mcpeasy import MCPClient
|
|
60
|
+
|
|
61
|
+
with MCPClient.from_sse("http://localhost:8000/sse") as client:
|
|
62
|
+
tools = client.list_tools()
|
|
63
|
+
# tools → [{ "name", "description", "parameters" }]
|
|
64
|
+
|
|
65
|
+
# Anthropic
|
|
66
|
+
anthropic_tools = [{"name": t["name"], "description": t["description"], "input_schema": t["parameters"]} for t in tools]
|
|
67
|
+
|
|
68
|
+
# OpenAI
|
|
69
|
+
openai_tools = [{"type": "function", "function": {"name": t["name"], "description": t["description"], "parameters": t["parameters"]}} for t in tools]
|
|
70
|
+
|
|
71
|
+
# when LLM picks a tool:
|
|
72
|
+
result = client.call_tool(tool_name, params)
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
## API
|
|
76
|
+
|
|
77
|
+
### MCPServer
|
|
78
|
+
|
|
79
|
+
| Method | Description |
|
|
80
|
+
|---|---|
|
|
81
|
+
| `MCPServer(name)` | Create a server |
|
|
82
|
+
| `@server.tool` | Register a function as a tool |
|
|
83
|
+
| `server.run(mode, host, port)` | Start the server (`mode="stdio"` or `"sse"`) |
|
|
84
|
+
|
|
85
|
+
### MCPClient
|
|
86
|
+
|
|
87
|
+
| Method | Description |
|
|
88
|
+
|---|---|
|
|
89
|
+
| `MCPClient.from_sse(url)` | Create client for remote SSE server |
|
|
90
|
+
| `MCPClient.from_stdio(command, args)` | Create client for local stdio server |
|
|
91
|
+
| `client.connect()` | Open connection |
|
|
92
|
+
| `client.disconnect()` | Close connection |
|
|
93
|
+
| `client.list_tools()` | Get all tools from server |
|
|
94
|
+
| `client.call_tool(name, params)` | Call a tool, get result as string |
|
|
95
|
+
| `client.tool_info(name)` | Get details of one tool |
|
|
96
|
+
|
|
97
|
+
## Transport modes
|
|
98
|
+
|
|
99
|
+
| | stdio | SSE |
|
|
100
|
+
|---|---|---|
|
|
101
|
+
| Where | Same machine | Anywhere |
|
|
102
|
+
| Server startup | Client spawns it | Already running |
|
|
103
|
+
| Connect with | `from_stdio(cmd, args)` | `from_sse(url)` |
|
|
104
|
+
| Best for | Local tools | Remote / shared servers |
|
|
105
|
+
|
|
106
|
+
## License
|
|
107
|
+
|
|
108
|
+
MIT
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
# easymcp2/client.py
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import threading
|
|
5
|
+
import queue
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class MCPClient:
|
|
9
|
+
"""
|
|
10
|
+
A simple MCP client. Connect to a server, list tools, call them.
|
|
11
|
+
|
|
12
|
+
Usage — SSE (remote or local server already running):
|
|
13
|
+
with MCPClient.from_sse("http://localhost:8000/sse") as client:
|
|
14
|
+
print(client.list_tools())
|
|
15
|
+
print(client.call_tool("get_weather", {"city": "Tokyo"}))
|
|
16
|
+
|
|
17
|
+
Usage — stdio (client spawns the server as a subprocess):
|
|
18
|
+
with MCPClient.from_stdio("python", ["server.py"]) as client:
|
|
19
|
+
print(client.list_tools())
|
|
20
|
+
print(client.call_tool("get_weather", {"city": "Tokyo"}))
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def __init__(self, transport: str, **kwargs):
|
|
24
|
+
self._transport = transport
|
|
25
|
+
self._kwargs = kwargs
|
|
26
|
+
self._session = None
|
|
27
|
+
self._loop = None
|
|
28
|
+
self._thread = None
|
|
29
|
+
self._ready = threading.Event()
|
|
30
|
+
self._stop = threading.Event()
|
|
31
|
+
self._error = None
|
|
32
|
+
self._tools_cache = None
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def from_sse(cls, url: str) -> "MCPClient":
|
|
36
|
+
return cls(transport="sse", url=url)
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
def from_stdio(cls, command: str, args: list = None) -> "MCPClient":
|
|
40
|
+
return cls(transport="stdio", command=command, args=args or [])
|
|
41
|
+
|
|
42
|
+
# ── Lifecycle ─────────────────────────────────────────────
|
|
43
|
+
|
|
44
|
+
def connect(self):
|
|
45
|
+
self._ready = threading.Event()
|
|
46
|
+
self._stop = threading.Event()
|
|
47
|
+
self._error = None
|
|
48
|
+
|
|
49
|
+
def run():
|
|
50
|
+
self._loop = asyncio.new_event_loop()
|
|
51
|
+
asyncio.set_event_loop(self._loop)
|
|
52
|
+
self._loop.run_until_complete(self._run_session())
|
|
53
|
+
|
|
54
|
+
self._thread = threading.Thread(target=run, daemon=True)
|
|
55
|
+
self._thread.start()
|
|
56
|
+
self._ready.wait()
|
|
57
|
+
if self._error:
|
|
58
|
+
raise self._error
|
|
59
|
+
|
|
60
|
+
def disconnect(self):
|
|
61
|
+
if self._stop:
|
|
62
|
+
self._stop.set()
|
|
63
|
+
if self._thread:
|
|
64
|
+
self._thread.join(timeout=5)
|
|
65
|
+
self._loop = None
|
|
66
|
+
self._thread = None
|
|
67
|
+
|
|
68
|
+
def __enter__(self):
|
|
69
|
+
self.connect()
|
|
70
|
+
return self
|
|
71
|
+
|
|
72
|
+
def __exit__(self, *_):
|
|
73
|
+
self.disconnect()
|
|
74
|
+
|
|
75
|
+
# ── Public API ────────────────────────────────────────────
|
|
76
|
+
|
|
77
|
+
def list_tools(self) -> list[dict]:
|
|
78
|
+
self._ensure_connected()
|
|
79
|
+
if self._tools_cache is None:
|
|
80
|
+
self._tools_cache = self._call(self._async_list_tools())
|
|
81
|
+
return self._tools_cache
|
|
82
|
+
|
|
83
|
+
def call_tool(self, tool_name: str, params: dict = None) -> str:
|
|
84
|
+
self._ensure_connected()
|
|
85
|
+
return self._call(self._async_call_tool(tool_name, params or {}))
|
|
86
|
+
|
|
87
|
+
def tool_info(self, tool_name: str) -> dict | None:
|
|
88
|
+
return next((t for t in self.list_tools() if t["name"] == tool_name), None)
|
|
89
|
+
|
|
90
|
+
# ── Internal ──────────────────────────────────────────────
|
|
91
|
+
|
|
92
|
+
def _call(self, coro):
|
|
93
|
+
"""Submit coroutine to background loop, block until done."""
|
|
94
|
+
result_q = queue.Queue()
|
|
95
|
+
|
|
96
|
+
async def runner():
|
|
97
|
+
try:
|
|
98
|
+
result_q.put(("ok", await coro))
|
|
99
|
+
except Exception as e:
|
|
100
|
+
result_q.put(("err", e))
|
|
101
|
+
|
|
102
|
+
asyncio.run_coroutine_threadsafe(runner(), self._loop)
|
|
103
|
+
tag, value = result_q.get()
|
|
104
|
+
if tag == "err":
|
|
105
|
+
raise value
|
|
106
|
+
return value
|
|
107
|
+
|
|
108
|
+
async def _run_session(self):
|
|
109
|
+
"""
|
|
110
|
+
Opens transport + session in ONE continuous async context.
|
|
111
|
+
Signals ready, then stays alive until disconnect() is called.
|
|
112
|
+
This is required by anyio — open and close must be in same task.
|
|
113
|
+
"""
|
|
114
|
+
from mcp import ClientSession
|
|
115
|
+
|
|
116
|
+
try:
|
|
117
|
+
if self._transport == "sse":
|
|
118
|
+
from mcp.client.sse import sse_client
|
|
119
|
+
cm = sse_client(self._kwargs["url"])
|
|
120
|
+
elif self._transport == "stdio":
|
|
121
|
+
from mcp import StdioServerParameters
|
|
122
|
+
from mcp.client.stdio import stdio_client
|
|
123
|
+
cm = stdio_client(StdioServerParameters(
|
|
124
|
+
command=self._kwargs["command"],
|
|
125
|
+
args=self._kwargs["args"],
|
|
126
|
+
))
|
|
127
|
+
else:
|
|
128
|
+
raise ValueError(f"Unknown transport: {self._transport!r}")
|
|
129
|
+
|
|
130
|
+
async with cm as (read, write):
|
|
131
|
+
async with ClientSession(read, write) as session:
|
|
132
|
+
await session.initialize()
|
|
133
|
+
self._session = session
|
|
134
|
+
self._ready.set() # unblock connect()
|
|
135
|
+
|
|
136
|
+
# stay alive until disconnect() fires
|
|
137
|
+
while not self._stop.is_set():
|
|
138
|
+
await asyncio.sleep(0.05)
|
|
139
|
+
|
|
140
|
+
except Exception as e:
|
|
141
|
+
self._error = e
|
|
142
|
+
self._ready.set() # unblock connect() so it raises
|
|
143
|
+
finally:
|
|
144
|
+
self._session = None
|
|
145
|
+
|
|
146
|
+
async def _async_list_tools(self) -> list[dict]:
|
|
147
|
+
response = await self._session.list_tools()
|
|
148
|
+
return [
|
|
149
|
+
{
|
|
150
|
+
"name": t.name,
|
|
151
|
+
"description": t.description or "",
|
|
152
|
+
"parameters": t.inputSchema or {},
|
|
153
|
+
}
|
|
154
|
+
for t in response.tools
|
|
155
|
+
]
|
|
156
|
+
|
|
157
|
+
async def _async_call_tool(self, tool_name: str, params: dict) -> str:
|
|
158
|
+
result = await self._session.call_tool(tool_name, params)
|
|
159
|
+
return "\n".join(
|
|
160
|
+
block.text for block in result.content if hasattr(block, "text")
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
def _ensure_connected(self):
|
|
164
|
+
if self._session is None:
|
|
165
|
+
raise RuntimeError(
|
|
166
|
+
"Not connected. Call client.connect() first, "
|
|
167
|
+
"or use: with MCPClient.from_sse(...) as client: ..."
|
|
168
|
+
)
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# easymcp2/server.py
|
|
2
|
+
|
|
3
|
+
from mcp.server.fastmcp import FastMCP
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class MCPServer:
|
|
7
|
+
"""
|
|
8
|
+
A simple MCP server. Define tools on it, then run it.
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
server = MCPServer("my-server")
|
|
12
|
+
|
|
13
|
+
@server.tool
|
|
14
|
+
def get_weather(city: str) -> str:
|
|
15
|
+
\"\"\"Get weather for a city.\"\"\"
|
|
16
|
+
return f"Sunny in {city}"
|
|
17
|
+
|
|
18
|
+
server.run() # stdio (default)
|
|
19
|
+
server.run(mode="sse") # SSE on http://0.0.0.0:8000
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def __init__(self, name: str = "mcp-server"):
|
|
23
|
+
self._mcp = FastMCP(name)
|
|
24
|
+
self.name = name
|
|
25
|
+
|
|
26
|
+
def tool(self, fn):
|
|
27
|
+
"""
|
|
28
|
+
Decorator to register a function as an MCP tool.
|
|
29
|
+
|
|
30
|
+
Example:
|
|
31
|
+
@server.tool
|
|
32
|
+
def add(a: int, b: int) -> str:
|
|
33
|
+
\"\"\"Add two numbers.\"\"\"
|
|
34
|
+
return str(a + b)
|
|
35
|
+
"""
|
|
36
|
+
self._mcp.tool()(fn)
|
|
37
|
+
return fn
|
|
38
|
+
|
|
39
|
+
def run(self, mode: str = "stdio", host: str = "0.0.0.0", port: int = 8000):
|
|
40
|
+
"""
|
|
41
|
+
Start the server.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
mode: "stdio" (default) or "sse"
|
|
45
|
+
host: host to bind to in SSE mode (default 0.0.0.0)
|
|
46
|
+
port: port to listen on in SSE mode (default 8000)
|
|
47
|
+
|
|
48
|
+
stdio → client spawns this process directly
|
|
49
|
+
sse → server runs independently, clients connect via HTTP
|
|
50
|
+
"""
|
|
51
|
+
if mode == "sse":
|
|
52
|
+
print(f"[{self.name}] running in SSE mode on http://{host}:{port}/sse")
|
|
53
|
+
self._mcp.run(transport="sse", host=host, port=port)
|
|
54
|
+
else:
|
|
55
|
+
self._mcp.run()
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mcpsdk
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: The simplest way to build and connect MCP servers and clients.
|
|
5
|
+
License: MIT
|
|
6
|
+
Project-URL: Homepage, https://github.com/yourname/mcpeasy
|
|
7
|
+
Project-URL: Bug Tracker, https://github.com/yourname/mcpeasy/issues
|
|
8
|
+
Keywords: mcp,ai,agent,llm,tools,model-context-protocol
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
14
|
+
Classifier: Operating System :: OS Independent
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
17
|
+
Requires-Python: >=3.11
|
|
18
|
+
Description-Content-Type: text/markdown
|
|
19
|
+
Requires-Dist: mcp>=1.0.0
|
|
20
|
+
Requires-Dist: httpx>=0.27.0
|
|
21
|
+
|
|
22
|
+
# MCPeasy
|
|
23
|
+
|
|
24
|
+
The simplest way to build and connect MCP (Model Context Protocol) servers and clients in Python.
|
|
25
|
+
|
|
26
|
+
No async. No boilerplate. Just define your tools and go.
|
|
27
|
+
|
|
28
|
+
## Install
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
pip install mcpeasy
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
## Quickstart
|
|
35
|
+
|
|
36
|
+
### Build a server
|
|
37
|
+
|
|
38
|
+
```python
|
|
39
|
+
from mcpeasy import MCPServer
|
|
40
|
+
|
|
41
|
+
server = MCPServer("my-server")
|
|
42
|
+
|
|
43
|
+
@server.tool
|
|
44
|
+
def get_weather(city: str) -> str:
|
|
45
|
+
"""Get the current weather for a city."""
|
|
46
|
+
return f"Sunny in {city}, 22C"
|
|
47
|
+
|
|
48
|
+
@server.tool
|
|
49
|
+
def calculate(expression: str) -> str:
|
|
50
|
+
"""Evaluate a math expression."""
|
|
51
|
+
return str(eval(expression))
|
|
52
|
+
|
|
53
|
+
server.run() # stdio (default)
|
|
54
|
+
server.run(mode="sse") # SSE on http://0.0.0.0:8000
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
### Connect a client
|
|
58
|
+
|
|
59
|
+
```python
|
|
60
|
+
from mcpeasy import MCPClient
|
|
61
|
+
|
|
62
|
+
# SSE — server already running
|
|
63
|
+
with MCPClient.from_sse("http://localhost:8000/sse") as client:
|
|
64
|
+
tools = client.list_tools()
|
|
65
|
+
result = client.call_tool("get_weather", {"city": "Tokyo"})
|
|
66
|
+
print(result)
|
|
67
|
+
|
|
68
|
+
# stdio — client spawns the server
|
|
69
|
+
with MCPClient.from_stdio("python", ["server.py"]) as client:
|
|
70
|
+
tools = client.list_tools()
|
|
71
|
+
result = client.call_tool("calculate", {"expression": "12 * 7"})
|
|
72
|
+
print(result)
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
### Use with any LLM
|
|
76
|
+
|
|
77
|
+
`list_tools()` returns raw MCP format — convert to your LLM's format yourself:
|
|
78
|
+
|
|
79
|
+
```python
|
|
80
|
+
from mcpeasy import MCPClient
|
|
81
|
+
|
|
82
|
+
with MCPClient.from_sse("http://localhost:8000/sse") as client:
|
|
83
|
+
tools = client.list_tools()
|
|
84
|
+
# tools → [{ "name", "description", "parameters" }]
|
|
85
|
+
|
|
86
|
+
# Anthropic
|
|
87
|
+
anthropic_tools = [{"name": t["name"], "description": t["description"], "input_schema": t["parameters"]} for t in tools]
|
|
88
|
+
|
|
89
|
+
# OpenAI
|
|
90
|
+
openai_tools = [{"type": "function", "function": {"name": t["name"], "description": t["description"], "parameters": t["parameters"]}} for t in tools]
|
|
91
|
+
|
|
92
|
+
# when LLM picks a tool:
|
|
93
|
+
result = client.call_tool(tool_name, params)
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
## API
|
|
97
|
+
|
|
98
|
+
### MCPServer
|
|
99
|
+
|
|
100
|
+
| Method | Description |
|
|
101
|
+
|---|---|
|
|
102
|
+
| `MCPServer(name)` | Create a server |
|
|
103
|
+
| `@server.tool` | Register a function as a tool |
|
|
104
|
+
| `server.run(mode, host, port)` | Start the server (`mode="stdio"` or `"sse"`) |
|
|
105
|
+
|
|
106
|
+
### MCPClient
|
|
107
|
+
|
|
108
|
+
| Method | Description |
|
|
109
|
+
|---|---|
|
|
110
|
+
| `MCPClient.from_sse(url)` | Create client for remote SSE server |
|
|
111
|
+
| `MCPClient.from_stdio(command, args)` | Create client for local stdio server |
|
|
112
|
+
| `client.connect()` | Open connection |
|
|
113
|
+
| `client.disconnect()` | Close connection |
|
|
114
|
+
| `client.list_tools()` | Get all tools from server |
|
|
115
|
+
| `client.call_tool(name, params)` | Call a tool, get result as string |
|
|
116
|
+
| `client.tool_info(name)` | Get details of one tool |
|
|
117
|
+
|
|
118
|
+
## Transport modes
|
|
119
|
+
|
|
120
|
+
| | stdio | SSE |
|
|
121
|
+
|---|---|---|
|
|
122
|
+
| Where | Same machine | Anywhere |
|
|
123
|
+
| Server startup | Client spawns it | Already running |
|
|
124
|
+
| Connect with | `from_stdio(cmd, args)` | `from_sse(url)` |
|
|
125
|
+
| Best for | Local tools | Remote / shared servers |
|
|
126
|
+
|
|
127
|
+
## License
|
|
128
|
+
|
|
129
|
+
MIT
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
mcpeasy
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=68", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "mcpsdk"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "The simplest way to build and connect MCP servers and clients."
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.11"
|
|
11
|
+
license = { text = "MIT" }
|
|
12
|
+
keywords = ["mcp", "ai", "agent", "llm", "tools", "model-context-protocol"]
|
|
13
|
+
classifiers = [
|
|
14
|
+
"Programming Language :: Python :: 3",
|
|
15
|
+
"Programming Language :: Python :: 3.11",
|
|
16
|
+
"Programming Language :: Python :: 3.12",
|
|
17
|
+
"Programming Language :: Python :: 3.13",
|
|
18
|
+
"License :: OSI Approved :: MIT License",
|
|
19
|
+
"Operating System :: OS Independent",
|
|
20
|
+
"Intended Audience :: Developers",
|
|
21
|
+
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
dependencies = [
|
|
25
|
+
"mcp>=1.0.0",
|
|
26
|
+
"httpx>=0.27.0",
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
[project.urls]
|
|
30
|
+
Homepage = "https://github.com/yourname/mcpeasy"
|
|
31
|
+
"Bug Tracker" = "https://github.com/yourname/mcpeasy/issues"
|
|
32
|
+
|
|
33
|
+
[tool.setuptools.packages.find]
|
|
34
|
+
where = ["."]
|
|
35
|
+
include = ["mcpeasy*"]
|
mcpsdk-0.1.0/setup.cfg
ADDED