mbxai 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mbxai-0.1.0/LICENSE +21 -0
- mbxai-0.1.0/PKG-INFO +168 -0
- mbxai-0.1.0/README.md +134 -0
- mbxai-0.1.0/pyproject.toml +86 -0
- mbxai-0.1.0/setup.py +47 -0
- mbxai-0.1.0/src/mbxai/__init__.py +5 -0
- mbxai-0.1.0/src/mbxai/core.py +12 -0
- mbxai-0.1.0/src/mbxai/mcp/__init__.py +6 -0
- mbxai-0.1.0/src/mbxai/mcp/client.py +116 -0
- mbxai-0.1.0/src/mbxai/mcp/example.py +84 -0
- mbxai-0.1.0/src/mbxai/mcp/server.py +92 -0
- mbxai-0.1.0/src/mbxai/openrouter/__init__.py +14 -0
- mbxai-0.1.0/src/mbxai/openrouter/client.py +269 -0
- mbxai-0.1.0/src/mbxai/openrouter/config.py +71 -0
- mbxai-0.1.0/src/mbxai/openrouter/models.py +87 -0
- mbxai-0.1.0/src/mbxai/tools/__init__.py +12 -0
- mbxai-0.1.0/src/mbxai/tools/client.py +172 -0
- mbxai-0.1.0/src/mbxai/tools/example.py +75 -0
- mbxai-0.1.0/src/mbxai/tools/types.py +33 -0
- mbxai-0.1.0/tests/test_core.py +9 -0
- mbxai-0.1.0/tests/test_mcp.py +355 -0
- mbxai-0.1.0/tests/test_openrouter.py +485 -0
- mbxai-0.1.0/tests/test_tools.py +286 -0
- mbxai-0.1.0/uv.lock +1013 -0
mbxai-0.1.0/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
MIT License
|
2
|
+
|
3
|
+
Copyright (c) 2024 Mike Bertram
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
13
|
+
copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21
|
+
SOFTWARE.
|
mbxai-0.1.0/PKG-INFO
ADDED
@@ -0,0 +1,168 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: mbxai
|
3
|
+
Version: 0.1.0
|
4
|
+
Summary: MBX AI SDK
|
5
|
+
Project-URL: Homepage, https://www.mibexx.de
|
6
|
+
Project-URL: Documentation, https://www.mibexx.de
|
7
|
+
Project-URL: Repository, https://github.com/yourusername/mbxai.git
|
8
|
+
Author: MBX AI
|
9
|
+
License: MIT
|
10
|
+
License-File: LICENSE
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
12
|
+
Classifier: Operating System :: OS Independent
|
13
|
+
Classifier: Programming Language :: Python
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
15
|
+
Requires-Python: >=3.12
|
16
|
+
Requires-Dist: fastapi>=0.115.12
|
17
|
+
Requires-Dist: httpx>=0.27.0
|
18
|
+
Requires-Dist: mcp>=1.7.1
|
19
|
+
Requires-Dist: openai>=1.77.0
|
20
|
+
Requires-Dist: pydantic-settings>=2.9.1
|
21
|
+
Requires-Dist: pydantic>=2.9.1
|
22
|
+
Requires-Dist: python-multipart>=0.0.20
|
23
|
+
Requires-Dist: sse-starlette>=2.3.4
|
24
|
+
Requires-Dist: starlette>=0.46.2
|
25
|
+
Requires-Dist: uvicorn>=0.34.2
|
26
|
+
Provides-Extra: dev
|
27
|
+
Requires-Dist: black>=24.3.0; extra == 'dev'
|
28
|
+
Requires-Dist: isort>=5.13.2; extra == 'dev'
|
29
|
+
Requires-Dist: mypy>=1.8.0; extra == 'dev'
|
30
|
+
Requires-Dist: pytest-asyncio>=0.26.0; extra == 'dev'
|
31
|
+
Requires-Dist: pytest-cov>=6.1.1; extra == 'dev'
|
32
|
+
Requires-Dist: pytest>=8.3.5; extra == 'dev'
|
33
|
+
Description-Content-Type: text/markdown
|
34
|
+
|
35
|
+
# MBX AI
|
36
|
+
|
37
|
+
A Python library for building AI applications with LLMs.
|
38
|
+
|
39
|
+
## Features
|
40
|
+
|
41
|
+
- **OpenRouter Integration**: Connect to various LLM providers through OpenRouter
|
42
|
+
- **Tool Integration**: Easily integrate tools with LLMs using the Model Context Protocol (MCP)
|
43
|
+
- **Structured Output**: Get structured, typed responses from LLMs
|
44
|
+
- **Chat Interface**: Simple chat interface for interacting with LLMs
|
45
|
+
- **FastAPI Server**: Built-in FastAPI server for tool integration
|
46
|
+
|
47
|
+
## Installation
|
48
|
+
|
49
|
+
```bash
|
50
|
+
pip install mbxai
|
51
|
+
```
|
52
|
+
|
53
|
+
## Quick Start
|
54
|
+
|
55
|
+
### Basic Usage
|
56
|
+
|
57
|
+
```python
|
58
|
+
from mbxai import OpenRouterClient
|
59
|
+
|
60
|
+
# Initialize the client
|
61
|
+
client = OpenRouterClient(api_key="your-api-key")
|
62
|
+
|
63
|
+
# Chat with an LLM
|
64
|
+
response = await client.chat([
|
65
|
+
{"role": "user", "content": "Hello, how are you?"}
|
66
|
+
])
|
67
|
+
print(response.choices[0].message.content)
|
68
|
+
```
|
69
|
+
|
70
|
+
### Using Tools
|
71
|
+
|
72
|
+
```python
|
73
|
+
from mbxai import OpenRouterClient, ToolClient
|
74
|
+
from pydantic import BaseModel
|
75
|
+
|
76
|
+
# Define your tool's input and output models
|
77
|
+
class CalculatorInput(BaseModel):
|
78
|
+
a: float
|
79
|
+
b: float
|
80
|
+
|
81
|
+
class CalculatorOutput(BaseModel):
|
82
|
+
result: float
|
83
|
+
|
84
|
+
# Create a calculator tool
|
85
|
+
async def calculator(input: CalculatorInput) -> CalculatorOutput:
|
86
|
+
return CalculatorOutput(result=input.a + input.b)
|
87
|
+
|
88
|
+
# Initialize the client with tools
|
89
|
+
client = ToolClient(OpenRouterClient(api_key="your-api-key"))
|
90
|
+
client.add_tool(calculator)
|
91
|
+
|
92
|
+
# Use the tool in a chat
|
93
|
+
response = await client.chat([
|
94
|
+
{"role": "user", "content": "What is 2 + 3?"}
|
95
|
+
])
|
96
|
+
print(response.choices[0].message.content)
|
97
|
+
```
|
98
|
+
|
99
|
+
### Using MCP (Model Context Protocol)
|
100
|
+
|
101
|
+
```python
|
102
|
+
from mbxai import OpenRouterClient, MCPClient
|
103
|
+
from mbxai.mcp import MCPServer
|
104
|
+
from mcp.server.fastmcp import FastMCP
|
105
|
+
from pydantic import BaseModel
|
106
|
+
|
107
|
+
# Define your tool's input and output models
|
108
|
+
class CalculatorInput(BaseModel):
|
109
|
+
a: float
|
110
|
+
b: float
|
111
|
+
|
112
|
+
class CalculatorOutput(BaseModel):
|
113
|
+
result: float
|
114
|
+
|
115
|
+
# Create a FastMCP instance
|
116
|
+
mcp = FastMCP("calculator-service")
|
117
|
+
|
118
|
+
# Create a calculator tool
|
119
|
+
@mcp.tool()
|
120
|
+
async def calculator(argument: CalculatorInput) -> CalculatorOutput:
|
121
|
+
return CalculatorOutput(result=argument.a + argument.b)
|
122
|
+
|
123
|
+
# Start the MCP server
|
124
|
+
server = MCPServer("calculator-service")
|
125
|
+
await server.add_tool(calculator)
|
126
|
+
await server.start()
|
127
|
+
|
128
|
+
# Initialize the MCP client
|
129
|
+
client = MCPClient(OpenRouterClient(api_key="your-api-key"))
|
130
|
+
await client.register_mcp_server("calculator-service", "http://localhost:8000")
|
131
|
+
|
132
|
+
# Use the tool in a chat
|
133
|
+
response = await client.chat([
|
134
|
+
{"role": "user", "content": "What is 2 + 3?"}
|
135
|
+
])
|
136
|
+
print(response.choices[0].message.content)
|
137
|
+
```
|
138
|
+
|
139
|
+
## Development
|
140
|
+
|
141
|
+
### Setup
|
142
|
+
|
143
|
+
1. Clone the repository:
|
144
|
+
```bash
|
145
|
+
git clone https://github.com/yourusername/mbxai.git
|
146
|
+
cd mbxai
|
147
|
+
```
|
148
|
+
|
149
|
+
2. Create a virtual environment:
|
150
|
+
```bash
|
151
|
+
python -m venv .venv
|
152
|
+
source .venv/bin/activate # On Windows: .venv\Scripts\activate
|
153
|
+
```
|
154
|
+
|
155
|
+
3. Install dependencies:
|
156
|
+
```bash
|
157
|
+
pip install -e ".[dev]"
|
158
|
+
```
|
159
|
+
|
160
|
+
### Running Tests
|
161
|
+
|
162
|
+
```bash
|
163
|
+
pytest tests/
|
164
|
+
```
|
165
|
+
|
166
|
+
## License
|
167
|
+
|
168
|
+
MIT License
|
mbxai-0.1.0/README.md
ADDED
@@ -0,0 +1,134 @@
|
|
1
|
+
# MBX AI
|
2
|
+
|
3
|
+
A Python library for building AI applications with LLMs.
|
4
|
+
|
5
|
+
## Features
|
6
|
+
|
7
|
+
- **OpenRouter Integration**: Connect to various LLM providers through OpenRouter
|
8
|
+
- **Tool Integration**: Easily integrate tools with LLMs using the Model Context Protocol (MCP)
|
9
|
+
- **Structured Output**: Get structured, typed responses from LLMs
|
10
|
+
- **Chat Interface**: Simple chat interface for interacting with LLMs
|
11
|
+
- **FastAPI Server**: Built-in FastAPI server for tool integration
|
12
|
+
|
13
|
+
## Installation
|
14
|
+
|
15
|
+
```bash
|
16
|
+
pip install mbxai
|
17
|
+
```
|
18
|
+
|
19
|
+
## Quick Start
|
20
|
+
|
21
|
+
### Basic Usage
|
22
|
+
|
23
|
+
```python
|
24
|
+
from mbxai import OpenRouterClient
|
25
|
+
|
26
|
+
# Initialize the client
|
27
|
+
client = OpenRouterClient(api_key="your-api-key")
|
28
|
+
|
29
|
+
# Chat with an LLM
|
30
|
+
response = await client.chat([
|
31
|
+
{"role": "user", "content": "Hello, how are you?"}
|
32
|
+
])
|
33
|
+
print(response.choices[0].message.content)
|
34
|
+
```
|
35
|
+
|
36
|
+
### Using Tools
|
37
|
+
|
38
|
+
```python
|
39
|
+
from mbxai import OpenRouterClient, ToolClient
|
40
|
+
from pydantic import BaseModel
|
41
|
+
|
42
|
+
# Define your tool's input and output models
|
43
|
+
class CalculatorInput(BaseModel):
|
44
|
+
a: float
|
45
|
+
b: float
|
46
|
+
|
47
|
+
class CalculatorOutput(BaseModel):
|
48
|
+
result: float
|
49
|
+
|
50
|
+
# Create a calculator tool
|
51
|
+
async def calculator(input: CalculatorInput) -> CalculatorOutput:
|
52
|
+
return CalculatorOutput(result=input.a + input.b)
|
53
|
+
|
54
|
+
# Initialize the client with tools
|
55
|
+
client = ToolClient(OpenRouterClient(api_key="your-api-key"))
|
56
|
+
client.add_tool(calculator)
|
57
|
+
|
58
|
+
# Use the tool in a chat
|
59
|
+
response = await client.chat([
|
60
|
+
{"role": "user", "content": "What is 2 + 3?"}
|
61
|
+
])
|
62
|
+
print(response.choices[0].message.content)
|
63
|
+
```
|
64
|
+
|
65
|
+
### Using MCP (Model Context Protocol)
|
66
|
+
|
67
|
+
```python
|
68
|
+
from mbxai import OpenRouterClient, MCPClient
|
69
|
+
from mbxai.mcp import MCPServer
|
70
|
+
from mcp.server.fastmcp import FastMCP
|
71
|
+
from pydantic import BaseModel
|
72
|
+
|
73
|
+
# Define your tool's input and output models
|
74
|
+
class CalculatorInput(BaseModel):
|
75
|
+
a: float
|
76
|
+
b: float
|
77
|
+
|
78
|
+
class CalculatorOutput(BaseModel):
|
79
|
+
result: float
|
80
|
+
|
81
|
+
# Create a FastMCP instance
|
82
|
+
mcp = FastMCP("calculator-service")
|
83
|
+
|
84
|
+
# Create a calculator tool
|
85
|
+
@mcp.tool()
|
86
|
+
async def calculator(argument: CalculatorInput) -> CalculatorOutput:
|
87
|
+
return CalculatorOutput(result=argument.a + argument.b)
|
88
|
+
|
89
|
+
# Start the MCP server
|
90
|
+
server = MCPServer("calculator-service")
|
91
|
+
await server.add_tool(calculator)
|
92
|
+
await server.start()
|
93
|
+
|
94
|
+
# Initialize the MCP client
|
95
|
+
client = MCPClient(OpenRouterClient(api_key="your-api-key"))
|
96
|
+
await client.register_mcp_server("calculator-service", "http://localhost:8000")
|
97
|
+
|
98
|
+
# Use the tool in a chat
|
99
|
+
response = await client.chat([
|
100
|
+
{"role": "user", "content": "What is 2 + 3?"}
|
101
|
+
])
|
102
|
+
print(response.choices[0].message.content)
|
103
|
+
```
|
104
|
+
|
105
|
+
## Development
|
106
|
+
|
107
|
+
### Setup
|
108
|
+
|
109
|
+
1. Clone the repository:
|
110
|
+
```bash
|
111
|
+
git clone https://github.com/yourusername/mbxai.git
|
112
|
+
cd mbxai
|
113
|
+
```
|
114
|
+
|
115
|
+
2. Create a virtual environment:
|
116
|
+
```bash
|
117
|
+
python -m venv .venv
|
118
|
+
source .venv/bin/activate # On Windows: .venv\Scripts\activate
|
119
|
+
```
|
120
|
+
|
121
|
+
3. Install dependencies:
|
122
|
+
```bash
|
123
|
+
pip install -e ".[dev]"
|
124
|
+
```
|
125
|
+
|
126
|
+
### Running Tests
|
127
|
+
|
128
|
+
```bash
|
129
|
+
pytest tests/
|
130
|
+
```
|
131
|
+
|
132
|
+
## License
|
133
|
+
|
134
|
+
MIT License
|
@@ -0,0 +1,86 @@
|
|
1
|
+
[build-system]
|
2
|
+
requires = ["hatchling"]
|
3
|
+
build-backend = "hatchling.build"
|
4
|
+
|
5
|
+
[project]
|
6
|
+
name = "mbxai"
|
7
|
+
version = "0.1.0"
|
8
|
+
authors = [
|
9
|
+
{ name = "MBX AI" }
|
10
|
+
]
|
11
|
+
description = "MBX AI SDK"
|
12
|
+
readme = "README.md"
|
13
|
+
requires-python = ">=3.12"
|
14
|
+
license = { text = "MIT" }
|
15
|
+
keywords = []
|
16
|
+
classifiers = [
|
17
|
+
"Development Status :: 4 - Beta",
|
18
|
+
"Programming Language :: Python",
|
19
|
+
"Programming Language :: Python :: 3.12",
|
20
|
+
"Operating System :: OS Independent",
|
21
|
+
]
|
22
|
+
dependencies = [
|
23
|
+
"httpx>=0.27.0",
|
24
|
+
"pydantic>=2.9.1",
|
25
|
+
"fastapi>=0.115.12",
|
26
|
+
"mcp>=1.7.1",
|
27
|
+
"openai>=1.77.0",
|
28
|
+
"python-multipart>=0.0.20",
|
29
|
+
"sse-starlette>=2.3.4",
|
30
|
+
"starlette>=0.46.2",
|
31
|
+
"uvicorn>=0.34.2",
|
32
|
+
"pydantic-settings>=2.9.1"
|
33
|
+
]
|
34
|
+
|
35
|
+
[project.urls]
|
36
|
+
Homepage = "https://www.mibexx.de"
|
37
|
+
Documentation = "https://www.mibexx.de"
|
38
|
+
Repository = "https://github.com/yourusername/mbxai.git"
|
39
|
+
|
40
|
+
[project.optional-dependencies]
|
41
|
+
dev = [
|
42
|
+
"pytest>=8.3.5",
|
43
|
+
"pytest-asyncio>=0.26.0",
|
44
|
+
"pytest-cov>=6.1.1",
|
45
|
+
"black>=24.3.0",
|
46
|
+
"isort>=5.13.2",
|
47
|
+
"mypy>=1.8.0"
|
48
|
+
]
|
49
|
+
|
50
|
+
[tool.hatch.build.targets.wheel]
|
51
|
+
packages = ["src/mbxai"]
|
52
|
+
|
53
|
+
[tool.pytest.ini_options]
|
54
|
+
testpaths = ["tests"]
|
55
|
+
python_files = ["test_*.py"]
|
56
|
+
addopts = "-v"
|
57
|
+
|
58
|
+
[tool.black]
|
59
|
+
line-length = 100
|
60
|
+
target-version = ["py312"]
|
61
|
+
|
62
|
+
[tool.isort]
|
63
|
+
profile = "black"
|
64
|
+
multi_line_output = 3
|
65
|
+
line_length = 100
|
66
|
+
|
67
|
+
[tool.mypy]
|
68
|
+
python_version = "3.12"
|
69
|
+
warn_return_any = true
|
70
|
+
warn_unused_configs = true
|
71
|
+
disallow_untyped_defs = true
|
72
|
+
disallow_incomplete_defs = true
|
73
|
+
check_untyped_defs = true
|
74
|
+
disallow_untyped_decorators = true
|
75
|
+
no_implicit_optional = true
|
76
|
+
warn_redundant_casts = true
|
77
|
+
warn_unused_ignores = true
|
78
|
+
warn_no_return = true
|
79
|
+
warn_unreachable = true
|
80
|
+
strict_equality = true
|
81
|
+
|
82
|
+
[dependency-groups]
|
83
|
+
dev = [
|
84
|
+
"build>=1.2.2.post1",
|
85
|
+
"twine>=6.1.0",
|
86
|
+
]
|
mbxai-0.1.0/setup.py
ADDED
@@ -0,0 +1,47 @@
|
|
1
|
+
from setuptools import setup, find_packages
|
2
|
+
|
3
|
+
setup(
|
4
|
+
name="mbxai",
|
5
|
+
version="0.1.0",
|
6
|
+
author="MBX AI",
|
7
|
+
description="MBX AI SDK",
|
8
|
+
long_description=open("README.md").read(),
|
9
|
+
long_description_content_type="text/markdown",
|
10
|
+
python_requires=">=3.12",
|
11
|
+
license="MIT",
|
12
|
+
classifiers=[
|
13
|
+
"Development Status :: 4 - Beta",
|
14
|
+
"Programming Language :: Python",
|
15
|
+
"Programming Language :: Python :: 3.12",
|
16
|
+
"Operating System :: OS Independent",
|
17
|
+
],
|
18
|
+
package_dir={"": "src"},
|
19
|
+
packages=find_packages(where="src"),
|
20
|
+
install_requires=[
|
21
|
+
"httpx>=0.27.0",
|
22
|
+
"pydantic>=2.9.1",
|
23
|
+
"fastapi>=0.115.12",
|
24
|
+
"mcp>=1.7.1",
|
25
|
+
"openai>=1.77.0",
|
26
|
+
"python-multipart>=0.0.20",
|
27
|
+
"sse-starlette>=2.3.4",
|
28
|
+
"starlette>=0.46.2",
|
29
|
+
"uvicorn>=0.34.2",
|
30
|
+
"pydantic-settings>=2.9.1"
|
31
|
+
],
|
32
|
+
extras_require={
|
33
|
+
"dev": [
|
34
|
+
"pytest>=8.3.5",
|
35
|
+
"pytest-asyncio>=0.26.0",
|
36
|
+
"pytest-cov>=6.1.1",
|
37
|
+
"black>=24.3.0",
|
38
|
+
"isort>=5.13.2",
|
39
|
+
"mypy>=1.8.0"
|
40
|
+
]
|
41
|
+
},
|
42
|
+
project_urls={
|
43
|
+
"Homepage": "https://www.mibexx.de",
|
44
|
+
"Documentation": "https://www.mibexx.de",
|
45
|
+
"Repository": "https://gitlab.com/mbxai/mbxai-sdk.git"
|
46
|
+
}
|
47
|
+
)
|
@@ -0,0 +1,116 @@
|
|
1
|
+
"""MCP client implementation."""
|
2
|
+
|
3
|
+
from typing import Any, TypeVar, Callable
|
4
|
+
import httpx
|
5
|
+
from pydantic import BaseModel, Field
|
6
|
+
|
7
|
+
from ..tools import ToolClient, Tool
|
8
|
+
from ..openrouter import OpenRouterClient
|
9
|
+
|
10
|
+
|
11
|
+
T = TypeVar("T", bound=BaseModel)
|
12
|
+
|
13
|
+
|
14
|
+
class MCPTool(Tool):
|
15
|
+
"""MCP tool definition."""
|
16
|
+
internal_url: str | None = Field(default=None, description="The internal URL to invoke the tool")
|
17
|
+
service: str = Field(description="The service that provides the tool")
|
18
|
+
strict: bool = Field(default=True, description="Whether the tool response is strictly validated")
|
19
|
+
input_schema: dict[str, Any] = Field(description="The input schema for the tool")
|
20
|
+
|
21
|
+
def to_openai_function(self) -> dict[str, Any]:
|
22
|
+
"""Convert the tool to an OpenAI function definition."""
|
23
|
+
return {
|
24
|
+
"name": self.name,
|
25
|
+
"description": self.description,
|
26
|
+
"parameters": self._convert_to_openai_schema(self.input_schema)
|
27
|
+
}
|
28
|
+
|
29
|
+
def _convert_to_openai_schema(self, mcp_schema: dict[str, Any]) -> dict[str, Any]:
|
30
|
+
"""Convert MCP schema to OpenAI schema format."""
|
31
|
+
if not mcp_schema:
|
32
|
+
return {"type": "object", "properties": {}}
|
33
|
+
|
34
|
+
# If schema has a $ref, resolve it
|
35
|
+
if "$ref" in mcp_schema:
|
36
|
+
ref = mcp_schema["$ref"].split("/")[-1]
|
37
|
+
mcp_schema = mcp_schema.get("$defs", {}).get(ref, {})
|
38
|
+
|
39
|
+
# If schema has an input wrapper, unwrap it
|
40
|
+
if "properties" in mcp_schema and "input" in mcp_schema["properties"]:
|
41
|
+
input_schema = mcp_schema["properties"]["input"]
|
42
|
+
if "$ref" in input_schema:
|
43
|
+
ref = input_schema["$ref"].split("/")[-1]
|
44
|
+
input_schema = mcp_schema.get("$defs", {}).get(ref, {})
|
45
|
+
return input_schema
|
46
|
+
|
47
|
+
return mcp_schema
|
48
|
+
|
49
|
+
|
50
|
+
class MCPClient(ToolClient):
|
51
|
+
"""MCP client that extends ToolClient to support MCP tool servers."""
|
52
|
+
|
53
|
+
def __init__(self, openrouter_client: OpenRouterClient):
|
54
|
+
"""Initialize the MCP client."""
|
55
|
+
super().__init__(openrouter_client)
|
56
|
+
self._mcp_servers: dict[str, str] = {}
|
57
|
+
self._http_client = httpx.AsyncClient()
|
58
|
+
|
59
|
+
async def __aenter__(self):
|
60
|
+
"""Enter the async context."""
|
61
|
+
return self
|
62
|
+
|
63
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
64
|
+
"""Exit the async context."""
|
65
|
+
await self._http_client.aclose()
|
66
|
+
|
67
|
+
def _create_tool_function(self, tool: MCPTool) -> Callable[..., Any]:
|
68
|
+
"""Create a function that invokes an MCP tool."""
|
69
|
+
async def tool_function(**kwargs: Any) -> Any:
|
70
|
+
# If kwargs has input wrapper, unwrap it
|
71
|
+
if "input" in kwargs:
|
72
|
+
kwargs = kwargs["input"]
|
73
|
+
|
74
|
+
# Get the URL to use for the tool
|
75
|
+
url = tool.internal_url
|
76
|
+
if url is None:
|
77
|
+
# Use the MCP server URL as fallback
|
78
|
+
server_url = self._mcp_servers.get(tool.service)
|
79
|
+
if server_url is None:
|
80
|
+
raise ValueError(f"No MCP server found for service {tool.service}")
|
81
|
+
url = f"{server_url}/tools/{tool.name}/invoke"
|
82
|
+
|
83
|
+
# Make the HTTP request to the tool's URL
|
84
|
+
response = await self._http_client.post(
|
85
|
+
url,
|
86
|
+
json={"input": kwargs} if tool.strict else kwargs
|
87
|
+
)
|
88
|
+
return response.json()
|
89
|
+
|
90
|
+
# Create a sync wrapper for the async function
|
91
|
+
def sync_tool_function(**kwargs: Any) -> Any:
|
92
|
+
import asyncio
|
93
|
+
loop = asyncio.get_event_loop()
|
94
|
+
return loop.run_until_complete(tool_function(**kwargs))
|
95
|
+
|
96
|
+
return sync_tool_function
|
97
|
+
|
98
|
+
async def register_mcp_server(self, name: str, base_url: str) -> None:
|
99
|
+
"""Register an MCP server and load its tools."""
|
100
|
+
self._mcp_servers[name] = base_url.rstrip("/")
|
101
|
+
|
102
|
+
# Fetch tools from the server
|
103
|
+
response = await self._http_client.get(f"{base_url}/tools")
|
104
|
+
tools_data = response.json()
|
105
|
+
|
106
|
+
# Register each tool
|
107
|
+
for tool_data in tools_data:
|
108
|
+
# Create MCPTool instance
|
109
|
+
tool = MCPTool(**tool_data)
|
110
|
+
|
111
|
+
# Create the tool function
|
112
|
+
tool_function = self._create_tool_function(tool)
|
113
|
+
|
114
|
+
# Register the tool with ToolClient
|
115
|
+
self._tools[tool.name] = tool
|
116
|
+
tool.function = tool_function
|
@@ -0,0 +1,84 @@
|
|
1
|
+
"""Example usage of MCP client and server."""
|
2
|
+
|
3
|
+
import asyncio
|
4
|
+
from typing import Any
|
5
|
+
from pydantic import BaseModel
|
6
|
+
from mcp.server.fastmcp import FastMCP
|
7
|
+
|
8
|
+
from ..openrouter import OpenRouterClient
|
9
|
+
from .client import MCPClient
|
10
|
+
from .server import MCPServer
|
11
|
+
|
12
|
+
|
13
|
+
# Create a FastMCP instance for this module
|
14
|
+
mcp = FastMCP("weather-service")
|
15
|
+
|
16
|
+
|
17
|
+
# Define input/output models
|
18
|
+
class WeatherInput(BaseModel):
|
19
|
+
location: str
|
20
|
+
units: str = "celsius" # Default to celsius, can be "fahrenheit" or "celsius"
|
21
|
+
|
22
|
+
|
23
|
+
class WeatherOutput(BaseModel):
|
24
|
+
location: str
|
25
|
+
temperature: float
|
26
|
+
units: str
|
27
|
+
condition: str
|
28
|
+
humidity: float
|
29
|
+
|
30
|
+
|
31
|
+
@mcp.tool()
|
32
|
+
async def get_weather(input: WeatherInput) -> dict[str, Any]:
|
33
|
+
"""Get weather information for a location.
|
34
|
+
|
35
|
+
Args:
|
36
|
+
input: WeatherInput model containing location and units preference
|
37
|
+
"""
|
38
|
+
# This is a mock implementation
|
39
|
+
temperature = 20 if input.units == "celsius" else 68 # Convert to fahrenheit if needed
|
40
|
+
|
41
|
+
return {
|
42
|
+
"location": input.location,
|
43
|
+
"temperature": temperature,
|
44
|
+
"units": input.units,
|
45
|
+
"condition": "sunny",
|
46
|
+
"humidity": 65,
|
47
|
+
}
|
48
|
+
|
49
|
+
|
50
|
+
async def main():
|
51
|
+
# Create and start the MCP server
|
52
|
+
server = MCPServer("weather-service")
|
53
|
+
|
54
|
+
# Register the tool with the MCP server
|
55
|
+
server.mcp_server.add_tool(get_weather)
|
56
|
+
|
57
|
+
# Create the OpenRouter client
|
58
|
+
openrouter_client = OpenRouterClient(token="your-api-key")
|
59
|
+
|
60
|
+
# Create the MCP client
|
61
|
+
mcp_client = MCPClient(openrouter_client)
|
62
|
+
|
63
|
+
# Register the MCP server
|
64
|
+
await mcp_client.register_mcp_server(
|
65
|
+
name="weather-service",
|
66
|
+
base_url="http://localhost:8000"
|
67
|
+
)
|
68
|
+
|
69
|
+
# Use the tool in a chat
|
70
|
+
messages = [{"role": "user", "content": "What's the weather like in New York?"}]
|
71
|
+
response = await mcp_client.chat(messages)
|
72
|
+
print(response.choices[0].message.content)
|
73
|
+
|
74
|
+
# Use the tool with structured output
|
75
|
+
response = await mcp_client.parse(messages, WeatherOutput)
|
76
|
+
weather_info = response.choices[0].message.parsed
|
77
|
+
print(f"Location: {weather_info.location}")
|
78
|
+
print(f"Temperature: {weather_info.temperature}°{weather_info.units.upper()}")
|
79
|
+
print(f"Condition: {weather_info.condition}")
|
80
|
+
print(f"Humidity: {weather_info.humidity}%")
|
81
|
+
|
82
|
+
|
83
|
+
if __name__ == "__main__":
|
84
|
+
asyncio.run(main())
|