mbxai 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mbxai/__init__.py ADDED
@@ -0,0 +1,5 @@
1
+ """
2
+ MBX AI package.
3
+ """
4
+
5
+ __version__ = "0.1.0"
mbxai/core.py ADDED
@@ -0,0 +1,12 @@
1
+ """
2
+ Core functionality for MBX AI.
3
+ """
4
+
5
+ def hello_world() -> str:
6
+ """
7
+ Returns a greeting message.
8
+
9
+ Returns:
10
+ str: A greeting message
11
+ """
12
+ return "Hello from MBX AI!"
mbxai/mcp/__init__.py ADDED
@@ -0,0 +1,6 @@
1
+ """Model Context Protocol (MCP) implementation for MBX AI."""
2
+
3
+ from .client import MCPClient
4
+ from .server import MCPServer, Tool
5
+
6
+ __all__ = ["MCPClient", "MCPServer", "Tool"]
mbxai/mcp/client.py ADDED
@@ -0,0 +1,116 @@
1
+ """MCP client implementation."""
2
+
3
+ from typing import Any, TypeVar, Callable
4
+ import httpx
5
+ from pydantic import BaseModel, Field
6
+
7
+ from ..tools import ToolClient, Tool
8
+ from ..openrouter import OpenRouterClient
9
+
10
+
11
+ T = TypeVar("T", bound=BaseModel)
12
+
13
+
14
+ class MCPTool(Tool):
15
+ """MCP tool definition."""
16
+ internal_url: str | None = Field(default=None, description="The internal URL to invoke the tool")
17
+ service: str = Field(description="The service that provides the tool")
18
+ strict: bool = Field(default=True, description="Whether the tool response is strictly validated")
19
+ input_schema: dict[str, Any] = Field(description="The input schema for the tool")
20
+
21
+ def to_openai_function(self) -> dict[str, Any]:
22
+ """Convert the tool to an OpenAI function definition."""
23
+ return {
24
+ "name": self.name,
25
+ "description": self.description,
26
+ "parameters": self._convert_to_openai_schema(self.input_schema)
27
+ }
28
+
29
+ def _convert_to_openai_schema(self, mcp_schema: dict[str, Any]) -> dict[str, Any]:
30
+ """Convert MCP schema to OpenAI schema format."""
31
+ if not mcp_schema:
32
+ return {"type": "object", "properties": {}}
33
+
34
+ # If schema has a $ref, resolve it
35
+ if "$ref" in mcp_schema:
36
+ ref = mcp_schema["$ref"].split("/")[-1]
37
+ mcp_schema = mcp_schema.get("$defs", {}).get(ref, {})
38
+
39
+ # If schema has an input wrapper, unwrap it
40
+ if "properties" in mcp_schema and "input" in mcp_schema["properties"]:
41
+ input_schema = mcp_schema["properties"]["input"]
42
+ if "$ref" in input_schema:
43
+ ref = input_schema["$ref"].split("/")[-1]
44
+ input_schema = mcp_schema.get("$defs", {}).get(ref, {})
45
+ return input_schema
46
+
47
+ return mcp_schema
48
+
49
+
50
+ class MCPClient(ToolClient):
51
+ """MCP client that extends ToolClient to support MCP tool servers."""
52
+
53
+ def __init__(self, openrouter_client: OpenRouterClient):
54
+ """Initialize the MCP client."""
55
+ super().__init__(openrouter_client)
56
+ self._mcp_servers: dict[str, str] = {}
57
+ self._http_client = httpx.AsyncClient()
58
+
59
+ async def __aenter__(self):
60
+ """Enter the async context."""
61
+ return self
62
+
63
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
64
+ """Exit the async context."""
65
+ await self._http_client.aclose()
66
+
67
+ def _create_tool_function(self, tool: MCPTool) -> Callable[..., Any]:
68
+ """Create a function that invokes an MCP tool."""
69
+ async def tool_function(**kwargs: Any) -> Any:
70
+ # If kwargs has input wrapper, unwrap it
71
+ if "input" in kwargs:
72
+ kwargs = kwargs["input"]
73
+
74
+ # Get the URL to use for the tool
75
+ url = tool.internal_url
76
+ if url is None:
77
+ # Use the MCP server URL as fallback
78
+ server_url = self._mcp_servers.get(tool.service)
79
+ if server_url is None:
80
+ raise ValueError(f"No MCP server found for service {tool.service}")
81
+ url = f"{server_url}/tools/{tool.name}/invoke"
82
+
83
+ # Make the HTTP request to the tool's URL
84
+ response = await self._http_client.post(
85
+ url,
86
+ json={"input": kwargs} if tool.strict else kwargs
87
+ )
88
+ return response.json()
89
+
90
+ # Create a sync wrapper for the async function
91
+ def sync_tool_function(**kwargs: Any) -> Any:
92
+ import asyncio
93
+ loop = asyncio.get_event_loop()
94
+ return loop.run_until_complete(tool_function(**kwargs))
95
+
96
+ return sync_tool_function
97
+
98
+ async def register_mcp_server(self, name: str, base_url: str) -> None:
99
+ """Register an MCP server and load its tools."""
100
+ self._mcp_servers[name] = base_url.rstrip("/")
101
+
102
+ # Fetch tools from the server
103
+ response = await self._http_client.get(f"{base_url}/tools")
104
+ tools_data = response.json()
105
+
106
+ # Register each tool
107
+ for tool_data in tools_data:
108
+ # Create MCPTool instance
109
+ tool = MCPTool(**tool_data)
110
+
111
+ # Create the tool function
112
+ tool_function = self._create_tool_function(tool)
113
+
114
+ # Register the tool with ToolClient
115
+ self._tools[tool.name] = tool
116
+ tool.function = tool_function
mbxai/mcp/example.py ADDED
@@ -0,0 +1,84 @@
1
+ """Example usage of MCP client and server."""
2
+
3
+ import asyncio
4
+ from typing import Any
5
+ from pydantic import BaseModel
6
+ from mcp.server.fastmcp import FastMCP
7
+
8
+ from ..openrouter import OpenRouterClient
9
+ from .client import MCPClient
10
+ from .server import MCPServer
11
+
12
+
13
+ # Create a FastMCP instance for this module
14
+ mcp = FastMCP("weather-service")
15
+
16
+
17
+ # Define input/output models
18
+ class WeatherInput(BaseModel):
19
+ location: str
20
+ units: str = "celsius" # Default to celsius, can be "fahrenheit" or "celsius"
21
+
22
+
23
+ class WeatherOutput(BaseModel):
24
+ location: str
25
+ temperature: float
26
+ units: str
27
+ condition: str
28
+ humidity: float
29
+
30
+
31
+ @mcp.tool()
32
+ async def get_weather(input: WeatherInput) -> dict[str, Any]:
33
+ """Get weather information for a location.
34
+
35
+ Args:
36
+ input: WeatherInput model containing location and units preference
37
+ """
38
+ # This is a mock implementation
39
+ temperature = 20 if input.units == "celsius" else 68 # Convert to fahrenheit if needed
40
+
41
+ return {
42
+ "location": input.location,
43
+ "temperature": temperature,
44
+ "units": input.units,
45
+ "condition": "sunny",
46
+ "humidity": 65,
47
+ }
48
+
49
+
50
+ async def main():
51
+ # Create and start the MCP server
52
+ server = MCPServer("weather-service")
53
+
54
+ # Register the tool with the MCP server
55
+ server.mcp_server.add_tool(get_weather)
56
+
57
+ # Create the OpenRouter client
58
+ openrouter_client = OpenRouterClient(token="your-api-key")
59
+
60
+ # Create the MCP client
61
+ mcp_client = MCPClient(openrouter_client)
62
+
63
+ # Register the MCP server
64
+ await mcp_client.register_mcp_server(
65
+ name="weather-service",
66
+ base_url="http://localhost:8000"
67
+ )
68
+
69
+ # Use the tool in a chat
70
+ messages = [{"role": "user", "content": "What's the weather like in New York?"}]
71
+ response = await mcp_client.chat(messages)
72
+ print(response.choices[0].message.content)
73
+
74
+ # Use the tool with structured output
75
+ response = await mcp_client.parse(messages, WeatherOutput)
76
+ weather_info = response.choices[0].message.parsed
77
+ print(f"Location: {weather_info.location}")
78
+ print(f"Temperature: {weather_info.temperature}°{weather_info.units.upper()}")
79
+ print(f"Condition: {weather_info.condition}")
80
+ print(f"Humidity: {weather_info.humidity}%")
81
+
82
+
83
+ if __name__ == "__main__":
84
+ asyncio.run(main())
mbxai/mcp/server.py ADDED
@@ -0,0 +1,92 @@
1
+ """MCP server implementation."""
2
+
3
+ from typing import Any, Callable, TypeVar
4
+ from fastapi import FastAPI, Body
5
+ from mcp.server.fastmcp import FastMCP
6
+ from pydantic import BaseModel, Field, ConfigDict
7
+
8
+
9
+ T = TypeVar("T", bound=BaseModel)
10
+
11
+
12
+ class Tool(BaseModel):
13
+ """MCP tool definition."""
14
+ model_config = ConfigDict(strict=True)
15
+ name: str = Field(description="The name of the tool")
16
+ description: str = Field(description="The description of what the tool does")
17
+ input_schema: dict[str, Any] = Field(description="The input schema for the tool")
18
+ strict: bool = Field(default=True, description="Whether the tool response is strictly validated")
19
+ function: Callable[..., Any] = Field(description="The tool function", exclude=True)
20
+
21
+
22
+ class MCPServer:
23
+ """MCP server implementation."""
24
+
25
+ def __init__(self, name: str, description: str | None = None):
26
+ """Initialize the MCP server."""
27
+ self.name = name
28
+ self.description = description or f"A Model Context Protocol (MCP) tool server for {name}"
29
+
30
+ # Create FastAPI app
31
+ self.app = FastAPI(
32
+ title=self.name,
33
+ description=self.description,
34
+ version="0.1.0",
35
+ )
36
+
37
+ # Initialize MCP server
38
+ self.mcp_server = FastMCP(self.name)
39
+
40
+ # Register endpoints
41
+ self._register_endpoints()
42
+
43
+ # Store registered tools
44
+ self._tools: dict[str, Tool] = {}
45
+
46
+ def _register_endpoints(self) -> None:
47
+ """Register FastAPI endpoints."""
48
+ @self.app.get("/tools", response_model=list[Tool])
49
+ async def get_tools():
50
+ """Get all available MCP tools."""
51
+ return list(self._tools.values())
52
+
53
+ @self.app.post("/tools/{tool_name}/invoke")
54
+ async def invoke_tool(tool_name: str, arguments: dict[str, Any] = Body(...)):
55
+ """Invoke a specific MCP tool."""
56
+ try:
57
+ result = await self.mcp_server.call_tool(tool_name, arguments=arguments)
58
+ return result
59
+ except Exception as e:
60
+ return {"error": f"Error invoking tool {tool_name}: {str(e)}"}
61
+
62
+ async def add_tool(self, tool: Callable[..., Any]) -> None:
63
+ """Add a tool to the MCP server."""
64
+ # Add tool to MCP server
65
+ self.mcp_server.add_tool(tool)
66
+
67
+ # Get tool metadata
68
+ tools = await self.mcp_server.list_tools()
69
+ tool_metadata = tools[-1]
70
+
71
+ # Convert FastMCP schema to our schema format
72
+ input_schema = tool_metadata.inputSchema
73
+ if isinstance(input_schema, dict):
74
+ if "$ref" in input_schema:
75
+ ref = input_schema["$ref"].split("/")[-1]
76
+ input_schema = tool_metadata.inputSchema.get("$defs", {}).get(ref, {})
77
+
78
+ # Handle Pydantic model input
79
+ if "properties" in input_schema and "input" in input_schema["properties"]:
80
+ input_schema = input_schema["properties"]["input"]
81
+ if "$ref" in input_schema:
82
+ ref = input_schema["$ref"].split("/")[-1]
83
+ input_schema = tool_metadata.inputSchema.get("$defs", {}).get(ref, {})
84
+
85
+ # Create and store Tool instance
86
+ self._tools[tool_metadata.name] = Tool(
87
+ name=tool_metadata.name,
88
+ description=tool_metadata.description,
89
+ input_schema=input_schema,
90
+ strict=True,
91
+ function=tool
92
+ )
@@ -0,0 +1,14 @@
1
+ """
2
+ OpenRouter client module for MBX AI.
3
+ """
4
+
5
+ from .client import OpenRouterClient
6
+ from .config import OpenRouterConfig
7
+ from .models import OpenRouterModel, OpenRouterModelRegistry
8
+
9
+ __all__ = [
10
+ "OpenRouterClient",
11
+ "OpenRouterConfig",
12
+ "OpenRouterModel",
13
+ "OpenRouterModelRegistry",
14
+ ]
@@ -0,0 +1,269 @@
1
+ """
2
+ OpenRouter client implementation.
3
+ """
4
+
5
+ from typing import Any, Optional, Union
6
+ from openai import OpenAI, OpenAIError
7
+ from pydantic import BaseModel, TypeAdapter
8
+ from .models import OpenRouterModel, OpenRouterModelRegistry
9
+ from .config import OpenRouterConfig
10
+
11
+
12
+ class OpenRouterError(Exception):
13
+ """Base exception for OpenRouter client errors."""
14
+ pass
15
+
16
+
17
+ class OpenRouterConnectionError(OpenRouterError):
18
+ """Raised when there is a connection error."""
19
+ pass
20
+
21
+
22
+ class OpenRouterAPIError(OpenRouterError):
23
+ """Raised when the API returns an error."""
24
+ pass
25
+
26
+
27
+ class OpenRouterClient:
28
+ """Client for interacting with the OpenRouter API."""
29
+
30
+ def __init__(
31
+ self,
32
+ token: str,
33
+ model: Union[str, OpenRouterModel] = OpenRouterModel.GPT4_TURBO,
34
+ base_url: Optional[str] = None,
35
+ default_headers: Optional[dict[str, str]] = None,
36
+ ) -> None:
37
+ """Initialize the OpenRouter client.
38
+
39
+ Args:
40
+ token: The OpenRouter API token
41
+ model: The model to use (default: GPT4_TURBO)
42
+ base_url: Optional custom base URL for the API
43
+ default_headers: Optional default headers for API requests
44
+
45
+ Raises:
46
+ OpenRouterError: If initialization fails
47
+ """
48
+ try:
49
+ self.config = OpenRouterConfig(
50
+ token=token,
51
+ model=model,
52
+ base_url=base_url or "https://openrouter.ai/api/v1",
53
+ default_headers=default_headers or {
54
+ "HTTP-Referer": "https://github.com/mibexx/mbxai",
55
+ "X-Title": "MBX AI",
56
+ }
57
+ )
58
+
59
+ self._client = OpenAI(
60
+ api_key=token,
61
+ base_url=self.config.base_url,
62
+ default_headers=self.config.default_headers,
63
+ )
64
+ except Exception as e:
65
+ raise OpenRouterError(f"Failed to initialize client: {str(e)}")
66
+
67
+ def _handle_api_error(self, operation: str, error: Exception) -> None:
68
+ """Handle API errors.
69
+
70
+ Args:
71
+ operation: The operation being performed
72
+ error: The error that occurred
73
+
74
+ Raises:
75
+ OpenRouterConnectionError: For connection issues
76
+ OpenRouterAPIError: For API errors
77
+ OpenRouterError: For other errors
78
+ """
79
+ if isinstance(error, OpenAIError):
80
+ raise OpenRouterAPIError(f"API error during {operation}: {str(error)}")
81
+ elif "Connection" in str(error):
82
+ raise OpenRouterConnectionError(f"Connection error during {operation}: {str(error)}")
83
+ else:
84
+ raise OpenRouterError(f"Error during {operation}: {str(error)}")
85
+
86
+ @property
87
+ def model(self) -> str:
88
+ """Get the current model."""
89
+ return str(self.config.model)
90
+
91
+ @model.setter
92
+ def model(self, value: Union[str, OpenRouterModel]) -> None:
93
+ """Set a new model.
94
+
95
+ Args:
96
+ value: The new model to use
97
+ """
98
+ self.config.model = value
99
+
100
+ def set_model(self, value: Union[str, OpenRouterModel]) -> None:
101
+ """Set a new model.
102
+
103
+ Args:
104
+ value: The new model to use
105
+ """
106
+ self.model = value
107
+
108
+ def chat_completion(
109
+ self,
110
+ messages: list[dict[str, Any]],
111
+ *,
112
+ model: Optional[Union[str, OpenRouterModel]] = None,
113
+ stream: bool = False,
114
+ **kwargs: Any,
115
+ ) -> Any:
116
+ """Create a chat completion.
117
+
118
+ Args:
119
+ messages: list of messages
120
+ model: Optional model override
121
+ stream: Whether to stream the response
122
+ **kwargs: Additional parameters
123
+
124
+ Returns:
125
+ Completion response
126
+
127
+ Raises:
128
+ OpenRouterConnectionError: For connection issues
129
+ OpenRouterAPIError: For API errors
130
+ OpenRouterError: For other errors
131
+ """
132
+ try:
133
+ # Remove any incompatible parameters
134
+ kwargs.pop("parse", None) # Remove parse parameter if present
135
+
136
+ return self._client.chat.completions.create(
137
+ model=str(model or self.model),
138
+ messages=messages,
139
+ stream=stream,
140
+ **kwargs,
141
+ )
142
+ except Exception as e:
143
+ self._handle_api_error("chat completion", e)
144
+
145
+ def chat_completion_parse(
146
+ self,
147
+ messages: list[dict[str, Any]],
148
+ response_format: type[BaseModel],
149
+ *,
150
+ model: Optional[Union[str, OpenRouterModel]] = None,
151
+ stream: bool = False,
152
+ **kwargs: Any,
153
+ ) -> Any:
154
+ """Create a chat completion and parse the response.
155
+
156
+ Args:
157
+ messages: list of messages
158
+ response_format: Pydantic model to parse the response into
159
+ model: Optional model override
160
+ stream: Whether to stream the response
161
+ **kwargs: Additional parameters
162
+
163
+ Returns:
164
+ Parsed completion response
165
+
166
+ Raises:
167
+ OpenRouterConnectionError: For connection issues
168
+ OpenRouterAPIError: For API errors
169
+ OpenRouterError: For other errors
170
+ ValueError: If response parsing fails
171
+ """
172
+ try:
173
+ # Add system message to enforce JSON output if not present
174
+ if not any(msg.get("role") == "system" for msg in messages):
175
+ messages.insert(0, {
176
+ "role": "system",
177
+ "content": "You are a helpful assistant that responds in valid JSON format."
178
+ })
179
+
180
+ # Add format instructions to user message
181
+ last_user_msg = next((msg for msg in reversed(messages) if msg.get("role") == "user"), None)
182
+ if last_user_msg:
183
+ format_desc = f"Respond with valid JSON matching this Pydantic model: {response_format.__name__}"
184
+ last_user_msg["content"] = f"{format_desc}\n\n{last_user_msg['content']}"
185
+
186
+ response = self.chat_completion(
187
+ messages,
188
+ model=model,
189
+ stream=stream,
190
+ response_format={"type": "json_object"}, # Force JSON response
191
+ **kwargs
192
+ )
193
+
194
+ if stream:
195
+ return response
196
+
197
+ # Parse the response content into the specified format
198
+ content = response.choices[0].message.content
199
+ adapter = TypeAdapter(response_format)
200
+ try:
201
+ parsed = adapter.validate_json(content)
202
+ response.choices[0].message.parsed = parsed
203
+ return response
204
+ except Exception as e:
205
+ raise ValueError(f"Failed to parse response as {response_format.__name__}: {str(e)}")
206
+ except ValueError as e:
207
+ raise e
208
+ except Exception as e:
209
+ self._handle_api_error("chat completion parse", e)
210
+
211
+ def embeddings(
212
+ self,
213
+ input: Union[str, list[str]],
214
+ *,
215
+ model: Optional[Union[str, OpenRouterModel]] = None,
216
+ **kwargs: Any,
217
+ ) -> Any:
218
+ """Create embeddings.
219
+
220
+ Args:
221
+ input: Text to embed
222
+ model: Optional model override
223
+ **kwargs: Additional parameters
224
+
225
+ Returns:
226
+ Embeddings response
227
+
228
+ Raises:
229
+ OpenRouterConnectionError: For connection issues
230
+ OpenRouterAPIError: For API errors
231
+ OpenRouterError: For other errors
232
+ """
233
+ try:
234
+ # Remove any incompatible parameters
235
+ kwargs.pop("parse", None) # Remove parse parameter if present
236
+
237
+ # Use text-embedding-ada-002 for embeddings
238
+ embeddings_model = "openai/text-embedding-ada-002"
239
+
240
+ return self._client.embeddings.create(
241
+ model=str(model or embeddings_model),
242
+ input=input if isinstance(input, list) else [input],
243
+ encoding_format="float", # Use float format instead of base64
244
+ **kwargs,
245
+ )
246
+ except Exception as e:
247
+ self._handle_api_error("embeddings", e)
248
+
249
+ @classmethod
250
+ def register_model(cls, name: str, value: str) -> None:
251
+ """Register a new model.
252
+
253
+ Args:
254
+ name: The name of the model (e.g., "CUSTOM_MODEL")
255
+ value: The model identifier (e.g., "provider/model-name")
256
+
257
+ Raises:
258
+ ValueError: If the model name is already registered.
259
+ """
260
+ OpenRouterModelRegistry.register_model(name, value)
261
+
262
+ @classmethod
263
+ def list_models(cls) -> dict[str, str]:
264
+ """List all available models.
265
+
266
+ Returns:
267
+ A dictionary mapping model names to their identifiers.
268
+ """
269
+ return OpenRouterModelRegistry.list_models()
@@ -0,0 +1,71 @@
1
+ from pydantic import BaseModel, Field, field_validator
2
+ from typing import Union
3
+ from .models import OpenRouterModel, OpenRouterModelRegistry
4
+
5
+ class OpenRouterConfig(BaseModel):
6
+ """Configuration for OpenRouter client."""
7
+
8
+ token: str = Field(..., description="OpenRouter API token")
9
+ model: Union[str, OpenRouterModel] = Field(
10
+ default=OpenRouterModel.GPT4_TURBO,
11
+ description="Model to use for completions"
12
+ )
13
+ base_url: str = Field(
14
+ default="https://openrouter.ai/api/v1",
15
+ description="Base URL for the OpenRouter API"
16
+ )
17
+ default_headers: dict[str, str] = Field(
18
+ default_factory=lambda: {
19
+ "HTTP-Referer": "https://github.com/mibexx/mbxai",
20
+ "X-Title": "MBX AI",
21
+ },
22
+ description="Default headers to include in all requests"
23
+ )
24
+
25
+ @field_validator("token")
26
+ def validate_token(cls, v: str) -> str:
27
+ """Validate that the token is not empty."""
28
+ if not v or not v.strip():
29
+ raise ValueError("Token cannot be empty")
30
+ return v.strip()
31
+
32
+ @field_validator("base_url")
33
+ def validate_base_url(cls, v: str) -> str:
34
+ """Validate that the base URL is a valid URL."""
35
+ if not v:
36
+ raise ValueError("Base URL cannot be empty")
37
+ if not v.startswith(("http://", "https://")):
38
+ raise ValueError("Base URL must start with http:// or https://")
39
+ return v.rstrip("/") + "/" # Always include trailing slash
40
+
41
+ @field_validator("model")
42
+ def validate_model(cls, v: Union[str, OpenRouterModel]) -> str:
43
+ """Validate and convert model to string."""
44
+ if isinstance(v, OpenRouterModel):
45
+ return v.value
46
+ if not v:
47
+ raise ValueError("Model cannot be empty")
48
+
49
+ # Try to get model from registry first
50
+ try:
51
+ return OpenRouterModelRegistry.get_model(v)
52
+ except ValueError:
53
+ # If not in registry, check if it's a valid provider/model format
54
+ if "/" not in v:
55
+ raise ValueError("Custom model must be in format 'provider/model-name'")
56
+ return str(v)
57
+
58
+ @field_validator("default_headers")
59
+ def validate_headers(cls, v: dict[str, str]) -> dict[str, str]:
60
+ """Validate that required headers are present and valid."""
61
+ required_headers = {"HTTP-Referer", "X-Title"}
62
+ missing_headers = required_headers - set(v.keys())
63
+ if missing_headers:
64
+ raise ValueError(f"Missing required headers: {', '.join(missing_headers)}")
65
+
66
+ # Ensure header values are not empty
67
+ empty_headers = [k for k, val in v.items() if not val or not val.strip()]
68
+ if empty_headers:
69
+ raise ValueError(f"Empty values for headers: {', '.join(empty_headers)}")
70
+
71
+ return {k: val.strip() for k, val in v.items()}
@@ -0,0 +1,87 @@
1
+ """OpenRouter models and model registry."""
2
+
3
+ from enum import Enum
4
+ from typing import ClassVar
5
+
6
+ class OpenRouterModel(str, Enum):
7
+ """Built-in OpenRouter models."""
8
+
9
+ GPT4_TURBO = "openai/gpt-4-turbo-preview"
10
+ GPT4 = "openai/gpt-4"
11
+ GPT41 = "openai/gpt-4.1"
12
+ GPT35_TURBO = "openai/gpt-3.5-turbo"
13
+ CLAUDE_3_OPUS = "anthropic/claude-3-opus"
14
+ CLAUDE_3_SONNET = "anthropic/claude-3-sonnet"
15
+ CLAUDE_3_HAIKU = "anthropic/claude-3-haiku"
16
+ GEMINI_PRO = "google/gemini-pro"
17
+ MIXTRAL_8X7B = "mistral/mixtral-8x7b"
18
+ MISTRAL_MEDIUM = "mistral/mistral-medium"
19
+ MISTRAL_SMALL = "mistral/mistral-small"
20
+ MISTRAL_TINY = "mistral/mistral-tiny"
21
+
22
+ class OpenRouterModelRegistry:
23
+ """Registry for OpenRouter models."""
24
+
25
+ _custom_models: ClassVar[dict[str, str]] = {}
26
+ _initialized: ClassVar[bool] = False
27
+
28
+ @classmethod
29
+ def _initialize(cls) -> None:
30
+ """Initialize the registry if not already initialized."""
31
+ if not cls._initialized:
32
+ cls._custom_models = {}
33
+ cls._initialized = True
34
+
35
+ @classmethod
36
+ def register_model(cls, name: str, value: str) -> None:
37
+ """Register a new model.
38
+
39
+ Args:
40
+ name: The name of the model (e.g., "CUSTOM_MODEL")
41
+ value: The model identifier (e.g., "provider/model-name")
42
+
43
+ Raises:
44
+ ValueError: If the model name is already registered.
45
+ """
46
+ cls._initialize()
47
+ if name in cls._custom_models:
48
+ # If the value is the same, just return
49
+ if cls._custom_models[name] == value:
50
+ return
51
+ raise ValueError(f"Model {name} is already registered")
52
+ cls._custom_models[name] = value
53
+
54
+ @classmethod
55
+ def get_model(cls, name: str) -> str:
56
+ """Get a model by name.
57
+
58
+ Args:
59
+ name: The name of the model
60
+
61
+ Returns:
62
+ The model identifier
63
+
64
+ Raises:
65
+ ValueError: If the model is not found.
66
+ """
67
+ cls._initialize()
68
+ try:
69
+ return OpenRouterModel[name].value
70
+ except KeyError:
71
+ try:
72
+ return cls._custom_models[name]
73
+ except KeyError:
74
+ raise ValueError(f"Model {name} not found")
75
+
76
+ @classmethod
77
+ def list_models(cls) -> dict[str, str]:
78
+ """List all available models.
79
+
80
+ Returns:
81
+ A dictionary mapping model names to their identifiers.
82
+ """
83
+ cls._initialize()
84
+ return {
85
+ **{model.name: model.value for model in OpenRouterModel},
86
+ **cls._custom_models,
87
+ }
@@ -0,0 +1,12 @@
1
+ """
2
+ Tools module for MBX AI.
3
+ """
4
+
5
+ from .client import ToolClient
6
+ from .types import Tool, ToolCall
7
+
8
+ __all__ = [
9
+ "ToolClient",
10
+ "Tool",
11
+ "ToolCall",
12
+ ]
mbxai/tools/client.py ADDED
@@ -0,0 +1,172 @@
1
+ """
2
+ ToolClient implementation for MBX AI.
3
+ """
4
+
5
+ from typing import Any, Callable, TypeVar, cast
6
+ from pydantic import BaseModel
7
+ from ..openrouter import OpenRouterClient
8
+ from .types import Tool, ToolCall
9
+
10
+ T = TypeVar("T", bound=BaseModel)
11
+
12
+ class ToolClient:
13
+ """Client for handling tool calls with OpenRouter."""
14
+
15
+ def __init__(self, openrouter_client: OpenRouterClient) -> None:
16
+ """Initialize the ToolClient.
17
+
18
+ Args:
19
+ openrouter_client: The OpenRouter client to use
20
+ """
21
+ self._client = openrouter_client
22
+ self._tools: dict[str, Tool] = {}
23
+
24
+ def register_tool(
25
+ self,
26
+ name: str,
27
+ description: str,
28
+ function: Callable[..., Any],
29
+ schema: dict[str, Any],
30
+ ) -> None:
31
+ """Register a new tool.
32
+
33
+ Args:
34
+ name: The name of the tool
35
+ description: A description of what the tool does
36
+ function: The function to call when the tool is used
37
+ schema: The JSON schema for the tool's parameters
38
+ """
39
+ tool = Tool(
40
+ name=name,
41
+ description=description,
42
+ function=function,
43
+ schema=schema,
44
+ )
45
+ self._tools[name] = tool
46
+
47
+ def chat(
48
+ self,
49
+ messages: list[dict[str, Any]],
50
+ *,
51
+ model: str | None = None,
52
+ stream: bool = False,
53
+ **kwargs: Any,
54
+ ) -> Any:
55
+ """Chat with the model, handling tool calls.
56
+
57
+ Args:
58
+ messages: The conversation messages
59
+ model: Optional model override
60
+ stream: Whether to stream the response
61
+ **kwargs: Additional parameters for the chat completion
62
+
63
+ Returns:
64
+ The final response from the model
65
+ """
66
+ tools = [tool.to_openai_function() for tool in self._tools.values()]
67
+
68
+ while True:
69
+ # Add tools to the request if we have any
70
+ if tools:
71
+ kwargs["tools"] = tools
72
+ kwargs["tool_choice"] = "auto"
73
+
74
+ # Get the model's response
75
+ response = self._client.chat_completion(
76
+ messages=messages,
77
+ model=model,
78
+ stream=stream,
79
+ **kwargs,
80
+ )
81
+
82
+ if stream:
83
+ return response
84
+
85
+ message = response.choices[0].message
86
+ messages.append({"role": "assistant", "content": message.content})
87
+
88
+ # If there are no tool calls, we're done
89
+ if not message.tool_calls:
90
+ return response
91
+
92
+ # Handle each tool call
93
+ for tool_call in message.tool_calls:
94
+ tool = self._tools.get(tool_call.function.name)
95
+ if not tool:
96
+ raise ValueError(f"Unknown tool: {tool_call.function.name}")
97
+
98
+ # Call the tool
99
+ result = tool.function(**tool_call.function.arguments)
100
+
101
+ # Add the tool response to the messages
102
+ messages.append({
103
+ "role": "tool",
104
+ "tool_call_id": tool_call.id,
105
+ "name": tool_call.function.name,
106
+ "content": str(result),
107
+ })
108
+
109
+ def parse(
110
+ self,
111
+ messages: list[dict[str, Any]],
112
+ response_format: type[T],
113
+ *,
114
+ model: str | None = None,
115
+ stream: bool = False,
116
+ **kwargs: Any,
117
+ ) -> Any:
118
+ """Chat with the model and parse the response into a Pydantic model.
119
+
120
+ Args:
121
+ messages: The conversation messages
122
+ response_format: The Pydantic model to parse the response into
123
+ model: Optional model override
124
+ stream: Whether to stream the response
125
+ **kwargs: Additional parameters for the chat completion
126
+
127
+ Returns:
128
+ The parsed response from the model
129
+ """
130
+ tools = [tool.to_openai_function() for tool in self._tools.values()]
131
+
132
+ while True:
133
+ # Add tools to the request if we have any
134
+ if tools:
135
+ kwargs["tools"] = tools
136
+ kwargs["tool_choice"] = "auto"
137
+
138
+ # Get the model's response
139
+ response = self._client.chat_completion_parse(
140
+ messages=messages,
141
+ response_format=response_format,
142
+ model=model,
143
+ stream=stream,
144
+ **kwargs,
145
+ )
146
+
147
+ if stream:
148
+ return response
149
+
150
+ message = response.choices[0].message
151
+ messages.append({"role": "assistant", "content": message.content})
152
+
153
+ # If there are no tool calls, we're done
154
+ if not message.tool_calls:
155
+ return response
156
+
157
+ # Handle each tool call
158
+ for tool_call in message.tool_calls:
159
+ tool = self._tools.get(tool_call.function.name)
160
+ if not tool:
161
+ raise ValueError(f"Unknown tool: {tool_call.function.name}")
162
+
163
+ # Call the tool
164
+ result = tool.function(**tool_call.function.arguments)
165
+
166
+ # Add the tool response to the messages
167
+ messages.append({
168
+ "role": "tool",
169
+ "tool_call_id": tool_call.id,
170
+ "name": tool_call.function.name,
171
+ "content": str(result),
172
+ })
mbxai/tools/example.py ADDED
@@ -0,0 +1,75 @@
1
+ """
2
+ Example usage of the ToolClient.
3
+ """
4
+
5
+ from pydantic import BaseModel
6
+ from ..openrouter import OpenRouterClient, OpenRouterModel
7
+ from .client import ToolClient
8
+
9
+ # Define a Pydantic model for structured output
10
+ class WeatherInfo(BaseModel):
11
+ """Weather information for a location."""
12
+ location: str
13
+ temperature: float
14
+ conditions: str
15
+ humidity: float
16
+
17
+ # Example tool function
18
+ def get_weather(location: str) -> str:
19
+ """Get the current weather for a location.
20
+
21
+ Args:
22
+ location: The location to get weather for
23
+
24
+ Returns:
25
+ A string describing the current weather
26
+ """
27
+ # In a real implementation, this would call a weather API
28
+ return f"The weather in {location} is sunny with a temperature of 25°C and 60% humidity."
29
+
30
+ def main() -> None:
31
+ # Initialize the OpenRouter client
32
+ client = OpenRouterClient(token="your-api-key")
33
+
34
+ # Initialize the ToolClient
35
+ tool_client = ToolClient(client)
36
+
37
+ # Register the weather tool
38
+ tool_client.register_tool(
39
+ name="get_weather",
40
+ description="Get the current weather for a location",
41
+ function=get_weather,
42
+ schema={
43
+ "type": "object",
44
+ "properties": {
45
+ "location": {
46
+ "type": "string",
47
+ "description": "The location to get weather for"
48
+ }
49
+ },
50
+ "required": ["location"]
51
+ }
52
+ )
53
+
54
+ # Example 1: Simple chat with tool usage
55
+ messages = [
56
+ {"role": "user", "content": "What's the weather like in New York?"}
57
+ ]
58
+
59
+ response = tool_client.chat(messages)
60
+ print(response.choices[0].message.content)
61
+
62
+ # Example 2: Structured output with tool usage
63
+ messages = [
64
+ {"role": "user", "content": "Get the weather in London and format it as structured data"}
65
+ ]
66
+
67
+ response = tool_client.parse(messages, WeatherInfo)
68
+ weather_info = response.choices[0].message.parsed
69
+ print(f"Location: {weather_info.location}")
70
+ print(f"Temperature: {weather_info.temperature}°C")
71
+ print(f"Conditions: {weather_info.conditions}")
72
+ print(f"Humidity: {weather_info.humidity}%")
73
+
74
+ if __name__ == "__main__":
75
+ main()
mbxai/tools/types.py ADDED
@@ -0,0 +1,33 @@
1
+ """
2
+ Type definitions for the tools package.
3
+ """
4
+
5
+ from typing import Any, Callable, TypedDict, NotRequired
6
+ from pydantic import BaseModel
7
+
8
+ class ToolFunction(TypedDict):
9
+ """OpenAI function definition for a tool."""
10
+ name: str
11
+ description: str
12
+ parameters: dict[str, Any]
13
+
14
+ class ToolCall(BaseModel):
15
+ """A tool call from the model."""
16
+ id: str
17
+ name: str
18
+ arguments: dict[str, Any]
19
+
20
+ class Tool(BaseModel):
21
+ """A tool that can be used by the model."""
22
+ name: str
23
+ description: str
24
+ function: Callable[..., Any]
25
+ schema: dict[str, Any]
26
+
27
+ def to_openai_function(self) -> ToolFunction:
28
+ """Convert the tool to an OpenAI function definition."""
29
+ return {
30
+ "name": self.name,
31
+ "description": self.description,
32
+ "parameters": self.schema,
33
+ }
@@ -0,0 +1,168 @@
1
+ Metadata-Version: 2.4
2
+ Name: mbxai
3
+ Version: 0.1.0
4
+ Summary: MBX AI SDK
5
+ Project-URL: Homepage, https://www.mibexx.de
6
+ Project-URL: Documentation, https://www.mibexx.de
7
+ Project-URL: Repository, https://github.com/yourusername/mbxai.git
8
+ Author: MBX AI
9
+ License: MIT
10
+ License-File: LICENSE
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Operating System :: OS Independent
13
+ Classifier: Programming Language :: Python
14
+ Classifier: Programming Language :: Python :: 3.12
15
+ Requires-Python: >=3.12
16
+ Requires-Dist: fastapi>=0.115.12
17
+ Requires-Dist: httpx>=0.27.0
18
+ Requires-Dist: mcp>=1.7.1
19
+ Requires-Dist: openai>=1.77.0
20
+ Requires-Dist: pydantic-settings>=2.9.1
21
+ Requires-Dist: pydantic>=2.9.1
22
+ Requires-Dist: python-multipart>=0.0.20
23
+ Requires-Dist: sse-starlette>=2.3.4
24
+ Requires-Dist: starlette>=0.46.2
25
+ Requires-Dist: uvicorn>=0.34.2
26
+ Provides-Extra: dev
27
+ Requires-Dist: black>=24.3.0; extra == 'dev'
28
+ Requires-Dist: isort>=5.13.2; extra == 'dev'
29
+ Requires-Dist: mypy>=1.8.0; extra == 'dev'
30
+ Requires-Dist: pytest-asyncio>=0.26.0; extra == 'dev'
31
+ Requires-Dist: pytest-cov>=6.1.1; extra == 'dev'
32
+ Requires-Dist: pytest>=8.3.5; extra == 'dev'
33
+ Description-Content-Type: text/markdown
34
+
35
+ # MBX AI
36
+
37
+ A Python library for building AI applications with LLMs.
38
+
39
+ ## Features
40
+
41
+ - **OpenRouter Integration**: Connect to various LLM providers through OpenRouter
42
+ - **Tool Integration**: Easily integrate tools with LLMs using the Model Context Protocol (MCP)
43
+ - **Structured Output**: Get structured, typed responses from LLMs
44
+ - **Chat Interface**: Simple chat interface for interacting with LLMs
45
+ - **FastAPI Server**: Built-in FastAPI server for tool integration
46
+
47
+ ## Installation
48
+
49
+ ```bash
50
+ pip install mbxai
51
+ ```
52
+
53
+ ## Quick Start
54
+
55
+ ### Basic Usage
56
+
57
+ ```python
58
+ from mbxai import OpenRouterClient
59
+
60
+ # Initialize the client
61
+ client = OpenRouterClient(api_key="your-api-key")
62
+
63
+ # Chat with an LLM
64
+ response = await client.chat([
65
+ {"role": "user", "content": "Hello, how are you?"}
66
+ ])
67
+ print(response.choices[0].message.content)
68
+ ```
69
+
70
+ ### Using Tools
71
+
72
+ ```python
73
+ from mbxai import OpenRouterClient, ToolClient
74
+ from pydantic import BaseModel
75
+
76
+ # Define your tool's input and output models
77
+ class CalculatorInput(BaseModel):
78
+ a: float
79
+ b: float
80
+
81
+ class CalculatorOutput(BaseModel):
82
+ result: float
83
+
84
+ # Create a calculator tool
85
+ async def calculator(input: CalculatorInput) -> CalculatorOutput:
86
+ return CalculatorOutput(result=input.a + input.b)
87
+
88
+ # Initialize the client with tools
89
+ client = ToolClient(OpenRouterClient(api_key="your-api-key"))
90
+ client.add_tool(calculator)
91
+
92
+ # Use the tool in a chat
93
+ response = await client.chat([
94
+ {"role": "user", "content": "What is 2 + 3?"}
95
+ ])
96
+ print(response.choices[0].message.content)
97
+ ```
98
+
99
+ ### Using MCP (Model Context Protocol)
100
+
101
+ ```python
102
+ from mbxai import OpenRouterClient, MCPClient
103
+ from mbxai.mcp import MCPServer
104
+ from mcp.server.fastmcp import FastMCP
105
+ from pydantic import BaseModel
106
+
107
+ # Define your tool's input and output models
108
+ class CalculatorInput(BaseModel):
109
+ a: float
110
+ b: float
111
+
112
+ class CalculatorOutput(BaseModel):
113
+ result: float
114
+
115
+ # Create a FastMCP instance
116
+ mcp = FastMCP("calculator-service")
117
+
118
+ # Create a calculator tool
119
+ @mcp.tool()
120
+ async def calculator(argument: CalculatorInput) -> CalculatorOutput:
121
+ return CalculatorOutput(result=argument.a + argument.b)
122
+
123
+ # Start the MCP server
124
+ server = MCPServer("calculator-service")
125
+ await server.add_tool(calculator)
126
+ await server.start()
127
+
128
+ # Initialize the MCP client
129
+ client = MCPClient(OpenRouterClient(api_key="your-api-key"))
130
+ await client.register_mcp_server("calculator-service", "http://localhost:8000")
131
+
132
+ # Use the tool in a chat
133
+ response = await client.chat([
134
+ {"role": "user", "content": "What is 2 + 3?"}
135
+ ])
136
+ print(response.choices[0].message.content)
137
+ ```
138
+
139
+ ## Development
140
+
141
+ ### Setup
142
+
143
+ 1. Clone the repository:
144
+ ```bash
145
+ git clone https://github.com/yourusername/mbxai.git
146
+ cd mbxai
147
+ ```
148
+
149
+ 2. Create a virtual environment:
150
+ ```bash
151
+ python -m venv .venv
152
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
153
+ ```
154
+
155
+ 3. Install dependencies:
156
+ ```bash
157
+ pip install -e ".[dev]"
158
+ ```
159
+
160
+ ### Running Tests
161
+
162
+ ```bash
163
+ pytest tests/
164
+ ```
165
+
166
+ ## License
167
+
168
+ MIT License
@@ -0,0 +1,18 @@
1
+ mbxai/__init__.py,sha256=K4smKVZXrhcJYppSx5ZVow5BySoCQFVW6fgtPb9MUec,47
2
+ mbxai/core.py,sha256=WMvmU9TTa7M_m-qWsUew4xH8Ul6xseCZ2iBCXJTW-Bs,196
3
+ mbxai/mcp/__init__.py,sha256=_ek9iYdYqW5saKetj4qDci11jxesQDiHPJRpHMKkxgU,175
4
+ mbxai/mcp/client.py,sha256=rVYYsWSSUWmF9lWHFeFiPOnqD5yU-N1A9ApXekfhwVg,4411
5
+ mbxai/mcp/example.py,sha256=oaol7AvvZnX86JWNz64KvPjab5gg1VjVN3G8eFSzuaE,2350
6
+ mbxai/mcp/server.py,sha256=pV6GskuQvutmXw0bsazlZdGktAcw9u0tF_o-AYFIGQg,3495
7
+ mbxai/openrouter/__init__.py,sha256=Ito9Qp_B6q-RLGAQcYyTJVWwR2YAZvNqE-HIYXxhtD8,298
8
+ mbxai/openrouter/client.py,sha256=NPccB7Slou5tSc8DvXvItLdoDMgU0pcxW1lKcRKQXp0,8865
9
+ mbxai/openrouter/config.py,sha256=MTX_YHsFrM7JYqovJSkEF6JzVyIdajeI5Dja2CALH58,2874
10
+ mbxai/openrouter/models.py,sha256=b3IjjtZAjeGOf2rLsdnCD1HacjTnS8jmv_ZXorc-KJQ,2604
11
+ mbxai/tools/__init__.py,sha256=QUFaXhDm-UKcuAtT1rbKzhBkvyRBVokcQIOf9cxIuwc,160
12
+ mbxai/tools/client.py,sha256=PDkDTvpwAs8-F8CcF6wjjq2Z-VhYpjA9RXTK64Rwa3Y,5418
13
+ mbxai/tools/example.py,sha256=1HgKK39zzUuwFbnp3f0ThyWVfA_8P28PZcTwaUw5K78,2232
14
+ mbxai/tools/types.py,sha256=ZHnmiDXpH6wZhiZ-Tj9PiPMJaW1aiDAq5It2gpiwNp0,831
15
+ mbxai-0.1.0.dist-info/METADATA,sha256=_pSF7GYV38BFNkaHUR0FUj4NAzH2fH49R32n0HXqYWI,4107
16
+ mbxai-0.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
17
+ mbxai-0.1.0.dist-info/licenses/LICENSE,sha256=hEyhc4FxwYo3NQ40yNgZ7STqwVk-1_XcTXOnAPbGJAw,1069
18
+ mbxai-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.27.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Mike Bertram
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.