mbxai 0.7.3__tar.gz → 0.8.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. {mbxai-0.7.3 → mbxai-0.8.1}/PKG-INFO +1 -1
  2. {mbxai-0.7.3 → mbxai-0.8.1}/pyproject.toml +1 -1
  3. {mbxai-0.7.3 → mbxai-0.8.1}/setup.py +1 -1
  4. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/__init__.py +1 -1
  5. mbxai-0.8.1/src/mbxai/examples/openrouter_example.py +45 -0
  6. mbxai-0.8.1/src/mbxai/examples/parse_example.py +99 -0
  7. mbxai-0.8.1/src/mbxai/examples/parse_tool_example.py +144 -0
  8. mbxai-0.8.1/src/mbxai/examples/tool_client_example.py +126 -0
  9. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/mcp/client.py +2 -2
  10. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/mcp/server.py +1 -1
  11. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/openrouter/client.py +21 -94
  12. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/tools/client.py +54 -76
  13. mbxai-0.7.3/tests/test_core.py +0 -9
  14. mbxai-0.7.3/tests/test_mcp.py +0 -355
  15. mbxai-0.7.3/tests/test_openrouter.py +0 -485
  16. mbxai-0.7.3/tests/test_tools.py +0 -319
  17. {mbxai-0.7.3 → mbxai-0.8.1}/.gitignore +0 -0
  18. {mbxai-0.7.3 → mbxai-0.8.1}/LICENSE +0 -0
  19. {mbxai-0.7.3 → mbxai-0.8.1}/README.md +0 -0
  20. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/core.py +0 -0
  21. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/mcp/__init__.py +0 -0
  22. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/mcp/example.py +0 -0
  23. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/openrouter/__init__.py +0 -0
  24. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/openrouter/config.py +0 -0
  25. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/openrouter/models.py +0 -0
  26. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/tools/__init__.py +0 -0
  27. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/tools/example.py +0 -0
  28. {mbxai-0.7.3 → mbxai-0.8.1}/src/mbxai/tools/types.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mbxai
3
- Version: 0.7.3
3
+ Version: 0.8.1
4
4
  Summary: MBX AI SDK
5
5
  Project-URL: Homepage, https://www.mibexx.de
6
6
  Project-URL: Documentation, https://www.mibexx.de
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "mbxai"
7
- version = "0.7.3"
7
+ version = "0.8.1"
8
8
  authors = [
9
9
  { name = "MBX AI" }
10
10
  ]
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name="mbxai",
5
- version="0.7.3",
5
+ version="0.8.1",
6
6
  author="MBX AI",
7
7
  description="MBX AI SDK",
8
8
  long_description=open("README.md").read(),
@@ -2,4 +2,4 @@
2
2
  MBX AI package.
3
3
  """
4
4
 
5
- __version__ = "0.7.3"
5
+ __version__ = "0.8.1"
@@ -0,0 +1,45 @@
1
+ """
2
+ Example script demonstrating basic usage of the OpenRouterClient.
3
+ """
4
+
5
+ import os
6
+ import logging
7
+ from mbxai.openrouter.client import OpenRouterClient, OpenRouterModel
8
+
9
+ # Configure logging
10
+ logging.basicConfig(
11
+ level=logging.INFO,
12
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
13
+ )
14
+ logger = logging.getLogger(__name__)
15
+
16
+ def main():
17
+ # Get API token from environment variable
18
+ token = os.getenv("OPENROUTER_API_KEY")
19
+ if not token:
20
+ logger.error("OPENROUTER_API_KEY environment variable not set")
21
+ raise ValueError("Please set the OPENROUTER_API_KEY environment variable")
22
+
23
+ logger.info("Initializing OpenRouterClient with GPT-4 Turbo")
24
+ # Initialize the client
25
+ client = OpenRouterClient(
26
+ token=token,
27
+ model=OpenRouterModel.GPT4_TURBO # Using GPT-4 Turbo as default
28
+ )
29
+
30
+ # Example messages
31
+ messages = [
32
+ {"role": "system", "content": "You are a helpful assistant."},
33
+ {"role": "user", "content": "What is the capital of France?"}
34
+ ]
35
+
36
+ logger.info("Sending request to OpenRouter API")
37
+ # Send the request
38
+ response = client.create(messages=messages)
39
+
40
+ # Log the response
41
+ logger.info("Received response from OpenRouter API")
42
+ logger.info(f"Response: {response}")
43
+
44
+ if __name__ == "__main__":
45
+ main()
@@ -0,0 +1,99 @@
1
+ """
2
+ Example script demonstrating how to use the parse function with OpenRouterClient.
3
+ """
4
+
5
+ import os
6
+ import logging
7
+ from typing import Any
8
+ from pydantic import BaseModel, Field
9
+ from mbxai.openrouter.client import OpenRouterClient, OpenRouterModel
10
+
11
+ # Configure logging
12
+ logging.basicConfig(
13
+ level=logging.INFO,
14
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
15
+ )
16
+ logger = logging.getLogger(__name__)
17
+
18
+ # Define a Pydantic model for structured weather data
19
+ class WeatherData(BaseModel):
20
+ """Weather data for a location."""
21
+ location: str = Field(..., description="The city name")
22
+ temperature: float = Field(..., description="Temperature in Celsius")
23
+ condition: str = Field(..., description="Weather condition (e.g., sunny, cloudy)")
24
+ humidity: float = Field(..., description="Humidity percentage")
25
+ wind_speed: float = Field(..., description="Wind speed in km/h")
26
+ feels_like: float = Field(..., description="Feels like temperature in Celsius")
27
+ precipitation_chance: float = Field(..., description="Chance of precipitation as a percentage")
28
+
29
+ async def main():
30
+ # Get API token from environment variable
31
+ token = os.getenv("OPENROUTER_API_KEY")
32
+ if not token:
33
+ raise ValueError("Please set the OPENROUTER_API_KEY environment variable")
34
+
35
+ # Initialize the OpenRouter client
36
+ logger.info("Initializing OpenRouter client")
37
+ client = OpenRouterClient(
38
+ token=token,
39
+ model=OpenRouterModel.GPT41
40
+ )
41
+
42
+ # Example 1: Parse weather data for a single location
43
+ logger.info("Parsing weather data for New York")
44
+ messages = [
45
+ {
46
+ "role": "user",
47
+ "content": "What's the current weather in New York? Please provide temperature, condition, humidity, wind speed, feels like temperature, and precipitation chance."
48
+ }
49
+ ]
50
+
51
+ response = client.parse(
52
+ messages=messages,
53
+ response_format=WeatherData,
54
+ timeout=30.0,
55
+ )
56
+
57
+ weather_data = response.choices[0].message.parsed
58
+ print("\nWeather data for New York:")
59
+ print(f"Location: {weather_data.location}")
60
+ print(f"Temperature: {weather_data.temperature}°C")
61
+ print(f"Condition: {weather_data.condition}")
62
+ print(f"Humidity: {weather_data.humidity}%")
63
+ print(f"Wind Speed: {weather_data.wind_speed} km/h")
64
+ print(f"Feels Like: {weather_data.feels_like}°C")
65
+ print(f"Precipitation Chance: {weather_data.precipitation_chance}%")
66
+
67
+ # Example 2: Parse weather data for multiple locations
68
+ logger.info("\nParsing weather data for multiple locations")
69
+ messages = [
70
+ {
71
+ "role": "user",
72
+ "content": "Compare the weather in London and Tokyo. For each city, provide temperature, condition, humidity, wind speed, feels like temperature, and precipitation chance."
73
+ }
74
+ ]
75
+
76
+ class MultiLocationWeather(BaseModel):
77
+ """Weather data for multiple locations."""
78
+ locations: list[WeatherData] = Field(..., description="List of weather data for different locations")
79
+
80
+ response = client.parse(
81
+ messages=messages,
82
+ response_format=MultiLocationWeather,
83
+ timeout=30.0,
84
+ )
85
+
86
+ multi_weather = response.choices[0].message.parsed
87
+ print("\nWeather comparison:")
88
+ for location_data in multi_weather.locations:
89
+ print(f"\n{location_data.location}:")
90
+ print(f"Temperature: {location_data.temperature}°C")
91
+ print(f"Condition: {location_data.condition}")
92
+ print(f"Humidity: {location_data.humidity}%")
93
+ print(f"Wind Speed: {location_data.wind_speed} km/h")
94
+ print(f"Feels Like: {location_data.feels_like}°C")
95
+ print(f"Precipitation Chance: {location_data.precipitation_chance}%")
96
+
97
+ if __name__ == "__main__":
98
+ import asyncio
99
+ asyncio.run(main())
@@ -0,0 +1,144 @@
1
+ """
2
+ Example script demonstrating how to use both parse and tools with OpenRouterClient.
3
+ """
4
+
5
+ import os
6
+ import logging
7
+ import random
8
+ from typing import Any
9
+ from pydantic import BaseModel, Field
10
+ from mbxai.openrouter.client import OpenRouterClient, OpenRouterModel
11
+ from mbxai.tools.client import ToolClient
12
+
13
+ # Configure logging
14
+ logging.basicConfig(
15
+ level=logging.INFO,
16
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
17
+ )
18
+ logger = logging.getLogger(__name__)
19
+
20
+ # Define a Pydantic model for structured weather data
21
+ class WeatherData(BaseModel):
22
+ """Weather data for a location."""
23
+ location: str = Field(..., description="The city name")
24
+ temperature: float = Field(..., description="Temperature in Celsius")
25
+ condition: str = Field(..., description="Weather condition (e.g., sunny, cloudy)")
26
+ humidity: float = Field(..., description="Humidity percentage")
27
+ wind_speed: float = Field(..., description="Wind speed in km/h")
28
+ feels_like: float = Field(..., description="Feels like temperature in Celsius")
29
+ precipitation_chance: float = Field(..., description="Chance of precipitation as a percentage")
30
+
31
+ # Mock weather data for demonstration
32
+ WEATHER_DATA = {
33
+ "new york": {"temperature": 22.5, "condition": "sunny", "humidity": 65, "wind_speed": 12, "feels_like": 23.0, "precipitation_chance": 10},
34
+ "london": {"temperature": 18.2, "condition": "cloudy", "humidity": 75, "wind_speed": 8, "feels_like": 17.5, "precipitation_chance": 40},
35
+ "tokyo": {"temperature": 25.7, "condition": "clear", "humidity": 60, "wind_speed": 5, "feels_like": 26.0, "precipitation_chance": 5},
36
+ "paris": {"temperature": 20.1, "condition": "partly cloudy", "humidity": 70, "wind_speed": 10, "feels_like": 19.5, "precipitation_chance": 20},
37
+ }
38
+
39
+ def get_weather(location: str) -> dict[str, Any]:
40
+ """Get weather information for a location.
41
+
42
+ Args:
43
+ location: The city name to get weather for
44
+
45
+ Returns:
46
+ Weather information including temperature, condition, humidity, and wind speed
47
+ """
48
+ logger.info(f"Getting weather for location: {location}")
49
+
50
+ # Convert location to lowercase for case-insensitive matching
51
+ location = location.lower()
52
+
53
+ # Get weather data or generate random data for unknown locations
54
+ if location in WEATHER_DATA:
55
+ weather = WEATHER_DATA[location]
56
+ else:
57
+ logger.warning(f"No weather data for {location}, generating random data")
58
+ weather = {
59
+ "temperature": round(random.uniform(15, 30), 1),
60
+ "condition": random.choice(["sunny", "cloudy", "clear", "partly cloudy"]),
61
+ "humidity": round(random.uniform(50, 90)),
62
+ "wind_speed": round(random.uniform(5, 20)),
63
+ "feels_like": round(random.uniform(15, 30), 1),
64
+ "precipitation_chance": round(random.uniform(0, 100))
65
+ }
66
+
67
+ # Create WeatherData instance
68
+ weather_data = WeatherData(
69
+ location=location.title(),
70
+ temperature=weather["temperature"],
71
+ condition=weather["condition"],
72
+ humidity=weather["humidity"],
73
+ wind_speed=weather["wind_speed"],
74
+ feels_like=weather["feels_like"],
75
+ precipitation_chance=weather["precipitation_chance"]
76
+ )
77
+
78
+ logger.info(f"Weather data retrieved: {weather_data}")
79
+ return weather_data.model_dump()
80
+
81
+ async def main():
82
+ # Get API token from environment variable
83
+ token = os.getenv("OPENROUTER_API_KEY")
84
+ if not token:
85
+ raise ValueError("Please set the OPENROUTER_API_KEY environment variable")
86
+
87
+ # Initialize the OpenRouter client
88
+ logger.info("Initializing OpenRouter client")
89
+ openrouter_client = OpenRouterClient(
90
+ token=token,
91
+ model=OpenRouterModel.GPT41
92
+ )
93
+
94
+ # Initialize the ToolClient
95
+ logger.info("Initializing ToolClient")
96
+ tool_client = ToolClient(openrouter_client)
97
+
98
+ # Register the weather tool
99
+ logger.info("Registering weather tool")
100
+ tool_client.register_tool(
101
+ name="get_weather",
102
+ description="Get the current weather for a location",
103
+ function=get_weather,
104
+ schema={
105
+ "type": "object",
106
+ "properties": {
107
+ "location": {
108
+ "type": "string",
109
+ "description": "The city name to get weather for"
110
+ }
111
+ },
112
+ "required": ["location"]
113
+ }
114
+ )
115
+
116
+ # Example 1: Get weather for a single location using tools and parse
117
+ logger.info("Getting weather for New York using tools and parse")
118
+ messages = [
119
+ {
120
+ "role": "user",
121
+ "content": "What's the current weather in New York? Use the get_weather tool and format the response according to the WeatherData model."
122
+ }
123
+ ]
124
+
125
+ response = tool_client.parse(
126
+ messages=messages,
127
+ response_format=WeatherData,
128
+ timeout=30.0,
129
+ )
130
+
131
+ weather_data = response.choices[0].message.parsed
132
+ print("\nWeather data for New York:")
133
+ print(f"Location: {weather_data.location}")
134
+ print(f"Temperature: {weather_data.temperature}°C")
135
+ print(f"Condition: {weather_data.condition}")
136
+ print(f"Humidity: {weather_data.humidity}%")
137
+ print(f"Wind Speed: {weather_data.wind_speed} km/h")
138
+ print(f"Feels Like: {weather_data.feels_like}°C")
139
+ print(f"Precipitation Chance: {weather_data.precipitation_chance}%")
140
+
141
+
142
+ if __name__ == "__main__":
143
+ import asyncio
144
+ asyncio.run(main())
@@ -0,0 +1,126 @@
1
+ """
2
+ Example script demonstrating how to use the ToolClient with a custom Weather Tool.
3
+ """
4
+
5
+ import os
6
+ import logging
7
+ import random
8
+ from typing import Any
9
+ from pydantic import BaseModel
10
+ from mbxai.openrouter.client import OpenRouterClient, OpenRouterModel
11
+ from mbxai.tools.client import ToolClient
12
+
13
+ # Configure logging
14
+ logging.basicConfig(
15
+ level=logging.INFO,
16
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
17
+ )
18
+ logger = logging.getLogger(__name__)
19
+
20
+ # Define the weather data model
21
+ class WeatherData(BaseModel):
22
+ """Weather data for a location."""
23
+ location: str
24
+ temperature: float
25
+ condition: str
26
+ humidity: float
27
+ wind_speed: float
28
+
29
+ # Mock weather data for demonstration
30
+ WEATHER_DATA = {
31
+ "new york": {"temperature": 22.5, "condition": "sunny", "humidity": 65, "wind_speed": 12},
32
+ "london": {"temperature": 18.2, "condition": "cloudy", "humidity": 75, "wind_speed": 8},
33
+ "tokyo": {"temperature": 25.7, "condition": "clear", "humidity": 60, "wind_speed": 5},
34
+ "paris": {"temperature": 20.1, "condition": "partly cloudy", "humidity": 70, "wind_speed": 10},
35
+ }
36
+
37
+ def get_weather(location: str) -> dict[str, Any]:
38
+ """Get weather information for a location.
39
+
40
+ Args:
41
+ location: The city name to get weather for
42
+
43
+ Returns:
44
+ Weather information including temperature, condition, humidity, and wind speed
45
+ """
46
+ logger.info(f"Getting weather for location: {location}")
47
+
48
+ # Convert location to lowercase for case-insensitive matching
49
+ location = location.lower()
50
+
51
+ # Get weather data or generate random data for unknown locations
52
+ if location in WEATHER_DATA:
53
+ weather = WEATHER_DATA[location]
54
+ else:
55
+ logger.warning(f"No weather data for {location}, generating random data")
56
+ weather = {
57
+ "temperature": round(random.uniform(15, 30), 1),
58
+ "condition": random.choice(["sunny", "cloudy", "clear", "partly cloudy"]),
59
+ "humidity": round(random.uniform(50, 90)),
60
+ "wind_speed": round(random.uniform(5, 20))
61
+ }
62
+
63
+ # Create WeatherData instance
64
+ weather_data = WeatherData(
65
+ location=location.title(),
66
+ temperature=weather["temperature"],
67
+ condition=weather["condition"],
68
+ humidity=weather["humidity"],
69
+ wind_speed=weather["wind_speed"]
70
+ )
71
+
72
+ logger.info(f"Weather data retrieved: {weather_data}")
73
+ return weather_data.model_dump()
74
+
75
+ async def main():
76
+ # Get API token from environment variable
77
+ token = os.getenv("OPENROUTER_API_KEY")
78
+ if not token:
79
+ raise ValueError("Please set the OPENROUTER_API_KEY environment variable")
80
+
81
+ # Initialize the OpenRouter client
82
+ logger.info("Initializing OpenRouter client")
83
+ openrouter_client = OpenRouterClient(
84
+ token=token,
85
+ model=OpenRouterModel.GPT35_TURBO
86
+ )
87
+
88
+ # Initialize the ToolClient
89
+ logger.info("Initializing ToolClient")
90
+ tool_client = ToolClient(openrouter_client)
91
+
92
+ # Register the weather tool
93
+ logger.info("Registering weather tool")
94
+ tool_client.register_tool(
95
+ name="get_weather",
96
+ description="Get the current weather for a location",
97
+ function=get_weather,
98
+ schema={
99
+ "type": "object",
100
+ "properties": {
101
+ "location": {
102
+ "type": "string",
103
+ "description": "The city name to get weather for"
104
+ }
105
+ },
106
+ "required": ["location"]
107
+ }
108
+ )
109
+
110
+ # Example 1: Simple weather query
111
+ logger.info("Sending weather query for New York")
112
+ messages = [
113
+ {"role": "user", "content": "What's the weather like in New York?"}
114
+ ]
115
+
116
+ response = tool_client.chat(
117
+ messages,
118
+ timeout=30.0,
119
+ )
120
+ logger.info("Received response from model")
121
+ print("\nResponse for New York weather:")
122
+ print(response)
123
+
124
+ if __name__ == "__main__":
125
+ import asyncio
126
+ asyncio.run(main())
@@ -60,7 +60,7 @@ class MCPClient(ToolClient):
60
60
 
61
61
  def _create_tool_function(self, tool: MCPTool) -> Callable[..., Any]:
62
62
  """Create a function that invokes an MCP tool."""
63
- async def tool_function(**kwargs: Any) -> Any:
63
+ def tool_function(**kwargs: Any) -> Any:
64
64
  # If kwargs has input wrapper, unwrap it
65
65
  if "input" in kwargs:
66
66
  kwargs = kwargs["input"]
@@ -75,7 +75,7 @@ class MCPClient(ToolClient):
75
75
  url = f"{server_url}/tools/{tool.name}/invoke"
76
76
 
77
77
  # Make the HTTP request to the tool's URL
78
- response = await self._http_client.post(
78
+ response = self._http_client.post(
79
79
  url,
80
80
  json={"input": kwargs} if tool.strict else kwargs,
81
81
  timeout=300.0 # 5 minutes timeout
@@ -31,7 +31,7 @@ class MCPServer:
31
31
  self.app = FastAPI(
32
32
  title=self.name,
33
33
  description=self.description,
34
- version="0.7.3",
34
+ version="0.8.1",
35
35
  )
36
36
 
37
37
  # Initialize MCP server
@@ -4,7 +4,6 @@ OpenRouter client implementation.
4
4
 
5
5
  from typing import Any, Optional, Union
6
6
  from openai import OpenAI, OpenAIError
7
- from pydantic import BaseModel, TypeAdapter, Field
8
7
  from .models import OpenRouterModel, OpenRouterModelRegistry
9
8
  from .config import OpenRouterConfig
10
9
  import logging
@@ -95,7 +94,7 @@ class OpenRouterClient:
95
94
  def __init__(
96
95
  self,
97
96
  token: str,
98
- model: Union[str, OpenRouterModel] = OpenRouterModel.GPT4_TURBO,
97
+ model: Union[str, OpenRouterModel] = OpenRouterModel.GPT35_TURBO,
99
98
  base_url: Optional[str] = None,
100
99
  default_headers: Optional[dict[str, str]] = None,
101
100
  max_retries: int = 3,
@@ -106,7 +105,7 @@ class OpenRouterClient:
106
105
 
107
106
  Args:
108
107
  token: The OpenRouter API token
109
- model: The model to use (default: GPT4_TURBO)
108
+ model: The model to use (default: GPT35_TURBO)
110
109
  base_url: Optional custom base URL for the API
111
110
  default_headers: Optional default headers for API requests
112
111
  max_retries: Maximum number of retry attempts (default: 3)
@@ -187,7 +186,7 @@ class OpenRouterClient:
187
186
  self.model = value
188
187
 
189
188
  @with_retry()
190
- def chat_completion(
189
+ def create(
191
190
  self,
192
191
  messages: list[dict[str, Any]],
193
192
  *,
@@ -205,21 +204,15 @@ class OpenRouterClient:
205
204
  total_size = sum(len(str(msg)) for msg in messages)
206
205
  logger.info(f"Total message size: {total_size} bytes")
207
206
 
208
- response = self._client.responses.create(
209
- input=messages,
210
- model=model or self.model,
211
- stream=stream,
207
+ request = {
208
+ "model": model or self.model,
209
+ "messages": messages,
210
+ "stream": stream,
212
211
  **kwargs,
213
- )
212
+ }
214
213
 
215
- # Log response details
216
- logger.info("Received response from OpenRouter")
217
- # Log the actual response content
218
- logger.info(f"Response content: {response}")
219
- if hasattr(response, 'output'):
220
- logger.info(f"Response output length: {len(response.output) if response.output else 0}")
221
- if hasattr(response, 'output_text'):
222
- logger.info(f"Response output_text length: {len(response.output_text) if response.output_text else 0}")
214
+ response = self._client.chat.completions.create(**request)
215
+ logger.info(f"Received response from OpenRouter: {len(response.choices)} choices")
223
216
 
224
217
  return response
225
218
 
@@ -237,31 +230,15 @@ class OpenRouterClient:
237
230
  self._handle_api_error("chat completion", e)
238
231
 
239
232
  @with_retry()
240
- def chat_completion_parse(
233
+ def parse(
241
234
  self,
242
235
  messages: list[dict[str, Any]],
243
- response_format: type[BaseModel],
236
+ response_format: object,
244
237
  *,
245
- model: Optional[Union[str, OpenRouterModel]] = None,
238
+ model: str | None = None,
246
239
  **kwargs: Any,
247
240
  ) -> Any:
248
- """Create a chat completion and parse the response.
249
-
250
- Args:
251
- messages: list of messages
252
- response_format: Pydantic model to parse the response into
253
- model: Optional model override
254
- **kwargs: Additional parameters
255
-
256
- Returns:
257
- Parsed completion response with output and output_parsed fields
258
-
259
- Raises:
260
- OpenRouterConnectionError: For connection issues
261
- OpenRouterAPIError: For API errors
262
- OpenRouterError: For other errors
263
- ValueError: If response parsing fails
264
- """
241
+ """Get a chat completion from OpenRouter."""
265
242
  try:
266
243
  # Log the request details
267
244
  logger.info(f"Sending chat completion request to OpenRouter with model: {model or self.model}")
@@ -271,26 +248,15 @@ class OpenRouterClient:
271
248
  total_size = sum(len(str(msg)) for msg in messages)
272
249
  logger.info(f"Total message size: {total_size} bytes")
273
250
 
274
- # Use responses.parse for structured output
275
- response = self._client.responses.parse(
276
- model=model or self.model,
277
- input=messages,
278
- text_format=response_format,
251
+ request = {
252
+ "model": model or self.model,
253
+ "messages": messages,
254
+ "response_format": response_format,
279
255
  **kwargs,
280
- )
281
-
282
- if not response:
283
- logger.error(f"Full response content: {response}")
284
- raise OpenRouterAPIError("Invalid response from OpenRouter: empty response")
256
+ }
285
257
 
286
- # Log response details
287
- logger.info("Received response from OpenRouter")
288
- if hasattr(response, 'output'):
289
- logger.info(f"Response output length: {len(response.output) if response.output else 0}")
290
- if hasattr(response, 'output_parsed'):
291
- logger.info("Response includes parsed output")
292
- if hasattr(response, 'tool_calls'):
293
- logger.info(f"Response includes {len(response.tool_calls)} tool calls")
258
+ response = self._client.beta.chat.completions.parse(**request)
259
+ logger.info(f"Received response from OpenRouter: {len(response.choices)} choices")
294
260
 
295
261
  return response
296
262
 
@@ -307,45 +273,6 @@ class OpenRouterClient:
307
273
  logger.error("Could not read response content")
308
274
  self._handle_api_error("chat completion", e)
309
275
 
310
- @with_retry()
311
- def embeddings(
312
- self,
313
- input: Union[str, list[str]],
314
- *,
315
- model: Optional[Union[str, OpenRouterModel]] = None,
316
- **kwargs: Any,
317
- ) -> Any:
318
- """Create embeddings.
319
-
320
- Args:
321
- input: Text to embed
322
- model: Optional model override
323
- **kwargs: Additional parameters
324
-
325
- Returns:
326
- Embeddings response
327
-
328
- Raises:
329
- OpenRouterConnectionError: For connection issues
330
- OpenRouterAPIError: For API errors
331
- OpenRouterError: For other errors
332
- """
333
- try:
334
- # Remove any incompatible parameters
335
- kwargs.pop("parse", None) # Remove parse parameter if present
336
-
337
- # Use text-embedding-ada-002 for embeddings
338
- embeddings_model = "openai/text-embedding-ada-002"
339
-
340
- return self._client.embeddings.create(
341
- model=str(model or embeddings_model),
342
- input=input if isinstance(input, list) else [input],
343
- encoding_format="float", # Use float format instead of base64
344
- **kwargs,
345
- )
346
- except Exception as e:
347
- self._handle_api_error("embeddings", e)
348
-
349
276
  @classmethod
350
277
  def register_model(cls, name: str, value: str) -> None:
351
278
  """Register a new model.