openrouter-provider 0.0.5__py3-none-any.whl → 1.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,430 @@
1
+ Metadata-Version: 2.4
2
+ Name: openrouter-provider
3
+ Version: 1.0.10
4
+ Summary: This is an unofficial wrapper of OpenRouter.
5
+ Author-email: Keisuke Miyamto <aichiboyhighschool@gmail.com>
6
+ Requires-Python: >=3.7
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: annotated-types
9
+ Requires-Dist: anyio
10
+ Requires-Dist: build
11
+ Requires-Dist: certifi
12
+ Requires-Dist: charset-normalizer
13
+ Requires-Dist: distro
14
+ Requires-Dist: docutils
15
+ Requires-Dist: h11
16
+ Requires-Dist: httpcore
17
+ Requires-Dist: httpx
18
+ Requires-Dist: id
19
+ Requires-Dist: idna
20
+ Requires-Dist: jaraco.classes
21
+ Requires-Dist: jaraco.context
22
+ Requires-Dist: jaraco.functools
23
+ Requires-Dist: jiter
24
+ Requires-Dist: keyring
25
+ Requires-Dist: markdown-it-py
26
+ Requires-Dist: mdurl
27
+ Requires-Dist: more-itertools
28
+ Requires-Dist: nh3
29
+ Requires-Dist: openai
30
+ Requires-Dist: packaging
31
+ Requires-Dist: pillow
32
+ Requires-Dist: pydantic
33
+ Requires-Dist: pydantic_core
34
+ Requires-Dist: Pygments
35
+ Requires-Dist: pyproject_hooks
36
+ Requires-Dist: python-dotenv
37
+ Requires-Dist: readme_renderer
38
+ Requires-Dist: requests
39
+ Requires-Dist: requests-toolbelt
40
+ Requires-Dist: rfc3986
41
+ Requires-Dist: rich
42
+ Requires-Dist: sniffio
43
+ Requires-Dist: tqdm
44
+ Requires-Dist: twine
45
+ Requires-Dist: typing-inspection
46
+ Requires-Dist: typing_extensions
47
+ Requires-Dist: urllib3
48
+
49
+ # OpenRouter Provider
50
+
51
+ [![Python 3.7+](https://img.shields.io/badge/python-3.7+-blue.svg)](https://www.python.org/downloads/)
52
+ [![PyPI version](https://badge.fury.io/py/openrouter-provider.svg)](https://badge.fury.io/py/openrouter-provider)
53
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
54
+
55
+ An unofficial Python wrapper for the OpenRouter API that provides a simple, intuitive interface for interacting with multiple LLM models. OpenRouter Provider supports chat conversations, image processing, tool integration, streaming responses, and structured output generation.
56
+
57
+ ## Features
58
+
59
+ - **Multi-Model Support**: Access 40+ models from OpenAI, Anthropic, Google, DeepSeek, xAI, Microsoft, and Meta
60
+ - **Conversation Memory**: Automatic chat history management with easy memory control
61
+ - **Image Processing**: Built-in image resizing and base64 encoding for multimodal interactions
62
+ - **Tool Integration**: Decorator-based function calling with automatic tool execution
63
+ - **Streaming Support**: Real-time response streaming for both sync and async operations
64
+ - **Structured Output**: JSON schema-based response formatting using Pydantic models
65
+ - **Async Support**: Full async/await support for non-blocking operations
66
+ - **Provider Configuration**: OpenRouter-specific routing and fallback options
67
+
68
+ ## Installation
69
+
70
+ ### From PyPI (Recommended)
71
+
72
+ ```bash
73
+ pip install openrouter-provider
74
+ ```
75
+
76
+ ### From Source
77
+
78
+ ```bash
79
+ git clone https://github.com/yourusername/openrouter-provider.git
80
+ cd openrouter-provider
81
+ pip install .
82
+ ```
83
+
84
+ ## =' Configuration
85
+
86
+ 1. Get your API key from [OpenRouter](https://openrouter.ai/)
87
+ 2. Set up your environment:
88
+
89
+ **Option 1: Environment Variable**
90
+ ```bash
91
+ export OPENROUTER_API_KEY="your-api-key-here"
92
+ ```
93
+
94
+ **Option 2: .env File**
95
+ ```bash
96
+ # Create .env file in your project root
97
+ echo "OPENROUTER_API_KEY=your-api-key-here" > .env
98
+ ```
99
+
100
+ ## <� Quick Start
101
+
102
+ ```python
103
+ from openrouter import *
104
+
105
+ # Create client
106
+ ai = OpenRouterClient(system_prompt="You are a helpful assistant.")
107
+
108
+ # Send a message
109
+ query = Message(text="What's the capital of France?")
110
+ response = ai.invoke(model=gpt_4o_mini, query=query)
111
+ print(response.text)
112
+ ```
113
+
114
+ ## Usage Examples
115
+
116
+ ### Basic Chat Conversation
117
+
118
+ ```python
119
+ from openrouter import *
120
+
121
+ # Initialize client with system prompt
122
+ ai = OpenRouterClient(system_prompt="You are a friendly coding assistant.")
123
+
124
+ # First message
125
+ query = Message(text="Explain what Python is in simple terms.")
126
+ response = ai.invoke(model=claude_3_7_sonnet, query=query)
127
+ print(response.text)
128
+
129
+ # Follow-up message (conversation history is automatically maintained)
130
+ query = Message(text="Give me a simple Python example.")
131
+ response = ai.invoke(model=claude_3_7_sonnet, query=query)
132
+ print(response.text)
133
+
134
+ # View conversation history
135
+ ai.print_memory()
136
+
137
+ # Clear conversation history
138
+ ai.clear_memory()
139
+ ```
140
+
141
+ ### Image Processing
142
+
143
+ ```python
144
+ from openrouter import *
145
+ from PIL import Image
146
+
147
+ # Load images
148
+ dog_image = Image.open("dog.jpg")
149
+ cat_image = Image.open("cat.jpg")
150
+
151
+ # Create client
152
+ ai = OpenRouterClient(system_prompt="You are an image analysis expert.")
153
+
154
+ # Send message with images
155
+ query = Message(
156
+ text="Compare these two animals. What are the key differences?",
157
+ images=[dog_image, cat_image]
158
+ )
159
+ response = ai.invoke(model=gpt_4o, query=query)
160
+ print(response.text)
161
+ ```
162
+
163
+ ### Tool Integration
164
+
165
+ ```python
166
+ from openrouter import *
167
+
168
+ @tool_model
169
+ def get_weather(city: str, country: str = "US") -> str:
170
+ """
171
+ Get current weather information for a specific city.
172
+
173
+ Args:
174
+ city: Name of the city
175
+ country: Country code (default: US)
176
+ """
177
+ # In real implementation, you'd call a weather API
178
+ return f"The weather in {city}, {country} is sunny with 22�C"
179
+
180
+ @tool_model
181
+ def calculate_tip(bill_amount: float, tip_percentage: float = 15.0) -> str:
182
+ """
183
+ Calculate tip amount and total bill.
184
+
185
+ Args:
186
+ bill_amount: The original bill amount
187
+ tip_percentage: Tip percentage (default: 15%)
188
+ """
189
+ tip = bill_amount * (tip_percentage / 100)
190
+ total = bill_amount + tip
191
+ return f"Tip: ${tip:.2f}, Total: ${total:.2f}"
192
+
193
+ # Create client with tools
194
+ ai = OpenRouterClient(
195
+ system_prompt="You are a helpful assistant with access to weather and calculator tools.",
196
+ tools=[get_weather, calculate_tip]
197
+ )
198
+
199
+ # The AI will automatically use tools when needed
200
+ query = Message(text="What's the weather in Tokyo and calculate a 20% tip on a $50 bill?")
201
+ response = ai.invoke(model=gpt_4o_mini, query=query)
202
+ print(response.text)
203
+ ```
204
+
205
+ ### Streaming Responses
206
+
207
+ ```python
208
+ from openrouter import *
209
+
210
+ ai = OpenRouterClient(system_prompt="You are a storyteller.")
211
+ query = Message(text="Tell me a short story about a magical forest.")
212
+
213
+ # Stream the response
214
+ for token in ai.invoke_stream(model=claude_3_7_sonnet, query=query):
215
+ print(token, end="", flush=True)
216
+ ```
217
+
218
+ ### Async Operations
219
+
220
+ ```python
221
+ import asyncio
222
+ from openrouter import *
223
+
224
+ async def main():
225
+ ai = OpenRouterClient(system_prompt="You are a helpful assistant.")
226
+
227
+ # Async invoke
228
+ query = Message(text="Explain quantum computing in simple terms.")
229
+ response = await ai.async_invoke(model=gpt_4o_mini, query=query)
230
+ print(response.text)
231
+
232
+ # Async streaming
233
+ query = Message(text="Write a poem about the ocean.")
234
+ async for token in ai.async_invoke_stream(model=gpt_4o_mini, query=query):
235
+ print(token, end="", flush=True)
236
+
237
+ # Run async function
238
+ asyncio.run(main())
239
+ ```
240
+
241
+ ### Structured Output
242
+
243
+ ```python
244
+ from openrouter import *
245
+ from pydantic import BaseModel, Field
246
+ from typing import List
247
+
248
+ class BookRecommendation(BaseModel):
249
+ title: str = Field(description="Title of the book")
250
+ author: str = Field(description="Author of the book")
251
+ genre: str = Field(description="Genre of the book")
252
+ rating: float = Field(description="Rating out of 5.0")
253
+ summary: str = Field(description="Brief summary of the book")
254
+
255
+ class BookList(BaseModel):
256
+ recommendations: List[BookRecommendation] = Field(description="List of book recommendations")
257
+ total_count: int = Field(description="Total number of recommendations")
258
+
259
+ ai = OpenRouterClient(system_prompt="You are a book recommendation expert.")
260
+ query = Message(text="Recommend 3 science fiction books for beginners.")
261
+
262
+ # Get structured output
263
+ result: BookList = ai.structured_output(
264
+ model=gpt_4o_mini,
265
+ query=query,
266
+ json_schema=BookList
267
+ )
268
+
269
+ print(f"Found {result.total_count} recommendations:")
270
+ for book in result.recommendations:
271
+ print(f"- {book.title} by {book.author} ({book.rating}/5)")
272
+ ```
273
+
274
+ ## Available Models
275
+
276
+ The library provides pre-configured models from major providers:
277
+
278
+ ### OpenAI
279
+ - `gpt_4o` - GPT-4 Omni
280
+ - `gpt_4o_mini` - GPT-4 Omni Mini
281
+ - `o3` - OpenAI o3 Reasoning Model
282
+
283
+ ### Anthropic
284
+ - `claude_3_7_sonnet` - Claude 3.7 Sonnet
285
+ - `claude_3_5_haiku` - Claude 3.5 Haiku
286
+
287
+ ### Google
288
+ - `gemini_2_0_flash` - Gemini 2.0 Flash
289
+ - `gemini_2_5_pro` - Gemini 2.5 Pro
290
+
291
+ ### DeepSeek
292
+ - `deepseek_v3` - DeepSeek V3
293
+ - `deepseek_r1` - DeepSeek R1
294
+
295
+ ### Others
296
+ - `grok_3` - xAI Grok 3
297
+ - `llama_4_scout` - Meta Llama 4 Scout
298
+
299
+ ### Custom Models
300
+
301
+ You can also use any model available on OpenRouter:
302
+
303
+ ```python
304
+ from openrouter import *
305
+
306
+ # Define custom model
307
+ custom_model = LLMModel(
308
+ name="anthropic/claude-3-haiku",
309
+ input_cost=0.25, # Optional: cost per 1M input tokens
310
+ output_cost=1.25 # Optional: cost per 1M output tokens
311
+ )
312
+
313
+ # Use custom model
314
+ response = ai.invoke(model=custom_model, query=query)
315
+ ```
316
+
317
+ ## Advanced Configuration
318
+
319
+ ### Provider Configuration
320
+
321
+ ```python
322
+ from openrouter import *
323
+
324
+ # Configure provider preferences
325
+ provider_config = ProviderConfig(
326
+ order=["OpenAI", "Anthropic"], # Preferred provider order
327
+ allow_fallbacks=True, # Allow fallback providers
328
+ data_collection="deny" # Opt out of data collection
329
+ )
330
+
331
+ response = ai.invoke(
332
+ model=gpt_4o_mini,
333
+ query=query,
334
+ provider=provider_config
335
+ )
336
+ ```
337
+
338
+ ### Temperature Control
339
+
340
+ ```python
341
+ # More creative responses
342
+ response = ai.invoke(
343
+ model=claude_3_7_sonnet,
344
+ query=query,
345
+ temperature=0.9
346
+ )
347
+
348
+ # More deterministic responses
349
+ response = ai.invoke(
350
+ model=claude_3_7_sonnet,
351
+ query=query,
352
+ temperature=0.1
353
+ )
354
+ ```
355
+
356
+ ## API Reference
357
+
358
+ ### OpenRouterClient
359
+
360
+ ```python
361
+ class OpenRouterClient:
362
+ def __init__(self, system_prompt: str = "", tools: List[tool_model] = None)
363
+
364
+ def invoke(self, model: LLMModel, query: Message, tools: List[tool_model] = None,
365
+ provider: ProviderConfig = None, temperature: float = 0.3) -> Message
366
+
367
+ def invoke_stream(self, model: LLMModel, query: Message, tools: List[tool_model] = None,
368
+ provider: ProviderConfig = None, temperature: float = 0.3) -> Iterator[str]
369
+
370
+ async def async_invoke(self, model: LLMModel, query: Message, tools: List[tool_model] = None,
371
+ provider: ProviderConfig = None, temperature: float = 0.3) -> Message
372
+
373
+ async def async_invoke_stream(self, model: LLMModel, query: Message, tools: List[tool_model] = None,
374
+ provider: ProviderConfig = None, temperature: float = 0.3) -> AsyncIterator[str]
375
+
376
+ def structured_output(self, model: LLMModel, query: Message, provider: ProviderConfig = None,
377
+ json_schema: BaseModel = None, temperature: float = 0.3) -> BaseModel
378
+
379
+ def clear_memory(self) -> None
380
+ def print_memory(self) -> None
381
+ def set_system_prompt(self, prompt: str) -> None
382
+ ```
383
+
384
+ ### Message
385
+
386
+ ```python
387
+ class Message:
388
+ def __init__(self, text: str, images: Optional[List[Image.Image]] = None,
389
+ role: Role = Role.user, answered_by: Optional[LLMModel] = None)
390
+ ```
391
+
392
+ ### Tool Decorator
393
+
394
+ ```python
395
+ @tool_model
396
+ def your_function(param1: str, param2: int = 10) -> str:
397
+ """Function description for the AI."""
398
+ return "result"
399
+ ```
400
+
401
+ ## Development
402
+
403
+ ### Running Tests
404
+
405
+ ```bash
406
+ # Basic functionality
407
+ python -m tests.basic
408
+
409
+ # Image processing
410
+ python -m tests.image
411
+
412
+ # Tool integration
413
+ python -m tests.tool
414
+
415
+ # Streaming
416
+ python -m tests.stream
417
+
418
+ # Async operations
419
+ python -m tests.async
420
+
421
+ # Structured output
422
+ python -m tests.structured_output
423
+ ```
424
+
425
+ ### Building from Source
426
+
427
+ ```bash
428
+ pip install build
429
+ python -m build
430
+ ```
@@ -0,0 +1,10 @@
1
+ openrouter/__init__.py,sha256=xuIzdm8-l3Tmc-zrNIXTicv05c9HCMxTS9xynKpWK-Q,123
2
+ openrouter/llms.py,sha256=zmujFW5BmQA0Fm6z8skuO3ipLpaUMy1VBZxYkPE9OcM,3391
3
+ openrouter/message.py,sha256=ESI4YT6x0TuPZ0AY29ZPlBCv72KrQad1-IeNmrfGD0w,2978
4
+ openrouter/openrouter.py,sha256=T1n3Mi26StAMzJ5yNNY5ww6fympiHBfp5M4pzYPmaV8,8993
5
+ openrouter/openrouter_provider.py,sha256=NHKZjpTTBKWTYcH67XTXh8vTotV-zsuuQl2xzAjXq24,9898
6
+ openrouter/tool.py,sha256=tUUNLosz1XhzPIwY1zHXWNM3ePs7hcVD1a_W5hWTCWk,1975
7
+ openrouter_provider-1.0.10.dist-info/METADATA,sha256=NFw7weaQNpo-sEJbxrsC_DqrkJsLSJ7_bk4APhRecVY,11504
8
+ openrouter_provider-1.0.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
9
+ openrouter_provider-1.0.10.dist-info/top_level.txt,sha256=0jnlCcRirGeYZLm5ZbWQRUonIp4tTPl_9mq-ds_1SEo,11
10
+ openrouter_provider-1.0.10.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.3.1)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -0,0 +1 @@
1
+ openrouter
@@ -1,142 +0,0 @@
1
- from .LLMs import LLMModel
2
-
3
- from enum import Enum
4
- from PIL import Image
5
- import base64
6
- from io import BytesIO
7
- from dataclasses import dataclass
8
-
9
- from openai.types.chat import ChatCompletion
10
-
11
-
12
- class Role(Enum):
13
- system = "system"
14
- user = "user"
15
- ai = "assistant"
16
- agent = "agent"
17
- tool = "tool"
18
-
19
-
20
- @dataclass
21
- class ToolCall:
22
- id: str
23
- name: str
24
- arguments: dict
25
- result: any = ""
26
-
27
-
28
- class Chat_message:
29
- def __init__(self,
30
- text: str,
31
- images: list[Image.Image]=None,
32
- role: Role=Role.user,
33
- answerdBy: LLMModel=None,
34
- raw_response: ChatCompletion=None
35
- ) -> None:
36
- self.role = role
37
- self.text = text
38
- self.images = self._process_image(images=images)
39
- self.answeredBy: LLMModel = answerdBy
40
-
41
- self.tool_calls: list[ToolCall] = []
42
- self.raw_resoonse: ChatCompletion = raw_response
43
-
44
-
45
- def __str__(self) -> str:
46
- # ANSI color codes for blue, green, and reset (to default)
47
- BLUE = "\033[34m"
48
- GREEN = "\033[32m"
49
- RESET = "\033[0m"
50
-
51
- message = ""
52
-
53
- if self.role == Role.system:
54
- message = "---------------------- System ----------------------\n"
55
- elif self.role == Role.user:
56
- message = BLUE + "----------------------- User -----------------------\n" + RESET
57
- elif self.role == Role.ai:
58
- message = GREEN + "--------------------- Assistant --------------------\n" + RESET
59
-
60
- # Append text and reset color formatting at the end
61
- message += self.text + RESET + "\n"
62
-
63
- return message
64
-
65
- def _process_image(self, images: list):
66
- """
67
- Process a list of images by resizing them to maintain aspect ratio and then converting them to base64 format.
68
-
69
- Args:
70
- images (list): A list of image objects to be processed.
71
-
72
- Returns:
73
- list: A list of base64-encoded image strings if input is not None/empty, otherwise `None`.
74
-
75
- Note:
76
- - Images should be provided as a "list" even if there is only a single image to process.
77
- """
78
- if images == None:
79
- return None
80
-
81
- base64_images = []
82
- for image in images:
83
- if image.mode == "RGBA":
84
- image = image.convert("RGB")
85
-
86
- image = self._resize_image_aspect_ratio(image=image)
87
- image = self._convert_to_base64(image=image)
88
- base64_images.append(image)
89
-
90
- return base64_images
91
-
92
- def _convert_to_base64(self, image: Image) -> str:
93
- """
94
- Convert an image to a base64-encoded string.
95
-
96
- Args:
97
- image (Image): The image object to be converted to base64 format.
98
-
99
- Returns:
100
- str: The base64-encoded string representation of the image.
101
-
102
- Note:
103
- - The image format will default to 'JPEG' if the format is not specified.
104
- """
105
- buffered = BytesIO()
106
- format = image.format if image.format else 'JPEG'
107
- image.save(buffered, format=format)
108
- img_bytes = buffered.getvalue()
109
- img_base64 = base64.b64encode(img_bytes).decode('utf-8')
110
-
111
- return img_base64
112
-
113
- def _resize_image_aspect_ratio(self, image: Image, target_length=1024):
114
- """
115
- Resize an image to a target length while maintaining its aspect ratio.
116
-
117
- Args:
118
- image (Image): The image object to be resized.
119
- target_length (int, optional): The target length for the larger dimension (default is 1024).
120
-
121
- Returns:
122
- Image: The resized image object with maintained aspect ratio.
123
-
124
- Note:
125
- - The smaller dimension is scaled proportionally based on the larger dimension to maintain aspect ratio.
126
- - If the image's aspect ratio is non-square, the target_length is applied to the larger dimension.
127
- """
128
-
129
- width, height = image.size
130
-
131
- if width > height:
132
- new_width = target_length
133
- new_height = int((target_length / width) * height)
134
- else:
135
- new_height = target_length
136
- new_width = int((target_length / height) * width)
137
-
138
- resized_image = image.resize((new_width, new_height))
139
-
140
- return resized_image
141
-
142
-