openrouter-provider 0.0.1__tar.gz → 0.0.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of openrouter-provider might be problematic. Click here for more details.
- openrouter_provider-0.0.2/PKG-INFO +232 -0
- openrouter_provider-0.0.2/README.md +206 -0
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/pyproject.toml +1 -1
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/OpenRouterProvider/Chatbot_manager.py +5 -3
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/OpenRouterProvider/OpenRouterProvider.py +23 -4
- openrouter_provider-0.0.2/src/openrouter_provider.egg-info/PKG-INFO +232 -0
- openrouter_provider-0.0.1/PKG-INFO +0 -25
- openrouter_provider-0.0.1/README.md +0 -0
- openrouter_provider-0.0.1/src/openrouter_provider.egg-info/PKG-INFO +0 -25
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/setup.cfg +0 -0
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/OpenRouterProvider/Chat_message.py +0 -0
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/OpenRouterProvider/LLMs.py +0 -0
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/OpenRouterProvider/Tool.py +0 -0
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/__init__.py +0 -0
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/openrouter_provider.egg-info/SOURCES.txt +0 -0
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/openrouter_provider.egg-info/dependency_links.txt +0 -0
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/openrouter_provider.egg-info/requires.txt +0 -0
- {openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/openrouter_provider.egg-info/top_level.txt +0 -0
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: openrouter-provider
|
|
3
|
+
Version: 0.0.2
|
|
4
|
+
Summary: This is an unofficial wrapper of OpenRouter.
|
|
5
|
+
Author-email: Keisuke Miyamto <aichiboyhighschool@gmail.com>
|
|
6
|
+
Requires-Python: >=3.7
|
|
7
|
+
Description-Content-Type: text/markdown
|
|
8
|
+
Requires-Dist: annotated-types
|
|
9
|
+
Requires-Dist: anyio
|
|
10
|
+
Requires-Dist: certifi
|
|
11
|
+
Requires-Dist: distro
|
|
12
|
+
Requires-Dist: h11
|
|
13
|
+
Requires-Dist: httpcore
|
|
14
|
+
Requires-Dist: httpx
|
|
15
|
+
Requires-Dist: idna
|
|
16
|
+
Requires-Dist: jiter
|
|
17
|
+
Requires-Dist: openai
|
|
18
|
+
Requires-Dist: pillow
|
|
19
|
+
Requires-Dist: pydantic
|
|
20
|
+
Requires-Dist: pydantic_core
|
|
21
|
+
Requires-Dist: python-dotenv
|
|
22
|
+
Requires-Dist: sniffio
|
|
23
|
+
Requires-Dist: tqdm
|
|
24
|
+
Requires-Dist: typing-inspection
|
|
25
|
+
Requires-Dist: typing_extensions
|
|
26
|
+
|
|
27
|
+
## Introduction
|
|
28
|
+
|
|
29
|
+
Welcome to **openrouter-provider**, an unofficial Python wrapper for the OpenRouter API. This library lets you easily integrate with OpenRouter models, manage chat sessions, process images, and call tools within your Python application.
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
## Features
|
|
33
|
+
|
|
34
|
+
* Simple chat interface with system, user, assistant, and tool roles
|
|
35
|
+
* Automatic image resizing and Base64 encoding
|
|
36
|
+
* Built-in tool decorator for defining custom functions
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
## Installation
|
|
40
|
+
|
|
41
|
+
### From PyPI
|
|
42
|
+
|
|
43
|
+
```bash
|
|
44
|
+
pip3 install openrouter-provider
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
### From Source
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
git clone https://github.com/yourusername/openrouter-provider.git
|
|
51
|
+
cd openrouter-provider
|
|
52
|
+
pip3 install .
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
## Configuration
|
|
58
|
+
|
|
59
|
+
1. Create a `.env` file in your project root.
|
|
60
|
+
2. Add your OpenRouter API key:
|
|
61
|
+
|
|
62
|
+
```bash
|
|
63
|
+
OPENROUTER_API_KEY=your_api_key_here
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
## Usage
|
|
69
|
+
|
|
70
|
+
### Basic chat bot
|
|
71
|
+
Chat history is automatically sent, by Chatbot_manager. If you want to delete chat history, use `clear_memory` method.
|
|
72
|
+
|
|
73
|
+
```python
|
|
74
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
75
|
+
from OpenRouterProvider.LLMs import gpt_4o_mini
|
|
76
|
+
|
|
77
|
+
# Declare chat bot
|
|
78
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
79
|
+
|
|
80
|
+
# Send query
|
|
81
|
+
query = Chat_message(text="Introduce yourself, please.")
|
|
82
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
83
|
+
print(response.text)
|
|
84
|
+
|
|
85
|
+
# Send next query. Chatbot_manager automatically handle chat history.
|
|
86
|
+
query = Chat_message(text="Tell me a short story.")
|
|
87
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
88
|
+
print(response.text)
|
|
89
|
+
|
|
90
|
+
# Print all chat history
|
|
91
|
+
ai.print_memory()
|
|
92
|
+
|
|
93
|
+
# Delete all chat history
|
|
94
|
+
ai.clear_memory()
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### Chat bot with images
|
|
98
|
+
You can use images in the chat.
|
|
99
|
+
|
|
100
|
+
```python
|
|
101
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
102
|
+
from OpenRouterProvider.LLMs import gpt_4o_mini
|
|
103
|
+
from PIL import Image
|
|
104
|
+
|
|
105
|
+
dog = Image.open("dog.jpg")
|
|
106
|
+
cat = Image.open("cat.jpg")
|
|
107
|
+
|
|
108
|
+
# Send query with images
|
|
109
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
110
|
+
query = Chat_message(text="What can you see in the images?", images=[dog, cat])
|
|
111
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
112
|
+
print(response.text)
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
### With tools
|
|
116
|
+
|
|
117
|
+
Use the `@tool_model` decorator to expose Python functions as callable tools in the chat. Tools are automatically processed by Chat_manager, so you don't need to care it.
|
|
118
|
+
|
|
119
|
+
```python
|
|
120
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
121
|
+
from OpenRouterProvider.LLMs import gpt_4o_mini
|
|
122
|
+
from OpenRouterProvider.Tool import tool_model
|
|
123
|
+
|
|
124
|
+
@tool_model
|
|
125
|
+
def get_user_info():
|
|
126
|
+
"""
|
|
127
|
+
Return user's personal info: name, age, and address.
|
|
128
|
+
"""
|
|
129
|
+
return "name: Alice\nage: 30\naddress: Wonderland"
|
|
130
|
+
|
|
131
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.", tools=[get_user_info])
|
|
132
|
+
query = Chat_message(text="What is the name, age, address of the user?")
|
|
133
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
134
|
+
ai.print_memory()
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
## Advanced Usage
|
|
138
|
+
### Prebuilt and Custom Model Usage
|
|
139
|
+
|
|
140
|
+
You can use prebuilt models defined or declare your own custom models easily.
|
|
141
|
+
This library provides many ready-to-use models from OpenAI, Anthropic, Google, and others.
|
|
142
|
+
|
|
143
|
+
```python
|
|
144
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
145
|
+
from OpenRouterProvider.LLMs import gpt_4o, claude_3_7_sonnet
|
|
146
|
+
|
|
147
|
+
# Use OpenAI GPT-4o
|
|
148
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
149
|
+
query = Chat_message(text="Tell me a joke.")
|
|
150
|
+
response = ai.invoke(model=gpt_4o, query=query)
|
|
151
|
+
print(response.text)
|
|
152
|
+
|
|
153
|
+
# Use Anthropic Claude 3.7 Sonnet
|
|
154
|
+
query = Chat_message(text="Summarize the story of Hamlet.")
|
|
155
|
+
response = ai.invoke(model=claude_3_7_sonnet, query=query)
|
|
156
|
+
print(response.text)
|
|
157
|
+
```
|
|
158
|
+
|
|
159
|
+
Available prebuilt models include:
|
|
160
|
+
|
|
161
|
+
#### **OpenAI**
|
|
162
|
+
|
|
163
|
+
* `gpt_4o`
|
|
164
|
+
* `gpt_4o_mini`
|
|
165
|
+
* `gpt_4_1`
|
|
166
|
+
* `gpt_4_1_mini`
|
|
167
|
+
* `gpt_4_1_nano`
|
|
168
|
+
* `o4_mini`
|
|
169
|
+
* `o4_mini_high`
|
|
170
|
+
* `o3`
|
|
171
|
+
|
|
172
|
+
#### **Anthropic**
|
|
173
|
+
|
|
174
|
+
* `claude_3_7_sonnet`
|
|
175
|
+
* `claude_3_7_sonnet_thinking`
|
|
176
|
+
* `claude_3_5_haiku`
|
|
177
|
+
|
|
178
|
+
#### **Google**
|
|
179
|
+
|
|
180
|
+
* `gemini_2_0_flash`
|
|
181
|
+
* `gemini_2_0_flash_free`
|
|
182
|
+
* `gemini_2_5_flash`
|
|
183
|
+
* `gemini_2_5_flash_thinking`
|
|
184
|
+
* `gemini_2_5_pro`
|
|
185
|
+
|
|
186
|
+
#### **Deepseek**
|
|
187
|
+
|
|
188
|
+
* `deepseek_v3_free`
|
|
189
|
+
* `deepseek_v3`
|
|
190
|
+
* `deepseek_r1_free`
|
|
191
|
+
* `deepseek_r1`
|
|
192
|
+
|
|
193
|
+
#### **xAI**
|
|
194
|
+
|
|
195
|
+
* `grok_3_mini`
|
|
196
|
+
* `grok_3`
|
|
197
|
+
|
|
198
|
+
#### **Microsoft**
|
|
199
|
+
|
|
200
|
+
* `mai_ds_r1_free`
|
|
201
|
+
|
|
202
|
+
#### **Others**
|
|
203
|
+
|
|
204
|
+
* `llama_4_maverick_free`
|
|
205
|
+
* `llama_4_scout`
|
|
206
|
+
* `mistral_small_3_1_24B_free`
|
|
207
|
+
|
|
208
|
+
All of them are instances of `LLMModel`, which includes cost and model name settings.
|
|
209
|
+
|
|
210
|
+
### Using Custom Models
|
|
211
|
+
|
|
212
|
+
You can define and use your own custom model if it's available on OpenRouter.
|
|
213
|
+
|
|
214
|
+
```python
|
|
215
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
216
|
+
from OpenRouterProvider.LLMs import LLMModel
|
|
217
|
+
|
|
218
|
+
# Declare a custom model
|
|
219
|
+
my_model = LLMModel(
|
|
220
|
+
name="my-org/my-custom-model", # Model name for OpenRouter
|
|
221
|
+
input_cost=0.5, # Optional: cost per 1M input tokens
|
|
222
|
+
output_cost=2.0 # Optional: cost per 1M output tokens
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
# Use the custom model
|
|
226
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
227
|
+
query = Chat_message(text="Explain black holes simply.")
|
|
228
|
+
response = ai.invoke(model=my_model, query=query)
|
|
229
|
+
print(response.text)
|
|
230
|
+
```
|
|
231
|
+
|
|
232
|
+
You only need to know the model name as used on OpenRouter. `input_cost` and `output_cost` are optional and currently, they are not used in this library. Please wait the future update.
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
## Introduction
|
|
2
|
+
|
|
3
|
+
Welcome to **openrouter-provider**, an unofficial Python wrapper for the OpenRouter API. This library lets you easily integrate with OpenRouter models, manage chat sessions, process images, and call tools within your Python application.
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
## Features
|
|
7
|
+
|
|
8
|
+
* Simple chat interface with system, user, assistant, and tool roles
|
|
9
|
+
* Automatic image resizing and Base64 encoding
|
|
10
|
+
* Built-in tool decorator for defining custom functions
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
## Installation
|
|
14
|
+
|
|
15
|
+
### From PyPI
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
pip3 install openrouter-provider
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
### From Source
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
git clone https://github.com/yourusername/openrouter-provider.git
|
|
25
|
+
cd openrouter-provider
|
|
26
|
+
pip3 install .
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
## Configuration
|
|
32
|
+
|
|
33
|
+
1. Create a `.env` file in your project root.
|
|
34
|
+
2. Add your OpenRouter API key:
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
OPENROUTER_API_KEY=your_api_key_here
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
## Usage
|
|
43
|
+
|
|
44
|
+
### Basic chat bot
|
|
45
|
+
Chat history is automatically sent, by Chatbot_manager. If you want to delete chat history, use `clear_memory` method.
|
|
46
|
+
|
|
47
|
+
```python
|
|
48
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
49
|
+
from OpenRouterProvider.LLMs import gpt_4o_mini
|
|
50
|
+
|
|
51
|
+
# Declare chat bot
|
|
52
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
53
|
+
|
|
54
|
+
# Send query
|
|
55
|
+
query = Chat_message(text="Introduce yourself, please.")
|
|
56
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
57
|
+
print(response.text)
|
|
58
|
+
|
|
59
|
+
# Send next query. Chatbot_manager automatically handle chat history.
|
|
60
|
+
query = Chat_message(text="Tell me a short story.")
|
|
61
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
62
|
+
print(response.text)
|
|
63
|
+
|
|
64
|
+
# Print all chat history
|
|
65
|
+
ai.print_memory()
|
|
66
|
+
|
|
67
|
+
# Delete all chat history
|
|
68
|
+
ai.clear_memory()
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
### Chat bot with images
|
|
72
|
+
You can use images in the chat.
|
|
73
|
+
|
|
74
|
+
```python
|
|
75
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
76
|
+
from OpenRouterProvider.LLMs import gpt_4o_mini
|
|
77
|
+
from PIL import Image
|
|
78
|
+
|
|
79
|
+
dog = Image.open("dog.jpg")
|
|
80
|
+
cat = Image.open("cat.jpg")
|
|
81
|
+
|
|
82
|
+
# Send query with images
|
|
83
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
84
|
+
query = Chat_message(text="What can you see in the images?", images=[dog, cat])
|
|
85
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
86
|
+
print(response.text)
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
### With tools
|
|
90
|
+
|
|
91
|
+
Use the `@tool_model` decorator to expose Python functions as callable tools in the chat. Tools are automatically processed by Chat_manager, so you don't need to care it.
|
|
92
|
+
|
|
93
|
+
```python
|
|
94
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
95
|
+
from OpenRouterProvider.LLMs import gpt_4o_mini
|
|
96
|
+
from OpenRouterProvider.Tool import tool_model
|
|
97
|
+
|
|
98
|
+
@tool_model
|
|
99
|
+
def get_user_info():
|
|
100
|
+
"""
|
|
101
|
+
Return user's personal info: name, age, and address.
|
|
102
|
+
"""
|
|
103
|
+
return "name: Alice\nage: 30\naddress: Wonderland"
|
|
104
|
+
|
|
105
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.", tools=[get_user_info])
|
|
106
|
+
query = Chat_message(text="What is the name, age, address of the user?")
|
|
107
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
108
|
+
ai.print_memory()
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
## Advanced Usage
|
|
112
|
+
### Prebuilt and Custom Model Usage
|
|
113
|
+
|
|
114
|
+
You can use prebuilt models defined or declare your own custom models easily.
|
|
115
|
+
This library provides many ready-to-use models from OpenAI, Anthropic, Google, and others.
|
|
116
|
+
|
|
117
|
+
```python
|
|
118
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
119
|
+
from OpenRouterProvider.LLMs import gpt_4o, claude_3_7_sonnet
|
|
120
|
+
|
|
121
|
+
# Use OpenAI GPT-4o
|
|
122
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
123
|
+
query = Chat_message(text="Tell me a joke.")
|
|
124
|
+
response = ai.invoke(model=gpt_4o, query=query)
|
|
125
|
+
print(response.text)
|
|
126
|
+
|
|
127
|
+
# Use Anthropic Claude 3.7 Sonnet
|
|
128
|
+
query = Chat_message(text="Summarize the story of Hamlet.")
|
|
129
|
+
response = ai.invoke(model=claude_3_7_sonnet, query=query)
|
|
130
|
+
print(response.text)
|
|
131
|
+
```
|
|
132
|
+
|
|
133
|
+
Available prebuilt models include:
|
|
134
|
+
|
|
135
|
+
#### **OpenAI**
|
|
136
|
+
|
|
137
|
+
* `gpt_4o`
|
|
138
|
+
* `gpt_4o_mini`
|
|
139
|
+
* `gpt_4_1`
|
|
140
|
+
* `gpt_4_1_mini`
|
|
141
|
+
* `gpt_4_1_nano`
|
|
142
|
+
* `o4_mini`
|
|
143
|
+
* `o4_mini_high`
|
|
144
|
+
* `o3`
|
|
145
|
+
|
|
146
|
+
#### **Anthropic**
|
|
147
|
+
|
|
148
|
+
* `claude_3_7_sonnet`
|
|
149
|
+
* `claude_3_7_sonnet_thinking`
|
|
150
|
+
* `claude_3_5_haiku`
|
|
151
|
+
|
|
152
|
+
#### **Google**
|
|
153
|
+
|
|
154
|
+
* `gemini_2_0_flash`
|
|
155
|
+
* `gemini_2_0_flash_free`
|
|
156
|
+
* `gemini_2_5_flash`
|
|
157
|
+
* `gemini_2_5_flash_thinking`
|
|
158
|
+
* `gemini_2_5_pro`
|
|
159
|
+
|
|
160
|
+
#### **Deepseek**
|
|
161
|
+
|
|
162
|
+
* `deepseek_v3_free`
|
|
163
|
+
* `deepseek_v3`
|
|
164
|
+
* `deepseek_r1_free`
|
|
165
|
+
* `deepseek_r1`
|
|
166
|
+
|
|
167
|
+
#### **xAI**
|
|
168
|
+
|
|
169
|
+
* `grok_3_mini`
|
|
170
|
+
* `grok_3`
|
|
171
|
+
|
|
172
|
+
#### **Microsoft**
|
|
173
|
+
|
|
174
|
+
* `mai_ds_r1_free`
|
|
175
|
+
|
|
176
|
+
#### **Others**
|
|
177
|
+
|
|
178
|
+
* `llama_4_maverick_free`
|
|
179
|
+
* `llama_4_scout`
|
|
180
|
+
* `mistral_small_3_1_24B_free`
|
|
181
|
+
|
|
182
|
+
All of them are instances of `LLMModel`, which includes cost and model name settings.
|
|
183
|
+
|
|
184
|
+
### Using Custom Models
|
|
185
|
+
|
|
186
|
+
You can define and use your own custom model if it's available on OpenRouter.
|
|
187
|
+
|
|
188
|
+
```python
|
|
189
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
190
|
+
from OpenRouterProvider.LLMs import LLMModel
|
|
191
|
+
|
|
192
|
+
# Declare a custom model
|
|
193
|
+
my_model = LLMModel(
|
|
194
|
+
name="my-org/my-custom-model", # Model name for OpenRouter
|
|
195
|
+
input_cost=0.5, # Optional: cost per 1M input tokens
|
|
196
|
+
output_cost=2.0 # Optional: cost per 1M output tokens
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
# Use the custom model
|
|
200
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
201
|
+
query = Chat_message(text="Explain black holes simply.")
|
|
202
|
+
response = ai.invoke(model=my_model, query=query)
|
|
203
|
+
print(response.text)
|
|
204
|
+
```
|
|
205
|
+
|
|
206
|
+
You only need to know the model name as used on OpenRouter. `input_cost` and `output_cost` are optional and currently, they are not used in this library. Please wait the future update.
|
{openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/OpenRouterProvider/Chatbot_manager.py
RENAMED
|
@@ -80,14 +80,15 @@ class Chatbot_manager:
|
|
|
80
80
|
|
|
81
81
|
print("----------------------------------------------------------\n")
|
|
82
82
|
|
|
83
|
-
def invoke(self, model: LLMModel, query: Chat_message, tools: list[tool_model]=[]) -> Chat_message:
|
|
83
|
+
def invoke(self, model: LLMModel, query: Chat_message, tools: list[tool_model]=[], provider:ProviderConfig=None) -> Chat_message:
|
|
84
84
|
self._memory.append(query)
|
|
85
85
|
client = OpenRouterProvider()
|
|
86
86
|
reply = client.invoke(
|
|
87
87
|
model=model,
|
|
88
88
|
system_prompt=self._system_prompt,
|
|
89
89
|
querys=self._memory,
|
|
90
|
-
tools=self.tools + tools
|
|
90
|
+
tools=self.tools + tools,
|
|
91
|
+
provider=provider
|
|
91
92
|
)
|
|
92
93
|
reply.answeredBy = model
|
|
93
94
|
self._memory.append(reply)
|
|
@@ -111,7 +112,8 @@ class Chatbot_manager:
|
|
|
111
112
|
model=model,
|
|
112
113
|
system_prompt=self._system_prompt,
|
|
113
114
|
querys=self._memory,
|
|
114
|
-
tools=self.tools + tools
|
|
115
|
+
tools=self.tools + tools,
|
|
116
|
+
provider=provider
|
|
115
117
|
)
|
|
116
118
|
|
|
117
119
|
reply.answeredBy = model
|
{openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/OpenRouterProvider/OpenRouterProvider.py
RENAMED
|
@@ -5,6 +5,25 @@ from .LLMs import *
|
|
|
5
5
|
from openai import OpenAI
|
|
6
6
|
from dotenv import load_dotenv
|
|
7
7
|
import os
|
|
8
|
+
from dataclasses import dataclass, field, asdict
|
|
9
|
+
from typing import List, Optional, Literal
|
|
10
|
+
import json
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class ProviderConfig:
|
|
15
|
+
order: Optional[List[str]] = None
|
|
16
|
+
allow_fallbacks: bool = None
|
|
17
|
+
require_parameters: bool = None
|
|
18
|
+
data_collection: Literal["allow", "deny"] = None
|
|
19
|
+
only: Optional[List[str]] = None
|
|
20
|
+
ignore: Optional[List[str]] = None
|
|
21
|
+
quantizations: Optional[List[str]] = None
|
|
22
|
+
sort: Optional[Literal["price", "throughput"]] = None
|
|
23
|
+
max_price: Optional[dict] = None
|
|
24
|
+
|
|
25
|
+
def to_dict(self) -> dict:
|
|
26
|
+
return {k: v for k, v in asdict(self).items() if v is not None}
|
|
8
27
|
|
|
9
28
|
|
|
10
29
|
class OpenRouterProvider:
|
|
@@ -62,15 +81,14 @@ class OpenRouterProvider:
|
|
|
62
81
|
return messages
|
|
63
82
|
|
|
64
83
|
|
|
65
|
-
def invoke(self, model: LLMModel, system_prompt: Chat_message, querys: list[Chat_message], tools:list[tool_model]=[]) -> Chat_message:
|
|
84
|
+
def invoke(self, model: LLMModel, system_prompt: Chat_message, querys: list[Chat_message], tools:list[tool_model]=[], provider:ProviderConfig=None) -> Chat_message:
|
|
85
|
+
print(provider.to_dict())
|
|
66
86
|
response = self.client.chat.completions.create(
|
|
67
87
|
model=model.name,
|
|
68
88
|
messages=self.make_prompt(system_prompt, querys),
|
|
69
89
|
tools=[tool.tool_definition for tool in tools],
|
|
70
90
|
extra_body={
|
|
71
|
-
"provider":
|
|
72
|
-
"order": ["Groq"]
|
|
73
|
-
}
|
|
91
|
+
"provider": provider.to_dict() if provider else None
|
|
74
92
|
}
|
|
75
93
|
)
|
|
76
94
|
reply = Chat_message(text=response.choices[0].message.content, role=Role.ai)
|
|
@@ -80,5 +98,6 @@ class OpenRouterProvider:
|
|
|
80
98
|
for tool in response.choices[0].message.tool_calls:
|
|
81
99
|
reply.tool_calls.append(ToolCall(id=tool.id, name=tool.function.name, arguments=tool.function.arguments))
|
|
82
100
|
|
|
101
|
+
print(response)
|
|
83
102
|
return reply
|
|
84
103
|
|
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: openrouter-provider
|
|
3
|
+
Version: 0.0.2
|
|
4
|
+
Summary: This is an unofficial wrapper of OpenRouter.
|
|
5
|
+
Author-email: Keisuke Miyamto <aichiboyhighschool@gmail.com>
|
|
6
|
+
Requires-Python: >=3.7
|
|
7
|
+
Description-Content-Type: text/markdown
|
|
8
|
+
Requires-Dist: annotated-types
|
|
9
|
+
Requires-Dist: anyio
|
|
10
|
+
Requires-Dist: certifi
|
|
11
|
+
Requires-Dist: distro
|
|
12
|
+
Requires-Dist: h11
|
|
13
|
+
Requires-Dist: httpcore
|
|
14
|
+
Requires-Dist: httpx
|
|
15
|
+
Requires-Dist: idna
|
|
16
|
+
Requires-Dist: jiter
|
|
17
|
+
Requires-Dist: openai
|
|
18
|
+
Requires-Dist: pillow
|
|
19
|
+
Requires-Dist: pydantic
|
|
20
|
+
Requires-Dist: pydantic_core
|
|
21
|
+
Requires-Dist: python-dotenv
|
|
22
|
+
Requires-Dist: sniffio
|
|
23
|
+
Requires-Dist: tqdm
|
|
24
|
+
Requires-Dist: typing-inspection
|
|
25
|
+
Requires-Dist: typing_extensions
|
|
26
|
+
|
|
27
|
+
## Introduction
|
|
28
|
+
|
|
29
|
+
Welcome to **openrouter-provider**, an unofficial Python wrapper for the OpenRouter API. This library lets you easily integrate with OpenRouter models, manage chat sessions, process images, and call tools within your Python application.
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
## Features
|
|
33
|
+
|
|
34
|
+
* Simple chat interface with system, user, assistant, and tool roles
|
|
35
|
+
* Automatic image resizing and Base64 encoding
|
|
36
|
+
* Built-in tool decorator for defining custom functions
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
## Installation
|
|
40
|
+
|
|
41
|
+
### From PyPI
|
|
42
|
+
|
|
43
|
+
```bash
|
|
44
|
+
pip3 install openrouter-provider
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
### From Source
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
git clone https://github.com/yourusername/openrouter-provider.git
|
|
51
|
+
cd openrouter-provider
|
|
52
|
+
pip3 install .
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
## Configuration
|
|
58
|
+
|
|
59
|
+
1. Create a `.env` file in your project root.
|
|
60
|
+
2. Add your OpenRouter API key:
|
|
61
|
+
|
|
62
|
+
```bash
|
|
63
|
+
OPENROUTER_API_KEY=your_api_key_here
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
## Usage
|
|
69
|
+
|
|
70
|
+
### Basic chat bot
|
|
71
|
+
Chat history is automatically sent, by Chatbot_manager. If you want to delete chat history, use `clear_memory` method.
|
|
72
|
+
|
|
73
|
+
```python
|
|
74
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
75
|
+
from OpenRouterProvider.LLMs import gpt_4o_mini
|
|
76
|
+
|
|
77
|
+
# Declare chat bot
|
|
78
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
79
|
+
|
|
80
|
+
# Send query
|
|
81
|
+
query = Chat_message(text="Introduce yourself, please.")
|
|
82
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
83
|
+
print(response.text)
|
|
84
|
+
|
|
85
|
+
# Send next query. Chatbot_manager automatically handle chat history.
|
|
86
|
+
query = Chat_message(text="Tell me a short story.")
|
|
87
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
88
|
+
print(response.text)
|
|
89
|
+
|
|
90
|
+
# Print all chat history
|
|
91
|
+
ai.print_memory()
|
|
92
|
+
|
|
93
|
+
# Delete all chat history
|
|
94
|
+
ai.clear_memory()
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### Chat bot with images
|
|
98
|
+
You can use images in the chat.
|
|
99
|
+
|
|
100
|
+
```python
|
|
101
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
102
|
+
from OpenRouterProvider.LLMs import gpt_4o_mini
|
|
103
|
+
from PIL import Image
|
|
104
|
+
|
|
105
|
+
dog = Image.open("dog.jpg")
|
|
106
|
+
cat = Image.open("cat.jpg")
|
|
107
|
+
|
|
108
|
+
# Send query with images
|
|
109
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
110
|
+
query = Chat_message(text="What can you see in the images?", images=[dog, cat])
|
|
111
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
112
|
+
print(response.text)
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
### With tools
|
|
116
|
+
|
|
117
|
+
Use the `@tool_model` decorator to expose Python functions as callable tools in the chat. Tools are automatically processed by Chat_manager, so you don't need to care it.
|
|
118
|
+
|
|
119
|
+
```python
|
|
120
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
121
|
+
from OpenRouterProvider.LLMs import gpt_4o_mini
|
|
122
|
+
from OpenRouterProvider.Tool import tool_model
|
|
123
|
+
|
|
124
|
+
@tool_model
|
|
125
|
+
def get_user_info():
|
|
126
|
+
"""
|
|
127
|
+
Return user's personal info: name, age, and address.
|
|
128
|
+
"""
|
|
129
|
+
return "name: Alice\nage: 30\naddress: Wonderland"
|
|
130
|
+
|
|
131
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.", tools=[get_user_info])
|
|
132
|
+
query = Chat_message(text="What is the name, age, address of the user?")
|
|
133
|
+
response = ai.invoke(model=gpt_4o_mini, query=query)
|
|
134
|
+
ai.print_memory()
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
## Advanced Usage
|
|
138
|
+
### Prebuilt and Custom Model Usage
|
|
139
|
+
|
|
140
|
+
You can use prebuilt models defined or declare your own custom models easily.
|
|
141
|
+
This library provides many ready-to-use models from OpenAI, Anthropic, Google, and others.
|
|
142
|
+
|
|
143
|
+
```python
|
|
144
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
145
|
+
from OpenRouterProvider.LLMs import gpt_4o, claude_3_7_sonnet
|
|
146
|
+
|
|
147
|
+
# Use OpenAI GPT-4o
|
|
148
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
149
|
+
query = Chat_message(text="Tell me a joke.")
|
|
150
|
+
response = ai.invoke(model=gpt_4o, query=query)
|
|
151
|
+
print(response.text)
|
|
152
|
+
|
|
153
|
+
# Use Anthropic Claude 3.7 Sonnet
|
|
154
|
+
query = Chat_message(text="Summarize the story of Hamlet.")
|
|
155
|
+
response = ai.invoke(model=claude_3_7_sonnet, query=query)
|
|
156
|
+
print(response.text)
|
|
157
|
+
```
|
|
158
|
+
|
|
159
|
+
Available prebuilt models include:
|
|
160
|
+
|
|
161
|
+
#### **OpenAI**
|
|
162
|
+
|
|
163
|
+
* `gpt_4o`
|
|
164
|
+
* `gpt_4o_mini`
|
|
165
|
+
* `gpt_4_1`
|
|
166
|
+
* `gpt_4_1_mini`
|
|
167
|
+
* `gpt_4_1_nano`
|
|
168
|
+
* `o4_mini`
|
|
169
|
+
* `o4_mini_high`
|
|
170
|
+
* `o3`
|
|
171
|
+
|
|
172
|
+
#### **Anthropic**
|
|
173
|
+
|
|
174
|
+
* `claude_3_7_sonnet`
|
|
175
|
+
* `claude_3_7_sonnet_thinking`
|
|
176
|
+
* `claude_3_5_haiku`
|
|
177
|
+
|
|
178
|
+
#### **Google**
|
|
179
|
+
|
|
180
|
+
* `gemini_2_0_flash`
|
|
181
|
+
* `gemini_2_0_flash_free`
|
|
182
|
+
* `gemini_2_5_flash`
|
|
183
|
+
* `gemini_2_5_flash_thinking`
|
|
184
|
+
* `gemini_2_5_pro`
|
|
185
|
+
|
|
186
|
+
#### **Deepseek**
|
|
187
|
+
|
|
188
|
+
* `deepseek_v3_free`
|
|
189
|
+
* `deepseek_v3`
|
|
190
|
+
* `deepseek_r1_free`
|
|
191
|
+
* `deepseek_r1`
|
|
192
|
+
|
|
193
|
+
#### **xAI**
|
|
194
|
+
|
|
195
|
+
* `grok_3_mini`
|
|
196
|
+
* `grok_3`
|
|
197
|
+
|
|
198
|
+
#### **Microsoft**
|
|
199
|
+
|
|
200
|
+
* `mai_ds_r1_free`
|
|
201
|
+
|
|
202
|
+
#### **Others**
|
|
203
|
+
|
|
204
|
+
* `llama_4_maverick_free`
|
|
205
|
+
* `llama_4_scout`
|
|
206
|
+
* `mistral_small_3_1_24B_free`
|
|
207
|
+
|
|
208
|
+
All of them are instances of `LLMModel`, which includes cost and model name settings.
|
|
209
|
+
|
|
210
|
+
### Using Custom Models
|
|
211
|
+
|
|
212
|
+
You can define and use your own custom model if it's available on OpenRouter.
|
|
213
|
+
|
|
214
|
+
```python
|
|
215
|
+
from OpenRouterProvider.Chatbot_manager import Chat_message, Chatbot_manager
|
|
216
|
+
from OpenRouterProvider.LLMs import LLMModel
|
|
217
|
+
|
|
218
|
+
# Declare a custom model
|
|
219
|
+
my_model = LLMModel(
|
|
220
|
+
name="my-org/my-custom-model", # Model name for OpenRouter
|
|
221
|
+
input_cost=0.5, # Optional: cost per 1M input tokens
|
|
222
|
+
output_cost=2.0 # Optional: cost per 1M output tokens
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
# Use the custom model
|
|
226
|
+
ai = Chatbot_manager(system_prompt="Please answer in English.")
|
|
227
|
+
query = Chat_message(text="Explain black holes simply.")
|
|
228
|
+
response = ai.invoke(model=my_model, query=query)
|
|
229
|
+
print(response.text)
|
|
230
|
+
```
|
|
231
|
+
|
|
232
|
+
You only need to know the model name as used on OpenRouter. `input_cost` and `output_cost` are optional and currently, they are not used in this library. Please wait the future update.
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: openrouter-provider
|
|
3
|
-
Version: 0.0.1
|
|
4
|
-
Summary: This is an unofficial wrapper of OpenRouter.
|
|
5
|
-
Author-email: Keisuke Miyamto <aichiboyhighschool@gmail.com>
|
|
6
|
-
Requires-Python: >=3.7
|
|
7
|
-
Description-Content-Type: text/markdown
|
|
8
|
-
Requires-Dist: annotated-types
|
|
9
|
-
Requires-Dist: anyio
|
|
10
|
-
Requires-Dist: certifi
|
|
11
|
-
Requires-Dist: distro
|
|
12
|
-
Requires-Dist: h11
|
|
13
|
-
Requires-Dist: httpcore
|
|
14
|
-
Requires-Dist: httpx
|
|
15
|
-
Requires-Dist: idna
|
|
16
|
-
Requires-Dist: jiter
|
|
17
|
-
Requires-Dist: openai
|
|
18
|
-
Requires-Dist: pillow
|
|
19
|
-
Requires-Dist: pydantic
|
|
20
|
-
Requires-Dist: pydantic_core
|
|
21
|
-
Requires-Dist: python-dotenv
|
|
22
|
-
Requires-Dist: sniffio
|
|
23
|
-
Requires-Dist: tqdm
|
|
24
|
-
Requires-Dist: typing-inspection
|
|
25
|
-
Requires-Dist: typing_extensions
|
|
File without changes
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: openrouter-provider
|
|
3
|
-
Version: 0.0.1
|
|
4
|
-
Summary: This is an unofficial wrapper of OpenRouter.
|
|
5
|
-
Author-email: Keisuke Miyamto <aichiboyhighschool@gmail.com>
|
|
6
|
-
Requires-Python: >=3.7
|
|
7
|
-
Description-Content-Type: text/markdown
|
|
8
|
-
Requires-Dist: annotated-types
|
|
9
|
-
Requires-Dist: anyio
|
|
10
|
-
Requires-Dist: certifi
|
|
11
|
-
Requires-Dist: distro
|
|
12
|
-
Requires-Dist: h11
|
|
13
|
-
Requires-Dist: httpcore
|
|
14
|
-
Requires-Dist: httpx
|
|
15
|
-
Requires-Dist: idna
|
|
16
|
-
Requires-Dist: jiter
|
|
17
|
-
Requires-Dist: openai
|
|
18
|
-
Requires-Dist: pillow
|
|
19
|
-
Requires-Dist: pydantic
|
|
20
|
-
Requires-Dist: pydantic_core
|
|
21
|
-
Requires-Dist: python-dotenv
|
|
22
|
-
Requires-Dist: sniffio
|
|
23
|
-
Requires-Dist: tqdm
|
|
24
|
-
Requires-Dist: typing-inspection
|
|
25
|
-
Requires-Dist: typing_extensions
|
|
File without changes
|
{openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/OpenRouterProvider/Chat_message.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{openrouter_provider-0.0.1 → openrouter_provider-0.0.2}/src/openrouter_provider.egg-info/SOURCES.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|