openrouter-provider 0.0.8__tar.gz → 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of openrouter-provider might be problematic. Click here for more details.
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/PKG-INFO +1 -1
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/pyproject.toml +1 -1
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/OpenRouterProvider/Chatbot_manager.py +45 -5
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/OpenRouterProvider/OpenRouterProvider.py +49 -8
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/openrouter_provider.egg-info/PKG-INFO +1 -1
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/README.md +0 -0
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/setup.cfg +0 -0
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/OpenRouterProvider/Chat_message.py +0 -0
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/OpenRouterProvider/LLMs.py +0 -0
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/OpenRouterProvider/Tool.py +0 -0
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/__init__.py +0 -0
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/openrouter_provider.egg-info/SOURCES.txt +0 -0
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/openrouter_provider.egg-info/dependency_links.txt +0 -0
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/openrouter_provider.egg-info/requires.txt +0 -0
- {openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/openrouter_provider.egg-info/top_level.txt +0 -0
{openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/OpenRouterProvider/Chatbot_manager.py
RENAMED
|
@@ -83,11 +83,19 @@ class Chatbot_manager:
|
|
|
83
83
|
|
|
84
84
|
print("----------------------------------------------------------\n")
|
|
85
85
|
|
|
86
|
-
def invoke(
|
|
86
|
+
def invoke(
|
|
87
|
+
self,
|
|
88
|
+
model: LLMModel,
|
|
89
|
+
query: Chat_message,
|
|
90
|
+
tools: list[tool_model]=[],
|
|
91
|
+
provider:ProviderConfig=None,
|
|
92
|
+
temperature: float=0.3
|
|
93
|
+
) -> Chat_message:
|
|
87
94
|
self._memory.append(query)
|
|
88
95
|
client = OpenRouterProvider()
|
|
89
96
|
reply = client.invoke(
|
|
90
97
|
model=model,
|
|
98
|
+
temperature=temperature,
|
|
91
99
|
system_prompt=self._system_prompt,
|
|
92
100
|
querys=self._memory,
|
|
93
101
|
tools=self.tools + tools,
|
|
@@ -124,11 +132,19 @@ class Chatbot_manager:
|
|
|
124
132
|
|
|
125
133
|
return reply
|
|
126
134
|
|
|
127
|
-
def invoke_stream(
|
|
135
|
+
def invoke_stream(
|
|
136
|
+
self,
|
|
137
|
+
model: LLMModel,
|
|
138
|
+
query: Chat_message,
|
|
139
|
+
tools: list[tool_model]=[],
|
|
140
|
+
provider:ProviderConfig=None,
|
|
141
|
+
temperature: float=0.3
|
|
142
|
+
) -> Iterator[str]:
|
|
128
143
|
self._memory.append(query)
|
|
129
144
|
client = OpenRouterProvider()
|
|
130
145
|
generator = client.invoke_stream(
|
|
131
146
|
model=model,
|
|
147
|
+
temperature=temperature,
|
|
132
148
|
system_prompt=self._system_prompt,
|
|
133
149
|
querys=self._memory,
|
|
134
150
|
tools=self.tools + tools,
|
|
@@ -142,11 +158,19 @@ class Chatbot_manager:
|
|
|
142
158
|
|
|
143
159
|
self._memory.append(Chat_message(text=text, role=Role.ai, answerdBy=LLMModel))
|
|
144
160
|
|
|
145
|
-
async def async_invoke(
|
|
161
|
+
async def async_invoke(
|
|
162
|
+
self,
|
|
163
|
+
model: LLMModel,
|
|
164
|
+
query: Chat_message,
|
|
165
|
+
tools: list[tool_model] = [],
|
|
166
|
+
provider: ProviderConfig = None,
|
|
167
|
+
temperature: float=0.3
|
|
168
|
+
) -> Chat_message:
|
|
146
169
|
self._memory.append(query)
|
|
147
170
|
client = OpenRouterProvider()
|
|
148
171
|
reply = await client.async_invoke(
|
|
149
172
|
model=model,
|
|
173
|
+
temperature=temperature,
|
|
150
174
|
system_prompt=self._system_prompt,
|
|
151
175
|
querys=self._memory,
|
|
152
176
|
tools=self.tools + tools,
|
|
@@ -182,12 +206,20 @@ class Chatbot_manager:
|
|
|
182
206
|
|
|
183
207
|
return reply
|
|
184
208
|
|
|
185
|
-
async def async_invoke_stream(
|
|
209
|
+
async def async_invoke_stream(
|
|
210
|
+
self,
|
|
211
|
+
model: LLMModel,
|
|
212
|
+
query: Chat_message,
|
|
213
|
+
tools: list[tool_model] = [],
|
|
214
|
+
provider: ProviderConfig = None,
|
|
215
|
+
temperature: float=0.3
|
|
216
|
+
) -> AsyncIterator[str]:
|
|
186
217
|
self._memory.append(query)
|
|
187
218
|
client = OpenRouterProvider()
|
|
188
219
|
|
|
189
220
|
stream = client.async_invoke_stream(
|
|
190
221
|
model=model,
|
|
222
|
+
temperature=temperature,
|
|
191
223
|
system_prompt=self._system_prompt,
|
|
192
224
|
querys=self._memory,
|
|
193
225
|
tools=self.tools + tools,
|
|
@@ -202,11 +234,19 @@ class Chatbot_manager:
|
|
|
202
234
|
|
|
203
235
|
self._memory.append(Chat_message(text=text, role=Role.ai, answerdBy=model))
|
|
204
236
|
|
|
205
|
-
def structured_output(
|
|
237
|
+
def structured_output(
|
|
238
|
+
self,
|
|
239
|
+
model: LLMModel,
|
|
240
|
+
query: Chat_message,
|
|
241
|
+
provider:ProviderConfig=None,
|
|
242
|
+
json_schema: BaseModel=None,
|
|
243
|
+
temperature: float=0.3
|
|
244
|
+
) -> BaseModel:
|
|
206
245
|
self._memory.append(query)
|
|
207
246
|
client = OpenRouterProvider()
|
|
208
247
|
reply = client.structured_output(
|
|
209
248
|
model=model,
|
|
249
|
+
temperature=temperature,
|
|
210
250
|
system_prompt=self._system_prompt,
|
|
211
251
|
querys=self._memory,
|
|
212
252
|
provider=provider,
|
{openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/OpenRouterProvider/OpenRouterProvider.py
RENAMED
|
@@ -53,8 +53,11 @@ class OpenRouterProvider:
|
|
|
53
53
|
api_key=api_key,
|
|
54
54
|
)
|
|
55
55
|
|
|
56
|
-
def make_prompt(
|
|
57
|
-
|
|
56
|
+
def make_prompt(
|
|
57
|
+
self,
|
|
58
|
+
system_prompt: Chat_message,
|
|
59
|
+
querys: list[Chat_message]
|
|
60
|
+
) -> list[dict]:
|
|
58
61
|
messages = [{"role": "system", "content": system_prompt.text}]
|
|
59
62
|
|
|
60
63
|
for query in querys:
|
|
@@ -96,7 +99,15 @@ class OpenRouterProvider:
|
|
|
96
99
|
|
|
97
100
|
return messages
|
|
98
101
|
|
|
99
|
-
def invoke(
|
|
102
|
+
def invoke(
|
|
103
|
+
self,
|
|
104
|
+
model: LLMModel,
|
|
105
|
+
system_prompt: Chat_message,
|
|
106
|
+
querys: list[Chat_message],
|
|
107
|
+
tools: list[tool_model] = [],
|
|
108
|
+
provider: ProviderConfig = None,
|
|
109
|
+
temperature: float = 0.3
|
|
110
|
+
) -> Chat_message:
|
|
100
111
|
try:
|
|
101
112
|
messages = self.make_prompt(system_prompt, querys)
|
|
102
113
|
|
|
@@ -105,6 +116,7 @@ class OpenRouterProvider:
|
|
|
105
116
|
|
|
106
117
|
response = self.client.chat.completions.create(
|
|
107
118
|
model=model.name,
|
|
119
|
+
temperature=temperature,
|
|
108
120
|
messages=messages,
|
|
109
121
|
tools=tool_defs,
|
|
110
122
|
extra_body={"provider": provider_dict},
|
|
@@ -122,7 +134,15 @@ class OpenRouterProvider:
|
|
|
122
134
|
logger.exception(f"An error occurred while invoking the model: {e.__class__.__name__}: {str(e)}")
|
|
123
135
|
return Chat_message(text="Fail to get response. Please see the error message.", role=Role.ai, raw_response=None)
|
|
124
136
|
|
|
125
|
-
def invoke_stream(
|
|
137
|
+
def invoke_stream(
|
|
138
|
+
self,
|
|
139
|
+
model: LLMModel,
|
|
140
|
+
system_prompt: Chat_message,
|
|
141
|
+
querys: list[Chat_message],
|
|
142
|
+
tools: list[tool_model] = [],
|
|
143
|
+
provider: ProviderConfig = None,
|
|
144
|
+
temperature: float = 0.3
|
|
145
|
+
) -> Iterator[ChatCompletionChunk]:
|
|
126
146
|
# chunk example
|
|
127
147
|
# ChatCompletionChunk(id='gen-1746748260-mdKZLTs9QY7MmUxWKb8V', choices=[Choice(delta=ChoiceDelta(content='!', function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None, native_finish_reason=None)], created=1746748260, model='openai/gpt-4o-mini', object='chat.completion.chunk', service_tier=None, system_fingerprint='fp_e2f22fdd96', usage=None, provider='OpenAI')
|
|
128
148
|
|
|
@@ -138,6 +158,7 @@ class OpenRouterProvider:
|
|
|
138
158
|
|
|
139
159
|
response = self.client.chat.completions.create(
|
|
140
160
|
model=model.name,
|
|
161
|
+
temperature=temperature,
|
|
141
162
|
messages=messages,
|
|
142
163
|
tools=tool_defs,
|
|
143
164
|
extra_body={"provider": provider_dict},
|
|
@@ -150,7 +171,14 @@ class OpenRouterProvider:
|
|
|
150
171
|
logger.exception(f"An error occurred while invoking the model: {e.__class__.__name__}: {str(e)}")
|
|
151
172
|
return Chat_message(text="Fail to get response. Please see the error message.", role=Role.ai, raw_response=None)
|
|
152
173
|
|
|
153
|
-
async def async_invoke(
|
|
174
|
+
async def async_invoke(
|
|
175
|
+
self, model: LLMModel,
|
|
176
|
+
system_prompt: Chat_message,
|
|
177
|
+
querys: list[Chat_message],
|
|
178
|
+
tools: list[tool_model] = [],
|
|
179
|
+
provider: ProviderConfig = None,
|
|
180
|
+
temperature: float = 0.3
|
|
181
|
+
) -> Chat_message:
|
|
154
182
|
try:
|
|
155
183
|
messages = self.make_prompt(system_prompt, querys)
|
|
156
184
|
|
|
@@ -159,6 +187,7 @@ class OpenRouterProvider:
|
|
|
159
187
|
|
|
160
188
|
response = await self.async_client.chat.completions.create(
|
|
161
189
|
model=model.name,
|
|
190
|
+
temperature=temperature,
|
|
162
191
|
messages=messages,
|
|
163
192
|
tools=tool_defs,
|
|
164
193
|
extra_body={"provider": provider_dict}
|
|
@@ -182,7 +211,8 @@ class OpenRouterProvider:
|
|
|
182
211
|
system_prompt: Chat_message,
|
|
183
212
|
querys: list[Chat_message],
|
|
184
213
|
tools: list[tool_model] = [],
|
|
185
|
-
provider: ProviderConfig = None
|
|
214
|
+
provider: ProviderConfig = None,
|
|
215
|
+
temperature: float = 0.3
|
|
186
216
|
) -> AsyncIterator[ChatCompletionChunk]:
|
|
187
217
|
try:
|
|
188
218
|
messages = self.make_prompt(system_prompt, querys)
|
|
@@ -192,6 +222,7 @@ class OpenRouterProvider:
|
|
|
192
222
|
|
|
193
223
|
response = await self.async_client.chat.completions.create(
|
|
194
224
|
model=model.name,
|
|
225
|
+
temperature=temperature,
|
|
195
226
|
messages=messages,
|
|
196
227
|
tools=tool_defs,
|
|
197
228
|
extra_body={"provider": provider_dict},
|
|
@@ -205,13 +236,22 @@ class OpenRouterProvider:
|
|
|
205
236
|
logger.exception(f"An error occurred while asynchronously streaming the model: {e.__class__.__name__}: {str(e)}")
|
|
206
237
|
return
|
|
207
238
|
|
|
208
|
-
def structured_output(
|
|
239
|
+
def structured_output(
|
|
240
|
+
self,
|
|
241
|
+
model: LLMModel,
|
|
242
|
+
system_prompt: Chat_message,
|
|
243
|
+
querys: list[Chat_message],
|
|
244
|
+
provider: ProviderConfig = None,
|
|
245
|
+
json_schema: BaseModel = None,
|
|
246
|
+
temperature: float = 0.3
|
|
247
|
+
) -> BaseModel:
|
|
209
248
|
try:
|
|
210
249
|
messages = self.make_prompt(system_prompt, querys)
|
|
211
250
|
provider_dict = provider.to_dict() if provider else None
|
|
212
251
|
|
|
213
252
|
response = self.client.chat.completions.create(
|
|
214
253
|
model=model.name,
|
|
254
|
+
temperature=temperature,
|
|
215
255
|
messages=messages,
|
|
216
256
|
response_format={"type": "json_schema", "json_schema": {"name": json_schema.__name__, "schema": json_schema.model_json_schema()}},
|
|
217
257
|
extra_body={"provider": provider_dict},
|
|
@@ -222,4 +262,5 @@ class OpenRouterProvider:
|
|
|
222
262
|
except Exception as e:
|
|
223
263
|
logger.exception(f"An error occurred while invoking structured output: {e.__class__.__name__}: {str(e)}")
|
|
224
264
|
return Chat_message(text="Fail to get response. Please see the error message.", role=Role.ai, raw_response=None)
|
|
225
|
-
|
|
265
|
+
|
|
266
|
+
|
|
File without changes
|
|
File without changes
|
{openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/OpenRouterProvider/Chat_message.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{openrouter_provider-0.0.8 → openrouter_provider-0.1.0}/src/openrouter_provider.egg-info/SOURCES.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|