openrouter-provider 0.0.6__tar.gz → 0.0.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of openrouter-provider might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: openrouter-provider
3
- Version: 0.0.6
3
+ Version: 0.0.7
4
4
  Summary: This is an unofficial wrapper of OpenRouter.
5
5
  Author-email: Keisuke Miyamto <aichiboyhighschool@gmail.com>
6
6
  Requires-Python: >=3.7
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "openrouter-provider"
3
- version = "0.0.6"
3
+ version = "0.0.7"
4
4
  description = "This is an unofficial wrapper of OpenRouter."
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.7"
@@ -5,7 +5,7 @@ from .LLMs import LLMModel
5
5
  from dotenv import load_dotenv
6
6
  import time
7
7
  import json
8
- from typing import Iterator
8
+ from typing import Iterator, AsyncIterator
9
9
 
10
10
 
11
11
  _base_system_prompt = """
@@ -141,4 +141,64 @@ class Chatbot_manager:
141
141
 
142
142
  self._memory.append(Chat_message(text=text, role=Role.ai, answerdBy=LLMModel))
143
143
 
144
+ async def async_invoke(self, model: LLMModel, query: Chat_message, tools: list[tool_model] = [], provider: ProviderConfig = None) -> Chat_message:
145
+ self._memory.append(query)
146
+ client = OpenRouterProvider()
147
+ reply = await client.async_invoke(
148
+ model=model,
149
+ system_prompt=self._system_prompt,
150
+ querys=self._memory,
151
+ tools=self.tools + tools,
152
+ provider=provider
153
+ )
154
+ reply.answeredBy = model
155
+ self._memory.append(reply)
156
+
157
+ if reply.tool_calls:
158
+ for requested_tool in reply.tool_calls:
159
+ args = requested_tool.arguments
160
+ if isinstance(args, str):
161
+ args = json.loads(args)
162
+
163
+ for tool in (self.tools + tools):
164
+ if tool.name == requested_tool.name:
165
+ result = tool(**args)
166
+ requested_tool.result = result
167
+ break
168
+ else:
169
+ print("Tool Not found", requested_tool.name)
170
+ return reply
171
+
172
+ reply = await client.async_invoke(
173
+ model=model,
174
+ system_prompt=self._system_prompt,
175
+ querys=self._memory,
176
+ tools=self.tools + tools,
177
+ provider=provider
178
+ )
179
+ reply.answeredBy = model
180
+ self._memory.append(reply)
181
+
182
+ return reply
183
+
184
+ async def async_invoke_stream(self, model: LLMModel, query: Chat_message, tools: list[tool_model] = [], provider: ProviderConfig = None) -> AsyncIterator[str]:
185
+ self._memory.append(query)
186
+ client = OpenRouterProvider()
187
+
188
+ stream = client.async_invoke_stream(
189
+ model=model,
190
+ system_prompt=self._system_prompt,
191
+ querys=self._memory,
192
+ tools=self.tools + tools,
193
+ provider=provider
194
+ )
195
+
196
+ text = ""
197
+ async for chunk in stream:
198
+ delta = chunk.choices[0].delta.content or ""
199
+ text += delta
200
+ yield delta
201
+
202
+ self._memory.append(Chat_message(text=text, role=Role.ai, answerdBy=model))
203
+
144
204
 
@@ -3,12 +3,12 @@ from .Chat_message import *
3
3
  from .Tool import tool_model
4
4
  from .LLMs import *
5
5
 
6
- from openai import OpenAI
6
+ from openai import OpenAI, AsyncOpenAI
7
7
  from openai.types.chat import ChatCompletionChunk
8
8
  from dotenv import load_dotenv
9
9
  import os, time
10
10
  from dataclasses import dataclass, field, asdict
11
- from typing import List, Optional, Literal, Iterator
11
+ from typing import List, Optional, Literal, Iterator, AsyncIterator
12
12
  from pprint import pprint
13
13
 
14
14
  # エラーのみ表示、詳細なトレースバック付き
@@ -42,6 +42,10 @@ class OpenRouterProvider:
42
42
  base_url="https://openrouter.ai/api/v1",
43
43
  api_key=api_key,
44
44
  )
45
+ self.async_client = AsyncOpenAI(
46
+ base_url="https://openrouter.ai/api/v1",
47
+ api_key=api_key,
48
+ )
45
49
 
46
50
  def make_prompt(self, system_prompt: Chat_message,
47
51
  querys: list[Chat_message]) -> list[dict]:
@@ -139,3 +143,60 @@ class OpenRouterProvider:
139
143
  except Exception as e:
140
144
  logger.exception(f"An error occurred while invoking the model: {e.__class__.__name__}: {str(e)}")
141
145
  return Chat_message(text="Fail to get response. Please see the error message.", role=Role.ai, raw_response=None)
146
+
147
+ async def async_invoke(self, model: LLMModel, system_prompt: Chat_message, querys: list[Chat_message], tools: list[tool_model] = [], provider: ProviderConfig = None) -> Chat_message:
148
+ try:
149
+ messages = self.make_prompt(system_prompt, querys)
150
+
151
+ tool_defs = [tool.tool_definition for tool in tools] if tools else None
152
+ provider_dict = provider.to_dict() if provider else None
153
+
154
+ response = await self.async_client.chat.completions.create(
155
+ model=model.name,
156
+ messages=messages,
157
+ tools=tool_defs,
158
+ extra_body={"provider": provider_dict}
159
+ )
160
+
161
+ reply = Chat_message(text=response.choices[0].message.content, role=Role.ai, raw_response=response)
162
+
163
+ if response.choices[0].message.tool_calls:
164
+ reply.role = Role.tool
165
+ for tool in response.choices[0].message.tool_calls:
166
+ reply.tool_calls.append(ToolCall(id=tool.id, name=tool.function.name, arguments=tool.function.arguments))
167
+ return reply
168
+
169
+ except Exception as e:
170
+ logger.exception(f"An error occurred while asynchronously invoking the model: {e.__class__.__name__}: {str(e)}")
171
+ return Chat_message(text="Fail to get response. Please see the error message.", role=Role.ai, raw_response=None)
172
+
173
+ async def async_invoke_stream(
174
+ self,
175
+ model: LLMModel,
176
+ system_prompt: Chat_message,
177
+ querys: list[Chat_message],
178
+ tools: list[tool_model] = [],
179
+ provider: ProviderConfig = None
180
+ ) -> AsyncIterator[ChatCompletionChunk]:
181
+ try:
182
+ messages = self.make_prompt(system_prompt, querys)
183
+
184
+ tool_defs = [tool.tool_definition for tool in tools] if tools else None
185
+ provider_dict = provider.to_dict() if provider else None
186
+
187
+ response = await self.async_client.chat.completions.create(
188
+ model=model.name,
189
+ messages=messages,
190
+ tools=tool_defs,
191
+ extra_body={"provider": provider_dict},
192
+ stream=True
193
+ )
194
+
195
+ async for chunk in response:
196
+ yield chunk
197
+
198
+ except Exception as e:
199
+ logger.exception(f"An error occurred while asynchronously streaming the model: {e.__class__.__name__}: {str(e)}")
200
+ return
201
+
202
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: openrouter-provider
3
- Version: 0.0.6
3
+ Version: 0.0.7
4
4
  Summary: This is an unofficial wrapper of OpenRouter.
5
5
  Author-email: Keisuke Miyamto <aichiboyhighschool@gmail.com>
6
6
  Requires-Python: >=3.7