openrouter-provider 0.0.4__py3-none-any.whl → 0.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of openrouter-provider might be problematic. Click here for more details.

@@ -6,6 +6,8 @@ import base64
6
6
  from io import BytesIO
7
7
  from dataclasses import dataclass
8
8
 
9
+ from openai.types.chat import ChatCompletion
10
+
9
11
 
10
12
  class Role(Enum):
11
13
  system = "system"
@@ -24,15 +26,21 @@ class ToolCall:
24
26
 
25
27
 
26
28
  class Chat_message:
27
- def __init__(self, text: str, images: list[Image.Image]=None, role: Role=Role.user, answerdBy: LLMModel=None, token :int=0, cost: float=0) -> None:
29
+ def __init__(self,
30
+ text: str,
31
+ images: list[Image.Image]=None,
32
+ role: Role=Role.user,
33
+ answerdBy: LLMModel=None,
34
+ raw_response: ChatCompletion=None
35
+ ) -> None:
28
36
  self.role = role
29
37
  self.text = text
30
38
  self.images = self._process_image(images=images)
31
- self.token = token
32
- self.cost = cost
33
39
  self.answeredBy: LLMModel = answerdBy
34
40
 
35
41
  self.tool_calls: list[ToolCall] = []
42
+ self.raw_resoonse: ChatCompletion = raw_response
43
+
36
44
 
37
45
  def __str__(self) -> str:
38
46
  # ANSI color codes for blue, green, and reset (to default)
@@ -5,6 +5,8 @@ from .LLMs import LLMModel
5
5
  from dotenv import load_dotenv
6
6
  import time
7
7
  import json
8
+ from typing import Iterator
9
+
8
10
 
9
11
  _base_system_prompt = """
10
12
  It's [TIME] today.
@@ -120,4 +122,23 @@ class Chatbot_manager:
120
122
  self._memory.append(reply)
121
123
 
122
124
  return reply
123
-
125
+
126
+ def invoke_stream(self, model: LLMModel, query: Chat_message, tools: list[tool_model]=[], provider:ProviderConfig=None) -> Iterator[str]:
127
+ self._memory.append(query)
128
+ client = OpenRouterProvider()
129
+ generator = client.invoke_stream(
130
+ model=model,
131
+ system_prompt=self._system_prompt,
132
+ querys=self._memory,
133
+ tools=self.tools + tools,
134
+ provider=provider
135
+ )
136
+
137
+ text = ""
138
+ for token in generator:
139
+ text += token.choices[0].delta.content
140
+ yield token.choices[0].delta.content
141
+
142
+ self._memory.append(Chat_message(text=text, role=Role.ai, answerdBy=LLMModel))
143
+
144
+
@@ -1,13 +1,19 @@
1
+ import logging
1
2
  from .Chat_message import *
2
3
  from .Tool import tool_model
3
4
  from .LLMs import *
4
5
 
5
6
  from openai import OpenAI
7
+ from openai.types.chat import ChatCompletionChunk
6
8
  from dotenv import load_dotenv
7
- import os
9
+ import os, time
8
10
  from dataclasses import dataclass, field, asdict
9
- from typing import List, Optional, Literal
10
- import json
11
+ from typing import List, Optional, Literal, Iterator
12
+ from pprint import pprint
13
+
14
+ # エラーのみ表示、詳細なトレースバック付き
15
+ logging.basicConfig(level=logging.ERROR, format="%(asctime)s - %(levelname)s - %(message)s")
16
+ logger = logging.getLogger(__name__)
11
17
 
12
18
 
13
19
  @dataclass
@@ -21,7 +27,7 @@ class ProviderConfig:
21
27
  quantizations: Optional[List[str]] = None
22
28
  sort: Optional[Literal["price", "throughput"]] = None
23
29
  max_price: Optional[dict] = None
24
-
30
+
25
31
  def to_dict(self) -> dict:
26
32
  return {k: v for k, v in asdict(self).items() if v is not None}
27
33
 
@@ -29,17 +35,19 @@ class ProviderConfig:
29
35
  class OpenRouterProvider:
30
36
  def __init__(self) -> None:
31
37
  load_dotenv()
38
+ api_key = os.getenv("OPENROUTER_API_KEY")
39
+ if not api_key:
40
+ logger.error("OPENROUTER_API_KEY is not set in environment variables.")
32
41
  self.client = OpenAI(
33
42
  base_url="https://openrouter.ai/api/v1",
34
- api_key=os.getenv("OPENROUTER_API_KEY"),
43
+ api_key=api_key,
35
44
  )
36
45
 
37
46
  def make_prompt(self, system_prompt: Chat_message,
38
- querys: list[Chat_message]) -> list[dict]:
47
+ querys: list[Chat_message]) -> list[dict]:
39
48
  messages = [{"role": "system", "content": system_prompt.text}]
40
49
 
41
50
  for query in querys:
42
- # ----- USER -----
43
51
  if query.role == Role.user:
44
52
  if query.images is None:
45
53
  messages.append({"role": "user", "content": query.text})
@@ -48,15 +56,13 @@ class OpenRouterProvider:
48
56
  for img in query.images[:50]:
49
57
  content.append(
50
58
  {"type": "image_url",
51
- "image_url": {"url": f"data:image/jpeg;base64,{img}"}})
59
+ "image_url": {"url": f"data:image/jpeg;base64,{img}"}})
52
60
  messages.append({"role": "user", "content": content})
53
61
 
54
- # ----- ASSISTANT -----
55
62
  elif query.role == Role.ai or query.role == Role.tool:
56
63
  assistant_msg = {"role": "assistant"}
57
- assistant_msg["content"] = query.text or None # ← content は明示必須
64
+ assistant_msg["content"] = query.text or None
58
65
 
59
- # ① tool_calls を付与(あれば)
60
66
  if query.tool_calls:
61
67
  assistant_msg["tool_calls"] = [
62
68
  {
@@ -64,38 +70,72 @@ class OpenRouterProvider:
64
70
  "type": "function",
65
71
  "function": {
66
72
  "name": t.name,
67
- "arguments": t.arguments # JSON 文字列
73
+ "arguments": t.arguments
68
74
  }
69
75
  }
70
76
  for t in query.tool_calls
71
77
  ]
72
78
  messages.append(assistant_msg)
73
79
 
74
- # ② tool メッセージを assistant の直後に並べる
75
80
  for t in query.tool_calls:
76
81
  messages.append({
77
82
  "role": "tool",
78
83
  "tool_call_id": str(t.id),
79
- "content": str(t.result) # 実行結果(文字列)
84
+ "content": str(t.result)
80
85
  })
86
+
81
87
  return messages
82
88
 
89
+ def invoke(self, model: LLMModel, system_prompt: Chat_message, querys: list[Chat_message], tools: list[tool_model] = [], provider: ProviderConfig = None) -> Chat_message:
90
+ try:
91
+ messages = self.make_prompt(system_prompt, querys)
83
92
 
84
- def invoke(self, model: LLMModel, system_prompt: Chat_message, querys: list[Chat_message], tools:list[tool_model]=[], provider:ProviderConfig=None) -> Chat_message:
85
- response = self.client.chat.completions.create(
86
- model=model.name,
87
- messages=self.make_prompt(system_prompt, querys),
88
- tools=[tool.tool_definition for tool in tools] if tools else None,
89
- extra_body={
90
- "provider": provider.to_dict() if provider else None
91
- }
92
- )
93
- reply = Chat_message(text=response.choices[0].message.content, role=Role.ai)
93
+ tool_defs = [tool.tool_definition for tool in tools] if tools else None
94
+ provider_dict = provider.to_dict() if provider else None
95
+
96
+ response = self.client.chat.completions.create(
97
+ model=model.name,
98
+ messages=messages,
99
+ tools=tool_defs,
100
+ extra_body={"provider": provider_dict}
101
+ )
102
+
103
+ reply = Chat_message(text=response.choices[0].message.content, role=Role.ai, raw_response=response)
104
+
105
+ if response.choices[0].message.tool_calls:
106
+ reply.role = Role.tool
107
+ for tool in response.choices[0].message.tool_calls:
108
+ reply.tool_calls.append(ToolCall(id=tool.id, name=tool.function.name, arguments=tool.function.arguments))
109
+ return reply
110
+
111
+ except Exception as e:
112
+ logger.exception(f"An error occurred while invoking the model: {e.__class__.__name__}: {str(e)}")
113
+ return Chat_message(text="Fail to get response. Please see the error message.", role=Role.ai, raw_response=None)
114
+
115
+ def invoke_stream(self, model: LLMModel, system_prompt: Chat_message, querys: list[Chat_message], tools: list[tool_model] = [], provider: ProviderConfig = None) -> Iterator[ChatCompletionChunk]:
116
+ # chunk example
117
+ # ChatCompletionChunk(id='gen-1746748260-mdKZLTs9QY7MmUxWKb8V', choices=[Choice(delta=ChoiceDelta(content='!', function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None, native_finish_reason=None)], created=1746748260, model='openai/gpt-4o-mini', object='chat.completion.chunk', service_tier=None, system_fingerprint='fp_e2f22fdd96', usage=None, provider='OpenAI')
118
+
119
+ # ChatCompletionChunk(id='gen-1746748260-mdKZLTs9QY7MmUxWKb8V', choices=[Choice(delta=ChoiceDelta(content='', function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason='stop', index=0, logprobs=None, native_finish_reason='stop')], created=1746748260, model='openai/gpt-4o-mini', object='chat.completion.chunk', service_tier=None, system_fingerprint='fp_e2f22fdd96', usage=None, provider='OpenAI')
94
120
 
95
- if response.choices[0].message.tool_calls:
96
- reply.role = Role.tool
97
- for tool in response.choices[0].message.tool_calls:
98
- reply.tool_calls.append(ToolCall(id=tool.id, name=tool.function.name, arguments=tool.function.arguments))
99
-
100
- return reply
121
+ # ChatCompletionChunk(id='gen-1746748260-mdKZLTs9QY7MmUxWKb8V', choices=[Choice(delta=ChoiceDelta(content='', function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None, native_finish_reason=None)], created=1746748260, model='openai/gpt-4o-mini', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=54, prompt_tokens=61, total_tokens=115, completion_tokens_details=CompletionTokensDetails(reasoning_tokens=0), prompt_tokens_details={'cached_tokens': 0}), provider='OpenAI')
122
+
123
+ try:
124
+ messages = self.make_prompt(system_prompt, querys)
125
+
126
+ tool_defs = [tool.tool_definition for tool in tools] if tools else None
127
+ provider_dict = provider.to_dict() if provider else None
128
+
129
+ response = self.client.chat.completions.create(
130
+ model=model.name,
131
+ messages=messages,
132
+ tools=tool_defs,
133
+ extra_body={"provider": provider_dict},
134
+ stream=True
135
+ )
136
+
137
+ return response
101
138
 
139
+ except Exception as e:
140
+ logger.exception(f"An error occurred while invoking the model: {e.__class__.__name__}: {str(e)}")
141
+ return Chat_message(text="Fail to get response. Please see the error message.", role=Role.ai, raw_response=None)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: openrouter-provider
3
- Version: 0.0.4
3
+ Version: 0.0.6
4
4
  Summary: This is an unofficial wrapper of OpenRouter.
5
5
  Author-email: Keisuke Miyamto <aichiboyhighschool@gmail.com>
6
6
  Requires-Python: >=3.7
@@ -0,0 +1,10 @@
1
+ __init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ OpenRouterProvider/Chat_message.py,sha256=lQd8bFp7OHOgeOrcpcVZMdkV2Mb4reUsv5Ixo6WecYY,4424
3
+ OpenRouterProvider/Chatbot_manager.py,sha256=RzRIrCwCV0PqvjvGZQFB6xJlWOZEgsyZdLAgsH01meg,5091
4
+ OpenRouterProvider/LLMs.py,sha256=-0ELd6fqmdDvsdaPIElRsluiK85-Y6USwvQb2b4M8TA,2607
5
+ OpenRouterProvider/OpenRouterProvider.py,sha256=B8ni2KdbVXZVwMhaetZnl6aA25J3s6oMoAPpuuenx1U,6994
6
+ OpenRouterProvider/Tool.py,sha256=QeeWOD2oaYjB9tjF-Jvcjd_G_qSUIuKwFgyh20Ne06I,2010
7
+ openrouter_provider-0.0.6.dist-info/METADATA,sha256=5CKWKftpXU_T-Y78MP98yVmTBhr9cTWeEYBQYXhRhqg,5995
8
+ openrouter_provider-0.0.6.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
9
+ openrouter_provider-0.0.6.dist-info/top_level.txt,sha256=I5BMEzkQFEnEYTqOY1Ktmnp7r1rrZQyeWdclKyyyHKs,28
10
+ openrouter_provider-0.0.6.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.3.1)
2
+ Generator: setuptools (80.4.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,10 +0,0 @@
1
- __init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- OpenRouterProvider/Chat_message.py,sha256=EG6CGfe3qLdczehA6wkX9FWTEFdKteAKWprQ5VSRbbU,4259
3
- OpenRouterProvider/Chatbot_manager.py,sha256=EpLWhxx7xnRa-q7xqP2Ur9dmYb9Mzv_UF6BChwpcbYk,4357
4
- OpenRouterProvider/LLMs.py,sha256=-0ELd6fqmdDvsdaPIElRsluiK85-Y6USwvQb2b4M8TA,2607
5
- OpenRouterProvider/OpenRouterProvider.py,sha256=XbS6-mhv7tzNgfzQrNeZRNDtpFC4DevhfCde4UxPw_o,3948
6
- OpenRouterProvider/Tool.py,sha256=QeeWOD2oaYjB9tjF-Jvcjd_G_qSUIuKwFgyh20Ne06I,2010
7
- openrouter_provider-0.0.4.dist-info/METADATA,sha256=nO3cxDrVZdqQIiKX18o0mIqTWWp2kQuVWENt3bt9JWg,5995
8
- openrouter_provider-0.0.4.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
9
- openrouter_provider-0.0.4.dist-info/top_level.txt,sha256=I5BMEzkQFEnEYTqOY1Ktmnp7r1rrZQyeWdclKyyyHKs,28
10
- openrouter_provider-0.0.4.dist-info/RECORD,,