LLM-Bridge 1.15.7__py3-none-any.whl → 1.15.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +13 -7
- {llm_bridge-1.15.7.dist-info → llm_bridge-1.15.8.dist-info}/METADATA +2 -2
- {llm_bridge-1.15.7.dist-info → llm_bridge-1.15.8.dist-info}/RECORD +5 -5
- {llm_bridge-1.15.7.dist-info → llm_bridge-1.15.8.dist-info}/WHEEL +0 -0
- {llm_bridge-1.15.7.dist-info → llm_bridge-1.15.8.dist-info}/licenses/LICENSE +0 -0
llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py
CHANGED
|
@@ -3,18 +3,19 @@ from openai.types.responses import ResponseInputTextParam, ResponseInputImagePar
|
|
|
3
3
|
|
|
4
4
|
from llm_bridge.logic.chat_generate import media_processor
|
|
5
5
|
from llm_bridge.logic.message_preprocess.file_type_checker import get_file_type, get_filename_without_timestamp
|
|
6
|
-
from llm_bridge.type.message import Message, ContentType
|
|
6
|
+
from llm_bridge.type.message import Message, ContentType, Role
|
|
7
7
|
from llm_bridge.type.model_message.openai_responses_message import OpenAIResponsesMessage
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
async def convert_message_to_openai_responses(message: Message) -> OpenAIResponsesMessage:
|
|
11
|
-
role = message.role
|
|
11
|
+
role = message.role
|
|
12
12
|
content: list[ResponseInputContentParam | ResponseOutputTextParam] = []
|
|
13
|
+
contains_pdf = False
|
|
13
14
|
|
|
14
15
|
for content_item in message.contents:
|
|
15
16
|
if content_item.type == ContentType.Text:
|
|
16
|
-
if role ==
|
|
17
|
-
text_content = ResponseOutputTextParam(type="output_text", text=content_item.data)
|
|
17
|
+
if role == Role.Assistant:
|
|
18
|
+
text_content = ResponseOutputTextParam(type="output_text", text=content_item.data, annotations=[])
|
|
18
19
|
else:
|
|
19
20
|
text_content = ResponseInputTextParam(type="input_text", text=content_item.data)
|
|
20
21
|
content.append(text_content)
|
|
@@ -30,6 +31,7 @@ async def convert_message_to_openai_responses(message: Message) -> OpenAIRespons
|
|
|
30
31
|
)
|
|
31
32
|
content.append(image_content)
|
|
32
33
|
elif sub_type == "pdf":
|
|
34
|
+
contains_pdf = True
|
|
33
35
|
file_data, _ = await media_processor.get_base64_content_from_url(file_url)
|
|
34
36
|
pdf_content = ResponseInputFileParam(
|
|
35
37
|
type="input_file",
|
|
@@ -52,7 +54,11 @@ async def convert_message_to_openai_responses(message: Message) -> OpenAIRespons
|
|
|
52
54
|
)
|
|
53
55
|
content.append(text_content)
|
|
54
56
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
+
# Force system role to user if the message contains a PDF
|
|
58
|
+
if role == Role.System and contains_pdf:
|
|
59
|
+
role = Role.User
|
|
60
|
+
|
|
61
|
+
if role in (Role.User, Role.System):
|
|
62
|
+
return EasyInputMessageParam(role=role.value, content=content)
|
|
57
63
|
else:
|
|
58
|
-
return ResponseOutputMessageParam(role=role, content=content)
|
|
64
|
+
return ResponseOutputMessageParam(role=role.value, content=content)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: LLM-Bridge
|
|
3
|
-
Version: 1.15.
|
|
3
|
+
Version: 1.15.8
|
|
4
4
|
Summary: A Bridge for LLMs
|
|
5
5
|
Author-email: windsnow1025 <windsnow1025@gmail.com>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -24,7 +24,7 @@ Description-Content-Type: text/markdown
|
|
|
24
24
|
|
|
25
25
|
# LLM Bridge
|
|
26
26
|
|
|
27
|
-
LLM Bridge is a unified
|
|
27
|
+
LLM Bridge is a unified API wrapper for native interactions with various LLM providers.
|
|
28
28
|
|
|
29
29
|
GitHub: [https://github.com/windsnow1025/LLM-Bridge](https://github.com/windsnow1025/LLM-Bridge)
|
|
30
30
|
|
|
@@ -40,7 +40,7 @@ llm_bridge/logic/chat_generate/model_message_converter/__init__.py,sha256=47DEQp
|
|
|
40
40
|
llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py,sha256=YiPqMkybCXrsAJOFcUfPOHXdMkn3mZxq7gft_W449dA,2439
|
|
41
41
|
llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py,sha256=m6IeeQ_-yKcyBwLcEO_1HOoQAXDR5nl0mz_DNSsjieo,1529
|
|
42
42
|
llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py,sha256=lmc-lUVZ_LgHcJZVB-l989TgrB4FtbCyGlRDp4eXycE,2179
|
|
43
|
-
llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py,sha256=
|
|
43
|
+
llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py,sha256=YscYmB9B5JRQFgrw3Mxk5Y9BkGnJtMt3tHjQzCHib3A,3242
|
|
44
44
|
llm_bridge/logic/message_preprocess/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
45
45
|
llm_bridge/logic/message_preprocess/code_file_extensions.py,sha256=5bsnSKC9PGbl6ZMy80sXfagAbz77pGjt6Z2-qwzUw48,9306
|
|
46
46
|
llm_bridge/logic/message_preprocess/document_processor.py,sha256=IsVqoFgWNa9i8cRsDAfmCynJMdlvBqiCKIT9kbx96kg,2861
|
|
@@ -57,7 +57,7 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
|
|
|
57
57
|
llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
|
|
58
58
|
llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
|
|
59
59
|
llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
|
|
60
|
-
llm_bridge-1.15.
|
|
61
|
-
llm_bridge-1.15.
|
|
62
|
-
llm_bridge-1.15.
|
|
63
|
-
llm_bridge-1.15.
|
|
60
|
+
llm_bridge-1.15.8.dist-info/METADATA,sha256=UGiRIxJ13auRHJJjEzu0zFCP4w44wx-u41iiEyjRAC8,3309
|
|
61
|
+
llm_bridge-1.15.8.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
62
|
+
llm_bridge-1.15.8.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
|
|
63
|
+
llm_bridge-1.15.8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|