LLM-Bridge 1.12.4__py3-none-any.whl → 1.12.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_bridge/logic/chat_generate/chat_client_factory.py +4 -0
- llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +5 -0
- llm_bridge/resources/model_prices.json +5 -29
- {llm_bridge-1.12.4.dist-info → llm_bridge-1.12.6.dist-info}/METADATA +7 -7
- {llm_bridge-1.12.4.dist-info → llm_bridge-1.12.6.dist-info}/RECORD +8 -8
- {llm_bridge-1.12.4.dist-info → llm_bridge-1.12.6.dist-info}/WHEEL +0 -0
- {llm_bridge-1.12.4.dist-info → llm_bridge-1.12.6.dist-info}/licenses/LICENSE +0 -0
- {llm_bridge-1.12.4.dist-info → llm_bridge-1.12.6.dist-info}/top_level.txt +0 -0
|
@@ -16,6 +16,7 @@ async def create_chat_client(
|
|
|
16
16
|
stream: bool,
|
|
17
17
|
thought: bool,
|
|
18
18
|
code_execution: bool,
|
|
19
|
+
structured_output_schema: dict | None = None,
|
|
19
20
|
) -> ChatClient:
|
|
20
21
|
if api_type == 'OpenAI':
|
|
21
22
|
return await create_openai_client(
|
|
@@ -74,6 +75,7 @@ async def create_chat_client(
|
|
|
74
75
|
stream=stream,
|
|
75
76
|
thought=thought,
|
|
76
77
|
code_execution=code_execution,
|
|
78
|
+
structured_output_schema=structured_output_schema,
|
|
77
79
|
)
|
|
78
80
|
elif api_type == 'Gemini-Paid':
|
|
79
81
|
return await create_gemini_client(
|
|
@@ -85,6 +87,7 @@ async def create_chat_client(
|
|
|
85
87
|
stream=stream,
|
|
86
88
|
thought=thought,
|
|
87
89
|
code_execution=code_execution,
|
|
90
|
+
structured_output_schema=structured_output_schema,
|
|
88
91
|
)
|
|
89
92
|
elif api_type == 'Gemini-Vertex':
|
|
90
93
|
return await create_gemini_client(
|
|
@@ -96,6 +99,7 @@ async def create_chat_client(
|
|
|
96
99
|
thought=thought,
|
|
97
100
|
code_execution=code_execution,
|
|
98
101
|
vertexai=True,
|
|
102
|
+
structured_output_schema=structured_output_schema,
|
|
99
103
|
)
|
|
100
104
|
elif api_type == 'Claude':
|
|
101
105
|
return await create_claude_client(
|
|
@@ -18,6 +18,7 @@ async def create_gemini_client(
|
|
|
18
18
|
stream: bool,
|
|
19
19
|
thought: bool,
|
|
20
20
|
code_execution: bool,
|
|
21
|
+
structured_output_schema: dict | None = None,
|
|
21
22
|
):
|
|
22
23
|
client = genai.Client(
|
|
23
24
|
vertexai=vertexai,
|
|
@@ -86,6 +87,10 @@ async def create_gemini_client(
|
|
|
86
87
|
response_modalities=response_modalities,
|
|
87
88
|
)
|
|
88
89
|
|
|
90
|
+
if structured_output_schema is not None:
|
|
91
|
+
config.response_mime_type = "application/json"
|
|
92
|
+
config.response_json_schema = structured_output_schema
|
|
93
|
+
|
|
89
94
|
gemini_messages = await convert_messages_to_gemini(messages)
|
|
90
95
|
|
|
91
96
|
if stream:
|
|
@@ -139,38 +139,14 @@
|
|
|
139
139
|
},
|
|
140
140
|
{
|
|
141
141
|
"apiType": "Grok",
|
|
142
|
-
"model": "grok-4-
|
|
143
|
-
"input":
|
|
144
|
-
"output": 15
|
|
145
|
-
},
|
|
146
|
-
{
|
|
147
|
-
"apiType": "Grok",
|
|
148
|
-
"model": "grok-3-fast-latest",
|
|
149
|
-
"input": 5,
|
|
142
|
+
"model": "grok-4-1-fast-reasoning",
|
|
143
|
+
"input": 0.4,
|
|
150
144
|
"output": 25
|
|
151
145
|
},
|
|
152
146
|
{
|
|
153
147
|
"apiType": "Grok",
|
|
154
|
-
"model": "grok-
|
|
155
|
-
"input":
|
|
156
|
-
"output":
|
|
157
|
-
},
|
|
158
|
-
{
|
|
159
|
-
"apiType": "Grok",
|
|
160
|
-
"model": "grok-3-mini-fast-latest",
|
|
161
|
-
"input": 0.6,
|
|
162
|
-
"output": 4
|
|
163
|
-
},
|
|
164
|
-
{
|
|
165
|
-
"apiType": "Grok",
|
|
166
|
-
"model": "grok-3-mini-latest",
|
|
167
|
-
"input": 0.3,
|
|
168
|
-
"output": 0.5
|
|
169
|
-
},
|
|
170
|
-
{
|
|
171
|
-
"apiType": "Grok",
|
|
172
|
-
"model": "grok-2-vision-latest",
|
|
173
|
-
"input": 2,
|
|
174
|
-
"output": 10
|
|
148
|
+
"model": "grok-4-1-fast-non-reasoning",
|
|
149
|
+
"input": 0.4,
|
|
150
|
+
"output": 25
|
|
175
151
|
}
|
|
176
152
|
]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: LLM-Bridge
|
|
3
|
-
Version: 1.12.
|
|
3
|
+
Version: 1.12.6
|
|
4
4
|
Summary: A Bridge for LLMs
|
|
5
5
|
Author-email: windsnow1025 <windsnow1025@gmail.com>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -52,12 +52,12 @@ PyPI: [https://pypi.org/project/LLM-Bridge/](https://pypi.org/project/LLM-Bridge
|
|
|
52
52
|
|
|
53
53
|
The features listed represent the maximum capabilities of each API type supported by LLM Bridge.
|
|
54
54
|
|
|
55
|
-
| API Type | Input Format | Capabilities
|
|
56
|
-
|
|
57
|
-
| OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution
|
|
58
|
-
| Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution | Text, Image, File |
|
|
59
|
-
| Claude | Text, Image, PDF | Thinking, Web Search, Code Execution
|
|
60
|
-
| Grok | Text, Image |
|
|
55
|
+
| API Type | Input Format | Capabilities | Output Format |
|
|
56
|
+
|----------|--------------------------------|---------------------------------------------------------------------|-------------------|
|
|
57
|
+
| OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
|
|
58
|
+
| Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution, Structured Output | Text, Image, File |
|
|
59
|
+
| Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
|
|
60
|
+
| Grok | Text, Image | | Text |
|
|
61
61
|
|
|
62
62
|
#### Planned Features
|
|
63
63
|
|
|
@@ -27,12 +27,12 @@ llm_bridge/logic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
|
27
27
|
llm_bridge/logic/file_fetch.py,sha256=Q8PGNj76E25sKD70TmlnSIdPgAxpNlb89syk87DbAGg,1341
|
|
28
28
|
llm_bridge/logic/model_prices.py,sha256=hiXVbki3004Rrm5LQrmVfdm0lLABeygxtFB-Qn9_mm0,1219
|
|
29
29
|
llm_bridge/logic/chat_generate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
30
|
-
llm_bridge/logic/chat_generate/chat_client_factory.py,sha256=
|
|
30
|
+
llm_bridge/logic/chat_generate/chat_client_factory.py,sha256=x1x5rYUCSMvK2r7lbtwBEHI_-jAcBfoOQbuUY4kZano,4043
|
|
31
31
|
llm_bridge/logic/chat_generate/chat_message_converter.py,sha256=40VTBOPXg_ocrEZMdt1ObYlm-mhRL35zWzzxv8m2xRc,1538
|
|
32
32
|
llm_bridge/logic/chat_generate/media_processor.py,sha256=ZR8G24EHwZZr2T9iFDRmScDGyJ_kvThApABzSzK0CL0,702
|
|
33
33
|
llm_bridge/logic/chat_generate/model_client_factory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
34
34
|
llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py,sha256=bdO-4LBSwe1x8_5kamVg6dpRkxGB8_FXgRaaNH53qUs,3059
|
|
35
|
-
llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py,sha256=
|
|
35
|
+
llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py,sha256=KjxU3NkHRvQS4G8FXDRlKRM0Kdunol-gTZhUakBwngQ,3666
|
|
36
36
|
llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=EoU5kyccnwOKjGdFi5yTozNVPrq402jRtWPjSmBJs7M,4517
|
|
37
37
|
llm_bridge/logic/chat_generate/model_message_converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
38
|
llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py,sha256=SfDhQXR7L5nCPHS4MIjwgzK_wER7qOUCc8gh-K77kKY,2441
|
|
@@ -45,7 +45,7 @@ llm_bridge/logic/message_preprocess/document_processor.py,sha256=IsVqoFgWNa9i8cR
|
|
|
45
45
|
llm_bridge/logic/message_preprocess/file_type_checker.py,sha256=nkrVki1a2udCeVqUnfIVi7Wxx8OMKbBuHw3FOlm17uo,1603
|
|
46
46
|
llm_bridge/logic/message_preprocess/message_preprocessor.py,sha256=VR4__ip4ytAo62DHn9HeeYdbcx5lWItBnKsm9l3gmY4,1924
|
|
47
47
|
llm_bridge/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
-
llm_bridge/resources/model_prices.json,sha256=
|
|
48
|
+
llm_bridge/resources/model_prices.json,sha256=Sf-knxXYkM2taRLAStzJbw-Ps5zxG9lMSAzwSB78MVc,2588
|
|
49
49
|
llm_bridge/type/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
50
50
|
llm_bridge/type/chat_response.py,sha256=zEw-my_I0-7msmlTySdBGE2vWUIPILex0UrUPqTJiYY,754
|
|
51
51
|
llm_bridge/type/message.py,sha256=NyWmSSrciFfvF81aBwAH8qFpo5IpRhh8QXMselbYen8,370
|
|
@@ -55,8 +55,8 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
|
|
|
55
55
|
llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
|
|
56
56
|
llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
|
|
57
57
|
llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
|
|
58
|
-
llm_bridge-1.12.
|
|
59
|
-
llm_bridge-1.12.
|
|
60
|
-
llm_bridge-1.12.
|
|
61
|
-
llm_bridge-1.12.
|
|
62
|
-
llm_bridge-1.12.
|
|
58
|
+
llm_bridge-1.12.6.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
|
|
59
|
+
llm_bridge-1.12.6.dist-info/METADATA,sha256=4izHWE6c6BYkFoMPjsJaTW6SxpqOsjT3ZCFAONCZVtE,3502
|
|
60
|
+
llm_bridge-1.12.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
61
|
+
llm_bridge-1.12.6.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
|
|
62
|
+
llm_bridge-1.12.6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|