LLM-Bridge 1.14.0a0__py3-none-any.whl → 1.14.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +8 -9
- llm_bridge/resources/model_prices.json +20 -32
- {llm_bridge-1.14.0a0.dist-info → llm_bridge-1.14.1.dist-info}/METADATA +16 -24
- {llm_bridge-1.14.0a0.dist-info → llm_bridge-1.14.1.dist-info}/RECORD +6 -7
- {llm_bridge-1.14.0a0.dist-info → llm_bridge-1.14.1.dist-info}/WHEEL +1 -2
- llm_bridge-1.14.0a0.dist-info/top_level.txt +0 -1
- {llm_bridge-1.14.0a0.dist-info → llm_bridge-1.14.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -65,7 +65,7 @@ async def create_openai_client(
|
|
|
65
65
|
tools = []
|
|
66
66
|
reasoning = None
|
|
67
67
|
|
|
68
|
-
if model not in ["gpt-5-
|
|
68
|
+
if model not in ["gpt-5-pro", "gpt-5.2-pro"]:
|
|
69
69
|
if code_execution:
|
|
70
70
|
tools.append(
|
|
71
71
|
CodeInterpreter(
|
|
@@ -73,16 +73,15 @@ async def create_openai_client(
|
|
|
73
73
|
container=CodeInterpreterContainerCodeInterpreterToolAuto(type="auto")
|
|
74
74
|
)
|
|
75
75
|
)
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
search_context_size="high",
|
|
81
|
-
)
|
|
76
|
+
tools.append(
|
|
77
|
+
WebSearchToolParam(
|
|
78
|
+
type="web_search",
|
|
79
|
+
search_context_size="high",
|
|
82
80
|
)
|
|
83
|
-
|
|
81
|
+
)
|
|
82
|
+
if re.match(r"gpt-5.*", model):
|
|
84
83
|
temperature = 1
|
|
85
|
-
if re.match(r"gpt-5.*", model)
|
|
84
|
+
if re.match(r"gpt-5.*", model):
|
|
86
85
|
if thought:
|
|
87
86
|
reasoning = Reasoning(
|
|
88
87
|
effort="high",
|
|
@@ -7,39 +7,27 @@
|
|
|
7
7
|
},
|
|
8
8
|
{
|
|
9
9
|
"apiType": "Gemini-Vertex",
|
|
10
|
-
"model": "gemini-3-
|
|
11
|
-
"input": 2,
|
|
12
|
-
"output": 120
|
|
13
|
-
},
|
|
14
|
-
{
|
|
15
|
-
"apiType": "Gemini-Vertex",
|
|
16
|
-
"model": "gemini-2.5-flash",
|
|
10
|
+
"model": "gemini-3-flash-preview",
|
|
17
11
|
"input": 1,
|
|
18
|
-
"output":
|
|
12
|
+
"output": 3
|
|
19
13
|
},
|
|
20
14
|
{
|
|
21
15
|
"apiType": "Gemini-Vertex",
|
|
22
|
-
"model": "gemini-
|
|
23
|
-
"input": 2
|
|
24
|
-
"output":
|
|
25
|
-
},
|
|
26
|
-
{
|
|
27
|
-
"apiType": "Gemini-Free",
|
|
28
|
-
"model": "gemini-flash-latest",
|
|
29
|
-
"input": 0,
|
|
30
|
-
"output": 0
|
|
16
|
+
"model": "gemini-3-pro-image-preview",
|
|
17
|
+
"input": 2,
|
|
18
|
+
"output": 120
|
|
31
19
|
},
|
|
32
20
|
{
|
|
33
21
|
"apiType": "Gemini-Free",
|
|
34
|
-
"model": "gemini-
|
|
22
|
+
"model": "gemini-3-flash-preview",
|
|
35
23
|
"input": 0,
|
|
36
24
|
"output": 0
|
|
37
25
|
},
|
|
38
26
|
{
|
|
39
|
-
"apiType": "Gemini-
|
|
40
|
-
"model": "gemini-
|
|
41
|
-
"input":
|
|
42
|
-
"output":
|
|
27
|
+
"apiType": "Gemini-Paid",
|
|
28
|
+
"model": "gemini-3-flash-preview",
|
|
29
|
+
"input": 1,
|
|
30
|
+
"output": 3
|
|
43
31
|
},
|
|
44
32
|
{
|
|
45
33
|
"apiType": "Gemini-Paid",
|
|
@@ -60,16 +48,10 @@
|
|
|
60
48
|
"output": 2.5
|
|
61
49
|
},
|
|
62
50
|
{
|
|
63
|
-
"apiType": "
|
|
64
|
-
"model": "
|
|
65
|
-
"input": 1,
|
|
66
|
-
"output":
|
|
67
|
-
},
|
|
68
|
-
{
|
|
69
|
-
"apiType": "Gemini-Paid",
|
|
70
|
-
"model": "gemini-2.5-pro",
|
|
71
|
-
"input": 2.5,
|
|
72
|
-
"output": 15
|
|
51
|
+
"apiType": "OpenAI",
|
|
52
|
+
"model": "gpt-5.2",
|
|
53
|
+
"input": 1.75,
|
|
54
|
+
"output": 14
|
|
73
55
|
},
|
|
74
56
|
{
|
|
75
57
|
"apiType": "OpenAI",
|
|
@@ -89,6 +71,12 @@
|
|
|
89
71
|
"input": 0.25,
|
|
90
72
|
"output": 2
|
|
91
73
|
},
|
|
74
|
+
{
|
|
75
|
+
"apiType": "OpenAI",
|
|
76
|
+
"model": "gpt-5.2-pro",
|
|
77
|
+
"input": 21,
|
|
78
|
+
"output": 168
|
|
79
|
+
},
|
|
92
80
|
{
|
|
93
81
|
"apiType": "OpenAI",
|
|
94
82
|
"model": "gpt-5-pro",
|
|
@@ -1,27 +1,26 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: LLM-Bridge
|
|
3
|
-
Version: 1.14.
|
|
3
|
+
Version: 1.14.1
|
|
4
4
|
Summary: A Bridge for LLMs
|
|
5
5
|
Author-email: windsnow1025 <windsnow1025@gmail.com>
|
|
6
6
|
License-Expression: MIT
|
|
7
|
-
|
|
7
|
+
License-File: LICENSE
|
|
8
|
+
Keywords: ai,llm
|
|
8
9
|
Classifier: Framework :: FastAPI
|
|
9
10
|
Classifier: Programming Language :: Python :: 3
|
|
10
11
|
Requires-Python: >=3.12
|
|
11
|
-
|
|
12
|
-
|
|
12
|
+
Requires-Dist: anthropic==0.75.0
|
|
13
|
+
Requires-Dist: docxlatex>=1.1.1
|
|
13
14
|
Requires-Dist: fastapi
|
|
15
|
+
Requires-Dist: google-genai==1.46.0
|
|
14
16
|
Requires-Dist: httpx
|
|
15
|
-
Requires-Dist: tenacity
|
|
16
17
|
Requires-Dist: openai==2.9.0
|
|
17
|
-
Requires-Dist: tiktoken==0.11.0
|
|
18
|
-
Requires-Dist: google-genai==1.46.0
|
|
19
|
-
Requires-Dist: anthropic==0.75.0
|
|
20
|
-
Requires-Dist: PyMuPDF
|
|
21
|
-
Requires-Dist: docxlatex>=1.1.1
|
|
22
18
|
Requires-Dist: openpyxl
|
|
19
|
+
Requires-Dist: pymupdf
|
|
23
20
|
Requires-Dist: python-pptx
|
|
24
|
-
|
|
21
|
+
Requires-Dist: tenacity
|
|
22
|
+
Requires-Dist: tiktoken==0.11.0
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
25
24
|
|
|
26
25
|
# LLM Bridge
|
|
27
26
|
|
|
@@ -60,12 +59,6 @@ The features listed represent the maximum capabilities of each API type supporte
|
|
|
60
59
|
- More features for API Types
|
|
61
60
|
- Native support for Grok
|
|
62
61
|
|
|
63
|
-
## Installation
|
|
64
|
-
|
|
65
|
-
```bash
|
|
66
|
-
pip install --upgrade llm_bridge
|
|
67
|
-
```
|
|
68
|
-
|
|
69
62
|
## Development
|
|
70
63
|
|
|
71
64
|
### Python uv
|
|
@@ -74,16 +67,15 @@ pip install --upgrade llm_bridge
|
|
|
74
67
|
2. Install Python in uv: `uv python install 3.12`; upgrade Python in uv: `uv python install 3.12`
|
|
75
68
|
3. Configure requirements:
|
|
76
69
|
```bash
|
|
77
|
-
uv sync
|
|
70
|
+
uv sync --refresh
|
|
78
71
|
```
|
|
79
72
|
|
|
80
|
-
### Pycharm
|
|
81
|
-
|
|
82
|
-
Add New Configuration >> uv run
|
|
83
|
-
- script: `./usage/main.py`
|
|
84
|
-
- Paths to ".env" files: `./usage/.env`
|
|
73
|
+
### Pycharm Professional
|
|
85
74
|
|
|
86
|
-
|
|
75
|
+
1. Add New Interpreter >> Add Local Interpreter
|
|
76
|
+
- Environment: Select existing
|
|
77
|
+
- Type: uv
|
|
78
|
+
2. Add New Configuration >> uv run >> script: `./usage/main.py`
|
|
87
79
|
|
|
88
80
|
### Usage
|
|
89
81
|
|
|
@@ -33,7 +33,7 @@ llm_bridge/logic/chat_generate/media_processor.py,sha256=ZR8G24EHwZZr2T9iFDRmScD
|
|
|
33
33
|
llm_bridge/logic/chat_generate/model_client_factory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
34
34
|
llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py,sha256=EMXEpNTmhOv_D8pQIPboW3taRX5DnLa1QIGJn0VjO4M,3597
|
|
35
35
|
llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py,sha256=6CuacST1bLUGhY-rGH0bm5tu3r9iQQhIweN32TgqLCc,3692
|
|
36
|
-
llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=
|
|
36
|
+
llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=zQyDC3beuUeC8czU22U_Xg8VGxvuQxxuaxWgghCknWg,4889
|
|
37
37
|
llm_bridge/logic/chat_generate/model_client_factory/schema_converter.py,sha256=kQ3RGyg_9vbe7oYzbl11Dzu-tHPY1z2-SBBSgHHwPfM,143
|
|
38
38
|
llm_bridge/logic/chat_generate/model_message_converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
39
39
|
llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py,sha256=SfDhQXR7L5nCPHS4MIjwgzK_wER7qOUCc8gh-K77kKY,2441
|
|
@@ -46,7 +46,7 @@ llm_bridge/logic/message_preprocess/document_processor.py,sha256=IsVqoFgWNa9i8cR
|
|
|
46
46
|
llm_bridge/logic/message_preprocess/file_type_checker.py,sha256=nkrVki1a2udCeVqUnfIVi7Wxx8OMKbBuHw3FOlm17uo,1603
|
|
47
47
|
llm_bridge/logic/message_preprocess/message_preprocessor.py,sha256=VR4__ip4ytAo62DHn9HeeYdbcx5lWItBnKsm9l3gmY4,1924
|
|
48
48
|
llm_bridge/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
49
|
-
llm_bridge/resources/model_prices.json,sha256=
|
|
49
|
+
llm_bridge/resources/model_prices.json,sha256=mDAZxdj34F9VVRxS3E-lKHx_JB-jxOGadWOWnwzvsIs,2372
|
|
50
50
|
llm_bridge/type/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
51
51
|
llm_bridge/type/chat_response.py,sha256=zEw-my_I0-7msmlTySdBGE2vWUIPILex0UrUPqTJiYY,754
|
|
52
52
|
llm_bridge/type/message.py,sha256=NyWmSSrciFfvF81aBwAH8qFpo5IpRhh8QXMselbYen8,370
|
|
@@ -56,8 +56,7 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
|
|
|
56
56
|
llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
|
|
57
57
|
llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
|
|
58
58
|
llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
|
|
59
|
-
llm_bridge-1.14.
|
|
60
|
-
llm_bridge-1.14.
|
|
61
|
-
llm_bridge-1.14.
|
|
62
|
-
llm_bridge-1.14.
|
|
63
|
-
llm_bridge-1.14.0a0.dist-info/RECORD,,
|
|
59
|
+
llm_bridge-1.14.1.dist-info/METADATA,sha256=eDO1HuwlmAq2d21xbIlCAtvkYLoPEqbCkP0HzfslPTA,3541
|
|
60
|
+
llm_bridge-1.14.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
61
|
+
llm_bridge-1.14.1.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
|
|
62
|
+
llm_bridge-1.14.1.dist-info/RECORD,,
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
llm_bridge
|
|
File without changes
|