LLM-Bridge 1.14.0__py3-none-any.whl → 1.14.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +9 -8
- llm_bridge/resources/model_prices.json +0 -12
- {llm_bridge-1.14.0.dist-info → llm_bridge-1.14.0a0.dist-info}/METADATA +17 -15
- {llm_bridge-1.14.0.dist-info → llm_bridge-1.14.0a0.dist-info}/RECORD +7 -6
- {llm_bridge-1.14.0.dist-info → llm_bridge-1.14.0a0.dist-info}/WHEEL +2 -1
- llm_bridge-1.14.0a0.dist-info/top_level.txt +1 -0
- {llm_bridge-1.14.0.dist-info → llm_bridge-1.14.0a0.dist-info}/licenses/LICENSE +0 -0
|
@@ -65,7 +65,7 @@ async def create_openai_client(
|
|
|
65
65
|
tools = []
|
|
66
66
|
reasoning = None
|
|
67
67
|
|
|
68
|
-
if model not in ["gpt-5-
|
|
68
|
+
if model not in ["gpt-5-chat-latest", "gpt-5-pro"]:
|
|
69
69
|
if code_execution:
|
|
70
70
|
tools.append(
|
|
71
71
|
CodeInterpreter(
|
|
@@ -73,15 +73,16 @@ async def create_openai_client(
|
|
|
73
73
|
container=CodeInterpreterContainerCodeInterpreterToolAuto(type="auto")
|
|
74
74
|
)
|
|
75
75
|
)
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
76
|
+
if model not in ["gpt-5-chat-latest"]:
|
|
77
|
+
tools.append(
|
|
78
|
+
WebSearchToolParam(
|
|
79
|
+
type="web_search",
|
|
80
|
+
search_context_size="high",
|
|
81
|
+
)
|
|
80
82
|
)
|
|
81
|
-
)
|
|
82
|
-
if re.match(r"gpt-5.*", model):
|
|
83
|
+
if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
|
|
83
84
|
temperature = 1
|
|
84
|
-
if re.match(r"gpt-5.*", model):
|
|
85
|
+
if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
|
|
85
86
|
if thought:
|
|
86
87
|
reasoning = Reasoning(
|
|
87
88
|
effort="high",
|
|
@@ -71,12 +71,6 @@
|
|
|
71
71
|
"input": 2.5,
|
|
72
72
|
"output": 15
|
|
73
73
|
},
|
|
74
|
-
{
|
|
75
|
-
"apiType": "OpenAI",
|
|
76
|
-
"model": "gpt-5.2",
|
|
77
|
-
"input": 1.75,
|
|
78
|
-
"output": 14
|
|
79
|
-
},
|
|
80
74
|
{
|
|
81
75
|
"apiType": "OpenAI",
|
|
82
76
|
"model": "gpt-5.1",
|
|
@@ -95,12 +89,6 @@
|
|
|
95
89
|
"input": 0.25,
|
|
96
90
|
"output": 2
|
|
97
91
|
},
|
|
98
|
-
{
|
|
99
|
-
"apiType": "OpenAI",
|
|
100
|
-
"model": "gpt-5.2-pro",
|
|
101
|
-
"input": 21,
|
|
102
|
-
"output": 168
|
|
103
|
-
},
|
|
104
92
|
{
|
|
105
93
|
"apiType": "OpenAI",
|
|
106
94
|
"model": "gpt-5-pro",
|
|
@@ -1,26 +1,27 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: LLM-Bridge
|
|
3
|
-
Version: 1.14.
|
|
3
|
+
Version: 1.14.0a0
|
|
4
4
|
Summary: A Bridge for LLMs
|
|
5
5
|
Author-email: windsnow1025 <windsnow1025@gmail.com>
|
|
6
6
|
License-Expression: MIT
|
|
7
|
-
|
|
8
|
-
Keywords: ai,llm
|
|
7
|
+
Keywords: llm,ai
|
|
9
8
|
Classifier: Framework :: FastAPI
|
|
10
9
|
Classifier: Programming Language :: Python :: 3
|
|
11
10
|
Requires-Python: >=3.12
|
|
12
|
-
|
|
13
|
-
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
License-File: LICENSE
|
|
14
13
|
Requires-Dist: fastapi
|
|
15
|
-
Requires-Dist: google-genai==1.46.0
|
|
16
14
|
Requires-Dist: httpx
|
|
15
|
+
Requires-Dist: tenacity
|
|
17
16
|
Requires-Dist: openai==2.9.0
|
|
17
|
+
Requires-Dist: tiktoken==0.11.0
|
|
18
|
+
Requires-Dist: google-genai==1.46.0
|
|
19
|
+
Requires-Dist: anthropic==0.75.0
|
|
20
|
+
Requires-Dist: PyMuPDF
|
|
21
|
+
Requires-Dist: docxlatex>=1.1.1
|
|
18
22
|
Requires-Dist: openpyxl
|
|
19
|
-
Requires-Dist: pymupdf
|
|
20
23
|
Requires-Dist: python-pptx
|
|
21
|
-
|
|
22
|
-
Requires-Dist: tiktoken==0.11.0
|
|
23
|
-
Description-Content-Type: text/markdown
|
|
24
|
+
Dynamic: license-file
|
|
24
25
|
|
|
25
26
|
# LLM Bridge
|
|
26
27
|
|
|
@@ -76,12 +77,13 @@ pip install --upgrade llm_bridge
|
|
|
76
77
|
uv sync
|
|
77
78
|
```
|
|
78
79
|
|
|
79
|
-
### Pycharm
|
|
80
|
+
### Pycharm
|
|
81
|
+
|
|
82
|
+
Add New Configuration >> uv run
|
|
83
|
+
- script: `./usage/main.py`
|
|
84
|
+
- Paths to ".env" files: `./usage/.env`
|
|
80
85
|
|
|
81
|
-
|
|
82
|
-
- Environment: Select existing
|
|
83
|
-
- Type: uv
|
|
84
|
-
2. Add New Configuration >> uv run >> script: `./usage/main.py`
|
|
86
|
+
If uv interpreter is not found, create a new project with uv.
|
|
85
87
|
|
|
86
88
|
### Usage
|
|
87
89
|
|
|
@@ -33,7 +33,7 @@ llm_bridge/logic/chat_generate/media_processor.py,sha256=ZR8G24EHwZZr2T9iFDRmScD
|
|
|
33
33
|
llm_bridge/logic/chat_generate/model_client_factory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
34
34
|
llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py,sha256=EMXEpNTmhOv_D8pQIPboW3taRX5DnLa1QIGJn0VjO4M,3597
|
|
35
35
|
llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py,sha256=6CuacST1bLUGhY-rGH0bm5tu3r9iQQhIweN32TgqLCc,3692
|
|
36
|
-
llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=
|
|
36
|
+
llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=xRnqYal5lBS76tCJKJHVvI0UsLCM49hTlqN6xYmzPco,5028
|
|
37
37
|
llm_bridge/logic/chat_generate/model_client_factory/schema_converter.py,sha256=kQ3RGyg_9vbe7oYzbl11Dzu-tHPY1z2-SBBSgHHwPfM,143
|
|
38
38
|
llm_bridge/logic/chat_generate/model_message_converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
39
39
|
llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py,sha256=SfDhQXR7L5nCPHS4MIjwgzK_wER7qOUCc8gh-K77kKY,2441
|
|
@@ -46,7 +46,7 @@ llm_bridge/logic/message_preprocess/document_processor.py,sha256=IsVqoFgWNa9i8cR
|
|
|
46
46
|
llm_bridge/logic/message_preprocess/file_type_checker.py,sha256=nkrVki1a2udCeVqUnfIVi7Wxx8OMKbBuHw3FOlm17uo,1603
|
|
47
47
|
llm_bridge/logic/message_preprocess/message_preprocessor.py,sha256=VR4__ip4ytAo62DHn9HeeYdbcx5lWItBnKsm9l3gmY4,1924
|
|
48
48
|
llm_bridge/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
49
|
-
llm_bridge/resources/model_prices.json,sha256=
|
|
49
|
+
llm_bridge/resources/model_prices.json,sha256=Sf-knxXYkM2taRLAStzJbw-Ps5zxG9lMSAzwSB78MVc,2588
|
|
50
50
|
llm_bridge/type/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
51
51
|
llm_bridge/type/chat_response.py,sha256=zEw-my_I0-7msmlTySdBGE2vWUIPILex0UrUPqTJiYY,754
|
|
52
52
|
llm_bridge/type/message.py,sha256=NyWmSSrciFfvF81aBwAH8qFpo5IpRhh8QXMselbYen8,370
|
|
@@ -56,7 +56,8 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
|
|
|
56
56
|
llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
|
|
57
57
|
llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
|
|
58
58
|
llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
|
|
59
|
-
llm_bridge-1.14.
|
|
60
|
-
llm_bridge-1.14.
|
|
61
|
-
llm_bridge-1.14.
|
|
62
|
-
llm_bridge-1.14.
|
|
59
|
+
llm_bridge-1.14.0a0.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
|
|
60
|
+
llm_bridge-1.14.0a0.dist-info/METADATA,sha256=sw7QoaHd3F3rkGPNB5bsa_KUR3hZp_y34Rzbn__zBS4,3616
|
|
61
|
+
llm_bridge-1.14.0a0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
62
|
+
llm_bridge-1.14.0a0.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
|
|
63
|
+
llm_bridge-1.14.0a0.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
llm_bridge
|
|
File without changes
|